context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.Diagnostics;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using System.Security;
using System.Threading;
namespace System.Runtime
{
internal static partial class Fx
{
private const string defaultEventSource = "System.Runtime";
#if DEBUG
private const string AssertsFailFastName = "AssertsFailFast";
private const string BreakOnExceptionTypesName = "BreakOnExceptionTypes";
private const string FastDebugName = "FastDebug";
private const string StealthDebuggerName = "StealthDebugger";
private static bool s_breakOnExceptionTypesRetrieved;
private static Type[] s_breakOnExceptionTypesCache;
#endif
private static ExceptionTrace s_exceptionTrace;
private static EtwDiagnosticTrace s_diagnosticTrace;
private static bool? s_isUap;
private static ExceptionHandler s_asynchronousThreadExceptionHandler;
internal static bool IsUap
{
get
{
if (!s_isUap.HasValue)
{
s_isUap = "Microsoft Windows".Equals(RuntimeInformation.OSDescription, StringComparison.Ordinal);
}
return s_isUap.Value;
}
}
internal static ExceptionTrace Exception
{
get
{
if (s_exceptionTrace == null)
{
// don't need a lock here since a true singleton is not required
s_exceptionTrace = new ExceptionTrace(defaultEventSource, Trace);
}
return s_exceptionTrace;
}
}
internal static EtwDiagnosticTrace Trace
{
get
{
if (s_diagnosticTrace == null)
{
s_diagnosticTrace = InitializeTracing();
}
return s_diagnosticTrace;
}
}
private static EtwDiagnosticTrace InitializeTracing()
{
return new EtwDiagnosticTrace();
}
public static ExceptionHandler AsynchronousThreadExceptionHandler
{
get
{
return Fx.s_asynchronousThreadExceptionHandler;
}
set
{
Fx.s_asynchronousThreadExceptionHandler = value;
}
}
public static void AssertAndThrow(bool condition, string description)
{
if (!condition)
{
AssertAndThrow(description);
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
public static Exception AssertAndThrow(string description)
{
Fx.Assert(description);
TraceCore.ShipAssertExceptionMessage(Trace, description);
throw new InternalException(description);
}
public static void AssertAndThrowFatal(bool condition, string description)
{
if (!condition)
{
AssertAndThrowFatal(description);
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
public static Exception AssertAndThrowFatal(string description)
{
Fx.Assert(description);
TraceCore.ShipAssertExceptionMessage(Trace, description);
throw new FatalInternalException(description);
}
public static void AssertAndFailFast(bool condition, string description)
{
if (!condition)
{
AssertAndFailFast(description);
}
}
// This never returns. The Exception return type lets you write 'throw AssertAndFailFast()' which tells the compiler/tools that
// execution stops.
[Tag.SecurityNote(Critical = "Calls into critical method Environment.FailFast",
Safe = "The side affect of the app crashing is actually intended here")]
[SecuritySafeCritical]
[MethodImpl(MethodImplOptions.NoInlining)]
public static Exception AssertAndFailFast(string description)
{
Fx.Assert(description);
string failFastMessage = InternalSR.FailFastMessage(description);
// The catch is here to force the finally to run, as finallys don't run until the stack walk gets to a catch.
// The catch makes sure that the finally will run before the stack-walk leaves the frame, but the code inside is impossible to reach.
try
{
try
{
Fx.Exception.TraceFailFast(failFastMessage);
}
finally
{
Environment.FailFast(failFastMessage);
}
}
catch
{
throw;
}
return null; // we'll never get here since we've just fail-fasted
}
public static bool IsFatal(Exception exception)
{
while (exception != null)
{
if (exception is FatalException ||
exception is OutOfMemoryException ||
exception is FatalInternalException)
{
return true;
}
// These exceptions aren't themselves fatal, but since the CLR uses them to wrap other exceptions,
// we want to check to see whether they've been used to wrap a fatal exception. If so, then they
// count as fatal.
if (exception is TypeInitializationException ||
exception is TargetInvocationException)
{
exception = exception.InnerException;
}
else if (exception is AggregateException)
{
// AggregateExceptions have a collection of inner exceptions, which may themselves be other
// wrapping exceptions (including nested AggregateExceptions). Recursively walk this
// hierarchy. The (singular) InnerException is included in the collection.
ReadOnlyCollection<Exception> innerExceptions = ((AggregateException)exception).InnerExceptions;
foreach (Exception innerException in innerExceptions)
{
if (IsFatal(innerException))
{
return true;
}
}
break;
}
else
{
break;
}
}
return false;
}
// This method should be only used for debug build.
internal static bool AssertsFailFast
{
get
{
return false;
}
}
// This property should be only used for debug build.
internal static Type[] BreakOnExceptionTypes
{
get
{
#if DEBUG
if (!Fx.s_breakOnExceptionTypesRetrieved)
{
object value;
if (TryGetDebugSwitch(Fx.BreakOnExceptionTypesName, out value))
{
string[] typeNames = value as string[];
if (typeNames != null && typeNames.Length > 0)
{
List<Type> types = new List<Type>(typeNames.Length);
for (int i = 0; i < typeNames.Length; i++)
{
types.Add(Type.GetType(typeNames[i], false));
}
if (types.Count != 0)
{
Fx.s_breakOnExceptionTypesCache = types.ToArray();
}
}
}
Fx.s_breakOnExceptionTypesRetrieved = true;
}
return Fx.s_breakOnExceptionTypesCache;
#else
return null;
#endif
}
}
// This property should be only used for debug build.
internal static bool StealthDebugger
{
get
{
return false;
}
}
#if DEBUG
private static bool TryGetDebugSwitch(string name, out object value)
{
value = null;
return false;
}
#endif
public static AsyncCallback ThunkCallback(AsyncCallback callback)
{
return (new AsyncThunk(callback)).ThunkFrame;
}
public static Action<T1> ThunkCallback<T1>(Action<T1> callback)
{
return (new ActionThunk<T1>(callback)).ThunkFrame;
}
#pragma warning disable CS3002 // Return type is not CLS-compliant
#pragma warning disable CS3001 // Argument type is not CLS-compliant
public static IOCompletionCallback ThunkCallback(IOCompletionCallback callback)
#pragma warning restore CS3001 // Argument type is not CLS-compliant
#pragma warning restore CS3002 // Return type is not CLS-compliant
{
Fx.Assert(callback != null, "Trying to create a ThunkCallback with a null callback method");
return (new IOCompletionThunk(callback)).ThunkFrame;
}
[SuppressMessage(FxCop.Category.ReliabilityBasic, FxCop.Rule.UseNewGuidHelperRule,
Justification = "These are the core methods that should be used for all other Guid(string) calls.")]
public static Guid CreateGuid(string guidString)
{
bool success = false;
Guid result = Guid.Empty;
try
{
result = new Guid(guidString);
success = true;
}
finally
{
if (!success)
{
AssertAndThrow("Creation of the Guid failed.");
}
}
return result;
}
[SuppressMessage(FxCop.Category.ReliabilityBasic, FxCop.Rule.UseNewGuidHelperRule,
Justification = "These are the core methods that should be used for all other Guid(string) calls.")]
public static bool TryCreateGuid(string guidString, out Guid result)
{
bool success = false;
result = Guid.Empty;
try
{
result = new Guid(guidString);
success = true;
}
catch (ArgumentException)
{
}
catch (FormatException)
{
}
catch (OverflowException)
{
}
return success;
}
public static byte[] AllocateByteArray(int size)
{
try
{
// Safe to catch OOM from this as long as the ONLY thing it does is a simple allocation of a primitive type (no method calls).
return new byte[size];
}
catch (OutOfMemoryException exception)
{
// Desktop wraps the OOM inside a new InsufficientMemoryException, traces, and then throws it.
// Project N and K trace and throw the original OOM. InsufficientMemoryException does not exist in N and K.
Fx.Exception.AsError(exception);
throw;
}
}
public static char[] AllocateCharArray(int size)
{
try
{
// Safe to catch OOM from this as long as the ONLY thing it does is a simple allocation of a primitive type (no method calls).
return new char[size];
}
catch (OutOfMemoryException exception)
{
// Desktop wraps the OOM inside a new InsufficientMemoryException, traces, and then throws it.
// Project N and K trace and throw the original OOM. InsufficientMemoryException does not exist in N and K.
Fx.Exception.AsError(exception);
throw;
}
}
[SuppressMessage(FxCop.Category.Design, FxCop.Rule.DoNotCatchGeneralExceptionTypes,
Justification = "Don't want to hide the exception which is about to crash the process.")]
[Tag.SecurityNote(Miscellaneous = "Must not call into PT code as it is called within a CER.")]
private static void TraceExceptionNoThrow(Exception exception)
{
try
{
// This call exits the CER. However, when still inside a catch, normal ThreadAbort is prevented.
// Rude ThreadAbort will still be allowed to terminate processing.
Fx.Exception.TraceUnhandledException(exception);
}
catch
{
// This empty catch is only acceptable because we are a) in a CER and b) processing an exception
// which is about to crash the process anyway.
}
}
[SuppressMessage(FxCop.Category.Design, FxCop.Rule.DoNotCatchGeneralExceptionTypes,
Justification = "Don't want to hide the exception which is about to crash the process.")]
[SuppressMessage(FxCop.Category.ReliabilityBasic, FxCop.Rule.IsFatalRule,
Justification = "Don't want to hide the exception which is about to crash the process.")]
[Tag.SecurityNote(Miscellaneous = "Must not call into PT code as it is called within a CER.")]
private static bool HandleAtThreadBase(Exception exception)
{
// This area is too sensitive to do anything but return.
if (exception == null)
{
Fx.Assert("Null exception in HandleAtThreadBase.");
return false;
}
TraceExceptionNoThrow(exception);
try
{
ExceptionHandler handler = Fx.AsynchronousThreadExceptionHandler;
return handler == null ? false : handler.HandleException(exception);
}
catch (Exception secondException)
{
// Don't let a new exception hide the original exception.
TraceExceptionNoThrow(secondException);
}
return false;
}
private static void UpdateLevel(EtwDiagnosticTrace trace)
{
if (trace == null)
{
return;
}
if (TraceCore.ActionItemCallbackInvokedIsEnabled(trace) ||
TraceCore.ActionItemScheduledIsEnabled(trace))
{
trace.SetEnd2EndActivityTracingEnabled(true);
}
}
private static void UpdateLevel()
{
UpdateLevel(Fx.Trace);
}
public abstract class ExceptionHandler
{
[Tag.SecurityNote(Miscellaneous = "Must not call into PT code as it is called within a CER.")]
public abstract bool HandleException(Exception exception);
}
// This can't derive from Thunk since T would be unsafe.
unsafe sealed class IOCompletionThunk
{
IOCompletionCallback callback;
public IOCompletionThunk(IOCompletionCallback callback)
{
this.callback = callback;
}
public IOCompletionCallback ThunkFrame
{
get
{
return new IOCompletionCallback(UnhandledExceptionFrame);
}
}
void UnhandledExceptionFrame(uint error, uint bytesRead, NativeOverlapped* nativeOverlapped)
{
RuntimeHelpers.PrepareConstrainedRegions();
try
{
callback(error, bytesRead, nativeOverlapped);
}
catch (Exception exception)
{
if (!Fx.HandleAtThreadBase(exception))
{
throw;
}
}
}
}
public static class Tag
{
public enum CacheAttrition
{
None,
ElementOnTimer,
// A finalizer/WeakReference based cache, where the elements are held by WeakReferences (or hold an
// inner object by a WeakReference), and the weakly-referenced object has a finalizer which cleans the
// item from the cache.
ElementOnGC,
// A cache that provides a per-element token, delegate, interface, or other piece of context that can
// be used to remove the element (such as IDisposable).
ElementOnCallback,
FullPurgeOnTimer,
FullPurgeOnEachAccess,
PartialPurgeOnTimer,
PartialPurgeOnEachAccess,
}
public enum ThrottleAction
{
Reject,
Pause,
}
public enum ThrottleMetric
{
Count,
Rate,
Other,
}
public enum Location
{
InProcess,
OutOfProcess,
LocalSystem,
LocalOrRemoteSystem, // as in a file that might live on a share
RemoteSystem,
}
public enum SynchronizationKind
{
LockStatement,
MonitorWait,
MonitorExplicit,
InterlockedNoSpin,
InterlockedWithSpin,
// Same as LockStatement if the field type is object.
FromFieldType,
}
[Flags]
public enum BlocksUsing
{
MonitorEnter,
MonitorWait,
ManualResetEvent,
AutoResetEvent,
AsyncResult,
IAsyncResult,
PInvoke,
InputQueue,
ThreadNeutralSemaphore,
PrivatePrimitive,
OtherInternalPrimitive,
OtherFrameworkPrimitive,
OtherInterop,
Other,
NonBlocking, // For use by non-blocking SynchronizationPrimitives such as IOThreadScheduler
}
public static class Strings
{
internal const string ExternallyManaged = "externally managed";
internal const string AppDomain = "AppDomain";
internal const string DeclaringInstance = "instance of declaring class";
internal const string Unbounded = "unbounded";
internal const string Infinite = "infinite";
}
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor | AttributeTargets.Property | AttributeTargets.Class,
AllowMultiple = true, Inherited = false)]
[Conditional("DEBUG")]
public sealed class FriendAccessAllowedAttribute : Attribute
{
public FriendAccessAllowedAttribute(string assemblyName) :
base()
{
AssemblyName = assemblyName;
}
public string AssemblyName { get; set; }
}
public static class Throws
{
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor,
AllowMultiple = true, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class TimeoutAttribute : ThrowsAttribute
{
public TimeoutAttribute() :
this("The operation timed out.")
{
}
public TimeoutAttribute(string diagnosis) :
base(typeof(TimeoutException), diagnosis)
{
}
}
}
[AttributeUsage(AttributeTargets.Field)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class CacheAttribute : Attribute
{
private readonly CacheAttrition _cacheAttrition;
public CacheAttribute(Type elementType, CacheAttrition cacheAttrition)
{
Scope = Strings.DeclaringInstance;
SizeLimit = Strings.Unbounded;
Timeout = Strings.Infinite;
ElementType = elementType ?? throw Fx.Exception.ArgumentNull("elementType");
_cacheAttrition = cacheAttrition;
}
public Type ElementType { get; }
public CacheAttrition CacheAttrition
{
get
{
return _cacheAttrition;
}
}
public string Scope { get; set; }
public string SizeLimit { get; set; }
public string Timeout { get; set; }
}
[AttributeUsage(AttributeTargets.Field)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class QueueAttribute : Attribute
{
public QueueAttribute(Type elementType)
{
Scope = Strings.DeclaringInstance;
SizeLimit = Strings.Unbounded;
ElementType = elementType ?? throw Fx.Exception.ArgumentNull("elementType");
}
public Type ElementType { get; }
public string Scope { get; set; }
public string SizeLimit { get; set; }
public bool StaleElementsRemovedImmediately { get; set; }
public bool EnqueueThrowsIfFull { get; set; }
}
[AttributeUsage(AttributeTargets.Field)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class ThrottleAttribute : Attribute
{
private readonly string _limit;
public ThrottleAttribute(ThrottleAction throttleAction, ThrottleMetric throttleMetric, string limit)
{
Scope = Strings.AppDomain;
if (string.IsNullOrEmpty(limit))
{
throw Fx.Exception.ArgumentNullOrEmpty("limit");
}
ThrottleAction = throttleAction;
ThrottleMetric = throttleMetric;
_limit = limit;
}
public ThrottleAction ThrottleAction { get; }
public ThrottleMetric ThrottleMetric { get; }
public string Limit
{
get
{
return _limit;
}
}
public string Scope
{
get; set;
}
}
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Method | AttributeTargets.Constructor,
AllowMultiple = true, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class ExternalResourceAttribute : Attribute
{
private readonly string _description;
public ExternalResourceAttribute(Location location, string description)
{
Location = location;
_description = description;
}
public Location Location { get; }
public string Description
{
get
{
return _description;
}
}
}
// Set on a class when that class uses lock (this) - acts as though it were on a field
// private object this;
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Class, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class SynchronizationObjectAttribute : Attribute
{
public SynchronizationObjectAttribute()
{
Blocking = true;
Scope = Strings.DeclaringInstance;
Kind = SynchronizationKind.FromFieldType;
}
public bool Blocking { get; set; }
public string Scope { get; set; }
public SynchronizationKind Kind { get; set; }
}
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct, Inherited = true)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class SynchronizationPrimitiveAttribute : Attribute
{
public SynchronizationPrimitiveAttribute(BlocksUsing blocksUsing)
{
BlocksUsing = blocksUsing;
}
public BlocksUsing BlocksUsing { get; }
public bool SupportsAsync { get; set; }
public bool Spins { get; set; }
public string ReleaseMethod { get; set; }
}
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class BlockingAttribute : Attribute
{
public BlockingAttribute()
{
}
public string CancelMethod { get; set; }
public Type CancelDeclaringType { get; set; }
public string Conditional { get; set; }
}
// Sometime a method will call a conditionally-blocking method in such a way that it is guaranteed
// not to block (i.e. the condition can be Asserted false). Such a method can be marked as
// GuaranteeNonBlocking as an assertion that the method doesn't block despite calling a blocking method.
//
// Methods that don't call blocking methods and aren't marked as Blocking are assumed not to block, so
// they do not require this attribute.
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class GuaranteeNonBlockingAttribute : Attribute
{
public GuaranteeNonBlockingAttribute()
{
}
}
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class NonThrowingAttribute : Attribute
{
public NonThrowingAttribute()
{
}
}
[SuppressMessage(FxCop.Category.Performance, "CA1813:AvoidUnsealedAttributes",
Justification = "This is intended to be an attribute hierarchy. It does not affect product perf.")]
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor,
AllowMultiple = true, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public class ThrowsAttribute : Attribute
{
private readonly string _diagnosis;
public ThrowsAttribute(Type exceptionType, string diagnosis)
{
if (string.IsNullOrEmpty(diagnosis))
{
throw Fx.Exception.ArgumentNullOrEmpty("diagnosis");
}
ExceptionType = exceptionType ?? throw Fx.Exception.ArgumentNull("exceptionType");
_diagnosis = diagnosis;
}
public Type ExceptionType { get; }
public string Diagnosis
{
get
{
return _diagnosis;
}
}
}
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class InheritThrowsAttribute : Attribute
{
public InheritThrowsAttribute()
{
}
public Type FromDeclaringType { get; set; }
public string From { get; set; }
}
[AttributeUsage(AttributeTargets.Property, AllowMultiple = false, Inherited = true)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class KnownXamlExternalAttribute : Attribute
{
public KnownXamlExternalAttribute()
{
}
}
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Struct, AllowMultiple = false, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class XamlVisibleAttribute : Attribute
{
public XamlVisibleAttribute()
: this(true)
{
}
public XamlVisibleAttribute(bool visible)
{
Visible = visible;
}
public bool Visible
{
get;
private set;
}
}
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Module | AttributeTargets.Class |
AttributeTargets.Struct | AttributeTargets.Enum | AttributeTargets.Constructor | AttributeTargets.Method |
AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Interface |
AttributeTargets.Delegate, AllowMultiple = false, Inherited = false)]
[Conditional("CODE_ANALYSIS_CDF")]
public sealed class SecurityNoteAttribute : Attribute
{
public SecurityNoteAttribute()
{
}
public string Critical
{
get;
set;
}
public string Safe
{
get;
set;
}
public string Miscellaneous
{
get;
set;
}
}
}
internal abstract class Thunk<T> where T : class
{
[Tag.SecurityNote(Critical = "Make these safe to use in SecurityCritical contexts.")]
[SecurityCritical]
private T _callback;
[Tag.SecurityNote(Critical = "Accesses critical field.", Safe = "Data provided by caller.")]
[SecuritySafeCritical]
protected Thunk(T callback)
{
_callback = callback;
}
internal T Callback
{
[Tag.SecurityNote(Critical = "Accesses critical field.", Safe = "Data is not privileged.")]
[SecuritySafeCritical]
get
{
return _callback;
}
}
}
internal sealed class ActionThunk<T1> : Thunk<Action<T1>>
{
public ActionThunk(Action<T1> callback) : base(callback)
{
}
public Action<T1> ThunkFrame
{
get
{
return new Action<T1>(UnhandledExceptionFrame);
}
}
[Tag.SecurityNote(Critical = "Calls PrepareConstrainedRegions which has a LinkDemand",
Safe = "Guaranteed not to call into PT user code from the finally.")]
[SecuritySafeCritical]
private void UnhandledExceptionFrame(T1 result)
{
try
{
Callback(result);
}
catch (Exception exception)
{
if (!Fx.HandleAtThreadBase(exception))
{
throw;
}
}
}
}
internal sealed class AsyncThunk : Thunk<AsyncCallback>
{
public AsyncThunk(AsyncCallback callback) : base(callback)
{
}
public AsyncCallback ThunkFrame
{
get
{
return new AsyncCallback(UnhandledExceptionFrame);
}
}
[Tag.SecurityNote(Critical = "Calls PrepareConstrainedRegions which has a LinkDemand",
Safe = "Guaranteed not to call into PT user code from the finally.")]
[SecuritySafeCritical]
private void UnhandledExceptionFrame(IAsyncResult result)
{
try
{
Callback(result);
}
catch (Exception exception)
{
if (!Fx.HandleAtThreadBase(exception))
{
throw;
}
}
}
}
[Serializable]
internal class InternalException : SystemException
{
public InternalException(string description) : base(InternalSR.ShipAssertExceptionMessage(description)) { }
protected InternalException(SerializationInfo info, StreamingContext context) : base(info, context) { }
}
[Serializable]
internal class FatalInternalException : InternalException
{
public FatalInternalException(string description) : base(description) { }
protected FatalInternalException(SerializationInfo info, StreamingContext context) : base(info, context) { }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.IO;
using System.Xml.Serialization;
using System.Text;
using System.Xml.Linq;
// Verify that jit test project files specify DebugType properly.
// Returns error status (-1) if any project files are in error.
internal class ScanProjectFiles
{
private static bool s_showNeedsFixOnly = true;
private static bool s_tryAndFix = true;
private static int s_projCount = 0;
private static int s_needsFixCount = 0;
private static int s_fixedCount = 0;
private static int Main(string[] args)
{
// If invoked w/o args, locate jit test project dir from
// CORE_ROOT, and scan only.
//
// If invoked w args, locate and try to fix project files.
string projectRoot = null;
if (args.Length == 0)
{
s_tryAndFix = false;
// CORE_ROOT should be something like
// c:\repos\coreclr\bin\tests\Windows_NT.x64.Checked\Tests\Core_Root
// or
// D:\j\workspace\x64_release_w---0575cb46\bin\tests\Windows_NT.x64.Release\Tests\Core_Root
// We want
// c:\repos\coreclr\tests\src\JIT
string coreRoot = System.Environment.GetEnvironmentVariable("CORE_ROOT");
if (coreRoot == null)
{
Console.WriteLine("CORE_ROOT must be set");
return -1;
}
int binIndex = coreRoot.IndexOf("bin");
if (binIndex < 0)
{
Console.WriteLine("No bin directory found in CORE_ROOT path `{0}`," +
" so no checking will be performed.", coreRoot);
return 100;
}
string repoRoot = coreRoot.Substring(0, binIndex);
projectRoot = Path.Combine(repoRoot, "tests", "src", "JIT");
}
else if (args.Length != 1)
{
Console.WriteLine("Usage: CheckProjects [<dir>]");
Console.WriteLine("If optional <dir> is specified,"
+ " all project files under <dir> will be scanned and updates will be attempted.");
return -1;
}
else
{
projectRoot = args[0];
}
Console.WriteLine("Scanning{0}projects under {1}",
s_tryAndFix ? " and attempting to update " : " ", projectRoot);
if (!Directory.Exists(projectRoot))
{
Console.WriteLine("Project directory does not exist, so no checking will be performed.");
return 100;
}
DirectoryInfo projectRootDir = new DirectoryInfo(projectRoot);
foreach (FileInfo f in projectRootDir.GetFiles("*.*proj", SearchOption.AllDirectories))
{
if (!f.FullName.Contains("JIT\\config") && !f.FullName.Contains("JIT/config"))
{
ParseAndUpdateProj(f.FullName, s_tryAndFix);
}
}
Console.WriteLine("{0} projects, {1} needed fixes, {2} were fixed",
s_projCount, s_needsFixCount, s_fixedCount);
// Return error status if there are unfixed projects
return (s_needsFixCount == 0 ? 100 : -1);
}
// Load up a project file and look for key attributes.
// Optionally try and update. Return true if modified.
private static bool ParseAndUpdateProj(string projFile, bool tryUpdate)
{
s_projCount++;
// Guess at expected settings by looking for suffixes...
string projFileBase = Path.GetFileNameWithoutExtension(projFile);
bool isDebugTypeTest = projFileBase.EndsWith("_d") || projFileBase.EndsWith("_do") || projFileBase.EndsWith("_dbg");
bool isRelTypeTest = projFileBase.EndsWith("_r") || projFileBase.EndsWith("_ro") || projFileBase.EndsWith("_rel");
bool isNotOptTypeTest = projFileBase.EndsWith("_r") || projFileBase.EndsWith("_d");
bool isOptTypeTest = projFileBase.EndsWith("_ro") || projFileBase.EndsWith("_do") || projFileBase.EndsWith("_opt");
bool isSpecificTest = isDebugTypeTest || isRelTypeTest;
bool updated = false;
try
{
XElement root = XElement.Load(projFile);
string nn = "{" + root.Name.NamespaceName + "}";
IEnumerable<XElement> props = from el in root.Descendants(nn + "PropertyGroup") select el;
bool hasReleaseCondition = false;
bool hasDebugCondition = false;
string oddness = null;
string optimizeOddness = null;
string debugVal = null;
string optimizeVal = null;
bool needsFix = false;
XElement bestPropertyGroupNode = null;
XElement lastPropertyGroupNode = null;
List<XElement> debugTypePropertyGroupNodes = new List<XElement>();
List<XElement> optimizePropertyGroupNodes = new List<XElement>();
foreach (XElement prop in props)
{
lastPropertyGroupNode = prop;
XAttribute condition = prop.Attribute("Condition");
bool isReleaseCondition = false;
bool isDebugCondition = false;
if (condition != null)
{
isReleaseCondition = condition.Value.Contains("Release");
isDebugCondition = condition.Value.Contains("Debug");
if (isReleaseCondition || isDebugCondition)
{
bestPropertyGroupNode = prop;
}
}
XElement debugType = prop.Element(nn + "DebugType");
if (debugType != null)
{
debugTypePropertyGroupNodes.Add(prop);
// If <DebugType> appears multiple times, all should agree.
string newDebugVal = debugType.Value;
if (newDebugVal.Equals(""))
{
newDebugVal = "blank";
}
if (debugVal != null)
{
if (!debugType.Value.Equals(newDebugVal))
{
oddness = "ConflictingDebugType";
}
}
debugVal = newDebugVal;
if (condition != null)
{
if (isReleaseCondition == isDebugCondition)
{
oddness = "RelDebugDisagree";
}
hasReleaseCondition |= isReleaseCondition;
hasDebugCondition |= isDebugCondition;
}
else
{
if (hasReleaseCondition || hasDebugCondition)
{
oddness = "CondAndUncond";
}
}
}
XElement optimize = prop.Element(nn + "Optimize");
if (optimize != null)
{
optimizePropertyGroupNodes.Add(optimize);
string newOptimizeVal = optimize.Value;
if (string.IsNullOrWhiteSpace(newOptimizeVal))
{
newOptimizeVal = "False";
}
if (optimizeVal != null && !optimizeVal.Equals(newOptimizeVal, StringComparison.InvariantCultureIgnoreCase))
{
optimizeOddness = "MultipleConflictValues";
}
optimizeVal = newOptimizeVal;
}
}
if (oddness == null)
{
if (hasReleaseCondition && !hasDebugCondition)
{
oddness = "RelButNotDbg";
}
else if (!hasReleaseCondition && hasDebugCondition)
{
oddness = "DbgButNotRel";
}
}
bool hasDebugType = debugTypePropertyGroupNodes.Count > 0;
bool hasOptimize = optimizePropertyGroupNodes.Count > 0;
// Analyze suffix convention mismatches
string suffixNote = "SuffixNone";
if (isSpecificTest)
{
if (!hasDebugType || oddness != null || hasReleaseCondition || hasDebugCondition)
{
suffixNote = "SuffixProblem";
needsFix = true;
}
else
{
if (isRelTypeTest)
{
if (debugVal.Equals("pdbonly", StringComparison.OrdinalIgnoreCase)
|| debugVal.Equals("none", StringComparison.OrdinalIgnoreCase)
|| debugVal.Equals("blank", StringComparison.OrdinalIgnoreCase)
|| debugVal.Equals("embedded", StringComparison.OrdinalIgnoreCase))
{
suffixNote = "SuffixRelOk";
}
else
{
suffixNote = "SuffixRelTestNot";
needsFix = true;
}
}
else if (isDebugTypeTest)
{
if (debugVal.Equals("full", StringComparison.OrdinalIgnoreCase))
{
suffixNote = "SuffixDbgOk";
}
else
{
suffixNote = "SuffixDbgTestNot";
needsFix = true;
}
}
}
}
if (!hasDebugType)
{
needsFix = true;
}
if (oddness != null)
{
needsFix = true;
}
if (needsFix || !s_showNeedsFixOnly)
{
if (!hasDebugType)
{
Console.WriteLine("{0} DebugType-n/a-{1}", projFile, suffixNote);
}
else if (oddness != null)
{
Console.WriteLine("{0} DebugType-Odd-{1}-{2}", projFile, oddness, suffixNote);
}
else if (hasReleaseCondition || hasDebugCondition)
{
Console.WriteLine("{0} DebugType-{1}-Conditional-{2}", projFile, debugVal, suffixNote);
}
else
{
Console.WriteLine("{0} DebugType-{1}-Unconditional-{2}", projFile, debugVal, suffixNote);
}
}
if (optimizeOddness != null)
{
needsFix = true;
}
if (!needsFix)
{
if (isOptTypeTest)
{
needsFix = DetermineIfOptimizeSettingNeedsFix(true, optimizeVal);
}
else if (isNotOptTypeTest)
{
needsFix = DetermineIfOptimizeSettingNeedsFix(false, optimizeVal);
}
}
if (needsFix || !s_showNeedsFixOnly)
{
if (!hasOptimize)
{
Console.WriteLine("{0} Optimize-n/a", projFile);
}
else if (optimizeOddness != null)
{
Console.WriteLine("{0} Optimize-Odd-{1}", projFile, optimizeOddness);
}
else
{
Console.WriteLine("{0} Optimize-{1}-Conflict", projFile, optimizeVal);
}
}
if (needsFix)
{
s_needsFixCount++;
}
// If a fix is needed, give it a shot!
if (!needsFix || !tryUpdate)
{
return false;
}
// Add new elements just after the conditional rel/debug
// property group entries, if possible.
if (bestPropertyGroupNode == null)
{
bestPropertyGroupNode = lastPropertyGroupNode;
}
if (bestPropertyGroupNode == null)
{
Console.WriteLine(".... no prop group, can't fix");
return false;
}
if (debugTypePropertyGroupNodes.Count == 0)
{
// Fix projects that don't mention debug type at all.
Console.WriteLine(".... no DebugType, attempting fix ....");
XElement newPropGroup = new XElement(nn + "PropertyGroup",
new XElement(nn + "DebugType", isDebugTypeTest ? "Full" : "PdbOnly"),
new XElement(nn + "Optimize", isNotOptTypeTest ? "False" : "True"));
bestPropertyGroupNode.AddAfterSelf(newPropGroup);
// Write out updated project file
using (StreamWriter outFile = File.CreateText(projFile))
{
root.Save(outFile);
updated = true;
s_fixedCount++;
}
}
else if (debugTypePropertyGroupNodes.Count == 1)
{
// Fix projects with just one mention of debug type.
Console.WriteLine(".... one DebugType, attempting fix ....");
XElement prop = debugTypePropertyGroupNodes.First();
XAttribute condition = prop.Attribute("Condition");
// If there is no condition then this is likely a suffix mismatch
if ((condition == null) && suffixNote.Equals("SuffixDbgTestNot"))
{
Console.WriteLine("Unconditional debug test w/ suffix issue");
// Do case analysis of suffix and debugType/Opt, then update.
XElement debugType = prop.Element(nn + "DebugType");
XElement optimize = prop.Element(nn + "Optimize");
// We know DebugType is set, but Optimize may not be.
if (optimize == null)
{
optimize = new XElement(nn + "Optimize");
prop.Add(optimize);
}
bool modified = false;
if (isDebugTypeTest && !isOptTypeTest)
{
// "d" suffix --
debugType.Value = "full";
optimize.Value = "False";
modified = true;
}
else if (isDebugTypeTest && isOptTypeTest)
{
// "do" suffix --
debugType.Value = "full";
optimize.Value = "True";
modified = true;
}
if (modified)
{
// Write out updated project file
using (StreamWriter outFile = File.CreateText(projFile))
{
root.Save(outFile);
updated = true;
s_fixedCount++;
}
}
}
else
{
XElement newPropGroup = new XElement(prop);
newPropGroup.RemoveAttributes();
prop.RemoveNodes();
bestPropertyGroupNode.AddAfterSelf(newPropGroup);
// Write out updated project file
using (StreamWriter outFile = File.CreateText(projFile))
{
root.Save(outFile);
updated = true;
s_fixedCount++;
}
}
}
else
{
// Multiple property groups specifying DebugType. Remove any that are conditional.
Console.WriteLine(".... multiple DebugTypes, attempting fix ....");
bool modified = false;
foreach (XElement prop in debugTypePropertyGroupNodes)
{
XAttribute condition = prop.Attribute("Condition");
if (condition != null)
{
prop.RemoveNodes();
modified = true;
}
}
if (modified)
{
// Write out updated project file
using (StreamWriter outFile = File.CreateText(projFile))
{
root.Save(outFile);
updated = true;
s_fixedCount++;
}
}
}
}
catch (Exception e)
{
Console.WriteLine("{0} DebugType-fail {1}", projFile, e.Message);
}
return updated;
}
/// <summary>
/// Determines if optimize setting needs to fix.
/// </summary>
/// <param name="isOptType">Whether a optimization is specified. This is the baseline for checking.</param>
/// <param name="optimizeVal">The optimize value in the project file's <Optimize /> property.</param>
/// <returns>True if a fix is needed. Otherwise false.</returns>
private static bool DetermineIfOptimizeSettingNeedsFix(bool isOptType, string optimizeVal)
{
if (isOptType && optimizeVal == null)
{
return true;
}
if (optimizeVal != null)
{
string expectedOptimizeValue = isOptType.ToString();
if (!optimizeVal.Equals(expectedOptimizeValue, StringComparison.InvariantCultureIgnoreCase))
{
return true;
}
}
return false;
}
}
| |
// ZlibStream.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2010-January-09 12:03:25>
//
// ------------------------------------------------------------------
//
// This module defines the ZlibStream class, which is similar in idea to
// the System.IO.Compression.DeflateStream and
// System.IO.Compression.GZipStream classes in the .NET BCL.
//
// ------------------------------------------------------------------
using System;
using System.IO;
namespace Ionic.Zlib
{
/// <summary>
/// Represents a Zlib stream for compression or decompression.
/// </summary>
/// <remarks>
///
/// <para>
/// The ZlibStream is a <see
/// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see
/// cref="System.IO.Stream"/>. It adds ZLIB compression or decompression to any
/// stream.
/// </para>
///
/// <para> Using this stream, applications can compress or decompress data via
/// stream <c>Read()</c> and <c>Write()</c> operations. Either compresssion or
/// decompression can occur through either reading or writing. The compression
/// format used is ZLIB, which is documented in <see
/// href="http://www.ietf.org/rfc/rfc1950.txt">IETF RFC 1950</see>, "ZLIB Compressed
/// Data Format Specification version 3.3". This implementation of ZLIB always uses
/// DEFLATE as the compression method. (see <see
/// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE
/// Compressed Data Format Specification version 1.3.") </para>
///
/// <para>
/// The ZLIB format allows for varying compression methods, window sizes, and dictionaries.
/// This implementation always uses the DEFLATE compression method, a preset dictionary,
/// and 15 window bits by default.
/// </para>
///
/// <para>
/// This class is similar to <see cref="DeflateStream"/>, except that it adds the
/// RFC1950 header and trailer bytes to a compressed stream when compressing, or expects
/// the RFC1950 header and trailer bytes when decompressing. It is also similar to the
/// <see cref="GZipStream"/>.
/// </para>
/// </remarks>
/// <seealso cref="DeflateStream" />
/// <seealso cref="GZipStream" />
internal class ZlibStream : System.IO.Stream
{
internal ZlibBaseStream _baseStream;
bool _disposed;
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>.
/// </summary>
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use the
/// default compression level. The "captive" stream will be closed when the
/// <c>ZlibStream</c> is closed.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example uses a <c>ZlibStream</c> to compress a file, and writes the compressed
/// data to another file.
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (Stream compressor = new ZlibStream(raw, CompressionMode.Compress))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream which will be read or written.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and
/// the specified <c>CompressionLevel</c>.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
/// The "captive" stream will be closed when the <c>ZlibStream</c> is closed.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example uses a <c>ZlibStream</c> to compress data from a file, and writes the
/// compressed data to another file.
///
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (Stream compressor = new ZlibStream(raw,
/// CompressionMode.Compress,
/// CompressionLevel.BestCompression))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
///
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream to be read or written while deflating or inflating.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>, and
/// explicitly specify whether the captive stream should be left open after
/// Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use
/// the default compression level.
/// </para>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also
/// closed. In some cases this is not desired, for example if the stream is a
/// <see cref="System.IO.MemoryStream"/> that will be re-read after
/// compression. Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream
/// open.
/// </para>
///
/// <para>
/// See the other overloads of this constructor for example code.
/// </para>
///
/// </remarks>
///
/// <param name="stream">The stream which will be read or written. This is called the
/// "captive" stream in other places in this documentation.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
/// <param name="leaveOpen">true if the application would like the stream to remain
/// open after inflation/deflation.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and
/// the specified <c>CompressionLevel</c>, and explicitly specify whether the
/// stream should be left open after Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also closed. In
/// some cases this is not desired, for example if the stream is a <see
/// cref="System.IO.MemoryStream"/> that will be re-read after compression.
/// Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream open.
/// </para>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example shows how to use a ZlibStream to compress the data from a file,
/// and store the result into another file. The filestream remains open to allow
/// additional data to be written to it.
/// <code>
/// using (var output = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (Stream compressor = new ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// // can write additional data to the output stream here
/// }
/// </code>
/// <code lang="VB">
/// Using output As FileStream = File.Create(fileToCompress & ".zlib")
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using compressor As Stream = New ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// ' can write additional data to the output stream here.
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream which will be read or written.</param>
///
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
///
/// <param name="leaveOpen">
/// true if the application would like the stream to remain open after inflation/deflation.
/// </param>
///
/// <param name="level">
/// A tuning knob to trade speed for effectiveness. This parameter is effective only when
/// mode is <c>CompressionMode.Compress</c>.
/// </param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen);
}
#region Zlib properties
/// <summary>
/// This property sets the flush behavior on the stream.
/// Sorry, though, not sure exactly how to describe all the various settings.
/// </summary>
virtual public FlushType FlushMode
{
get { return (this._baseStream._flushMode); }
set
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
this._baseStream._flushMode = value;
}
}
/// <summary>
/// The size of the working buffer for the compression codec.
/// </summary>
///
/// <remarks>
/// <para>
/// The working buffer is used for all stream operations. The default size is
/// 1024 bytes. The minimum size is 128 bytes. You may get better performance
/// with a larger buffer. Then again, you might not. You would have to test
/// it.
/// </para>
///
/// <para>
/// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the
/// stream. If you try to set it afterwards, it will throw.
/// </para>
/// </remarks>
public int BufferSize
{
get
{
return this._baseStream._bufferSize;
}
set
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
if (this._baseStream._workingBuffer != null)
throw new ZlibException("The working buffer is already set.");
if (value < ZlibConstants.WorkingBufferSizeMin)
throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin));
this._baseStream._bufferSize = value;
}
}
/// <summary> Returns the total number of bytes input so far.</summary>
virtual public long TotalIn
{
get { return this._baseStream._z.TotalBytesIn; }
}
/// <summary> Returns the total number of bytes output so far.</summary>
virtual public long TotalOut
{
get { return this._baseStream._z.TotalBytesOut; }
}
#endregion
#region System.IO.Stream methods
/// <summary>
/// Dispose the stream.
/// </summary>
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// See the constructors that have a <c>leaveOpen</c> parameter for more information.
/// </remarks>
protected override void Dispose(bool disposing)
{
try
{
if (!_disposed)
{
if (disposing && (this._baseStream != null))
this._baseStream.Close();
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Indicates whether the stream can be read.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports reading.
/// </remarks>
public override bool CanRead
{
get
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream._stream.CanRead;
}
}
/// <summary>
/// Indicates whether the stream supports Seek operations.
/// </summary>
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Indicates whether the stream can be written.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports writing.
/// </remarks>
public override bool CanWrite
{
get
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream._stream.CanWrite;
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
_baseStream.Flush();
}
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length
{
get { throw new NotImplementedException(); }
}
/// <summary>
/// The position of the stream pointer.
/// </summary>
///
/// <remarks>
/// Setting this property always throws a <see
/// cref="NotImplementedException"/>. Reading will return the total bytes
/// written out, if used in writing, or the total bytes read in, if used in
/// reading. The count may refer to compressed bytes or uncompressed bytes,
/// depending on how you've used the stream.
/// </remarks>
public override long Position
{
get
{
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Writer)
return this._baseStream._z.TotalBytesOut;
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Reader)
return this._baseStream._z.TotalBytesIn;
return 0;
}
set { throw new NotImplementedException(); }
}
/// <summary>
/// Read data from the stream.
/// </summary>
///
/// <remarks>
///
/// <para>
/// If you wish to use the <c>ZlibStream</c> to compress data while reading,
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
/// providing an uncompressed data stream. Then call <c>Read()</c> on that
/// <c>ZlibStream</c>, and the data read will be compressed. If you wish to
/// use the <c>ZlibStream</c> to decompress data while reading, you can create
/// a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, providing a
/// readable compressed data stream. Then call <c>Read()</c> on that
/// <c>ZlibStream</c>, and the data will be decompressed as it is read.
/// </para>
///
/// <para>
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but
/// not both.
/// </para>
///
/// </remarks>
/// <param name="buffer">The buffer into which the read data should be placed.</param>
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream.Read(buffer, offset, count);
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
throw new NotImplementedException();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override void SetLength(long value)
{
throw new NotImplementedException();
}
/// <summary>
/// Write data to the stream.
/// </summary>
///
/// <remarks>
///
/// <para>
/// If you wish to use the <c>ZlibStream</c> to compress data while writing,
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
/// and a writable output stream. Then call <c>Write()</c> on that
/// <c>ZlibStream</c>, providing uncompressed data as input. The data sent to
/// the output stream will be the compressed form of the data written. If you
/// wish to use the <c>ZlibStream</c> to decompress data while writing, you
/// can create a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, and a
/// writable output stream. Then call <c>Write()</c> on that stream,
/// providing previously compressed data. The data sent to the output stream
/// will be the decompressed form of the data written.
/// </para>
///
/// <para>
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both.
/// </para>
/// </remarks>
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
_baseStream.Write(buffer, offset, count);
}
#endregion
/// <summary>
/// Compress a string into a byte array using ZLIB.
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="ZlibStream.UncompressString(byte[])"/>.
/// </remarks>
///
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
///
/// <param name="s">
/// A string to compress. The string will first be encoded
/// using UTF8, then compressed.
/// </param>
///
/// <returns>The string in compressed form</returns>
public static byte[] CompressString(String s)
{
using (var ms = new MemoryStream())
{
Stream compressor =
new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
ZlibBaseStream.CompressString(s, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Compress a byte array into a new byte array using ZLIB.
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="ZlibStream.UncompressBuffer(byte[])"/>.
/// </remarks>
///
/// <seealso cref="ZlibStream.CompressString(string)"/>
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
///
/// <param name="b">
/// A buffer to compress.
/// </param>
///
/// <returns>The data in compressed form</returns>
public static byte[] CompressBuffer(byte[] b)
{
using (var ms = new MemoryStream())
{
Stream compressor =
new ZlibStream( ms, CompressionMode.Compress, CompressionLevel.BestCompression );
ZlibBaseStream.CompressBuffer(b, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Uncompress a ZLIB-compressed byte array into a single string.
/// </summary>
///
/// <seealso cref="ZlibStream.CompressString(String)"/>
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
///
/// <param name="compressed">
/// A buffer containing ZLIB-compressed data.
/// </param>
///
/// <returns>The uncompressed string</returns>
public static String UncompressString(byte[] compressed)
{
using (var input = new MemoryStream(compressed))
{
Stream decompressor =
new ZlibStream(input, CompressionMode.Decompress);
return ZlibBaseStream.UncompressString(compressed, decompressor);
}
}
/// <summary>
/// Uncompress a ZLIB-compressed byte array into a byte array.
/// </summary>
///
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
///
/// <param name="compressed">
/// A buffer containing ZLIB-compressed data.
/// </param>
///
/// <returns>The data in uncompressed form</returns>
public static byte[] UncompressBuffer(byte[] compressed)
{
using (var input = new MemoryStream(compressed))
{
Stream decompressor =
new ZlibStream( input, CompressionMode.Decompress );
return ZlibBaseStream.UncompressBuffer(compressed, decompressor);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Composition;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.Diagnostics
{
public class DiagnosticStateTests
{
[Fact, Trait(Roslyn.Test.Utilities.Traits.Feature, Roslyn.Test.Utilities.Traits.Features.Diagnostics)]
public void SerializationTest_Document()
{
using (var workspace = new TestWorkspace(TestExportProvider.ExportProviderWithCSharpAndVisualBasic, workspaceKind: "DiagnosticTest"))
{
var utcTime = DateTime.UtcNow;
var version1 = VersionStamp.Create(utcTime);
var version2 = VersionStamp.Create(utcTime.AddDays(1));
var document = workspace.CurrentSolution.AddProject("TestProject", "TestProject", LanguageNames.CSharp).AddDocument("TestDocument", "");
var diagnostics = new[]
{
new DiagnosticData(
"test1", "Test", "test1 message", "test1 message format",
DiagnosticSeverity.Info, DiagnosticSeverity.Info, false, 1, ImmutableArray<string>.Empty,
workspace, document.Project.Id, document.Id,
new TextSpan(10, 20), "mappedFile1", 10, 10, 20, 20, "originalFile1", 30, 30, 40, 40),
new DiagnosticData(
"test2", "Test", "test2 message", "test2 message format",
DiagnosticSeverity.Warning, DiagnosticSeverity.Warning, true, 0, ImmutableArray.Create<string>("Test2"),
workspace, document.Project.Id, document.Id,
new TextSpan(30, 40), "mappedFile2", 50, 50, 60, 60, "originalFile2", 70, 70, 80, 80, title: "test2 title", description: "test2 description", helpLink: "http://test2link"),
new DiagnosticData(
"test3", "Test", "test3 message", "test3 message format",
DiagnosticSeverity.Error, DiagnosticSeverity.Warning, true, 2, ImmutableArray.Create<string>("Test3", "Test3_2"),
workspace, document.Project.Id, document.Id,
new TextSpan(50, 60), "mappedFile3", 90, 90, 100, 100, "originalFile3", 110, 110, 120, 120, title: "test3 title", description: "test3 description", helpLink: "http://test3link"),
};
var original = new DiagnosticAnalyzerService.AnalysisData(version1, version2, diagnostics.ToImmutableArray());
var state = new DiagnosticAnalyzerService.DiagnosticIncrementalAnalyzer.DiagnosticState("Test", VersionStamp.Default, LanguageNames.CSharp);
state.PersistAsync(document, original, CancellationToken.None).Wait();
var recovered = state.TryGetExistingDataAsync(document, CancellationToken.None).Result;
Assert.Equal(original.TextVersion, recovered.TextVersion);
Assert.Equal(original.DataVersion, recovered.DataVersion);
AssertDiagnostics(original.Items, recovered.Items);
}
}
[Fact, Trait(Roslyn.Test.Utilities.Traits.Feature, Roslyn.Test.Utilities.Traits.Features.Diagnostics)]
public void SerializationTest_Project()
{
using (var workspace = new TestWorkspace(TestExportProvider.ExportProviderWithCSharpAndVisualBasic, workspaceKind: "DiagnosticTest"))
{
var utcTime = DateTime.UtcNow;
var version1 = VersionStamp.Create(utcTime);
var version2 = VersionStamp.Create(utcTime.AddDays(1));
var document = workspace.CurrentSolution.AddProject("TestProject", "TestProject", LanguageNames.CSharp).AddDocument("TestDocument", "");
var diagnostics = new[]
{
new DiagnosticData(
"test1", "Test", "test1 message", "test1 message format",
DiagnosticSeverity.Info, DiagnosticSeverity.Info, false, 1, ImmutableArray<string>.Empty,
workspace, document.Project.Id, description: "test1 description", helpLink: "http://test1link"),
new DiagnosticData(
"test2", "Test", "test2 message", "test2 message format",
DiagnosticSeverity.Warning, DiagnosticSeverity.Warning, true, 0, ImmutableArray.Create<string>("Test2"),
workspace, document.Project.Id),
new DiagnosticData(
"test3", "Test", "test3 message", "test3 message format",
DiagnosticSeverity.Error, DiagnosticSeverity.Warning, true, 2, ImmutableArray.Create<string>("Test3", "Test3_2"),
workspace, document.Project.Id, description: "test3 description", helpLink: "http://test3link"),
};
var original = new DiagnosticAnalyzerService.AnalysisData(version1, version2, diagnostics.ToImmutableArray());
var state = new DiagnosticAnalyzerService.DiagnosticIncrementalAnalyzer.DiagnosticState("Test", VersionStamp.Default, LanguageNames.CSharp);
state.PersistAsync(document.Project, original, CancellationToken.None).Wait();
var recovered = state.TryGetExistingDataAsync(document.Project, CancellationToken.None).Result;
Assert.Equal(original.TextVersion, recovered.TextVersion);
Assert.Equal(original.DataVersion, recovered.DataVersion);
AssertDiagnostics(original.Items, recovered.Items);
}
}
private void AssertDiagnostics(ImmutableArray<DiagnosticData> items1, ImmutableArray<DiagnosticData> items2)
{
Assert.Equal(items1.Length, items1.Length);
for (var i = 0; i < items1.Length; i++)
{
Assert.Equal(items1[i].Id, items2[i].Id);
Assert.Equal(items1[i].Category, items2[i].Category);
Assert.Equal(items1[i].Message, items2[i].Message);
Assert.Equal(items1[i].MessageFormat, items2[i].MessageFormat);
Assert.Equal(items1[i].Severity, items2[i].Severity);
Assert.Equal(items1[i].IsEnabledByDefault, items2[i].IsEnabledByDefault);
Assert.Equal(items1[i].WarningLevel, items2[i].WarningLevel);
Assert.Equal(items1[i].DefaultSeverity, items2[i].DefaultSeverity);
Assert.Equal(items1[i].CustomTags.Count, items2[i].CustomTags.Count);
for (var j = 0; j < items1[i].CustomTags.Count; j++)
{
Assert.Equal(items1[i].CustomTags[j], items2[i].CustomTags[j]);
}
Assert.Equal(items1[i].Workspace, items2[i].Workspace);
Assert.Equal(items1[i].ProjectId, items2[i].ProjectId);
Assert.Equal(items1[i].DocumentId, items2[i].DocumentId);
Assert.Equal(items1[i].HasTextSpan, items2[i].HasTextSpan);
if (items1[i].HasTextSpan)
{
Assert.Equal(items1[i].TextSpan, items2[i].TextSpan);
}
Assert.Equal(items1[i].MappedFilePath, items2[i].MappedFilePath);
Assert.Equal(items1[i].MappedStartLine, items2[i].MappedStartLine);
Assert.Equal(items1[i].MappedStartColumn, items2[i].MappedStartColumn);
Assert.Equal(items1[i].MappedEndLine, items2[i].MappedEndLine);
Assert.Equal(items1[i].MappedEndColumn, items2[i].MappedEndColumn);
Assert.Equal(items1[i].OriginalFilePath, items2[i].OriginalFilePath);
Assert.Equal(items1[i].OriginalStartLine, items2[i].OriginalStartLine);
Assert.Equal(items1[i].OriginalStartColumn, items2[i].OriginalStartColumn);
Assert.Equal(items1[i].OriginalEndLine, items2[i].OriginalEndLine);
Assert.Equal(items1[i].OriginalEndColumn, items2[i].OriginalEndColumn);
Assert.Equal(items1[i].Description, items2[i].Description);
Assert.Equal(items1[i].HelpLink, items2[i].HelpLink);
}
}
[ExportWorkspaceServiceFactory(typeof(IPersistentStorageService), "DiagnosticTest"), Shared]
public class PersistentStorageServiceFactory : IWorkspaceServiceFactory
{
public IWorkspaceService CreateService(HostWorkspaceServices workspaceServices)
{
return new Service();
}
public class Service : IPersistentStorageService
{
private readonly Storage _instance = new Storage();
IPersistentStorage IPersistentStorageService.GetStorage(Solution solution)
{
return _instance;
}
internal class Storage : IPersistentStorage
{
private readonly Dictionary<object, Stream> _map = new Dictionary<object, Stream>();
public Task<Stream> ReadStreamAsync(string name, CancellationToken cancellationToken = default(CancellationToken))
{
var stream = _map[name];
stream.Position = 0;
return Task.FromResult(stream);
}
public Task<Stream> ReadStreamAsync(Project project, string name, CancellationToken cancellationToken = default(CancellationToken))
{
var stream = _map[Tuple.Create(project, name)];
stream.Position = 0;
return Task.FromResult(stream);
}
public Task<Stream> ReadStreamAsync(Document document, string name, CancellationToken cancellationToken = default(CancellationToken))
{
var stream = _map[Tuple.Create(document, name)];
stream.Position = 0;
return Task.FromResult(stream);
}
public Task<bool> WriteStreamAsync(string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
_map[name] = new MemoryStream();
stream.CopyTo(_map[name]);
return SpecializedTasks.True;
}
public Task<bool> WriteStreamAsync(Project project, string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
_map[Tuple.Create(project, name)] = new MemoryStream();
stream.CopyTo(_map[Tuple.Create(project, name)]);
return SpecializedTasks.True;
}
public Task<bool> WriteStreamAsync(Document document, string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
_map[Tuple.Create(document, name)] = new MemoryStream();
stream.CopyTo(_map[Tuple.Create(document, name)]);
return SpecializedTasks.True;
}
protected virtual void Dispose(bool disposing)
{
}
public void Dispose()
{
Dispose(true);
}
}
}
}
}
}
| |
using System;
using System.Data;
using Csla;
using Csla.Data;
using ParentLoadSoftDelete.DataAccess;
using ParentLoadSoftDelete.DataAccess.ERCLevel;
namespace ParentLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// F09_CityColl (editable child list).<br/>
/// This is a generated base class of <see cref="F09_CityColl"/> business object.
/// </summary>
/// <remarks>
/// This class is child of <see cref="F08_Region"/> editable child object.<br/>
/// The items of the collection are <see cref="F10_City"/> objects.
/// </remarks>
[Serializable]
public partial class F09_CityColl : BusinessListBase<F09_CityColl, F10_City>
{
#region Collection Business Methods
/// <summary>
/// Removes a <see cref="F10_City"/> item from the collection.
/// </summary>
/// <param name="city_ID">The City_ID of the item to be removed.</param>
public void Remove(int city_ID)
{
foreach (var f10_City in this)
{
if (f10_City.City_ID == city_ID)
{
Remove(f10_City);
break;
}
}
}
/// <summary>
/// Determines whether a <see cref="F10_City"/> item is in the collection.
/// </summary>
/// <param name="city_ID">The City_ID of the item to search for.</param>
/// <returns><c>true</c> if the F10_City is a collection item; otherwise, <c>false</c>.</returns>
public bool Contains(int city_ID)
{
foreach (var f10_City in this)
{
if (f10_City.City_ID == city_ID)
{
return true;
}
}
return false;
}
/// <summary>
/// Determines whether a <see cref="F10_City"/> item is in the collection's DeletedList.
/// </summary>
/// <param name="city_ID">The City_ID of the item to search for.</param>
/// <returns><c>true</c> if the F10_City is a deleted collection item; otherwise, <c>false</c>.</returns>
public bool ContainsDeleted(int city_ID)
{
foreach (var f10_City in DeletedList)
{
if (f10_City.City_ID == city_ID)
{
return true;
}
}
return false;
}
#endregion
#region Find Methods
/// <summary>
/// Finds a <see cref="F10_City"/> item of the <see cref="F09_CityColl"/> collection, based on item key properties.
/// </summary>
/// <param name="city_ID">The City_ID.</param>
/// <returns>A <see cref="F10_City"/> object.</returns>
public F10_City FindF10_CityByParentProperties(int city_ID)
{
for (var i = 0; i < this.Count; i++)
{
if (this[i].City_ID.Equals(city_ID))
{
return this[i];
}
}
return null;
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="F09_CityColl"/> collection.
/// </summary>
/// <returns>A reference to the created <see cref="F09_CityColl"/> collection.</returns>
internal static F09_CityColl NewF09_CityColl()
{
return DataPortal.CreateChild<F09_CityColl>();
}
/// <summary>
/// Factory method. Loads a <see cref="F09_CityColl"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="F09_CityColl"/> object.</returns>
internal static F09_CityColl GetF09_CityColl(SafeDataReader dr)
{
F09_CityColl obj = new F09_CityColl();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="F09_CityColl"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public F09_CityColl()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
var rlce = RaiseListChangedEvents;
RaiseListChangedEvents = false;
AllowNew = true;
AllowEdit = true;
AllowRemove = true;
RaiseListChangedEvents = rlce;
}
#endregion
#region Data Access
/// <summary>
/// Loads all <see cref="F09_CityColl"/> collection items from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
var rlce = RaiseListChangedEvents;
RaiseListChangedEvents = false;
var args = new DataPortalHookArgs(dr);
OnFetchPre(args);
while (dr.Read())
{
Add(F10_City.GetF10_City(dr));
}
OnFetchPost(args);
RaiseListChangedEvents = rlce;
}
/// <summary>
/// Loads <see cref="F10_City"/> items on the F09_CityObjects collection.
/// </summary>
/// <param name="collection">The grand parent <see cref="F07_RegionColl"/> collection.</param>
internal void LoadItems(F07_RegionColl collection)
{
foreach (var item in this)
{
var obj = collection.FindF08_RegionByParentProperties(item.parent_Region_ID);
var rlce = obj.F09_CityObjects.RaiseListChangedEvents;
obj.F09_CityObjects.RaiseListChangedEvents = false;
obj.F09_CityObjects.Add(item);
obj.F09_CityObjects.RaiseListChangedEvents = rlce;
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
#endregion
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the SysParentesco class.
/// </summary>
[Serializable]
public partial class SysParentescoCollection : ActiveList<SysParentesco, SysParentescoCollection>
{
public SysParentescoCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>SysParentescoCollection</returns>
public SysParentescoCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
SysParentesco o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the Sys_Parentesco table.
/// </summary>
[Serializable]
public partial class SysParentesco : ActiveRecord<SysParentesco>, IActiveRecord
{
#region .ctors and Default Settings
public SysParentesco()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public SysParentesco(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public SysParentesco(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public SysParentesco(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Sys_Parentesco", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdParentesco = new TableSchema.TableColumn(schema);
colvarIdParentesco.ColumnName = "idParentesco";
colvarIdParentesco.DataType = DbType.Int32;
colvarIdParentesco.MaxLength = 0;
colvarIdParentesco.AutoIncrement = true;
colvarIdParentesco.IsNullable = false;
colvarIdParentesco.IsPrimaryKey = true;
colvarIdParentesco.IsForeignKey = false;
colvarIdParentesco.IsReadOnly = false;
colvarIdParentesco.DefaultSetting = @"";
colvarIdParentesco.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdParentesco);
TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema);
colvarNombre.ColumnName = "nombre";
colvarNombre.DataType = DbType.String;
colvarNombre.MaxLength = 50;
colvarNombre.AutoIncrement = false;
colvarNombre.IsNullable = false;
colvarNombre.IsPrimaryKey = false;
colvarNombre.IsForeignKey = false;
colvarNombre.IsReadOnly = false;
colvarNombre.DefaultSetting = @"";
colvarNombre.ForeignKeyTableName = "";
schema.Columns.Add(colvarNombre);
TableSchema.TableColumn colvarApellido = new TableSchema.TableColumn(schema);
colvarApellido.ColumnName = "apellido";
colvarApellido.DataType = DbType.String;
colvarApellido.MaxLength = 50;
colvarApellido.AutoIncrement = false;
colvarApellido.IsNullable = false;
colvarApellido.IsPrimaryKey = false;
colvarApellido.IsForeignKey = false;
colvarApellido.IsReadOnly = false;
colvarApellido.DefaultSetting = @"";
colvarApellido.ForeignKeyTableName = "";
schema.Columns.Add(colvarApellido);
TableSchema.TableColumn colvarIdTipoDocumento = new TableSchema.TableColumn(schema);
colvarIdTipoDocumento.ColumnName = "idTipoDocumento";
colvarIdTipoDocumento.DataType = DbType.Int32;
colvarIdTipoDocumento.MaxLength = 0;
colvarIdTipoDocumento.AutoIncrement = false;
colvarIdTipoDocumento.IsNullable = false;
colvarIdTipoDocumento.IsPrimaryKey = false;
colvarIdTipoDocumento.IsForeignKey = true;
colvarIdTipoDocumento.IsReadOnly = false;
colvarIdTipoDocumento.DefaultSetting = @"";
colvarIdTipoDocumento.ForeignKeyTableName = "Sys_TipoDocumento";
schema.Columns.Add(colvarIdTipoDocumento);
TableSchema.TableColumn colvarNumeroDocumento = new TableSchema.TableColumn(schema);
colvarNumeroDocumento.ColumnName = "numeroDocumento";
colvarNumeroDocumento.DataType = DbType.Int32;
colvarNumeroDocumento.MaxLength = 0;
colvarNumeroDocumento.AutoIncrement = false;
colvarNumeroDocumento.IsNullable = false;
colvarNumeroDocumento.IsPrimaryKey = false;
colvarNumeroDocumento.IsForeignKey = false;
colvarNumeroDocumento.IsReadOnly = false;
colvarNumeroDocumento.DefaultSetting = @"";
colvarNumeroDocumento.ForeignKeyTableName = "";
schema.Columns.Add(colvarNumeroDocumento);
TableSchema.TableColumn colvarFechaNacimiento = new TableSchema.TableColumn(schema);
colvarFechaNacimiento.ColumnName = "fechaNacimiento";
colvarFechaNacimiento.DataType = DbType.DateTime;
colvarFechaNacimiento.MaxLength = 0;
colvarFechaNacimiento.AutoIncrement = false;
colvarFechaNacimiento.IsNullable = false;
colvarFechaNacimiento.IsPrimaryKey = false;
colvarFechaNacimiento.IsForeignKey = false;
colvarFechaNacimiento.IsReadOnly = false;
colvarFechaNacimiento.DefaultSetting = @"(((1)/(1))/(1900))";
colvarFechaNacimiento.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaNacimiento);
TableSchema.TableColumn colvarIdProvincia = new TableSchema.TableColumn(schema);
colvarIdProvincia.ColumnName = "idProvincia";
colvarIdProvincia.DataType = DbType.Int32;
colvarIdProvincia.MaxLength = 0;
colvarIdProvincia.AutoIncrement = false;
colvarIdProvincia.IsNullable = false;
colvarIdProvincia.IsPrimaryKey = false;
colvarIdProvincia.IsForeignKey = true;
colvarIdProvincia.IsReadOnly = false;
colvarIdProvincia.DefaultSetting = @"((0))";
colvarIdProvincia.ForeignKeyTableName = "Sys_Provincia";
schema.Columns.Add(colvarIdProvincia);
TableSchema.TableColumn colvarIdPais = new TableSchema.TableColumn(schema);
colvarIdPais.ColumnName = "idPais";
colvarIdPais.DataType = DbType.Int32;
colvarIdPais.MaxLength = 0;
colvarIdPais.AutoIncrement = false;
colvarIdPais.IsNullable = false;
colvarIdPais.IsPrimaryKey = false;
colvarIdPais.IsForeignKey = true;
colvarIdPais.IsReadOnly = false;
colvarIdPais.DefaultSetting = @"((0))";
colvarIdPais.ForeignKeyTableName = "Sys_Pais";
schema.Columns.Add(colvarIdPais);
TableSchema.TableColumn colvarIdSituacionLaboral = new TableSchema.TableColumn(schema);
colvarIdSituacionLaboral.ColumnName = "idSituacionLaboral";
colvarIdSituacionLaboral.DataType = DbType.Int32;
colvarIdSituacionLaboral.MaxLength = 0;
colvarIdSituacionLaboral.AutoIncrement = false;
colvarIdSituacionLaboral.IsNullable = false;
colvarIdSituacionLaboral.IsPrimaryKey = false;
colvarIdSituacionLaboral.IsForeignKey = true;
colvarIdSituacionLaboral.IsReadOnly = false;
colvarIdSituacionLaboral.DefaultSetting = @"((0))";
colvarIdSituacionLaboral.ForeignKeyTableName = "Sys_SituacionLaboral";
schema.Columns.Add(colvarIdSituacionLaboral);
TableSchema.TableColumn colvarIdNivelInstruccion = new TableSchema.TableColumn(schema);
colvarIdNivelInstruccion.ColumnName = "idNivelInstruccion";
colvarIdNivelInstruccion.DataType = DbType.Int32;
colvarIdNivelInstruccion.MaxLength = 0;
colvarIdNivelInstruccion.AutoIncrement = false;
colvarIdNivelInstruccion.IsNullable = false;
colvarIdNivelInstruccion.IsPrimaryKey = false;
colvarIdNivelInstruccion.IsForeignKey = true;
colvarIdNivelInstruccion.IsReadOnly = false;
colvarIdNivelInstruccion.DefaultSetting = @"((0))";
colvarIdNivelInstruccion.ForeignKeyTableName = "Sys_NivelInstruccion";
schema.Columns.Add(colvarIdNivelInstruccion);
TableSchema.TableColumn colvarIdProfesion = new TableSchema.TableColumn(schema);
colvarIdProfesion.ColumnName = "idProfesion";
colvarIdProfesion.DataType = DbType.Int32;
colvarIdProfesion.MaxLength = 0;
colvarIdProfesion.AutoIncrement = false;
colvarIdProfesion.IsNullable = false;
colvarIdProfesion.IsPrimaryKey = false;
colvarIdProfesion.IsForeignKey = true;
colvarIdProfesion.IsReadOnly = false;
colvarIdProfesion.DefaultSetting = @"((0))";
colvarIdProfesion.ForeignKeyTableName = "Sys_Profesion";
schema.Columns.Add(colvarIdProfesion);
TableSchema.TableColumn colvarIdPaciente = new TableSchema.TableColumn(schema);
colvarIdPaciente.ColumnName = "idPaciente";
colvarIdPaciente.DataType = DbType.Int32;
colvarIdPaciente.MaxLength = 0;
colvarIdPaciente.AutoIncrement = false;
colvarIdPaciente.IsNullable = false;
colvarIdPaciente.IsPrimaryKey = false;
colvarIdPaciente.IsForeignKey = true;
colvarIdPaciente.IsReadOnly = false;
colvarIdPaciente.DefaultSetting = @"";
colvarIdPaciente.ForeignKeyTableName = "Sys_Paciente";
schema.Columns.Add(colvarIdPaciente);
TableSchema.TableColumn colvarTipoParentesco = new TableSchema.TableColumn(schema);
colvarTipoParentesco.ColumnName = "tipoParentesco";
colvarTipoParentesco.DataType = DbType.String;
colvarTipoParentesco.MaxLength = 50;
colvarTipoParentesco.AutoIncrement = false;
colvarTipoParentesco.IsNullable = false;
colvarTipoParentesco.IsPrimaryKey = false;
colvarTipoParentesco.IsForeignKey = false;
colvarTipoParentesco.IsReadOnly = false;
colvarTipoParentesco.DefaultSetting = @"";
colvarTipoParentesco.ForeignKeyTableName = "";
schema.Columns.Add(colvarTipoParentesco);
TableSchema.TableColumn colvarIdUsuario = new TableSchema.TableColumn(schema);
colvarIdUsuario.ColumnName = "idUsuario";
colvarIdUsuario.DataType = DbType.Int32;
colvarIdUsuario.MaxLength = 0;
colvarIdUsuario.AutoIncrement = false;
colvarIdUsuario.IsNullable = false;
colvarIdUsuario.IsPrimaryKey = false;
colvarIdUsuario.IsForeignKey = false;
colvarIdUsuario.IsReadOnly = false;
colvarIdUsuario.DefaultSetting = @"";
colvarIdUsuario.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdUsuario);
TableSchema.TableColumn colvarFechaModificacion = new TableSchema.TableColumn(schema);
colvarFechaModificacion.ColumnName = "fechaModificacion";
colvarFechaModificacion.DataType = DbType.DateTime;
colvarFechaModificacion.MaxLength = 0;
colvarFechaModificacion.AutoIncrement = false;
colvarFechaModificacion.IsNullable = false;
colvarFechaModificacion.IsPrimaryKey = false;
colvarFechaModificacion.IsForeignKey = false;
colvarFechaModificacion.IsReadOnly = false;
colvarFechaModificacion.DefaultSetting = @"";
colvarFechaModificacion.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaModificacion);
TableSchema.TableColumn colvarIdAntecedente = new TableSchema.TableColumn(schema);
colvarIdAntecedente.ColumnName = "idAntecedente";
colvarIdAntecedente.DataType = DbType.Int32;
colvarIdAntecedente.MaxLength = 0;
colvarIdAntecedente.AutoIncrement = false;
colvarIdAntecedente.IsNullable = false;
colvarIdAntecedente.IsPrimaryKey = false;
colvarIdAntecedente.IsForeignKey = true;
colvarIdAntecedente.IsReadOnly = false;
colvarIdAntecedente.DefaultSetting = @"((0))";
colvarIdAntecedente.ForeignKeyTableName = "Sys_Antecedente";
schema.Columns.Add(colvarIdAntecedente);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("Sys_Parentesco",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdParentesco")]
[Bindable(true)]
public int IdParentesco
{
get { return GetColumnValue<int>(Columns.IdParentesco); }
set { SetColumnValue(Columns.IdParentesco, value); }
}
[XmlAttribute("Nombre")]
[Bindable(true)]
public string Nombre
{
get { return GetColumnValue<string>(Columns.Nombre); }
set { SetColumnValue(Columns.Nombre, value); }
}
[XmlAttribute("Apellido")]
[Bindable(true)]
public string Apellido
{
get { return GetColumnValue<string>(Columns.Apellido); }
set { SetColumnValue(Columns.Apellido, value); }
}
[XmlAttribute("IdTipoDocumento")]
[Bindable(true)]
public int IdTipoDocumento
{
get { return GetColumnValue<int>(Columns.IdTipoDocumento); }
set { SetColumnValue(Columns.IdTipoDocumento, value); }
}
[XmlAttribute("NumeroDocumento")]
[Bindable(true)]
public int NumeroDocumento
{
get { return GetColumnValue<int>(Columns.NumeroDocumento); }
set { SetColumnValue(Columns.NumeroDocumento, value); }
}
[XmlAttribute("FechaNacimiento")]
[Bindable(true)]
public DateTime FechaNacimiento
{
get { return GetColumnValue<DateTime>(Columns.FechaNacimiento); }
set { SetColumnValue(Columns.FechaNacimiento, value); }
}
[XmlAttribute("IdProvincia")]
[Bindable(true)]
public int IdProvincia
{
get { return GetColumnValue<int>(Columns.IdProvincia); }
set { SetColumnValue(Columns.IdProvincia, value); }
}
[XmlAttribute("IdPais")]
[Bindable(true)]
public int IdPais
{
get { return GetColumnValue<int>(Columns.IdPais); }
set { SetColumnValue(Columns.IdPais, value); }
}
[XmlAttribute("IdSituacionLaboral")]
[Bindable(true)]
public int IdSituacionLaboral
{
get { return GetColumnValue<int>(Columns.IdSituacionLaboral); }
set { SetColumnValue(Columns.IdSituacionLaboral, value); }
}
[XmlAttribute("IdNivelInstruccion")]
[Bindable(true)]
public int IdNivelInstruccion
{
get { return GetColumnValue<int>(Columns.IdNivelInstruccion); }
set { SetColumnValue(Columns.IdNivelInstruccion, value); }
}
[XmlAttribute("IdProfesion")]
[Bindable(true)]
public int IdProfesion
{
get { return GetColumnValue<int>(Columns.IdProfesion); }
set { SetColumnValue(Columns.IdProfesion, value); }
}
[XmlAttribute("IdPaciente")]
[Bindable(true)]
public int IdPaciente
{
get { return GetColumnValue<int>(Columns.IdPaciente); }
set { SetColumnValue(Columns.IdPaciente, value); }
}
[XmlAttribute("TipoParentesco")]
[Bindable(true)]
public string TipoParentesco
{
get { return GetColumnValue<string>(Columns.TipoParentesco); }
set { SetColumnValue(Columns.TipoParentesco, value); }
}
[XmlAttribute("IdUsuario")]
[Bindable(true)]
public int IdUsuario
{
get { return GetColumnValue<int>(Columns.IdUsuario); }
set { SetColumnValue(Columns.IdUsuario, value); }
}
[XmlAttribute("FechaModificacion")]
[Bindable(true)]
public DateTime FechaModificacion
{
get { return GetColumnValue<DateTime>(Columns.FechaModificacion); }
set { SetColumnValue(Columns.FechaModificacion, value); }
}
[XmlAttribute("IdAntecedente")]
[Bindable(true)]
public int IdAntecedente
{
get { return GetColumnValue<int>(Columns.IdAntecedente); }
set { SetColumnValue(Columns.IdAntecedente, value); }
}
#endregion
#region ForeignKey Properties
/// <summary>
/// Returns a SysSituacionLaboral ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysSituacionLaboral SysSituacionLaboral
{
get { return DalSic.SysSituacionLaboral.FetchByID(this.IdSituacionLaboral); }
set { SetColumnValue("idSituacionLaboral", value.IdSituacionLaboral); }
}
/// <summary>
/// Returns a SysProfesion ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysProfesion SysProfesion
{
get { return DalSic.SysProfesion.FetchByID(this.IdProfesion); }
set { SetColumnValue("idProfesion", value.IdProfesion); }
}
/// <summary>
/// Returns a SysAntecedente ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysAntecedente SysAntecedente
{
get { return DalSic.SysAntecedente.FetchByID(this.IdAntecedente); }
set { SetColumnValue("idAntecedente", value.IdAntecedente); }
}
/// <summary>
/// Returns a SysNivelInstruccion ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysNivelInstruccion SysNivelInstruccion
{
get { return DalSic.SysNivelInstruccion.FetchByID(this.IdNivelInstruccion); }
set { SetColumnValue("idNivelInstruccion", value.IdNivelInstruccion); }
}
/// <summary>
/// Returns a SysPaciente ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysPaciente SysPaciente
{
get { return DalSic.SysPaciente.FetchByID(this.IdPaciente); }
set { SetColumnValue("idPaciente", value.IdPaciente); }
}
/// <summary>
/// Returns a SysPai ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysPai SysPai
{
get { return DalSic.SysPai.FetchByID(this.IdPais); }
set { SetColumnValue("idPais", value.IdPais); }
}
/// <summary>
/// Returns a SysTipoDocumento ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysTipoDocumento SysTipoDocumento
{
get { return DalSic.SysTipoDocumento.FetchByID(this.IdTipoDocumento); }
set { SetColumnValue("idTipoDocumento", value.IdTipoDocumento); }
}
/// <summary>
/// Returns a SysProvincium ActiveRecord object related to this SysParentesco
///
/// </summary>
public DalSic.SysProvincium SysProvincium
{
get { return DalSic.SysProvincium.FetchByID(this.IdProvincia); }
set { SetColumnValue("idProvincia", value.IdProvincia); }
}
#endregion
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varNombre,string varApellido,int varIdTipoDocumento,int varNumeroDocumento,DateTime varFechaNacimiento,int varIdProvincia,int varIdPais,int varIdSituacionLaboral,int varIdNivelInstruccion,int varIdProfesion,int varIdPaciente,string varTipoParentesco,int varIdUsuario,DateTime varFechaModificacion,int varIdAntecedente)
{
SysParentesco item = new SysParentesco();
item.Nombre = varNombre;
item.Apellido = varApellido;
item.IdTipoDocumento = varIdTipoDocumento;
item.NumeroDocumento = varNumeroDocumento;
item.FechaNacimiento = varFechaNacimiento;
item.IdProvincia = varIdProvincia;
item.IdPais = varIdPais;
item.IdSituacionLaboral = varIdSituacionLaboral;
item.IdNivelInstruccion = varIdNivelInstruccion;
item.IdProfesion = varIdProfesion;
item.IdPaciente = varIdPaciente;
item.TipoParentesco = varTipoParentesco;
item.IdUsuario = varIdUsuario;
item.FechaModificacion = varFechaModificacion;
item.IdAntecedente = varIdAntecedente;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdParentesco,string varNombre,string varApellido,int varIdTipoDocumento,int varNumeroDocumento,DateTime varFechaNacimiento,int varIdProvincia,int varIdPais,int varIdSituacionLaboral,int varIdNivelInstruccion,int varIdProfesion,int varIdPaciente,string varTipoParentesco,int varIdUsuario,DateTime varFechaModificacion,int varIdAntecedente)
{
SysParentesco item = new SysParentesco();
item.IdParentesco = varIdParentesco;
item.Nombre = varNombre;
item.Apellido = varApellido;
item.IdTipoDocumento = varIdTipoDocumento;
item.NumeroDocumento = varNumeroDocumento;
item.FechaNacimiento = varFechaNacimiento;
item.IdProvincia = varIdProvincia;
item.IdPais = varIdPais;
item.IdSituacionLaboral = varIdSituacionLaboral;
item.IdNivelInstruccion = varIdNivelInstruccion;
item.IdProfesion = varIdProfesion;
item.IdPaciente = varIdPaciente;
item.TipoParentesco = varTipoParentesco;
item.IdUsuario = varIdUsuario;
item.FechaModificacion = varFechaModificacion;
item.IdAntecedente = varIdAntecedente;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdParentescoColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn NombreColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn ApellidoColumn
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn IdTipoDocumentoColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn NumeroDocumentoColumn
{
get { return Schema.Columns[4]; }
}
public static TableSchema.TableColumn FechaNacimientoColumn
{
get { return Schema.Columns[5]; }
}
public static TableSchema.TableColumn IdProvinciaColumn
{
get { return Schema.Columns[6]; }
}
public static TableSchema.TableColumn IdPaisColumn
{
get { return Schema.Columns[7]; }
}
public static TableSchema.TableColumn IdSituacionLaboralColumn
{
get { return Schema.Columns[8]; }
}
public static TableSchema.TableColumn IdNivelInstruccionColumn
{
get { return Schema.Columns[9]; }
}
public static TableSchema.TableColumn IdProfesionColumn
{
get { return Schema.Columns[10]; }
}
public static TableSchema.TableColumn IdPacienteColumn
{
get { return Schema.Columns[11]; }
}
public static TableSchema.TableColumn TipoParentescoColumn
{
get { return Schema.Columns[12]; }
}
public static TableSchema.TableColumn IdUsuarioColumn
{
get { return Schema.Columns[13]; }
}
public static TableSchema.TableColumn FechaModificacionColumn
{
get { return Schema.Columns[14]; }
}
public static TableSchema.TableColumn IdAntecedenteColumn
{
get { return Schema.Columns[15]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdParentesco = @"idParentesco";
public static string Nombre = @"nombre";
public static string Apellido = @"apellido";
public static string IdTipoDocumento = @"idTipoDocumento";
public static string NumeroDocumento = @"numeroDocumento";
public static string FechaNacimiento = @"fechaNacimiento";
public static string IdProvincia = @"idProvincia";
public static string IdPais = @"idPais";
public static string IdSituacionLaboral = @"idSituacionLaboral";
public static string IdNivelInstruccion = @"idNivelInstruccion";
public static string IdProfesion = @"idProfesion";
public static string IdPaciente = @"idPaciente";
public static string TipoParentesco = @"tipoParentesco";
public static string IdUsuario = @"idUsuario";
public static string FechaModificacion = @"fechaModificacion";
public static string IdAntecedente = @"idAntecedente";
}
#endregion
#region Update PK Collections
#endregion
#region Deep Save
#endregion
}
}
| |
using System;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.IdentityModel.Tokens;
using System.Text;
using Kroeg.Server.Configuration;
using Kroeg.Server.Middleware;
using Kroeg.EntityStore.Models;
using Kroeg.EntityStore.Store;
using Microsoft.AspNetCore.Http;
using Kroeg.Server.Services.Template;
using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Identity;
using Npgsql;
using System.Data;
using System.Data.Common;
using Npgsql.Logging;
using System.Runtime.Loader;
using System.Collections.Generic;
using Kroeg.Services;
using System.IO;
using Kroeg.ActivityPub.Shared;
using Kroeg.ActivityPub.ServerToServer;
using Kroeg.ActivityPub.ClientToServer;
using Kroeg.ActivityPub;
using Kroeg.EntityStore;
using Kroeg.EntityStore.Services;
using Kroeg.EntityStore.Notifier;
using Kroeg.EntityStore.Salmon;
using Kroeg.ActivityPub.Services;
namespace Kroeg.Server
{
public class Startup
{
public Startup(IHostingEnvironment env)
{
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true)
.AddEnvironmentVariables();
Configuration = builder.Build();
}
public IConfigurationRoot Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
// Add framework services.
services.AddMvc();
services.AddScoped<NpgsqlConnection>((svc) => new NpgsqlConnection(Configuration.GetConnectionString("Default")));
services.AddScoped<DbConnection, NpgsqlConnection>((svc) => svc.GetService<NpgsqlConnection>());
services.AddTransient<IUserStore<APUser>, KroegUserStore>();
services.AddTransient<IUserPasswordStore<APUser>, KroegUserStore>();
services.AddTransient<IRoleStore<IdentityRole>, KroegUserStore>();
services.AddIdentity<APUser, IdentityRole>()
.AddDefaultTokenProviders();
services.AddAuthorization(options =>
{
options.AddPolicy("admin", policy => policy.RequireClaim("admin"));
options.AddPolicy("pass", policy => policy.AddAuthenticationSchemes(IdentityConstants.ApplicationScheme).RequireAuthenticatedUser());
});
services.Configure<IdentityOptions>(options =>
{
options.Password.RequireDigit = false;
options.Password.RequiredLength = 0;
options.Password.RequireNonAlphanumeric = false;
options.Password.RequireUppercase = false;
options.Password.RequireLowercase = false;
});
var signingKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(Configuration.GetSection("Kroeg")["TokenSigningKey"]));
var tokenSettings = new JwtTokenSettings
{
Audience =Configuration.GetSection("Kroeg")["BaseUri"],
Issuer = Configuration.GetSection("Kroeg")["BaseUri"],
ExpiryTime = TimeSpan.FromDays(30),
Credentials = new SigningCredentials(signingKey, SecurityAlgorithms.HmacSha256),
ValidationParameters = new TokenValidationParameters
{
ValidateIssuerSigningKey = true,
IssuerSigningKey = signingKey,
ValidateIssuer = true,
ValidIssuer = Configuration.GetSection("Kroeg")["BaseUri"],
ValidateAudience = true,
ValidAudience = Configuration.GetSection("Kroeg")["BaseUri"],
ValidateLifetime = true,
ClockSkew = TimeSpan.Zero
}
};
services.AddSingleton(tokenSettings);
services.AddSingleton(new ServerConfig(Configuration.GetSection("Kroeg")));
services.AddSingleton<URLService>(a => new URLService(a.GetService<ServerConfig>()) {
EntityNames = Configuration.GetSection("EntityNames")
});
services.AddScoped<INotifier, LocalNotifier>();
services.AddSingleton(Configuration);
services.AddTransient<IAuthorizer, DefaultAuthorizer>();
services.AddTransient<DeliveryService>();
services.AddTransient<RelevantEntitiesService>();
services.AddScoped<TripleEntityStore>();
services.AddScoped<CollectionTools>();
services.AddScoped<FakeEntityService>();
services.AddScoped<EntityFlattener>();
services.AddScoped<KeyService>();
services.AddScoped<IEntityStore>((provider) =>
{
var triple = provider.GetRequiredService<TripleEntityStore>();
var flattener = provider.GetRequiredService<EntityFlattener>();
var httpAccessor = provider.GetService<IHttpContextAccessor>();
var fakeEntityService = provider.GetService<FakeEntityService>();
var keyService = provider.GetService<KeyService>();
var retrieving = new RetrievingEntityStore(triple, flattener, provider, keyService, httpAccessor);
return new FakeEntityStore(fakeEntityService, retrieving);
});
services.AddSingleton<TemplateService>();
services.AddTransient<SignatureVerifier>();
services.AddAuthentication(o => {
o.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
o.DefaultChallengeScheme = IdentityConstants.ApplicationScheme;
o.DefaultSignInScheme = IdentityConstants.ApplicationScheme;
})
.AddJwtBearer((options) => {
options.TokenValidationParameters = tokenSettings.ValidationParameters;
options.Audience =Configuration.GetSection("Kroeg")["BaseUri"];
options.ClaimsIssuer = Configuration.GetSection("Kroeg")["BaseUri"];
});
services.ConfigureApplicationCookie((options) => {
options.Cookie.Name = "Kroeg.Auth";
options.LoginPath = "/auth/login";
});
var typeMap = new Dictionary<string, Type>();
foreach (var module in Configuration.GetSection("Kroeg").GetSection("Modules").GetChildren())
{
var assembly = AssemblyLoadContext.Default.LoadFromAssemblyPath(Path.Combine(Directory.GetCurrentDirectory(), module.Value));
foreach (var type in assembly.GetTypes())
{
if (type.IsSubclassOf(typeof(BaseHandler)))
typeMap[type.FullName] = type;
}
}
ServerConfig.ClientToServerHandlers.AddRange(new Type[] {
typeof(ObjectWrapperHandler),
typeof(ActivityMissingFieldsHandler),
typeof(CreateActivityHandler),
// commit changes before modifying collections
typeof(UpdateDeleteActivityHandler),
typeof(CommitChangesHandler),
typeof(AcceptRejectFollowHandler),
typeof(FollowLikeHandler),
typeof(AddRemoveActivityHandler),
typeof(UndoActivityHandler),
typeof(BlockHandler),
typeof(CreateActorHandler),
typeof(DeliveryHandler)
});
ServerConfig.ServerToServerHandlers.AddRange(new Type[] {
typeof(VerifyOwnershipHandler),
typeof(DeleteHandler),
typeof(FollowResponseHandler),
typeof(LikeFollowAnnounceHandler),
typeof(AddRemoveActivityHandler),
typeof(UndoHandler),
typeof(CreateHandler),
typeof(DeliveryHandler)
});
ServerConfig.Converters.AddRange(new IConverterFactory[]
{
new AS2ConverterFactory()
});
foreach (var extra in Configuration.GetSection("Kroeg").GetSection("Filters").GetChildren())
{
if (typeMap.ContainsKey(extra.Value))
{
ServerConfig.ClientToServerHandlers.Add(typeMap[extra.Value]);
ServerConfig.ServerToServerHandlers.Add(typeMap[extra.Value]);
}
}
services.AddScoped<DatabaseManager>();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public async void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory)
{
loggerFactory.AddConsole(Configuration.GetSection("Logging"));
loggerFactory.AddDebug(LogLevel.Trace);
app.UseAuthentication();
app.UseWebSockets();
app.UseStaticFiles();
app.UseDeveloperExceptionPage();
app.UseMiddleware<GetEntityMiddleware>();
app.UseMvc();
app.ApplicationServices.GetRequiredService<DatabaseManager>().EnsureExists();
for (int i = 0; i < int.Parse(Configuration.GetSection("Kroeg")["BackgroundThreads"]); i++)
{
var serviceProvider = app.ApplicationServices.GetRequiredService<IServiceScopeFactory>().CreateScope().ServiceProvider;
ActivatorUtilities.CreateInstance<BackgroundTaskQueuer>(serviceProvider);
}
var sevc = app.ApplicationServices.GetRequiredService<ServerConfig>();
await ActivityStreams.ASObject.SetContext(JsonLDConfig.GetContext(true), sevc.BaseUri + "render/context");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Cache
{
/// <summary>
/// Cache metrics used to obtain statistics on cache itself.
/// </summary>
public interface ICacheMetrics
{
/// <summary>
/// The number of get requests that were satisfied by the cache.
/// </summary>
/// <returns>
/// The number of hits.
/// </returns>
long CacheHits { get; }
/// <summary>
/// This is a measure of cache efficiency.
/// </summary>
/// <returns>
/// The percentage of successful hits, as a decimal e.g 75.
/// </returns>
float CacheHitPercentage { get; }
/// <summary>
/// A miss is a get request that is not satisfied.
/// </summary>
/// <returns>
/// The number of misses.
/// </returns>
long CacheMisses { get; }
/// <summary>
/// Returns the percentage of cache accesses that did not find a requested entry in the cache.
/// </summary>
/// <returns>
/// The percentage of accesses that failed to find anything.
/// </returns>
float CacheMissPercentage { get; }
/// <summary>
/// The total number of requests to the cache. This will be equal to the sum of the hits and misses.
/// </summary>
/// <returns>
/// The number of gets.
/// </returns>
long CacheGets { get; }
/// <summary>
/// The total number of puts to the cache.
/// </summary>
/// <returns>
/// The number of puts.
/// </returns>
long CachePuts { get; }
/// <summary>
/// The total number of removals from the cache. This does not include evictions, where the cache itself
/// initiates the removal to make space.
/// </summary>
/// <returns>
/// The number of removals.
/// </returns>
long CacheRemovals { get; }
/// <summary>
/// The total number of evictions from the cache. An eviction is a removal initiated by the cache itself
/// to free up space. An eviction is not treated as a removal and does not appear in the removal counts.
/// </summary>
/// <returns>
/// The number of evictions.
/// </returns>
long CacheEvictions { get; }
/// <summary>
/// The mean time to execute gets.
/// </summary>
/// <returns>
/// The time in ms.
/// </returns>
float AverageGetTime { get; }
/// <summary>
/// The mean time to execute puts.
/// </summary>
/// <returns>
/// The time in s.
/// </returns>
float AveragePutTime { get; }
/// <summary>
/// The mean time to execute removes.
/// </summary>
/// <returns>
/// The time in ms.
/// </returns>
float AverageRemoveTime { get; }
/// <summary>
/// The mean time to execute tx commit.
/// </summary>
/// <returns>
/// The time in ms.
/// </returns>
float AverageTxCommitTime { get; }
/// <summary>
/// The mean time to execute tx rollbacks.
/// </summary>
/// <returns>
/// Number of transaction rollbacks.
/// </returns>
float AverageTxRollbackTime { get; }
/// <summary>
/// Gets total number of transaction commits.
/// </summary>
/// <returns>
/// Number of transaction commits.
/// </returns>
long CacheTxCommits { get; }
/// <summary>
/// Gets total number of transaction rollbacks.
/// </summary>
/// <returns>
/// Number of transaction rollbacks.
/// </returns>
long CacheTxRollbacks { get; }
/// <summary>
/// Gets cache name.
/// </summary>
/// <returns>
/// Cache name.
/// </returns>
string CacheName { get; }
/// <summary>
/// The total number of get requests to the off-heap memory.
/// </summary>
/// <returns>
/// The number of gets.
/// </returns>
long OffHeapGets { get; }
/// <summary>
/// The total number of put requests to the off-heap memory.
/// </summary>
/// <returns>
/// The number of puts.
/// </returns>
long OffHeapPuts { get; }
/// <summary>
/// The total number of removals from the off-heap memory. This does not include evictions.
/// </summary>
/// <returns>
/// The number of removals.
/// </returns>
long OffHeapRemovals { get; }
/// <summary>
/// The total number of evictions from the off-heap memory.
/// </summary>
/// <returns>
/// The number of evictions.
/// </returns>
long OffHeapEvictions { get; }
/// <summary>
/// The number of get requests that were satisfied by the off-heap memory.
/// </summary>
/// <returns>
/// The off-heap hits number.
/// </returns>
long OffHeapHits { get; }
/// <summary>
/// Gets the percentage of hits on off-heap memory.
/// </summary>
/// <returns>
/// The percentage of hits on off-heap memory.
/// </returns>
float OffHeapHitPercentage { get; }
/// <summary>
/// A miss is a get request that is not satisfied by off-heap memory.
/// </summary>
/// <returns>
/// The off-heap misses number.
/// </returns>
long OffHeapMisses { get; }
/// <summary>
/// Gets the percentage of misses on off-heap memory.
/// </summary>
/// <returns>
/// The percentage of misses on off-heap memory.
/// </returns>
float OffHeapMissPercentage { get; }
/// <summary>
/// Gets number of entries stored in off-heap memory.
/// </summary>
/// <returns>
/// Number of entries stored in off-heap memory.
/// </returns>
long OffHeapEntriesCount { get; }
/// <summary>
/// Gets the number of primary entries stored in off-heap memory.
/// </summary>
/// <returns>
/// Number of primary entries stored in off-heap memory.
/// </returns>
long OffHeapPrimaryEntriesCount { get; }
/// <summary>
/// Gets number of backup entries stored in off-heap memory.
/// </summary>
/// <returns>
/// Number of backup entries stored in off-heap memory.
/// </returns>
long OffHeapBackupEntriesCount { get; }
/// <summary>
/// Gets memory size allocated in off-heap.
/// </summary>
/// <returns>
/// Memory size allocated in off-heap.
/// </returns>
long OffHeapAllocatedSize { get; }
/// <summary>
/// Gets number of non-null values in the cache.
/// </summary>
/// <returns>
/// Number of non-null values in the cache.
/// </returns>
int Size { get; }
/// <summary>
/// Gets number of non-null values in the cache.
/// </summary>
/// <returns>
/// Number of non-null values in the cache.
/// </returns>
long CacheSize { get; }
/// <summary>
/// Gets number of keys in the cache, possibly with null values.
/// </summary>
/// <returns>
/// Number of keys in the cache.
/// </returns>
int KeySize { get; }
/// <summary>
/// Returns true if this cache is empty.
/// </summary>
/// <returns>
/// True if this cache is empty.
/// </returns>
bool IsEmpty { get; }
/// <summary>
/// Gets current size of evict queue used to batch up evictions.
/// </summary>
/// <returns>
/// Current size of evict queue.
/// </returns>
int DhtEvictQueueCurrentSize { get; }
/// <summary>
/// Gets transaction per-thread map size.
/// </summary>
/// <returns>
/// Thread map size.
/// </returns>
int TxThreadMapSize { get; }
/// <summary>
/// Gets transaction per-Xid map size.
/// </summary>
/// <returns>
/// Transaction per-Xid map size.
/// </returns>
int TxXidMapSize { get; }
/// <summary>
/// Gets committed transaction queue size.
/// </summary>
/// <returns>
/// Committed transaction queue size.
/// </returns>
int TxCommitQueueSize { get; }
/// <summary>
/// Gets prepared transaction queue size.
/// </summary>
/// <returns>
/// Prepared transaction queue size.
/// </returns>
int TxPrepareQueueSize { get; }
/// <summary>
/// Gets start version counts map size.
/// </summary>
/// <returns>
/// Start version counts map size.
/// </returns>
int TxStartVersionCountsSize { get; }
/// <summary>
/// Gets number of cached committed transaction IDs.
/// </summary>
/// <returns>
/// Number of cached committed transaction IDs.
/// </returns>
int TxCommittedVersionsSize { get; }
/// <summary>
/// Gets number of cached rolled back transaction IDs.
/// </summary>
/// <returns>
/// Number of cached rolled back transaction IDs.
/// </returns>
int TxRolledbackVersionsSize { get; }
/// <summary>
/// Gets transaction DHT per-thread map size.
/// </summary>
/// <returns>
/// DHT thread map size.
/// </returns>
int TxDhtThreadMapSize { get; }
/// <summary>
/// Gets transaction DHT per-Xid map size.
/// </summary>
/// <returns>
/// Transaction DHT per-Xid map size.
/// </returns>
int TxDhtXidMapSize { get; }
/// <summary>
/// Gets committed DHT transaction queue size.
/// </summary>
/// <returns>
/// Committed DHT transaction queue size.
/// </returns>
int TxDhtCommitQueueSize { get; }
/// <summary>
/// Gets prepared DHT transaction queue size.
/// </summary>
/// <returns>
/// Prepared DHT transaction queue size.
/// </returns>
int TxDhtPrepareQueueSize { get; }
/// <summary>
/// Gets DHT start version counts map size.
/// </summary>
/// <returns>
/// DHT start version counts map size.
/// </returns>
int TxDhtStartVersionCountsSize { get; }
/// <summary>
/// Gets number of cached committed DHT transaction IDs.
/// </summary>
/// <returns>
/// Number of cached committed DHT transaction IDs.
/// </returns>
int TxDhtCommittedVersionsSize { get; }
/// <summary>
/// Gets number of cached rolled back DHT transaction IDs.
/// </summary>
/// <returns>
/// Number of cached rolled back DHT transaction IDs.
/// </returns>
int TxDhtRolledbackVersionsSize { get; }
/// <summary>
/// Returns true if write-behind is enabled.
/// </summary>
/// <returns>
/// True if write-behind is enabled.
/// </returns>
bool IsWriteBehindEnabled { get; }
/// <summary>
/// Gets the maximum size of the write-behind buffer. When the count of unique keys in write buffer exceeds
/// this value, the buffer is scheduled for write to the underlying store.
/// <para />
/// If this value is 0, then flush is performed only on time-elapsing basis.
/// </summary>
/// <returns>
/// Buffer size that triggers flush procedure.
/// </returns>
int WriteBehindFlushSize { get; }
/// <summary>
/// Gets the number of flush threads that will perform store update operations.
/// </summary>
/// <returns>
/// Count of worker threads.
/// </returns>
int WriteBehindFlushThreadCount { get; }
/// <summary>
/// Gets the cache flush frequency. All pending operations on the underlying store will be performed
/// within time interval not less then this value.
/// <para /> If this value is 0, then flush is performed only when buffer size exceeds flush size.
/// </summary>
/// <returns>
/// Flush frequency in milliseconds.
/// </returns>
long WriteBehindFlushFrequency { get; }
/// <summary>
/// Gets the maximum count of similar (put or remove) operations that can be grouped to a single batch.
/// </summary>
/// <returns>
/// Maximum size of batch.
/// </returns>
int WriteBehindStoreBatchSize { get; }
/// <summary>
/// Gets count of write buffer overflow events since initialization.
/// Each overflow event causes the ongoing flush operation to be performed synchronously.
/// </summary>
/// <returns>
/// Count of cache overflow events since start.
/// </returns>
int WriteBehindTotalCriticalOverflowCount { get; }
/// <summary>
/// Gets count of write buffer overflow events in progress at the moment.
/// Each overflow event causes the ongoing flush operation to be performed synchronously.
/// </summary>
/// <returns>
/// Count of cache overflow events since start.
/// </returns>
int WriteBehindCriticalOverflowCount { get; }
/// <summary>
/// Gets count of cache entries that are in a store-retry state.
/// An entry is assigned a store-retry state when underlying store failed due some reason
/// and cache has enough space to retain this entry till the next try.
/// </summary>
/// <returns>
/// Count of entries in store-retry state.
/// </returns>
int WriteBehindErrorRetryCount { get; }
/// <summary>
/// Gets count of entries that were processed by the write-behind store
/// and have not been flushed to the underlying store yet.
/// </summary>
/// <returns>
/// Total count of entries in cache store internal buffer.
/// </returns>
int WriteBehindBufferSize { get; }
/// <summary>
/// Determines the required type of keys for this cache, if any.
/// </summary>
/// <returns>
/// The fully qualified class name of the key type, or "java.lang.Object" if the type is undefined.
/// </returns>
string KeyType { get; }
/// <summary>
/// Determines the required type of values for this cache, if any.
/// </summary>
/// <returns>
/// The fully qualified class name of the value type, or "java.lang.Object" if the type is undefined.
/// </returns>
string ValueType { get; }
/// <summary>
/// Whether storeByValue true or storeByReference false. When true, both keys and values are stored by value.
/// <para />
/// When false, both keys and values are stored by reference. Caches stored by reference are capable of
/// mutation by any threads holding the reference.
/// The effects are:
/// - if the key is mutated, then the key may not be retrievable or removable
/// - if the value is mutated, then all threads in the JVM can potentially observe those mutations, subject
/// to the normal Java Memory Model rules.
/// Storage by reference only applies to the local heap.
/// If an entry is moved off heap it will need to be transformed into a representation.
/// Any mutations that occur after transformation may not be reflected in the cache.
/// <para />
/// When a cache is storeByValue, any mutation to the key or value does not affect the key of value
/// stored in the cache.
/// <para />
/// The default value is true.
/// </summary>
/// <returns>
/// True if the cache is store by value.
/// </returns>
bool IsStoreByValue { get; }
/// <summary>
/// Checks whether statistics collection is enabled in this cache.
/// <para />
/// The default value is false.
/// </summary>
/// <returns>
/// True if statistics collection is enabled.
/// </returns>
bool IsStatisticsEnabled { get; }
/// <summary>
/// Checks whether management is enabled on this cache.
/// <para />
/// The default value is false.
/// </summary>
/// <returns>
/// True if management is enabled.
/// </returns>
bool IsManagementEnabled { get; }
/// <summary>
/// Determines if a cache should operate in read-through mode.
/// <para />
/// The default value is false
/// </summary>
/// <returns>
/// True when a cache is in "read-through" mode.
/// </returns>
bool IsReadThrough { get; }
/// <summary>
/// Determines if a cache should operate in "write-through" mode.
/// <para />
/// Will appropriately cause the configured CacheWriter to be invoked.
/// <para />
/// The default value is false
/// </summary>
/// <returns>
/// True when a cache is in "write-through" mode.
/// </returns>
bool IsWriteThrough { get; }
/// <summary>
/// Checks whether cache topology is valid for read operations.
/// </summary>
/// <returns>
/// True when cache topology is valid for reading.
/// </returns>
bool IsValidForReading { get; }
/// <summary>
/// Checks whether cache topology is valid for write operations.
/// </summary>
/// <returns>
/// True when cache topology is valid for writing.
/// </returns>
bool IsValidForWriting { get; }
/// <summary>
/// Gets total number of partitions on current node.
/// </summary>
/// <returns>
/// Total number of partitions on current node.
/// </returns>
int TotalPartitionsCount { get; }
/// <summary>
/// Gets number of currently rebalancing partitions on current node.
/// </summary>
/// <returns>
/// Number of currently rebalancing partitions on current node.
/// </returns>
int RebalancingPartitionsCount { get; }
/// <summary>
/// Gets estimated number of keys to be rebalanced on current node.
/// </summary>
/// <returns>
/// Estimated number of keys to be rebalanced on current node.
/// </returns>
long KeysToRebalanceLeft { get; }
/// <summary>
/// Gets estimated rebalancing speed in keys.
/// </summary>
/// <returns>
/// Estimated rebalancing speed in keys.
/// </returns>
long RebalancingKeysRate { get; }
/// <summary>
/// Gets estimated rebalancing speed in bytes.
/// </summary>
/// <returns>
/// Estimated rebalancing speed in bytes.
/// </returns>
long RebalancingBytesRate { get; }
/// <summary>
/// Gets the number of cache entries in heap memory, including entries held by active transactions,
/// entries in onheap cache and near entries.
/// </summary>
/// <returns>
/// Number of entries in heap memory.
/// </returns>
long HeapEntriesCount { get; }
/// <summary>
/// Gets estimated rebalancing finish time.
/// entries in onheap cache and near entries.
/// </summary>
/// <returns>
/// Estimated rebalancing finish time.
/// </returns>
long EstimatedRebalancingFinishTime { get; }
/// <summary>
/// Gets rebalancing start time.
/// entries in onheap cache and near entries.
/// </summary>
/// <returns>
/// Rebalancing start time.
/// </returns>
long RebalancingStartTime { get; }
/// <summary>
/// Gets number of partitions.
/// need to be cleared before actual rebalance start.
/// </summary>
/// <returns>
/// Number of clearing partitions for rebalance.
/// </returns>
long RebalanceClearingPartitionsLeft { get; }
/// <summary>
/// Gets number of already rebalanced keys.
/// need to be cleared before actual rebalance start.
/// </summary>
/// <returns>
/// Number of already rebalanced keys.
/// </returns>
long RebalancedKeys { get; }
/// <summary>
/// Gets number of estimated keys to rebalance.
/// need to be cleared before actual rebalance start.
/// </summary>
/// <returns>
/// Number of estimated keys to rebalance.
/// </returns>
long EstimatedRebalancingKeys { get; }
}
}
| |
using System;
using System.Collections.Generic;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Audio;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.GamerServices;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input;
using Microsoft.Xna.Framework.Net;
using Microsoft.Xna.Framework.Storage;
using ControlLibrary.MKI062V2;
using System.Threading;
using RPY;
namespace AngleEstimationApp
{
/// <summary>
/// This is the main type for your game
/// </summary>
public class Game : Microsoft.Xna.Framework.Game
{
private string COMport;
INEMO2_Device dev= new INEMO2_Device();
GraphicsDeviceManager graphics;
SpriteBatch spriteBatch;
Matrix worldMatrix;
Matrix cameraMatrix;
Matrix projectionMatrix;
BasicEffect cubeEffect;
BasicShape cube = new BasicShape(new Vector3(7, 5, 1), new Vector3(0, 0, 0));
AHRSalgorithm AHRS;
KalmanFilter KalmanFilter;
ComplementaryFilter ComplementaryFilter;
Quaternion AuxFrame = new Quaternion(0, 0, 0, 1);
Texture2D background;
Rectangle mainFrame;
int alg;
AcquisitionThread acq;
Thread workerThread;
public double dt;
public Game(Parameters param,int alg)
{
graphics = new GraphicsDeviceManager(this);
Content.RootDirectory = "Content";
this.IsMouseVisible = true;
this.alg = alg;
dt = 1.0 / param.freq;
this.COMport = "PL=PL_001{PN=COM"+ param.porta+ ", SENDMODE=B}";
if (alg == 1) //Complementary filter
ComplementaryFilter = new ComplementaryFilter(param);
if (alg == 2) //AHRS
{
AHRS = new AHRSalgorithm();
AHRS.setParamaters(param.AHRSpar1, param.AHRSpar2,param.freq);
}
if (alg == 3) //Kalman filter
KalmanFilter = new KalmanFilter(param);
this.Exiting += new EventHandler(Game1_Exiting);
}
void Game1_Exiting(object sender, EventArgs e)
{
acq.Disconnect();
Environment.Exit(0);
}
/// <summary>
/// Allows the game to perform any initialization
/// it needs to before starting to run.
/// This is where it can query for any required
/// services and load any non-graphic
/// related content. Calling base. Initialize will
/// enumerate through any components
/// and initialize them as well.
/// </summary>
protected override void Initialize()
{
base.Initialize();
this.BeginRun();
initializeWorld();
acq=new AcquisitionThread(COMport,dev,this); //connette iNEMO e prepara per acquisizione
workerThread = new Thread(acq.DoWork);
workerThread.Start();
//imutag = new IMUtagv1p0serial(IMUtagCOMport, settings); //connect the iNEMO
//imutag.packetReceived += new IMUtagv1p0serial.onDataPacketReceived(imutag_packetReceived);
}
/// <summary>
/// LoadContent will be called once
/// per game and is the place to load
/// all of your content.
/// </summary>
protected override void LoadContent()
{
// Create a new SpriteBatch, which can be used to draw textures.
spriteBatch = new SpriteBatch(GraphicsDevice);
// Load the background content.
background = Content.Load<Texture2D>("Textures\\myback");
// Set the rectangle parameters.
mainFrame = new Rectangle(0, 0, GraphicsDevice.Viewport.Width, GraphicsDevice.Viewport.Height);
cube.shapeTexture = Content.Load<Texture2D>("Textures\\mytext");
}
/// <summary>
/// UnloadContent will be called once per game
/// and is the place to unload all content.
/// </summary>
protected override void UnloadContent()
{
// TODO: Unload any non ContentManager content here
}
/// <summary>
/// Allows the game to run logic
/// such as updating the world,
/// checking for collisions,
/// gathering input, and playing audio.
/// </summary>
/// <param name="gameTime">
/// Provides a snapshot of timing values.</param>
protected override void Update(GameTime gameTime)
{
// Allows the game to exit
if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed) this.Exit();
if (Mouse.GetState().RightButton == ButtonState.Pressed) graphics.ToggleFullScreen();
if (alg == 1) //Complementary filter
{
double[] anglesFiltered = ComplementaryFilter.getAnglesFiltered();
Matrix m = Matrix.CreateRotationZ((float)(anglesFiltered[2] * Math.PI / 180));
Matrix m1 = Matrix.CreateRotationY(-(float)(anglesFiltered[1] * Math.PI / 180));
Matrix m2 = Matrix.CreateRotationX((float)(anglesFiltered[0] * Math.PI / 180));
Matrix m3 = m * m1 * m2;
double qw = Math.Sqrt(1 + m3.M11 * m3.M11 + m3.M22 * m3.M22 + m3.M33 * m3.M33) / 2;
double q1 = (m3.M32 - m3.M23) / (4 * qw);
double q2 = (m3.M13 - m3.M31) / (4 * qw);
double q3 = (m3.M21 - m3.M12) / (4 * qw);
worldMatrix = Matrix.CreateFromQuaternion(AuxFrame * new Quaternion((float)q1, (float)q2, (float)q3, (float)qw));
//worldMatrix = Matrix.CreateFromYawPitchRoll((float)(anglesFiltered[1] * Math.PI / 180), -(float)(anglesFiltered[0] * Math.PI / 180), -(float)(anglesFiltered[2] * Math.PI / 180));
}
if (alg == 2) //AHRS
if (Mouse.GetState().LeftButton == ButtonState.Pressed) AuxFrame = new Quaternion((float)-AHRS.SEq_2, (float)-AHRS.SEq_3, (float)-AHRS.SEq_4, (float)AHRS.SEq_1);
if (alg == 3) //Kalman filter
if (Mouse.GetState().LeftButton == ButtonState.Pressed) AuxFrame = new Quaternion((float)-KalmanFilter.q_filt2, (float)-KalmanFilter.q_filt3, (float)-KalmanFilter.q_filt4, (float)KalmanFilter.q_filt1);
base.Update(gameTime);
}
/// <summary>
/// This is called when the game should draw itself.
/// </summary>
/// <param name="gameTime">
/// Provides a snapshot of timing values.</param>
protected override void Draw(GameTime gameTime)
{
graphics.GraphicsDevice.Clear(Color.CornflowerBlue);
// Draw the background.
// Start building the sprite.
spriteBatch.Begin(SpriteBlendMode.AlphaBlend);
// Draw the background.
spriteBatch.Draw(background, mainFrame, Color.White);
// End building the sprite.
spriteBatch.End();
cubeEffect.Begin();
if (alg == 1) //Complementary filter
{
double[] anglesFiltered = ComplementaryFilter.getAnglesFiltered();
//Console.Out.Write(anglesFiltered[0] + " " + anglesFiltered[1] + " " + anglesFiltered[2]+"\n");
worldMatrix = Matrix.CreateFromYawPitchRoll((float)(anglesFiltered[1] * Math.PI / 180), -(float)(anglesFiltered[0] * Math.PI / 180), -(float)(anglesFiltered[2] * Math.PI / 180));
}
if (alg == 2) //AHRS
worldMatrix = Matrix.CreateFromQuaternion(AuxFrame * new Quaternion((float)AHRS.SEq_2, (float)AHRS.SEq_3, (float)AHRS.SEq_4, (float)AHRS.SEq_1));
if (alg == 3) //Kalman filter
worldMatrix = Matrix.CreateFromQuaternion(AuxFrame * new Quaternion((float)-KalmanFilter.q_filt2, (float)-KalmanFilter.q_filt3, (float)KalmanFilter.q_filt4, (float)KalmanFilter.q_filt1));
cubeEffect.World = worldMatrix;
foreach (EffectPass pass in cubeEffect.CurrentTechnique.Passes)
{
pass.Begin();
cubeEffect.Texture = cube.shapeTexture;
cube.RenderShape(GraphicsDevice);
pass.End();
}
cubeEffect.End();
base.Draw(gameTime);
}
public void initializeWorld()
{
cameraMatrix = Matrix.CreateLookAt(new Vector3(0, -30, 1), new Vector3(0, 0, 0), new Vector3(0, 1, 0));
projectionMatrix = Matrix.CreatePerspectiveFieldOfView(MathHelper.PiOver4, Window.ClientBounds.Width / Window.ClientBounds.Height, 1.0f, 50.0f);
worldMatrix = Matrix.Identity;
cubeEffect = new BasicEffect(GraphicsDevice, null);
cubeEffect.World = worldMatrix;
cubeEffect.View = cameraMatrix;
cubeEffect.Projection = projectionMatrix;
cubeEffect.TextureEnabled = true;
}
public void PacketReceived(INEMO2_FrameData data)
{
if (alg == 1) //Complementary filter
ComplementaryFilter.complementaryFiltering(data.Gyroscope.X, data.Gyroscope.Y, data.Gyroscope.Z,
data.Accelometer.X, data.Accelometer.Y, data.Accelometer.Z,
data.Magnetometer.X, data.Magnetometer.Y, data.Magnetometer.Z);
if (alg == 2) //AHRS
AHRS.update(-data.Gyroscope.X, -data.Gyroscope.Y, -data.Gyroscope.Z,
-data.Accelometer.X, data.Accelometer.Y, -data.Accelometer.Z,
data.Magnetometer.X, data.Magnetometer.Y, -data.Magnetometer.Z); // update filter with sensor data
if (alg == 3) //Kalman filter
KalmanFilter.filter_step(data.Gyroscope.X, data.Gyroscope.Y, data.Gyroscope.Z,
data.Accelometer.X, data.Accelometer.Y, data.Accelometer.Z,
data.Magnetometer.X, data.Magnetometer.Y, data.Magnetometer.Z);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Composition;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.DocumentationComments;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.SignatureHelp;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp.SignatureHelp
{
[ExportSignatureHelpProvider("GenericNameSignatureHelpProvider", LanguageNames.CSharp), Shared]
internal partial class GenericNameSignatureHelpProvider : AbstractCSharpSignatureHelpProvider
{
public override bool IsTriggerCharacter(char ch)
{
return ch == '<' || ch == ',';
}
public override bool IsRetriggerCharacter(char ch)
{
return ch == '>';
}
protected virtual bool TryGetGenericIdentifier(
SyntaxNode root, int position,
ISyntaxFactsService syntaxFacts,
SignatureHelpTriggerReason triggerReason,
CancellationToken cancellationToken,
out SyntaxToken genericIdentifier, out SyntaxToken lessThanToken)
{
GenericNameSyntax name;
if (CommonSignatureHelpUtilities.TryGetSyntax(root, position, syntaxFacts, triggerReason, IsTriggerToken, IsArgumentListToken, cancellationToken, out name))
{
genericIdentifier = name.Identifier;
lessThanToken = name.TypeArgumentList.LessThanToken;
return true;
}
genericIdentifier = default(SyntaxToken);
lessThanToken = default(SyntaxToken);
return false;
}
private bool IsTriggerToken(SyntaxToken token)
{
return !token.IsKind(SyntaxKind.None) &&
token.ValueText.Length == 1 &&
IsTriggerCharacter(token.ValueText[0]) &&
token.Parent is TypeArgumentListSyntax &&
token.Parent.Parent is GenericNameSyntax;
}
private bool IsArgumentListToken(GenericNameSyntax node, SyntaxToken token)
{
return node.TypeArgumentList != null &&
node.TypeArgumentList.Span.Contains(token.SpanStart) &&
token != node.TypeArgumentList.GreaterThanToken;
}
protected override async Task<SignatureHelpItems> GetItemsWorkerAsync(Document document, int position, SignatureHelpTriggerInfo triggerInfo, CancellationToken cancellationToken)
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
SyntaxToken genericIdentifier, lessThanToken;
if (!TryGetGenericIdentifier(root, position, document.GetLanguageService<ISyntaxFactsService>(), triggerInfo.TriggerReason, cancellationToken,
out genericIdentifier, out lessThanToken))
{
return null;
}
var simpleName = genericIdentifier.Parent as SimpleNameSyntax;
if (simpleName == null)
{
return null;
}
var beforeDotExpression = simpleName.IsRightSideOfDot() ? simpleName.GetLeftSideOfDot() : null;
var semanticModel = await document.GetSemanticModelForNodeAsync(simpleName, cancellationToken).ConfigureAwait(false);
var leftSymbol = beforeDotExpression == null
? null
: semanticModel.GetSymbolInfo(beforeDotExpression, cancellationToken).GetAnySymbol() as INamespaceOrTypeSymbol;
var leftType = beforeDotExpression == null
? null
: semanticModel.GetTypeInfo(beforeDotExpression, cancellationToken).Type as INamespaceOrTypeSymbol;
var leftContainer = leftSymbol ?? leftType;
var isBaseAccess = beforeDotExpression is BaseExpressionSyntax;
var namespacesOrTypesOnly = SyntaxFacts.IsInNamespaceOrTypeContext(simpleName);
var includeExtensions = leftSymbol == null && leftType != null;
var name = genericIdentifier.ValueText;
var symbols = isBaseAccess
? semanticModel.LookupBaseMembers(position, name)
: namespacesOrTypesOnly
? semanticModel.LookupNamespacesAndTypes(position, leftContainer, name)
: semanticModel.LookupSymbols(position, leftContainer, name, includeExtensions);
var within = semanticModel.GetEnclosingNamedTypeOrAssembly(position, cancellationToken);
if (within == null)
{
return null;
}
var symbolDisplayService = document.Project.LanguageServices.GetService<ISymbolDisplayService>();
var accessibleSymbols =
symbols.WhereAsArray(s => s.GetArity() > 0)
.WhereAsArray(s => s is INamedTypeSymbol || s is IMethodSymbol)
.FilterToVisibleAndBrowsableSymbols(document.ShouldHideAdvancedMembers(), semanticModel.Compilation)
.Sort(symbolDisplayService, semanticModel, genericIdentifier.SpanStart);
if (!accessibleSymbols.Any())
{
return null;
}
var anonymousTypeDisplayService = document.Project.LanguageServices.GetService<IAnonymousTypeDisplayService>();
var documentationCommentFormattingService = document.Project.LanguageServices.GetService<IDocumentationCommentFormattingService>();
var textSpan = GetTextSpan(genericIdentifier, lessThanToken);
var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>();
return CreateSignatureHelpItems(accessibleSymbols.Select(s =>
Convert(s, lessThanToken, semanticModel, symbolDisplayService, anonymousTypeDisplayService, documentationCommentFormattingService, cancellationToken)).ToList(),
textSpan, GetCurrentArgumentState(root, position, syntaxFacts, textSpan, cancellationToken));
}
public override SignatureHelpState GetCurrentArgumentState(SyntaxNode root, int position, ISyntaxFactsService syntaxFacts, TextSpan currentSpan, CancellationToken cancellationToken)
{
SyntaxToken genericIdentifier, lessThanToken;
if (!TryGetGenericIdentifier(root, position, syntaxFacts, SignatureHelpTriggerReason.InvokeSignatureHelpCommand, cancellationToken,
out genericIdentifier, out lessThanToken))
{
return null;
}
GenericNameSyntax genericName;
if (genericIdentifier.TryParseGenericName(cancellationToken, out genericName))
{
// Because we synthesized the generic name, it will have an index starting at 0
// instead of at the actual position it's at in the text. Because of this, we need to
// offset the position we are checking accordingly.
var offset = genericIdentifier.SpanStart - genericName.SpanStart;
position -= offset;
return SignatureHelpUtilities.GetSignatureHelpState(genericName.TypeArgumentList, position);
}
return null;
}
protected virtual TextSpan GetTextSpan(SyntaxToken genericIdentifier, SyntaxToken lessThanToken)
{
Contract.ThrowIfFalse(lessThanToken.Parent is TypeArgumentListSyntax && lessThanToken.Parent.Parent is GenericNameSyntax);
return SignatureHelpUtilities.GetSignatureHelpSpan(((GenericNameSyntax)lessThanToken.Parent.Parent).TypeArgumentList);
}
private SignatureHelpItem Convert(
ISymbol symbol,
SyntaxToken lessThanToken,
SemanticModel semanticModel,
ISymbolDisplayService symbolDisplayService,
IAnonymousTypeDisplayService anonymousTypeDisplayService,
IDocumentationCommentFormattingService documentationCommentFormattingService,
CancellationToken cancellationToken)
{
var position = lessThanToken.SpanStart;
SignatureHelpItem item;
if (symbol is INamedTypeSymbol)
{
var namedType = (INamedTypeSymbol)symbol;
item = CreateItem(
symbol, semanticModel, position,
symbolDisplayService, anonymousTypeDisplayService,
false,
symbol.GetDocumentationPartsFactory(semanticModel, position, documentationCommentFormattingService),
GetPreambleParts(namedType, semanticModel, position),
GetSeparatorParts(),
GetPostambleParts(namedType),
namedType.TypeParameters.Select(p => Convert(p, semanticModel, position, documentationCommentFormattingService, cancellationToken)).ToList());
}
else
{
var method = (IMethodSymbol)symbol;
item = CreateItem(
symbol, semanticModel, position,
symbolDisplayService, anonymousTypeDisplayService,
false,
c => symbol.GetDocumentationParts(semanticModel, position, documentationCommentFormattingService, c).Concat(GetAwaitableUsage(method, semanticModel, position)),
GetPreambleParts(method, semanticModel, position),
GetSeparatorParts(),
GetPostambleParts(method, semanticModel, position),
method.TypeParameters.Select(p => Convert(p, semanticModel, position, documentationCommentFormattingService, cancellationToken)).ToList());
}
return item;
}
private static readonly SymbolDisplayFormat s_minimallyQualifiedFormat =
SymbolDisplayFormat.MinimallyQualifiedFormat.WithGenericsOptions(
SymbolDisplayFormat.MinimallyQualifiedFormat.GenericsOptions | SymbolDisplayGenericsOptions.IncludeVariance);
private SignatureHelpSymbolParameter Convert(
ITypeParameterSymbol parameter,
SemanticModel semanticModel,
int position,
IDocumentationCommentFormattingService formatter,
CancellationToken cancellationToken)
{
return new SignatureHelpSymbolParameter(
parameter.Name,
isOptional: false,
documentationFactory: parameter.GetDocumentationPartsFactory(semanticModel, position, formatter),
displayParts: parameter.ToMinimalDisplayParts(semanticModel, position, s_minimallyQualifiedFormat),
selectedDisplayParts: GetSelectedDisplayParts(parameter, semanticModel, position, cancellationToken));
}
private IList<SymbolDisplayPart> GetSelectedDisplayParts(
ITypeParameterSymbol typeParam,
SemanticModel semanticModel,
int position,
CancellationToken cancellationToken)
{
var parts = new List<SymbolDisplayPart>();
if (TypeParameterHasConstraints(typeParam))
{
parts.Add(Space());
parts.Add(Keyword(SyntaxKind.WhereKeyword));
parts.Add(Space());
parts.Add(new SymbolDisplayPart(SymbolDisplayPartKind.TypeParameterName, typeParam, typeParam.Name));
parts.Add(Space());
parts.Add(Punctuation(SyntaxKind.ColonToken));
parts.Add(Space());
bool needComma = false;
// class/struct constraint must be first
if (typeParam.HasReferenceTypeConstraint)
{
parts.Add(Keyword(SyntaxKind.ClassKeyword));
needComma = true;
}
else if (typeParam.HasValueTypeConstraint)
{
parts.Add(Keyword(SyntaxKind.StructKeyword));
needComma = true;
}
foreach (var baseType in typeParam.ConstraintTypes)
{
if (needComma)
{
parts.Add(Punctuation(SyntaxKind.CommaToken));
parts.Add(Space());
}
parts.AddRange(baseType.ToMinimalDisplayParts(semanticModel, position));
needComma = true;
}
// ctor constraint must be last
if (typeParam.HasConstructorConstraint)
{
if (needComma)
{
parts.Add(Punctuation(SyntaxKind.CommaToken));
parts.Add(Space());
}
parts.Add(Keyword(SyntaxKind.NewKeyword));
parts.Add(Punctuation(SyntaxKind.OpenParenToken));
parts.Add(Punctuation(SyntaxKind.CloseParenToken));
}
}
return parts;
}
private static bool TypeParameterHasConstraints(ITypeParameterSymbol typeParam)
{
return !typeParam.ConstraintTypes.IsDefaultOrEmpty || typeParam.HasConstructorConstraint ||
typeParam.HasReferenceTypeConstraint || typeParam.HasValueTypeConstraint;
}
}
}
| |
//// Copyright (c) .NET Foundation. All rights reserved.
///Licensed under the MIT license. See LICENSE file in the project root for details.
//
//#define DEBUG_PHOTOMAIL
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Windows.Forms;
using mshtml;
using OpenLiveWriter.Mail;
using OpenLiveWriter.BlogClient;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.CoreServices.Diagnostics;
using OpenLiveWriter.Extensibility.BlogClient;
using OpenLiveWriter.HtmlEditor;
using OpenLiveWriter.HtmlParser.Parser;
using OpenLiveWriter.Interop.Com;
using OpenLiveWriter.Interop.Com.Ribbon;
using OpenLiveWriter.Interop.Windows;
using OpenLiveWriter.Localization;
using OpenLiveWriter.Mshtml;
using OpenLiveWriter.Mshtml.Mshtml_Interop;
using OpenLiveWriter.PostEditor.Autoreplace;
using OpenLiveWriter.PostEditor.ContentSources;
using OpenLiveWriter.PostEditor.PostHtmlEditing;
using IDropTarget = OpenLiveWriter.Interop.Com.IDropTarget;
namespace OpenLiveWriter.PostEditor
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("FC51FC7A-9BB1-4472-86E4-34911D298922")]
[ComVisible(true)]
public class ContentEditorFactory : IContentEditorFactory
{
#region IContentEditorFactory Members
private RedirectionLogger _logger;
public void Initialize(string registrySettingsPath, IContentEditorLogger logger, IContentTarget contentTarget, ISettingsProvider settingsProvider)
{
try
{
GlobalEditorOptions.Init(contentTarget, settingsProvider);
HtmlEditorControl.AllowCachedEditor();
Assembly assembly = Assembly.GetExecutingAssembly();
ApplicationEnvironment.Initialize(assembly, Path.GetDirectoryName(assembly.Location), registrySettingsPath, contentTarget.ProductName);
ContentSourceManager.Initialize(false);
Trace.Listeners.Clear();
if (logger != null)
{
_logger = new RedirectionLogger(logger);
Trace.Listeners.Add(_logger);
}
#if DEBUG
Trace.Listeners.Add(new DefaultTraceListener());
#endif
}
catch (Exception e)
{
Trace.Fail("Failed to initialize Shared Canvas: " + e);
Trace.Flush();
throw;
}
}
public void Shutdown()
{
HtmlEditorControl.DisposeCachedEditor();
TempFileManager.Instance.Dispose();
Trace.Listeners.Clear();
if (_logger != null)
_logger.Dispose();
}
public IContentEditor CreateEditor(IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, string wysiwygHtml, int dlControlFlags)
{
return new ContentEditorProxy(this, contentEditorSite, internetSecurityManager, wysiwygHtml, null, dlControlFlags);
}
public IContentEditor CreateEditorFromDraft(IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, string wysiwygHtml, string pathToDraftFile, int dlControlFlags)
{
return new ContentEditorProxy(this, contentEditorSite, internetSecurityManager, wysiwygHtml, null, pathToDraftFile, dlControlFlags);
}
public IContentEditor CreateEditorFromHtmlDocument(IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, IHTMLDocument2 htmlDocument, HtmlInsertOptions options, int dlControlFlags)
{
return new ContentEditorProxy(this, contentEditorSite, internetSecurityManager, htmlDocument, options, dlControlFlags, null, null);
}
public IContentEditor CreateEditorFromMoniker(IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, IMoniker moniker, uint codepage, HtmlInsertOptions options, string color, int dlControlFlags, string wpost)
{
codepage = EmailShim.GetCodepage(codepage);
string name;
string html = HTMLDocumentHelper.MonikerToString(moniker, codepage, out name);
if (CultureHelper.IsRtlCodepage(codepage))
{
EmailContentTarget target =
GlobalEditorOptions.ContentTarget as EmailContentTarget;
if (target != null)
{
target.EnableRtlMode();
}
}
if (string.IsNullOrEmpty(html))
html = "<html><body></body></html>";
html = EmailShim.GetContentHtml(name, html);
// Create a IHtmlDocument2 from the html which will then be loaded into the editor
IHTMLDocument2 htmlDocument;
htmlDocument = HTMLDocumentHelper.StringToHTMLDoc(html, name);
return new ContentEditorProxy(this, contentEditorSite, internetSecurityManager, htmlDocument, options, dlControlFlags, color, wpost);
}
#endregion
#region IContentEditorFactory Members
public void DoPreloadWork()
{
ContentEditorProxy.ApplyInstalledCulture();
SimpleHtmlParser.Create();
BlogClientHelper.FormatUrl("", "", "", "");
ContentEditor contentEditor = new ContentEditor(null, new Panel(), null, new BlogPostHtmlEditorControl.BlogPostHtmlEditorSecurityManager(), new ContentEditorProxy.ContentEditorTemplateStrategy(), MshtmlOptions.DEFAULT_DLCTL);
contentEditor.Dispose();
}
#endregion
}
public class RedirectionLogger : TraceListener
{
public enum ContentEditorLoggingLevel
{
Log_Error = 0,
Log_Terse = 1,
Log_Verbose = 2,
Log_Blab = 4,
Log_Always = 6
};
private IContentEditorLogger _logger;
public RedirectionLogger(IContentEditorLogger logger)
{
_logger = logger;
}
public override void Write(string message)
{
WriteLine(message);
}
public override void WriteLine(string message)
{
WriteLine(message, null);
}
public override void WriteLine(string message, string category)
{
try
{
if (category == ErrText.FailText)
{
_logger.WriteLine(message, (int)ContentEditorLoggingLevel.Log_Error);
}
else
{
_logger.WriteLine(message, (int)ContentEditorLoggingLevel.Log_Blab);
}
}
catch (Exception)
{
// TODO: Explore our options here.
// IContentEditorLogger should not be throwing exceptions, but in the case that it does we do not want
// to make another Debug or Trace call because it could cause an infinite loop/stack overflow.
}
}
public override void Write(string message, string category)
{
WriteLine(message, category);
}
}
public class ContentEditorProxy : IContentEditor
{
private ContentEditor contentEditor;
private MainFrameWindowAdapter mainFrame;
private IBlogPostEditingContext context;
private Panel panel;
private ContentEditorAccountAdapter accountAdapter;
private IContentEditorSite _contentEditorSite;
public ContentEditorProxy(ContentEditorFactory factory, IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, string wysiwygHTML, string previewHTML, int dlControlFlags)
{
ContentEditorProxyCore(factory, contentEditorSite, internetSecurityManager, wysiwygHTML, previewHTML, new BlogPostEditingContext(ContentEditorAccountAdapter.AccountId, new BlogPost()), new ContentEditorTemplateStrategy(), dlControlFlags, null);
}
public ContentEditorProxy(ContentEditorFactory factory, IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, string wysiwygHTML, string previewHTML, string pathToFile, int dlControlFlags)
{
ContentEditorProxyCore(factory, contentEditorSite, internetSecurityManager, wysiwygHTML, previewHTML, PostEditorFile.GetExisting(new FileInfo(pathToFile)).Load(false), new ContentEditorTemplateStrategy(), dlControlFlags, null);
}
public ContentEditorProxy(ContentEditorFactory factory, IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, IHTMLDocument2 htmlDocument, HtmlInsertOptions options, int dlControlFlags, string color, string wpost)
{
string content = htmlDocument.body.innerHTML;
htmlDocument.body.innerHTML = "{post-body}";
string wysiwygHTML = HTMLDocumentHelper.HTMLDocToString(htmlDocument);
BlogPost documentToBeLoaded = null;
IBlogPostEditingContext editingContext = null;
if (string.IsNullOrEmpty(wpost) || !File.Exists(wpost))
{
documentToBeLoaded = new BlogPost();
editingContext = new BlogPostEditingContext(ContentEditorAccountAdapter.AccountId,
documentToBeLoaded);
}
else
{
PostEditorFile wpostxFile = PostEditorFile.GetExisting(new FileInfo(wpost));
editingContext = wpostxFile.Load(false);
editingContext.BlogPost.Contents = "";
}
if (!string.IsNullOrEmpty(content))
delayedInsertOperations.Enqueue(new DelayedInsert(content, options));
ContentEditorProxyCore(factory, contentEditorSite, internetSecurityManager, wysiwygHTML, null, editingContext, new ContentEditorTemplateStrategy(), dlControlFlags, color);
}
private class DelayedInsert
{
public readonly string Content;
public readonly HtmlInsertOptions Options;
public DelayedInsert(string content, HtmlInsertOptions options)
{
Content = content;
Options = options;
}
}
private Queue<DelayedInsert> delayedInsertOperations = new Queue<DelayedInsert>();
/// <summary>
/// Initializes the IContentEditor.
/// </summary>
/// <param name="factory"></param>
/// <param name="contentEditorSite"></param>
/// <param name="internetSecurityManager"></param>
/// <param name="wysiwygHTML"></param>
/// <param name="previewHTML"></param>
/// <param name="newEditingContext"></param>
/// <param name="templateStrategy"></param>
/// <param name="dlControlFlags">
/// For Mail, these flags should always include DLCTL_DLIMAGES | DLCTL_VIDEOS | DLCTL_BGSOUNDS so that local
/// images, videos and sounds are loaded. To block external content, it should also include
/// DLCTL_PRAGMA_NO_CACHE | DLCTL_FORCEOFFLINE | DLCTL_NO_CLIENTPULL so that external images are not loaded
/// and are displayed as a red X instead.
/// </param>
/// <param name="color"></param>
private void ContentEditorProxyCore(ContentEditorFactory factory, IContentEditorSite contentEditorSite, IInternetSecurityManager internetSecurityManager, string wysiwygHTML, string previewHTML, IBlogPostEditingContext newEditingContext, BlogPostHtmlEditorControl.TemplateStrategy templateStrategy, int dlControlFlags, string color)
{
try
{
Debug.Assert(contentEditorSite is IUIFramework, "IContentEditorSite must also implement IUIFramework");
Debug.Assert(contentEditorSite is IDropTarget, "IContentEditorSite must also implement IDropTarget");
ApplyInstalledCulture();
this.factory = factory;
_wysiwygHTML = wysiwygHTML;
_previewHTML = previewHTML;
_contentEditorSite = contentEditorSite;
IntPtr p = _contentEditorSite.GetWindowHandle();
WINDOWINFO info = new WINDOWINFO();
User32.GetWindowInfo(p, ref info);
panel = new Panel();
panel.Top = 0;
panel.Left = 0;
panel.Width = Math.Max(info.rcWindow.Width, 200);
panel.Height = Math.Max(info.rcWindow.Height, 200);
panel.CreateControl();
User32.SetParent(panel.Handle, p);
accountAdapter = new ContentEditorAccountAdapter();
mainFrame = new MainFrameWindowAdapter(p, panel, _contentEditorSite, accountAdapter.Id);
context = newEditingContext;
contentEditor = new ContentEditor(mainFrame, panel, mainFrame, internetSecurityManager, templateStrategy, dlControlFlags);
// Prevents asserts
contentEditor.DisableSpelling();
contentEditor.OnEditorAccountChanged(accountAdapter);
contentEditor.DocumentComplete += new EventHandler(blogPostHtmlEditor_DocumentComplete);
contentEditor.GotFocus += new EventHandler(contentEditor_GotFocus);
contentEditor.LostFocus += new EventHandler(contentEditor_LostFocus);
contentEditor.Initialize(context, accountAdapter, wysiwygHTML, previewHTML, false);
if (!string.IsNullOrEmpty(color))
{
contentEditor.IndentColor = color;
}
}
catch (Exception ex)
{
// Something went wrong, make sure we don't reuse a cached editor
HtmlEditorControl.DisposeCachedEditor();
Trace.Fail(ex.ToString());
Trace.Flush();
throw;
}
}
private bool _inGotFocusHandler = false;
void contentEditor_GotFocus(object sender, EventArgs e)
{
_inGotFocusHandler = true;
_contentEditorSite.OnGotFocus();
_inGotFocusHandler = false;
}
void contentEditor_LostFocus(object sender, EventArgs e)
{
_contentEditorSite.OnLostFocus();
}
public static void ApplyInstalledCulture()
{
CultureHelper.ApplyUICulture(GlobalEditorOptions.GetSetting<string>(ContentEditorSetting.Language));
}
private bool _documentComplete = false;
void blogPostHtmlEditor_DocumentComplete(object sender, EventArgs e)
{
_documentComplete = true;
if (_editMode != null && _editMode.HasValue)
{
EditingMode mode = _editMode.Value;
_editMode = null;
ChangeView(mode);
return;
}
while (delayedInsertOperations.Count > 0)
{
DelayedInsert insert = delayedInsertOperations.Dequeue();
InsertHtml(insert.Content, insert.Options);
}
_contentEditorSite.OnDocumentComplete();
}
#region IContentEditor Members
public void Save(string fileName, bool preserveDirty)
{
contentEditor.SaveChanges(context.BlogPost, BlogPostSaveOptions.DefaultOptions);
context.LocalFile.SaveContentEditorFile(context, fileName, false);
}
public string Publish(IPublishOperation imageConverter)
{
return contentEditor.Publish(imageConverter);
}
public IHTMLDocument2 GetPublishDocument()
{
string body = contentEditor.Publish(null);
// Before we drop the body into the template, we wrap the whole thing in the user's default font
// This will help cover for any blocks of text that while editing had their font set by body style. We
// cannot send the body style in emails because it will get stripped by some email providers.
body = contentEditor.CurrentDefaultFont.ApplyFontToBody(body);
// We also need to wrap the email in a default direction because we support LTR/RTL per paragraph but
// we inherit the default direction from the body.
// NOTE: Now that we set the direction of the body (a few lines below) this may not be needed. It is
// currently kept to avoid possible regressions with external mail providers
string dir = contentEditor.IsRTLTemplate ? "rtl" : "ltr";
body = string.Format(CultureInfo.InvariantCulture, "<div dir=\"{0}\">{1}</div>", dir, body);
// This forms the whole html document by combining the theme and the body and then turning it into an IHTMLDocument2
// This is needed for WLM so they can reuse packaging code.
// We wrap the html document with a class that improves the representation of smart content for an email's plain text MIME part.
// In order to minimize the potential for regressions we only wrap in the case of a photomail.
IHTMLDocument2 publishDocument = HTMLDocumentHelper.StringToHTMLDoc(_wysiwygHTML.Replace("{post-body}", body), "");
// WinLive 262662: although many features work by wrapping the email in a direction div, the
// email as a whole is determined by the direction defined in the body
publishDocument.body.setAttribute("dir", dir, 1);
return publishDocument;
}
public void SetSize(int width, int height)
{
panel.Size = new Size(width, height);
}
private ContentEditorFactory factory;
private string _wysiwygHTML;
private string _previewHTML;
public void SetTheme(string wysiwygHTML)
{
// We need to track the wysiwygHTML and previewHTML so that we can use it in the Load() function
// where we have to call Initialize again which requires the theme to be passed in
_wysiwygHTML = wysiwygHTML;
_previewHTML = null;
contentEditor.SetTheme(_wysiwygHTML, null, false);
}
public void SetSpellingOptions(string bcp47Code, uint sobitOptions, bool useAutoCorrect)
{
if (CultureHelper.IsRtlCulture(bcp47Code))
{
EmailContentTarget target =
GlobalEditorOptions.ContentTarget as EmailContentTarget;
if (target != null)
{
target.EnableRtlMode();
}
}
}
public void DisableSpelling()
{
contentEditor.DisableSpelling();
}
public void AutoreplaceEmoticons(bool enabled)
{
AutoreplaceSettings.EnableEmoticonsReplacement = enabled;
}
#endregion
#region IDisposable Members
public void Dispose()
{
contentEditor.DocumentComplete -= new EventHandler(blogPostHtmlEditor_DocumentComplete);
contentEditor.GotFocus -= new EventHandler(contentEditor_GotFocus);
contentEditor.LostFocus -= new EventHandler(contentEditor_LostFocus);
contentEditor.Dispose();
panel.Dispose();
accountAdapter.Dispose();
mainFrame.Dispose();
Marshal.ReleaseComObject(_contentEditorSite);
_contentEditorSite = null;
accountAdapter = null;
contentEditor = null;
panel = null;
context = null;
}
private EditingMode? _editMode;
public void ChangeView(EditingMode editingMode)
{
if (!_documentComplete)
{
_editMode = editingMode;
return;
}
try
{
switch (editingMode)
{
case EditingMode.Wysiwyg:
contentEditor.ChangeToWysiwygMode();
return;
case EditingMode.Source:
contentEditor.ChangeToCodeMode();
return;
case EditingMode.Preview:
contentEditor.ChangeToPreviewMode();
return;
case EditingMode.PlainText:
contentEditor.ChangeToPlainTextMode();
return;
}
}
catch (Exception ex)
{
Debug.Fail(ex.ToString());
throw;
}
Debug.Fail("Unknown value for editingView: " + editingMode.ToString() + "\r\nAccepted values Wysiwyg, Source, Preview, PlainText");
}
public void SetFocus()
{
if (!_inGotFocusHandler && !contentEditor.DocumentHasFocus())
contentEditor.FocusBody();
}
public void InsertHtml(string html, HtmlInsertOptions options)
{
contentEditor.InsertHtml(html, (HtmlInsertionOptions)options | HtmlInsertionOptions.ExternalContent);
}
public void ChangeSelection(SelectionPosition position)
{
contentEditor.ChangeSelection(position);
}
#endregion
#region IUICommandHandler Members
public int Execute(uint commandId, CommandExecutionVerb verb, PropertyKeyRef key, PropVariantRef currentValue, IUISimplePropertySet commandExecutionProperties)
{
return contentEditor.CommandManager.Execute(commandId, verb, key, currentValue, commandExecutionProperties);
}
public int UpdateProperty(uint commandId, ref PropertyKey key, PropVariantRef currentValue, out PropVariant newValue)
{
return contentEditor.CommandManager.UpdateProperty(commandId, ref key, currentValue, out newValue);
}
#endregion
#region Implementation of IUICommandHandlerOverride
public int OverrideProperty(uint commandId, ref PropertyKey key, PropVariantRef overrideValue)
{
return contentEditor.CommandManager.OverrideProperty(commandId, ref key, overrideValue);
}
public int CancelOverride(uint commandId, ref PropertyKey key)
{
return contentEditor.CommandManager.CancelOverride(commandId, ref key);
}
#endregion
internal class ContentEditorTemplateStrategy : BlogPostHtmlEditorControl.TemplateStrategy
{
public override string OnBodyInserted(string bodyContents)
{
return bodyContents;
}
public override string OnTitleInserted(string title)
{
return null;
}
public override void OnDocumentComplete(IHTMLDocument2 doc)
{
doc.body.style.overflow = "auto";
doc.body.id = BODY_FRAGMENT_ID;
((IHTMLElement3)doc.body).contentEditable = "true";
doc.body.style.width = "100%";
if (GlobalEditorOptions.SupportsFeature(ContentEditorFeature.RTLDirectionDefault))
doc.body.setAttribute("dir", "rtl", 0);
else
doc.body.setAttribute("dir", "ltr", 0);
IHTMLElement2 body = (IHTMLElement2)doc.body;
body.runtimeStyle.padding = "0px 0px 0px 0px";
body.runtimeStyle.borderWidth = "0px";
}
public override IHTMLElement PostBodyElement(IHTMLDocument2 doc)
{
return doc.body;
}
public override IHTMLElement TitleElement(IHTMLDocument2 doc)
{
return null;
}
}
#region IContentEditor Members
public bool GetDirtyState()
{
return contentEditor.IsDirty;
}
public void SetDirtyState(bool newState)
{
contentEditor.IsDirty = newState;
}
public void SetDefaultFont(string fontSetting)
{
contentEditor.SetDefaultFont(fontSetting);
}
#endregion
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.RecoveryServices;
using Microsoft.Azure.Management.RecoveryServices.Models;
namespace Microsoft.Azure.Management.RecoveryServices
{
public static partial class VaultOperationsExtensions
{
/// <summary>
/// Creates a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group?) cloud service
/// containing the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to create.
/// </param>
/// <param name='vaultCreationInput'>
/// Required. Vault object to be created
/// </param>
/// <returns>
/// The response model for the Vm group object.
/// </returns>
public static VaultCreateResponse BeginCreating(this IVaultOperations operations, string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultOperations)s).BeginCreatingAsync(resourceGroupName, vaultName, vaultCreationInput);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group?) cloud service
/// containing the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to create.
/// </param>
/// <param name='vaultCreationInput'>
/// Required. Vault object to be created
/// </param>
/// <returns>
/// The response model for the Vm group object.
/// </returns>
public static Task<VaultCreateResponse> BeginCreatingAsync(this IVaultOperations operations, string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput)
{
return operations.BeginCreatingAsync(resourceGroupName, vaultName, vaultCreationInput, CancellationToken.None);
}
/// <summary>
/// Deletes a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (Resource Group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to delete.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static RecoveryServicesOperationStatusResponse BeginDeleting(this IVaultOperations operations, string resourceGroupName, string vaultName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultOperations)s).BeginDeletingAsync(resourceGroupName, vaultName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (Resource Group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to delete.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static Task<RecoveryServicesOperationStatusResponse> BeginDeletingAsync(this IVaultOperations operations, string resourceGroupName, string vaultName)
{
return operations.BeginDeletingAsync(resourceGroupName, vaultName, CancellationToken.None);
}
/// <summary>
/// Creates a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Optional. The name of the vault to create.
/// </param>
/// <param name='vaultCreationInput'>
/// Required. Vault object to be created
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static RecoveryServicesOperationStatusResponse Create(this IVaultOperations operations, string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultOperations)s).CreateAsync(resourceGroupName, vaultName, vaultCreationInput);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Optional. The name of the vault to create.
/// </param>
/// <param name='vaultCreationInput'>
/// Required. Vault object to be created
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static Task<RecoveryServicesOperationStatusResponse> CreateAsync(this IVaultOperations operations, string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput)
{
return operations.CreateAsync(resourceGroupName, vaultName, vaultCreationInput, CancellationToken.None);
}
/// <summary>
/// Deletes a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (Resource Group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to delete.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static RecoveryServicesOperationStatusResponse Delete(this IVaultOperations operations, string resourceGroupName, string vaultName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultOperations)s).DeleteAsync(resourceGroupName, vaultName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (Resource Group) cloud service containing
/// the job collection.
/// </param>
/// <param name='vaultName'>
/// Required. The name of the vault to delete.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static Task<RecoveryServicesOperationStatusResponse> DeleteAsync(this IVaultOperations operations, string resourceGroupName, string vaultName)
{
return operations.DeleteAsync(resourceGroupName, vaultName, CancellationToken.None);
}
/// <summary>
/// Get the Vaults.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group?) cloud service
/// containing the vault collection.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for Vault.
/// </returns>
public static VaultListResponse Get(this IVaultOperations operations, string resourceGroupName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultOperations)s).GetAsync(resourceGroupName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the Vaults.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the (resource group?) cloud service
/// containing the vault collection.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for Vault.
/// </returns>
public static Task<VaultListResponse> GetAsync(this IVaultOperations operations, string resourceGroupName, CustomRequestHeaders customRequestHeaders)
{
return operations.GetAsync(resourceGroupName, customRequestHeaders, CancellationToken.None);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using Avalonia.Controls.Diagnostics;
using Avalonia.Controls.Generators;
using Avalonia.Controls.Platform;
using Avalonia.Controls.Primitives;
using Avalonia.Controls.Primitives.PopupPositioning;
using Avalonia.Controls.Templates;
using Avalonia.Input;
using Avalonia.Input.Platform;
using Avalonia.Interactivity;
using Avalonia.Layout;
using Avalonia.Styling;
#nullable enable
namespace Avalonia.Controls
{
/// <summary>
/// A control context menu.
/// </summary>
public class ContextMenu : MenuBase, ISetterValue, IPopupHostProvider
{
/// <summary>
/// Defines the <see cref="HorizontalOffset"/> property.
/// </summary>
public static readonly StyledProperty<double> HorizontalOffsetProperty =
Popup.HorizontalOffsetProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="VerticalOffset"/> property.
/// </summary>
public static readonly StyledProperty<double> VerticalOffsetProperty =
Popup.VerticalOffsetProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementAnchor"/> property.
/// </summary>
public static readonly StyledProperty<PopupAnchor> PlacementAnchorProperty =
Popup.PlacementAnchorProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementConstraintAdjustment"/> property.
/// </summary>
public static readonly StyledProperty<PopupPositionerConstraintAdjustment> PlacementConstraintAdjustmentProperty =
Popup.PlacementConstraintAdjustmentProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementGravity"/> property.
/// </summary>
public static readonly StyledProperty<PopupGravity> PlacementGravityProperty =
Popup.PlacementGravityProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementMode"/> property.
/// </summary>
public static readonly StyledProperty<PlacementMode> PlacementModeProperty =
Popup.PlacementModeProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementRect"/> property.
/// </summary>
public static readonly StyledProperty<Rect?> PlacementRectProperty =
AvaloniaProperty.Register<Popup, Rect?>(nameof(PlacementRect));
/// <summary>
/// Defines the <see cref="WindowManagerAddShadowHint"/> property.
/// </summary>
public static readonly StyledProperty<bool> WindowManagerAddShadowHintProperty =
Popup.WindowManagerAddShadowHintProperty.AddOwner<ContextMenu>();
/// <summary>
/// Defines the <see cref="PlacementTarget"/> property.
/// </summary>
public static readonly StyledProperty<Control?> PlacementTargetProperty =
Popup.PlacementTargetProperty.AddOwner<ContextMenu>();
private static readonly ITemplate<IPanel> DefaultPanel =
new FuncTemplate<IPanel>(() => new StackPanel { Orientation = Orientation.Vertical });
private Popup? _popup;
private List<Control>? _attachedControls;
private IInputElement? _previousFocus;
private Action<IPopupHost?>? _popupHostChangedHandler;
/// <summary>
/// Initializes a new instance of the <see cref="ContextMenu"/> class.
/// </summary>
public ContextMenu()
: this(new DefaultMenuInteractionHandler(true))
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ContextMenu"/> class.
/// </summary>
/// <param name="interactionHandler">The menu interaction handler.</param>
public ContextMenu(IMenuInteractionHandler interactionHandler)
: base(interactionHandler)
{
}
/// <summary>
/// Initializes static members of the <see cref="ContextMenu"/> class.
/// </summary>
static ContextMenu()
{
ItemsPanelProperty.OverrideDefaultValue<ContextMenu>(DefaultPanel);
PlacementModeProperty.OverrideDefaultValue<ContextMenu>(PlacementMode.Pointer);
ContextMenuProperty.Changed.Subscribe(ContextMenuChanged);
}
/// <summary>
/// Gets or sets the Horizontal offset of the context menu in relation to the <see cref="PlacementTarget"/>.
/// </summary>
public double HorizontalOffset
{
get { return GetValue(HorizontalOffsetProperty); }
set { SetValue(HorizontalOffsetProperty, value); }
}
/// <summary>
/// Gets or sets the Vertical offset of the context menu in relation to the <see cref="PlacementTarget"/>.
/// </summary>
public double VerticalOffset
{
get { return GetValue(VerticalOffsetProperty); }
set { SetValue(VerticalOffsetProperty, value); }
}
/// <summary>
/// Gets or sets the anchor point on the <see cref="PlacementRect"/> when <see cref="PlacementMode"/>
/// is <see cref="PlacementMode.AnchorAndGravity"/>.
/// </summary>
public PopupAnchor PlacementAnchor
{
get { return GetValue(PlacementAnchorProperty); }
set { SetValue(PlacementAnchorProperty, value); }
}
/// <summary>
/// Gets or sets a value describing how the context menu position will be adjusted if the
/// unadjusted position would result in the context menu being partly constrained.
/// </summary>
public PopupPositionerConstraintAdjustment PlacementConstraintAdjustment
{
get { return GetValue(PlacementConstraintAdjustmentProperty); }
set { SetValue(PlacementConstraintAdjustmentProperty, value); }
}
/// <summary>
/// Gets or sets a value which defines in what direction the context menu should open
/// when <see cref="PlacementMode"/> is <see cref="PlacementMode.AnchorAndGravity"/>.
/// </summary>
public PopupGravity PlacementGravity
{
get { return GetValue(PlacementGravityProperty); }
set { SetValue(PlacementGravityProperty, value); }
}
/// <summary>
/// Gets or sets the placement mode of the context menu in relation to the<see cref="PlacementTarget"/>.
/// </summary>
public PlacementMode PlacementMode
{
get { return GetValue(PlacementModeProperty); }
set { SetValue(PlacementModeProperty, value); }
}
public bool WindowManagerAddShadowHint
{
get { return GetValue(WindowManagerAddShadowHintProperty); }
set { SetValue(WindowManagerAddShadowHintProperty, value); }
}
/// <summary>
/// Gets or sets the the anchor rectangle within the parent that the context menu will be placed
/// relative to when <see cref="PlacementMode"/> is <see cref="PlacementMode.AnchorAndGravity"/>.
/// </summary>
/// <remarks>
/// The placement rect defines a rectangle relative to <see cref="PlacementTarget"/> around
/// which the popup will be opened, with <see cref="PlacementAnchor"/> determining which edge
/// of the placement target is used.
///
/// If unset, the anchor rectangle will be the bounds of the <see cref="PlacementTarget"/>.
/// </remarks>
public Rect? PlacementRect
{
get { return GetValue(PlacementRectProperty); }
set { SetValue(PlacementRectProperty, value); }
}
/// <summary>
/// Gets or sets the control that is used to determine the popup's position.
/// </summary>
public Control? PlacementTarget
{
get { return GetValue(PlacementTargetProperty); }
set { SetValue(PlacementTargetProperty, value); }
}
/// <summary>
/// Occurs when the value of the
/// <see cref="P:Avalonia.Controls.ContextMenu.IsOpen" />
/// property is changing from false to true.
/// </summary>
public event CancelEventHandler? ContextMenuOpening;
/// <summary>
/// Occurs when the value of the
/// <see cref="P:Avalonia.Controls.ContextMenu.IsOpen" />
/// property is changing from true to false.
/// </summary>
public event CancelEventHandler? ContextMenuClosing;
/// <summary>
/// Called when the <see cref="Control.ContextMenu"/> property changes on a control.
/// </summary>
/// <param name="e">The event args.</param>
private static void ContextMenuChanged(AvaloniaPropertyChangedEventArgs e)
{
var control = (Control)e.Sender;
if (e.OldValue is ContextMenu oldMenu)
{
control.ContextRequested -= ControlContextRequested;
control.DetachedFromVisualTree -= ControlDetachedFromVisualTree;
oldMenu._attachedControls?.Remove(control);
((ISetLogicalParent?)oldMenu._popup)?.SetParent(null);
}
if (e.NewValue is ContextMenu newMenu)
{
newMenu._attachedControls ??= new List<Control>();
newMenu._attachedControls.Add(control);
control.ContextRequested += ControlContextRequested;
control.DetachedFromVisualTree += ControlDetachedFromVisualTree;
}
}
protected override void OnPropertyChanged<T>(AvaloniaPropertyChangedEventArgs<T> change)
{
base.OnPropertyChanged(change);
if (change.Property == WindowManagerAddShadowHintProperty && _popup != null)
{
_popup.WindowManagerAddShadowHint = change.NewValue.GetValueOrDefault<bool>();
}
}
/// <summary>
/// Opens the menu.
/// </summary>
public override void Open() => Open(null);
/// <summary>
/// Opens a context menu on the specified control.
/// </summary>
/// <param name="control">The control.</param>
public void Open(Control? control)
{
if (control is null && (_attachedControls is null || _attachedControls.Count == 0))
{
throw new ArgumentNullException(nameof(control));
}
if (control is object &&
_attachedControls is object &&
!_attachedControls.Contains(control))
{
throw new ArgumentException(
"Cannot show ContentMenu on a different control to the one it is attached to.",
nameof(control));
}
control ??= _attachedControls![0];
Open(control, PlacementTarget ?? control, false);
}
/// <summary>
/// Closes the menu.
/// </summary>
public override void Close()
{
if (!IsOpen)
{
return;
}
if (_popup != null && _popup.IsVisible)
{
_popup.IsOpen = false;
}
}
void ISetterValue.Initialize(ISetter setter)
{
// ContextMenu can be assigned to the ContextMenu property in a setter. This overrides
// the behavior defined in Control which requires controls to be wrapped in a <template>.
if (!(setter is Setter s && s.Property == ContextMenuProperty))
{
throw new InvalidOperationException(
"Cannot use a control as a Setter value. Wrap the control in a <Template>.");
}
}
IPopupHost? IPopupHostProvider.PopupHost => _popup?.Host;
event Action<IPopupHost?>? IPopupHostProvider.PopupHostChanged
{
add => _popupHostChangedHandler += value;
remove => _popupHostChangedHandler -= value;
}
protected override IItemContainerGenerator CreateItemContainerGenerator()
{
return new MenuItemContainerGenerator(this);
}
private void Open(Control control, Control placementTarget, bool requestedByPointer)
{
if (IsOpen)
{
return;
}
if (_popup == null)
{
_popup = new Popup
{
HorizontalOffset = HorizontalOffset,
VerticalOffset = VerticalOffset,
PlacementAnchor = PlacementAnchor,
PlacementConstraintAdjustment = PlacementConstraintAdjustment,
PlacementGravity = PlacementGravity,
PlacementMode = PlacementMode,
PlacementRect = PlacementRect,
IsLightDismissEnabled = true,
OverlayDismissEventPassThrough = true,
WindowManagerAddShadowHint = WindowManagerAddShadowHint,
};
_popup.Opened += PopupOpened;
_popup.Closed += PopupClosed;
_popup.Closing += PopupClosing;
_popup.KeyUp += PopupKeyUp;
}
if (_popup.Parent != control)
{
((ISetLogicalParent)_popup).SetParent(null);
((ISetLogicalParent)_popup).SetParent(control);
}
_popup.PlacementMode = !requestedByPointer && PlacementMode == PlacementMode.Pointer
? PlacementMode.Bottom
: PlacementMode;
_popup.PlacementTarget = placementTarget;
_popup.Child = this;
IsOpen = true;
_popup.IsOpen = true;
RaiseEvent(new RoutedEventArgs
{
RoutedEvent = MenuOpenedEvent,
Source = this,
});
}
private void PopupOpened(object sender, EventArgs e)
{
_previousFocus = FocusManager.Instance?.Current;
Focus();
_popupHostChangedHandler?.Invoke(_popup!.Host);
}
private void PopupClosing(object sender, CancelEventArgs e)
{
e.Cancel = CancelClosing();
}
private void PopupClosed(object sender, EventArgs e)
{
foreach (var i in LogicalChildren)
{
if (i is MenuItem menuItem)
{
menuItem.IsSubMenuOpen = false;
}
}
SelectedIndex = -1;
IsOpen = false;
if (_attachedControls is null || _attachedControls.Count == 0)
{
((ISetLogicalParent)_popup!).SetParent(null);
}
// HACK: Reset the focus when the popup is closed. We need to fix this so it's automatic.
FocusManager.Instance?.Focus(_previousFocus);
RaiseEvent(new RoutedEventArgs
{
RoutedEvent = MenuClosedEvent,
Source = this,
});
_popupHostChangedHandler?.Invoke(null);
}
private void PopupKeyUp(object sender, KeyEventArgs e)
{
if (IsOpen)
{
var keymap = AvaloniaLocator.Current.GetService<PlatformHotkeyConfiguration>();
if (keymap.OpenContextMenu.Any(k => k.Matches(e))
&& !CancelClosing())
{
Close();
e.Handled = true;
}
}
}
private static void ControlContextRequested(object sender, ContextRequestedEventArgs e)
{
if (sender is Control control
&& control.ContextMenu is ContextMenu contextMenu
&& !e.Handled
&& !contextMenu.CancelOpening())
{
var requestedByPointer = e.TryGetPosition(null, out _);
contextMenu.Open(control, e.Source as Control ?? control, requestedByPointer);
e.Handled = true;
}
}
private static void ControlDetachedFromVisualTree(object sender, VisualTreeAttachmentEventArgs e)
{
if (sender is Control control
&& control.ContextMenu is ContextMenu contextMenu)
{
contextMenu.Close();
}
}
private bool CancelClosing()
{
var eventArgs = new CancelEventArgs();
ContextMenuClosing?.Invoke(this, eventArgs);
return eventArgs.Cancel;
}
private bool CancelOpening()
{
var eventArgs = new CancelEventArgs();
ContextMenuOpening?.Invoke(this, eventArgs);
return eventArgs.Cancel;
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Diagnostics;
using QuantConnect.Data;
package com.quantconnect.lean.Indicators
{
/**
* Provides a base type for all indicators
*/
* <typeparam name="T The type of data input into this indicator</typeparam>
[DebuggerDisplay( "{ToDetailedString()}")]
public abstract partial class IndicatorBase<T> : IIndicator<T>
where T : BaseData
{
/**the most recent input that was given to this indicator</summary>
private T _previousInput;
/**
* Event handler that fires after this indicator is updated
*/
public event IndicatorUpdatedHandler Updated;
/**
* Initializes a new instance of the Indicator class using the specified name.
*/
* @param name The name of this indicator
protected IndicatorBase( String name) {
Name = name;
Current = new IndicatorDataPoint(DateTime.MinValue, BigDecimal.ZERO);
}
/**
* Gets a name for this indicator
*/
public String Name { get; private set; }
/**
* Gets a flag indicating when this indicator is ready and fully initialized
*/
public abstract boolean IsReady { get; }
/**
* Gets the current state of this indicator. If the state has not been updated
* then the time on the value will equal DateTime.MinValue.
*/
public IndicatorDataPoint Current { get; protected set; }
/**
* Gets the number of samples processed by this indicator
*/
public long Samples { get; private set; }
/**
* Updates the state of this indicator with the given value and returns true
* if this indicator is ready, false otherwise
*/
* @param input The value to use to update this indicator
@returns True if this indicator is ready, false otherwise
public boolean Update(T input) {
if( _previousInput != null && input.Time < _previousInput.Time) {
// if we receive a time in the past, throw
throw new IllegalArgumentException( String.format( "This is a forward only indicator: %1$s Input: %2$s Previous: %3$s", Name, input.Time.toString( "u"), _previousInput.Time.toString( "u")));
}
if( !ReferenceEquals(input, _previousInput)) {
// compute a new value and update our previous time
Samples++;
_previousInput = input;
nextResult = ValidateAndComputeNextValue(input);
if( nextResult.Status == IndicatorStatus.Success) {
Current = new IndicatorDataPoint(input.Time, nextResult.Value);
// let others know we've produced a new data point
OnUpdated(Current);
}
}
return IsReady;
}
/**
* Resets this indicator to its initial state
*/
public void Reset() {
Samples = 0;
_previousInput = null;
Current = new IndicatorDataPoint(DateTime.MinValue, default(decimal));
}
/**
* Compares the current object with another object of the same type.
*/
@returns
* A value that indicates the relative order of the objects being compared. The return value has the following meanings: Value Meaning Less than zero This object is less than the <paramref name="other"/> parameter.Zero This object is equal to <paramref name="other"/>. Greater than zero This object is greater than <paramref name="other"/>.
*
* @param other An object to compare with this object.
public int CompareTo(IIndicator<T> other) {
if( ReferenceEquals(other, null )) {
// everything is greater than null via MSDN
return 1;
}
return Current.CompareTo(other.Current);
}
/**
* Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object.
*/
@returns
* A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes <paramref name="obj"/> in the sort order. Zero This instance occurs in the same position in the sort order as <paramref name="obj"/>. Greater than zero This instance follows <paramref name="obj"/> in the sort order.
*
* @param obj An object to compare with this instance. <exception cref="T:System.ArgumentException"><paramref name="obj"/> is not the same type as this instance. </exception><filterpriority>2</filterpriority>
public int CompareTo(object obj) {
other = obj as IndicatorBase<T>;
if( other == null ) {
throw new IllegalArgumentException( "Object must be of type " + GetType().GetBetterTypeName());
}
return CompareTo(other);
}
/**
* Determines whether the specified object is equal to the current object.
*/
@returns
* true if the specified object is equal to the current object; otherwise, false.
*
* @param obj The object to compare with the current object.
public @Override boolean Equals(object obj) {
// this implementation acts as a liason to prevent inconsistency between the operators
// == and != against primitive types. the core impl for equals between two indicators
// is still reference equality, however, when comparing value types (floats/int, ect..)
// we'll use value type semantics on Current.Value
// because of this, we shouldn't need to @Override GetHashCode as well since we're still
// solely relying on reference semantics (think hashset/dictionary impls)
if( ReferenceEquals(obj, null )) return false;
if( obj.GetType().IsSubclassOf(typeof (IndicatorBase<>))) return ReferenceEquals(this, obj);
// the obj is not an indicator, so let's check for value types, try converting to decimal
converted = new BigDecimal( obj);
return Current.Value == converted;
}
/**
* toString Overload for Indicator Base
*/
@returns String representation of the indicator
public @Override String toString() {
return Current.Value.toString( "#######0.0####");
}
/**
* Provides a more detailed String of this indicator in the form of {Name} - {Value}
*/
@returns A detailed String of this indicator's current state
public String ToDetailedString() {
return String.format( "%1$s - %2$s", Name, this);
}
/**
* Computes the next value of this indicator from the given state
*/
* @param input The input given to the indicator
@returns A new value for this indicator
protected abstract BigDecimal ComputeNextValue(T input);
/**
* Computes the next value of this indicator from the given state
* and returns an instance of the <see cref="IndicatorResult"/> class
*/
* @param input The input given to the indicator
@returns An IndicatorResult object including the status of the indicator
protected IndicatorResult ValidateAndComputeNextValue(T input) {
// default implementation always returns IndicatorStatus.Success
return new IndicatorResult(ComputeNextValue(input));
}
/**
* Event invocator for the Updated event
*/
* @param consolidated This is the new piece of data produced by this indicator
protected void OnUpdated(IndicatorDataPoint consolidated) {
handler = Updated;
if( handler != null ) handler(this, consolidated);
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="ClusterClientSpec.cs" company="Akka.NET Project">
// Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using Akka.Actor;
using Akka.Cluster.TestKit;
using Akka.Cluster.Tools.Client;
using Akka.Cluster.Tools.PublishSubscribe;
using Akka.Cluster.Tools.PublishSubscribe.Internal;
using Akka.Configuration;
using Akka.Remote.TestKit;
using Akka.Remote.Transport;
using Akka.Util.Internal;
using FluentAssertions;
namespace Akka.Cluster.Tools.Tests.MultiNode.Client
{
public class ClusterClientSpecConfig : MultiNodeConfig
{
public RoleName Client { get; }
public RoleName First { get; }
public RoleName Second { get; }
public RoleName Third { get; }
public RoleName Fourth { get; }
public ClusterClientSpecConfig()
{
Client = Role("client");
First = Role("first");
Second = Role("second");
Third = Role("third");
Fourth = Role("fourth");
CommonConfig = ConfigurationFactory.ParseString(@"
akka.loglevel = DEBUG
akka.actor.provider = ""Akka.Cluster.ClusterActorRefProvider, Akka.Cluster""
akka.remote.log-remote-lifecycle-events = off
akka.cluster.auto-down-unreachable-after = 0s
akka.cluster.client.heartbeat-interval = 1s
akka.cluster.client.acceptable-heartbeat-pause = 3s
akka.cluster.client.refresh-contacts-interval = 1s
# number-of-contacts must be >= 4 because we shutdown all but one in the end
akka.cluster.client.receptionist.number-of-contacts = 4
akka.cluster.client.receptionist.heartbeat-interval = 10s
akka.cluster.client.receptionist.acceptable-heartbeat-pause = 10s
akka.cluster.client.receptionist.failure-detection-interval = 1s
akka.test.filter-leeway = 10s
")
.WithFallback(ClusterClientReceptionist.DefaultConfig())
.WithFallback(DistributedPubSub.DefaultConfig());
TestTransport = true;
}
#region Helpers
public class Reply
{
public Reply(object msg, Address node)
{
Msg = msg;
Node = node;
}
public object Msg { get; }
public Address Node { get; }
}
public class TestService : ReceiveActor
{
public TestService(IActorRef testActorRef)
{
Receive<string>(cmd => cmd.Equals("shutdown"), msg =>
{
Context.System.Terminate();
});
ReceiveAny(msg =>
{
testActorRef.Forward(msg);
Sender.Tell(new Reply(msg.ToString() + "-ack", Cluster.Get(Context.System).SelfAddress));
});
}
}
public class Service : ReceiveActor
{
public Service()
{
ReceiveAny(msg => Sender.Tell(msg));
}
}
public class TestClientListener : ReceiveActor
{
#region TestClientListener messages
public sealed class GetLatestContactPoints
{
public static readonly GetLatestContactPoints Instance = new GetLatestContactPoints();
private GetLatestContactPoints() { }
}
public sealed class LatestContactPoints : INoSerializationVerificationNeeded
{
public LatestContactPoints(ImmutableHashSet<ActorPath> contactPoints)
{
ContactPoints = contactPoints;
}
public ImmutableHashSet<ActorPath> ContactPoints { get; }
}
#endregion
private readonly IActorRef _targetClient;
private ImmutableHashSet<ActorPath> _contactPoints;
public TestClientListener(IActorRef targetClient)
{
_targetClient = targetClient;
_contactPoints = ImmutableHashSet<ActorPath>.Empty;
Receive<GetLatestContactPoints>(_ =>
{
Sender.Tell(new LatestContactPoints(_contactPoints));
});
Receive<ContactPoints>(cps =>
{
// Now do something with the up-to-date "cps"
_contactPoints = cps.ContactPointsList;
});
Receive<ContactPointAdded>(cp =>
{
// Now do something with an up-to-date "contactPoints + cp"
_contactPoints = _contactPoints.Add(cp.ContactPoint);
});
Receive<ContactPointRemoved>(cp =>
{
// Now do something with an up-to-date "contactPoints - cp"
_contactPoints = _contactPoints.Remove(cp.ContactPoint);
});
}
protected override void PreStart()
{
_targetClient.Tell(SubscribeContactPoints.Instance);
}
}
public class TestReceptionistListener : ReceiveActor
{
#region TestReceptionistListener messages
public sealed class GetLatestClusterClients
{
public static readonly GetLatestClusterClients Instance = new GetLatestClusterClients();
private GetLatestClusterClients() { }
}
public sealed class LatestClusterClients : INoSerializationVerificationNeeded
{
public LatestClusterClients(ImmutableHashSet<IActorRef> clusterClients)
{
ClusterClients = clusterClients;
}
public ImmutableHashSet<IActorRef> ClusterClients { get; }
}
#endregion
private readonly IActorRef _targetReceptionist;
private ImmutableHashSet<IActorRef> _clusterClients;
public TestReceptionistListener(IActorRef targetReceptionist)
{
_targetReceptionist = targetReceptionist;
_clusterClients = ImmutableHashSet<IActorRef>.Empty;
Receive<GetLatestClusterClients>(_ =>
{
Sender.Tell(new LatestClusterClients(_clusterClients));
});
Receive<ClusterClients>(cs =>
{
// Now do something with the up-to-date "c"
_clusterClients = cs.ClusterClientsList;
});
Receive<ClusterClientUp>(c =>
{
// Now do something with an up-to-date "clusterClients + c"
_clusterClients = _clusterClients.Add(c.ClusterClient);
});
Receive<ClusterClientUnreachable>(c =>
{
// Now do something with an up-to-date "clusterClients - c"
_clusterClients = _clusterClients.Remove(c.ClusterClient);
});
}
protected override void PreStart()
{
_targetReceptionist.Tell(SubscribeClusterClients.Instance);
}
}
#endregion
}
public class ClusterClientMultiNode1 : ClusterClientSpec { }
public class ClusterClientMultiNode2 : ClusterClientSpec { }
public class ClusterClientMultiNode3 : ClusterClientSpec { }
public class ClusterClientMultiNode4 : ClusterClientSpec { }
public class ClusterClientMultiNode5 : ClusterClientSpec { }
public abstract class ClusterClientSpec : MultiNodeClusterSpec
{
private readonly ClusterClientSpecConfig _config;
protected ClusterClientSpec() : this(new ClusterClientSpecConfig())
{
}
protected ClusterClientSpec(ClusterClientSpecConfig config) : base(config)
{
_config = config;
_remainingServerRoleNames = ImmutableHashSet.Create(_config.First, _config.Second, _config.Third, _config.Fourth);
}
protected override int InitialParticipantsValueFactory
{
get { return Roles.Count; }
}
private void Join(RoleName from, RoleName to)
{
RunOn(() =>
{
Cluster.Join(Node(to).Address);
CreateReceptionist();
}, from);
EnterBarrier(from.Name + "-joined");
}
private void CreateReceptionist()
{
ClusterClientReceptionist.Get(Sys);
}
private void AwaitCount(int expected)
{
AwaitAssert(() =>
{
DistributedPubSub.Get(Sys).Mediator.Tell(Count.Instance);
ExpectMsg<int>().Should().Be(expected);
});
}
private RoleName GetRoleName(Address address)
{
return _remainingServerRoleNames.FirstOrDefault(r => Node(r).Address.Equals(address));
}
private ImmutableHashSet<RoleName> _remainingServerRoleNames;
private ImmutableHashSet<ActorPath> InitialContacts
{
get
{
return _remainingServerRoleNames.Remove(_config.First).Remove(_config.Fourth).Select(r => Node(r) / "system" / "receptionist").ToImmutableHashSet();
}
}
[MultiNodeFact(Skip = "Disable due to known issues with this spec which are currently under investigation")]
public void ClusterClientSpecs()
{
ClusterClient_must_startup_cluster();
ClusterClient_must_communicate_to_any_node_in_cluster();
ClusterClient_must_demonstrate_usage();
ClusterClient_must_report_events();
ClusterClient_must_reestablish_connection_to_another_receptionist_when_server_is_shutdown();
ClusterClient_must_reestablish_connection_to_receptionist_after_partition();
//ClusterClient_must_reestablish_connection_to_receptionist_after_server_restart();
}
public void ClusterClient_must_startup_cluster()
{
Within(30.Seconds(), () =>
{
Join(_config.First, _config.First);
Join(_config.Second, _config.First);
Join(_config.Third, _config.First);
Join(_config.Fourth, _config.First);
RunOn(() =>
{
var service = Sys.ActorOf(Props.Create(() => new ClusterClientSpecConfig.TestService(TestActor)), "testService");
ClusterClientReceptionist.Get(Sys).RegisterService(service);
}, _config.Fourth);
RunOn(() =>
{
AwaitCount(1);
}, _config.First, _config.Second, _config.Third, _config.Fourth);
EnterBarrier("after-1");
});
}
public void ClusterClient_must_communicate_to_any_node_in_cluster()
{
Within(10.Seconds(), () =>
{
RunOn(() =>
{
var c = Sys.ActorOf(ClusterClient.Props(ClusterClientSettings.Create(Sys).WithInitialContacts(InitialContacts)), "client1");
c.Tell(new ClusterClient.Send("/user/testService", "hello", localAffinity: true));
ExpectMsg<ClusterClientSpecConfig.Reply>().Msg.Should().Be("hello-ack");
Sys.Stop(c);
}, _config.Client);
RunOn(() =>
{
ExpectMsg("hello");
}, _config.Fourth);
EnterBarrier("after-2");
});
}
public void ClusterClient_must_demonstrate_usage()
{
var host1 = _config.First;
var host2 = _config.Second;
var host3 = _config.Third;
Within(15.Seconds(), () =>
{
//#server
RunOn(() =>
{
var serviceA = Sys.ActorOf(Props.Create<ClusterClientSpecConfig.Service>(), "serviceA");
ClusterClientReceptionist.Get(Sys).RegisterService(serviceA);
}, host1);
RunOn(() =>
{
var serviceB = Sys.ActorOf(Props.Create<ClusterClientSpecConfig.Service>(), "serviceB");
ClusterClientReceptionist.Get(Sys).RegisterService(serviceB);
}, host2, host3);
//#server
RunOn(() =>
{
AwaitCount(4);
}, host1, host2, host3, _config.Fourth);
EnterBarrier("services-replicated");
//#client
RunOn(() =>
{
var c = Sys.ActorOf(ClusterClient.Props(ClusterClientSettings.Create(Sys).WithInitialContacts(InitialContacts)), "client");
c.Tell(new ClusterClient.Send("/user/serviceA", "hello", localAffinity: true));
c.Tell(new ClusterClient.SendToAll("/user/serviceB", "hi"));
}, _config.Client);
//#client
RunOn(() =>
{
// note that "hi" was sent to 2 "serviceB"
var received = ReceiveN(3);
received.ToImmutableHashSet().Should().BeEquivalentTo(ImmutableHashSet.Create("hello", "hi"));
}, _config.Client);
// strange, barriers fail without this sleep
Thread.Sleep(1000);
EnterBarrier("after-3");
});
}
public void ClusterClient_must_report_events()
{
Within(15.Seconds(), () =>
{
RunOn(() =>
{
var c = Sys.ActorSelection("/user/client").ResolveOne(Dilated(1.Seconds())).Result;
var l = Sys.ActorOf(
Props.Create(() => new ClusterClientSpecConfig.TestClientListener(c)),
"reporter-client-listener");
var expectedContacts = ImmutableHashSet.Create(_config.First, _config.Second, _config.Third, _config.Fourth)
.Select(_ => Node(_) / "system" / "receptionist");
Within(10.Seconds(), () =>
{
AwaitAssert(() =>
{
var probe = CreateTestProbe();
l.Tell(ClusterClientSpecConfig.TestClientListener.GetLatestContactPoints.Instance, probe.Ref);
probe.ExpectMsg<ClusterClientSpecConfig.TestClientListener.LatestContactPoints>()
.ContactPoints.Should()
.BeEquivalentTo(expectedContacts);
});
});
}, _config.Client);
EnterBarrier("reporter-client-listener-tested");
RunOn(() =>
{
// Only run this test on a node that knows about our client. It could be that no node knows
// but there isn't a means of expressing that at least one of the nodes needs to pass the test.
var r = ClusterClientReceptionist.Get(Sys).Underlying;
r.Tell(GetClusterClients.Instance);
var cps = ExpectMsg<ClusterClients>();
if (cps.ClusterClientsList.Any(c => c.Path.Name.Equals("client")))
{
Log.Info("Testing that the receptionist has just one client");
var l = Sys.ActorOf(
Props.Create(() => new ClusterClientSpecConfig.TestReceptionistListener(r)),
"reporter-receptionist-listener");
var c = Sys
.ActorSelection(Node(_config.Client) / "user" / "client")
.ResolveOne(Dilated(2.Seconds())).Result;
var expectedClients = ImmutableHashSet.Create(c);
Within(10.Seconds(), () =>
{
AwaitAssert(() =>
{
var probe = CreateTestProbe();
l.Tell(ClusterClientSpecConfig.TestReceptionistListener.GetLatestClusterClients.Instance, probe.Ref);
probe.ExpectMsg<ClusterClientSpecConfig.TestReceptionistListener.LatestClusterClients>()
.ClusterClients.Should()
.BeEquivalentTo(expectedClients);
});
});
}
}, _config.First, _config.Second, _config.Third);
EnterBarrier("after-6");
});
}
public void ClusterClient_must_reestablish_connection_to_another_receptionist_when_server_is_shutdown()
{
Within(30.Seconds(), () =>
{
RunOn(() =>
{
var service2 = Sys.ActorOf(Props.Create(() => new ClusterClientSpecConfig.TestService(TestActor)), "service2");
ClusterClientReceptionist.Get(Sys).RegisterService(service2);
AwaitCount(8);
}, _config.First, _config.Second, _config.Third, _config.Fourth);
EnterBarrier("service2-replicated");
RunOn(() =>
{
var c = Sys.ActorOf(ClusterClient.Props(ClusterClientSettings.Create(Sys).WithInitialContacts(InitialContacts)), "client2");
c.Tell(new ClusterClient.Send("/user/service2", "bonjour", localAffinity: true));
var reply = ExpectMsg<ClusterClientSpecConfig.Reply>();
reply.Msg.Should().Be("bonjour-ack");
RoleName receptionistRoleName = GetRoleName(reply.Node);
if (receptionistRoleName == null)
{
throw new Exception("Unexpected missing role name: " + reply.Node);
}
TestConductor.Exit(receptionistRoleName, 0).Wait();
_remainingServerRoleNames = _remainingServerRoleNames.Remove(receptionistRoleName);
Within(Remaining - 3.Seconds(), () =>
{
AwaitAssert(() =>
{
c.Tell(new ClusterClient.Send("/user/service2", "hi again", localAffinity: true));
ExpectMsg<ClusterClientSpecConfig.Reply>(1.Seconds()).Msg.Should().Be("hi again-ack");
});
});
Sys.Stop(c);
}, _config.Client);
EnterBarrier("verified-3");
ReceiveWhile(2.Seconds(), msg =>
{
if (msg.Equals("hi again")) return msg;
else throw new Exception("Unexpected message: " + msg);
});
EnterBarrier("verified-4");
RunOn(() =>
{
// Locate the test listener from a previous test and see that it agrees
// with what the client is telling it about what receptionists are alive
var l = Sys.ActorSelection("/user/reporter-client-listener");
var expectedContacts = _remainingServerRoleNames.Select(c => Node(c) / "system" / "receptionist");
Within(10.Seconds(), () =>
{
AwaitAssert(() =>
{
var probe = CreateTestProbe();
l.Tell(ClusterClientSpecConfig.TestClientListener.GetLatestContactPoints.Instance, probe.Ref);
probe.ExpectMsg<ClusterClientSpecConfig.TestClientListener.LatestContactPoints>()
.ContactPoints.Should()
.BeEquivalentTo(expectedContacts);
});
});
}, _config.Client);
EnterBarrier("after-4");
});
}
public void ClusterClient_must_reestablish_connection_to_receptionist_after_partition()
{
Within(30.Seconds(), () =>
{
RunOn(() =>
{
var c = Sys.ActorOf(ClusterClient.Props(ClusterClientSettings.Create(Sys).WithInitialContacts(InitialContacts)), "client3");
c.Tell(new ClusterClient.Send("/user/service2", "bonjour2", localAffinity: true));
var reply = ExpectMsg<ClusterClientSpecConfig.Reply>();
reply.Msg.Should().Be("bonjour2-ack");
RoleName receptionistRoleName = GetRoleName(reply.Node);
if (receptionistRoleName == null)
{
throw new Exception("Unexpected missing role name: " + reply.Node);
}
// shutdown all but the one that the client is connected to
_remainingServerRoleNames.Where(r => !r.Equals(receptionistRoleName)).ForEach(r =>
{
TestConductor.Exit(r, 0).Wait();
});
_remainingServerRoleNames = ImmutableHashSet.Create(receptionistRoleName);
// network partition between client and server
TestConductor.Blackhole(_config.Client, receptionistRoleName, ThrottleTransportAdapter.Direction.Both).Wait();
c.Tell(new ClusterClient.Send("/user/service2", "ping", localAffinity: true));
// if we would use remote watch the failure detector would trigger and
// connection quarantined
ExpectNoMsg(5.Seconds());
TestConductor.PassThrough(_config.Client, receptionistRoleName, ThrottleTransportAdapter.Direction.Both).Wait();
var expectedAddress = GetAddress(receptionistRoleName);
AwaitAssert(() =>
{
var probe = CreateTestProbe();
c.Tell(new ClusterClient.Send("/user/service2", "bonjour3", localAffinity: true), probe.Ref);
var reply2 = probe.ExpectMsg<ClusterClientSpecConfig.Reply>(1.Seconds());
reply2.Msg.Should().Be("bonjour3-ack");
reply2.Node.Should().Be(expectedAddress);
});
Sys.Stop(c);
}, _config.Client);
EnterBarrier("after-5");
});
}
public void ClusterClient_must_reestablish_connection_to_receptionist_after_server_restart()
{
Within(30.Seconds(), () =>
{
RunOn(() =>
{
_remainingServerRoleNames.Count.Should().Be(1);
var remainingContacts = _remainingServerRoleNames.Select(r => Node(r) / "system" / "receptionist").ToList();
var c = Sys.ActorOf(ClusterClient.Props(ClusterClientSettings.Create(Sys).WithInitialContacts(remainingContacts)), "client4");
c.Tell(new ClusterClient.Send("/user/service2", "bonjour4", localAffinity: true));
var reply = ExpectMsg<ClusterClientSpecConfig.Reply>(10.Seconds());
reply.Msg.Should().Be("bonjour4-ack");
reply.Node.Should().Be(remainingContacts.First().Address);
var logSource = $"{(Sys as ExtendedActorSystem).Provider.DefaultAddress}/user/client4";
EventFilter.Info(start: "Connected to", source: logSource).ExpectOne(() =>
{
EventFilter.Info(start: "Lost contact", source: logSource).ExpectOne(() =>
{
// shutdown server
TestConductor.Shutdown(_remainingServerRoleNames.First()).Wait();
});
});
c.Tell(new ClusterClient.Send("/user/service2", "shutdown", localAffinity: true));
Thread.Sleep(2000); // to ensure that it is sent out before shutting down system
}, _config.Client);
RunOn(() =>
{
Sys.WhenTerminated.Wait(20.Seconds());
// start new system on same port
var sys2 = ActorSystem.Create(
Sys.Name,
ConfigurationFactory.ParseString("akka.remote.helios.tcp.port=" + Cluster.Get(Sys).SelfAddress.Port).WithFallback(Sys.Settings.Config));
Cluster.Get(sys2).Join(Cluster.Get(sys2).SelfAddress);
var service2 = sys2.ActorOf(Props.Create(() => new ClusterClientSpecConfig.TestService(TestActor)), "service2");
ClusterClientReceptionist.Get(sys2).RegisterService(service2);
sys2.WhenTerminated.Wait(20.Seconds());
}, _remainingServerRoleNames.ToArray());
});
}
}
}
| |
/***************************************************************************************************************************************
* Copyright (C) 2001-2012 LearnLift USA *
* Contact: Learnlift USA, 12 Greenway Plaza, Suite 1510, Houston, Texas 77046, support@memorylifter.com *
* *
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License *
* as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty *
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License along with this library; if not, *
* write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
***************************************************************************************************************************************/
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Text;
using System.Collections.Generic;
using MLifterTest.BusinessLayer;
using System.IO;
using System.Reflection;
using MLifter.DAL;
using MLifter.DAL.Interfaces;
using MLifter.DAL.XML;
namespace MLifterTest.DAL
{
/// <summary>
///This is a test class for MLifter.DAL.XML.XmlQueryOptions and is intended
///to contain all MLifter.DAL.XML.XmlQueryOptions Unit Tests
///</summary>
[TestClass()]
public class XmlTest
{
private TestContext testContextInstance;
private static IUser user = null;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
public static IUser User
{
get
{
return user;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//
[ClassInitialize()]
public static void MyClassInitialize(TestContext testContext)
{
DictionaryTest.MyClassCleanup(); //in case the LM is still open
DictionaryTest.ExtractTestDictionary();
user = UserFactory.Create((GetLoginInformation)delegate(UserStruct u, ConnectionStringStruct c) { return u; },
new ConnectionStringStruct(DatabaseType.Xml, DictionaryTest.testDic, false), (DataAccessErrorDelegate)delegate { return; }, testContext);
Assert.IsTrue(File.Exists(DictionaryTest.testDic), "Test Learning Module file cannot be found.");
}
//
//Use ClassCleanup to run code after all tests in a class have run
//
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
private static Random random
{
get { return DictionaryTest.Random; }
}
private static bool GetRandBool()
{
return DictionaryTest.GetRandBool();
}
private readonly int loopcount = 100;
/// <summary>
/// Tests the query options.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod()]
public void QueryOptionsTest()
{
Dictionary<string, object> values;
using (IDictionary target = user.Open())
{
values = GetInitialValues(target.DefaultSettings);
}
for (int i = 0; i < loopcount; i++)
{
using (IDictionary target = user.Open())
{
CheckAndAssign(target.DefaultSettings, values);
target.Save();
}
}
}
/// <summary>
/// Checks the property values of the target object for changes, and assigns new random values.
/// </summary>
/// <param name="target">The target.</param>
/// <param name="values">The values.</param>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
private static void CheckAndAssign(object target, Dictionary<string, object> values)
{
//check if values are equal and assign new random values
foreach (PropertyInfo info in target.GetType().GetProperties())
{
if (values.ContainsKey(info.Name))
{
Assert.AreEqual(values[info.Name], info.GetValue(target, null), info.Name + " returned an other value than the set one.");
object newvalue = null;
if (info.PropertyType == typeof(bool))
newvalue = GetRandBool();
else if (info.PropertyType.IsEnum)
{
Type enumType = info.PropertyType;
newvalue = Enum.GetValues(enumType).GetValue(random.Next(Enum.GetValues(enumType).Length));
}
else if (info.PropertyType == typeof(int))
newvalue = random.Next(99) + 1; //exception for MultipleChoice - AnswerCount
info.SetValue(target, newvalue, null);
values[info.Name] = newvalue;
}
}
}
/// <summary>
/// Gets the initial property values of the target object.
/// </summary>
/// <param name="target">The target.</param>
/// <returns></returns>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
private static Dictionary<string, object> GetInitialValues(object target)
{
Dictionary<string, object> values = new Dictionary<string, object>();
foreach (PropertyInfo info in target.GetType().GetProperties())
{
if (info.CanRead && info.CanWrite && (info.PropertyType == typeof(bool) || info.PropertyType.IsEnum || info.PropertyType == typeof(int))
&& info.PropertyType != typeof(ESnoozeMode)) //special exception for ESnoozeMode: not all values are allowed
{
Assert.IsFalse(values.ContainsKey(info.Name), "Duplicate property name.");
values.Add(info.Name, info.GetValue(target, null));
}
}
return values;
}
/// <summary>
/// Tests the multiple choice options.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod()]
public void MultipleChoiceOptionsTest()
{
using (IDictionary target = user.Open())
{
Dictionary<string, object> values = GetInitialValues(target.DefaultSettings.MultipleChoiceOptions);
target.Dispose();
for (int i = 0; i < loopcount; i++)
{
using (IDictionary target2 = user.Open())
{
CheckAndAssign(target2.DefaultSettings.MultipleChoiceOptions, values);
target2.Save();
}
}
}
}
/// <summary>
/// Tests the query types.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod()]
public void QueryTypesTest()
{
using (IDictionary target = user.Open())
{
Dictionary<string, object> values = GetInitialValues(target.DefaultSettings.QueryTypes);
target.Dispose();
for (int i = 0; i < loopcount; i++)
{
using (IDictionary target2 = user.Open())
{
CheckAndAssign(target2.DefaultSettings.QueryTypes, values);
target2.Save();
}
}
}
}
/// <summary>
/// Tests the snooze options.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod()]
public void SnoozeOptionsTest()
{
using (IDictionary target = user.Open())
{
Dictionary<string, object> values = GetInitialValues(target.DefaultSettings.SnoozeOptions);
target.Dispose();
for (int i = 0; i < loopcount; i++)
{
using (IDictionary target2 = user.Open())
{
CheckAndAssign(target2.DefaultSettings.SnoozeOptions, values);
target2.Save();
}
}
}
}
/// <summary>
/// Tests the card generation.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod()]
public void CardAddRemoveTest()
{
for (int i = 0; i < loopcount / 2; i++)
{
int cardcount = 0;
int newcardid = 0;
using (IDictionary target = user.Open())
{
cardcount = target.Cards.Count;
if (GetRandBool())
newcardid = target.Cards.AddNew().Id;
else
{
ICard card = target.Cards.Create();
target.Cards.Cards.Add(card);
newcardid = card.Id;
}
Assert.IsTrue(newcardid > 0, "New card did not have a valid ID.");
target.Save();
}
//reopen the LM file
using (IDictionary target = user.Open())
{
Assert.AreEqual(cardcount + 1, target.Cards.Count, "New card was not added properly.");
target.Cards.Delete(newcardid);
target.Save();
}
//reopen the LM file
using (IDictionary target = user.Open())
{
Assert.AreEqual(cardcount, target.Cards.Count, "New card was not deleted properly.");
}
}
}
/// <summary>
/// Tests card data assignment.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-05</remarks>
[TestMethod()]
public void CardDataTest()
{
//create new card
using (IDictionary target = user.Open())
{
ICard card = target.Cards.AddNew();
int cardid = card.Id;
int boxno = card.Box;
DateTime timestamp = card.Timestamp;
string answer = "1";
card.Answer.AddWord(card.Answer.CreateWord(answer, WordType.Word, true));
card.AnswerDistractors.AddWord(card.AnswerDistractors.CreateWord(answer, WordType.Distractor, true));
target.Save();
target.Dispose();
for (int i = 0; i < loopcount; i++)
{
using (IDictionary target2 = user.Open())
{
card = target2.Cards.Get(cardid);
Assert.AreEqual(boxno, card.Box, "Box was not saved properly.");
Assert.AreEqual(timestamp, card.Timestamp, "Timestamp was not saved properly.");
Assert.AreEqual(1, card.Answer.Words.Count, "Not all answers were saved properly.");
Assert.AreEqual(1, card.AnswerDistractors.Words.Count, "Not all answer distractors were saved properly.");
Assert.AreEqual(answer, card.Answer.Words[0].Word, "Answer was not saved properly.");
Assert.AreEqual(answer, card.AnswerDistractors.Words[0].Word, "Answer distractor was not saved properly.");
boxno = card.Box = random.Next(target2.Boxes.Box.Count + 1) - 1;
timestamp = card.Timestamp = DateTime.Now;
if (GetRandBool()) //test different methods to change the word
{
answer = card.Answer.Words[0].Word = random.Next(100).ToString();
card.AnswerDistractors.Words[0].Word = answer;
}
else
{
answer = random.Next(100).ToString();
card.Answer.Words.Clear();
card.Answer.AddWord(card.Answer.CreateWord(answer, WordType.Word, true));
card.AnswerDistractors.Words.Clear();
card.AnswerDistractors.AddWord(card.AnswerDistractors.CreateWord(answer, WordType.Distractor, true));
}
target2.Save();
}
}
}
}
/// <summary>
/// A test for the AddMedia function.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-03</remarks>
[TestMethod(), TestCategory("Deprecated")]
public void AddMediaObjectsTest()
{
string workingDirectory = Environment.CurrentDirectory;
int newcardid = 0;
int newmediacount = 0;
using (IDictionary target = user.Open())
{
//get temp folder
DirectoryInfo tempfolder;
int tempfolderindex = 0;
do
{
tempfolder = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "MLifterTest" + tempfolderindex.ToString()));
tempfolderindex++;
} while (tempfolder.Exists);
tempfolder.Create();
try
{
List<KeyValuePair<FileInfo, EMedia>> mediafiles = new List<KeyValuePair<FileInfo, EMedia>>();
//get some Media files
foreach (ICard card in target.Cards.Cards)
{
foreach (IMedia media in card.QuestionMedia)
{
FileInfo mediafile = new FileInfo(media.Filename);
if (mediafile.Exists)
{
mediafile = mediafile.CopyTo(Path.Combine(tempfolder.FullName, mediafile.Name), true);
mediafiles.Add(new KeyValuePair<FileInfo, EMedia>(mediafile, media.MediaType));
}
}
}
Assert.IsTrue(mediafiles.Count > 0, "No Media file found in sample LM.");
//add the Media files to a new card
ICard newcard = target.Cards.AddNew();
newcardid = newcard.Id;
foreach (KeyValuePair<FileInfo, EMedia> mediafile in mediafiles)
{
IMedia media = newcard.CreateMedia(mediafile.Value, mediafile.Key.FullName, true, true, false);
if (GetRandBool()) //both ways must work
newcard.AddMedia(media, Side.Question);
else
newcard.QuestionMedia.Add(media);
newmediacount++;
}
//AddMedia has changed the current directory
Environment.CurrentDirectory = workingDirectory;
target.Save();
}
finally
{
if (tempfolder != null && tempfolder.Exists)
tempfolder.Delete(true);
}
}
//reopen file
using (IDictionary target = user.Open())
{
ICard card = target.Cards.Get(newcardid);
//Assert.IsTrue(card.QuestionMedia.Count == newmediacount && card.AnswerMedia.Count == 0, "Not all Media files were added properly to the card.");
//Can currently not be checked: Audio fils get cleaned up (only the last one survives) [ML-1320]
int foundmediacount = 0;
foreach (IMedia media in card.QuestionMedia)
{
//check if file exists in Media directory
Assert.IsTrue(File.Exists(media.Filename), "Media file could not be found: " + media.Filename);
foundmediacount++;
}
Assert.IsTrue(newmediacount == 0 || foundmediacount > 0, "There were Media objects added, but could not be found afterwards.");
target.Cards.Delete(newcardid);
target.Save();
}
}
/// <summary>
/// Tests the box size change functions.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-04</remarks>
[TestMethod()]
public void BoxSizesTest()
{
using (IDictionary target = user.Open())
{
//add new chapter, add new card in chaptert
IChapter chapter = target.Chapters.AddNew();
chapter.Title = "TestChapter";
ICard card = target.Cards.AddNew();
int poolsize = target.Boxes.Box[0].Size;
int boxsize = target.Boxes.Box[5].Size;
card.Chapter = chapter.Id;
Assert.AreEqual(0, card.Box, "Newly created card is not in pool.");
Assert.AreEqual(true, card.Active, "Newly created card is not active.");
card.Box = 5;
Assert.AreEqual(poolsize - 1, target.Boxes.Box[0].Size, "Card was not removed from pool.");
Assert.AreEqual(boxsize, target.Boxes.Box[5].Size, "Box size did change, although moved card is not in querychapters."); //[ML-1321]
target.DefaultSettings.SelectedLearnChapters.Add(chapter.Id);
Assert.AreEqual(boxsize + 1, target.Boxes.Box[5].Size, "Box size did not change, although moved card is in querychapters.");
target.DefaultSettings.SelectedLearnChapters.Remove(chapter.Id);
Assert.AreEqual(boxsize, target.Boxes.Box[5].Size, "Box size did change during deactivation, although moved card is not in querychapters.");
Assert.AreEqual(poolsize - 1, target.Boxes.Box[0].Size, "Poolsize did increase during activation, although activated card is not in querychapters.");
target.Cards.Delete(card.Id);
}
}
/// <summary>
/// Tests chapter adding and removing.
/// </summary>
/// <remarks>Documented by Dev02, 2008-06-04</remarks>
[TestMethod()]
public void ChaptersAddRemoveTest()
{
int existingchaptercount;
int addedchaptercount = random.Next(100);
using (IDictionary target = user.Open())
{
Assert.AreEqual(target.Chapters.Count, target.Chapters.Chapters.Count, "Chapter count must be equal.");
existingchaptercount = target.Chapters.Count;
for (int i = 0; i < addedchaptercount; i++)
{
IChapter chapter = null;
chapter = target.Chapters.AddNew();
chapter.Title = (existingchaptercount + i).ToString();
Assert.IsTrue(chapter.Id >= 0, "New chapter did not have a valid ID.");
Assert.AreEqual(0, chapter.ActiveSize, "New chapter must have an active size of 0.");
Assert.AreEqual(0, chapter.Size, "New chapter must have a size of 0.");
}
Assert.AreEqual(target.Chapters.Count, target.Chapters.Chapters.Count, "Chapter count must be equal.");
target.Save();
}
//reopen LM
using (IDictionary target = user.Open())
{
Assert.AreEqual(target.Chapters.Count, target.Chapters.Chapters.Count, "Chapter count must be equal.");
Assert.AreEqual(existingchaptercount + addedchaptercount, target.Chapters.Count, "Chapter count missmatch - not all chapters were created properly.");
List<int> deleteIDs = new List<int>();
for (int i = 0; i < target.Chapters.Count; i++)
{
if (i >= existingchaptercount)
{
Assert.AreEqual(i.ToString(), target.Chapters.Chapters[i].Title, "Chapter title was not saved successfully.");
deleteIDs.Add(target.Chapters.Chapters[i].Id);
}
}
foreach (int deleteID in deleteIDs)
target.Chapters.Delete(deleteID);
target.Save();
}
//reopen LM
using (IDictionary target = user.Open())
{
Assert.AreEqual(existingchaptercount, target.Chapters.Count, "Chapter count missmatch - not all chapters were deleted properly.");
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using TopIndexMaker.Models;
namespace TopIndexMaker
{
public class TopIndexMaker {
/// <summary>
/// The base directory for the top level index
/// </summary>
/// <returns></returns>
public string baseDirectory {
get {
return _baseDirectory;
}
private set {
_baseDirectory = value;
}
}
private string _baseDirectory = "";
/// <summary>
/// The page title for the top level albums
/// </summary>
/// <returns></returns>
public string baseTitle { get; set; }
/// <summary>
/// This is the default title to be used for any non-specified title.
/// </summary>
public const string DefaultTitle = "Albums";
/// <summary>
/// All albums
/// </summary>
private List<Album> albums;
/// <summary>
/// All html which will actually appear in index.html will be placed here.
/// </summary>
private StringBuilder html;
/// <summary>
/// The template for HTML manipulation from which to work
/// </summary>
private string templateHTML;
/// <summary>
/// The template for album-level div html manipulation from which to work
/// </summary>
private string templateBodySnippet;
/// <summary>
/// Storage location for the style.css self-reference stream. Should probably be moved elsewhere.
/// </summary>
private Stream styleInStream;
/// <summary>
/// All program-level option flags are stored here.
/// </summary>
public enum Options {
preserveUnderscores,
preserveIndexLink
};
private bool isPreservingUnderscores;
private bool isPreservingIndexLinks;
/// <summary>
/// Default blank constructor.
/// </summary>
public TopIndexMaker() {
loadEmbeddedData();
resetInformation();
}
/// <summary>
/// Default constructor for general use.
/// </summary>
/// <param name="baseDirectory">The base directory for index creation</param>
/// <param name="baseTitle">(optional) The title of the base album</param>
/// <param name="options">(optional) Array of any other specific options/flags</param>
public TopIndexMaker(string baseDirectory, string baseTitle = DefaultTitle, params Options[] options) {
this.baseDirectory = baseDirectory;
this.baseTitle = baseTitle;
loadEmbeddedData();
resetInformation();
foreach (Options option in options) {
switch (option) {
case Options.preserveUnderscores:
this.isPreservingUnderscores = true;
break;
case Options.preserveIndexLink:
this.isPreservingIndexLinks = true;
break;
default:
break;
}
}
}
/// <summary>
/// Initialization/reset routine
/// </summary>
private void resetInformation() {
albums = new List<Album>();
isPreservingUnderscores = false;
isPreservingIndexLinks = false;
html = new StringBuilder();
}
/// <summary>
/// Loads any embedded information/files from within the "Resources" folder which are necessary for execution.
/// </summary>
private void loadEmbeddedData() {
var assembly = typeof(TopIndexMaker).GetTypeInfo().Assembly;
string[] names = assembly.GetManifestResourceNames();
using (Stream templateStream = assembly.GetManifestResourceStream("ftopindexmaker.Resources.htmlTemplate.html"))
{
using (StreamReader reader = new StreamReader(templateStream))
{
templateHTML = reader.ReadToEnd();
}
}
using (Stream templateStream = assembly.GetManifestResourceStream("ftopindexmaker.Resources.bodySnippet.html"))
{
using (StreamReader reader = new StreamReader(templateStream))
{
templateBodySnippet = reader.ReadToEnd();
}
}
styleInStream = assembly.GetManifestResourceStream("ftopindexmaker.Resources.style.css");
}
/// <summary>
/// Default external entry point for starting the actual input/output mechanism.
/// Supports additional runs by specifying the baseDirectory.
/// </summary>
/// <param name="baseDirectory">(optional) The base directory for index creation</param>
public void Start(string baseDirectory = null) {
if (!string.IsNullOrEmpty(baseDirectory)) {
this.baseDirectory = baseDirectory;
}
enumerateAlbums();
setupHTML();
outputAll();
}
/// <summary>
/// Debug method, insert when necessary and use to check for problems.
/// </summary>
private void debugAlbums() {
var count = 0;
foreach (Album album in albums) {
count++;
Console.WriteLine("Album #" + count.ToString() + ": " + album.Title + " :: " + album.FirstImageFileUrl);
}
}
/// <summary>
/// Adds all albums to this.albums using the current baseDirectory.
/// </summary>
private void enumerateAlbums() {
try {
var dirs = Directory.EnumerateDirectories(baseDirectory);
foreach (var dir in dirs) {
var album = getAlbum(dir);
if (album != null) {
albums.Add(album);
}
}
}
catch (Exception ex) {
Console.WriteLine("Error: " + ex.Message);
}
}
/// <summary>
/// Returns an album if directory is valid album. otherwise returns null.
/// </summary>
/// <param name="dir">The directory from which to fetch the album</param>
/// <returns>The album, or null if invalid.</returns>
private Album getAlbum(string dir) {
var album = new Album(dir, isPreservingUnderscores);
if (album.isValid) {
return album;
}
return null;
}
/// <summary>
/// Takes the actual albums and creates the index.html contents (stored in "html").
/// </summary>
private void setupHTML() {
html.Append(templateHTML);
html.Replace("{TITLE}", baseTitle);
html.Replace("{BACKGROUND}", albums[0].BackgroundUrl);
html.Replace("{NOISE}", albums[0].NoiseUrl);
StringBuilder body = new StringBuilder();
StringBuilder divBuilder = new StringBuilder();
//yes, we do the sort here - allows for the background image to be "randomly" chosen through unsorted list indexing above.
//probably a good candidate for a user choice via params flag
albums = albums.OrderBy(d => d.Title).ToList();
foreach (Album album in albums) {
divBuilder.Clear();
divBuilder.Append(templateBodySnippet);
divBuilder.Replace("{LINK}", isPreservingIndexLinks ? album.Url + "index.html" : album.Url);
divBuilder.Replace("{IMG}", album.FirstImageFileUrl);
divBuilder.Replace("{TITLE}", album.Title);
body.Append(divBuilder);
}
html.Replace("{BODY}", body.ToString());
}
/// <summary>
/// Outputs the final index.html to file, along with style.css and wraps things up.
/// </summary>
private void outputAll() {
try {
string indexLocation = Path.Combine(baseDirectory, "index.html");
File.WriteAllText(indexLocation, html.ToString());
string styleLocation = Path.Combine(baseDirectory, "style.css");
using (var styleOutStream = File.Create(styleLocation))
{
styleInStream.Seek(0, SeekOrigin.Begin);
styleInStream.CopyTo(styleOutStream);
}
Console.WriteLine("Files written successfully! Check output folder: " + baseDirectory);
}
catch (Exception ex) {
Console.WriteLine("Error: " + ex.Message);
}
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Transformer.cs" company="Solidsoft Reply Ltd.">
// Copyright (c) 2015 Solidsoft Reply Limited. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace SolidsoftReply.Esb.Libraries.Resolution
{
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Xml;
using System.Xml.Xsl;
using SolidsoftReply.Esb.Libraries.Resolution.Properties;
/// <summary>
/// Transform helper class
/// </summary>
[Serializable]
public class Transformer
{
/// <summary>
/// Cache of strong names of schemas.
/// </summary>
internal static readonly SchemaStrongNameCache SchemaStrongNameCache = new SchemaStrongNameCache();
/// <summary>
/// Apply a map to an aggregation of two XML messages
/// </summary>
/// <param name="mapFullName">Map full name (Class, strong name)</param>
/// <param name="messageIn1">The first XML message to be transformed</param>
/// <param name="messageIn2">The second XML message to be transformed</param>
/// <returns>The transformed XML document</returns>
public static TransformResults Transform(string mapFullName, XmlDocument messageIn1, XmlDocument messageIn2)
{
var merge = new XmlDocument();
var xml = new StringBuilder();
xml.Append(string.Format("<r:Root xmlns:r='{0}'>", Resources.UriAggSchema));
xml.Append("<InputMessagePart_0>");
if (messageIn1.DocumentElement != null)
{
xml.Append(messageIn1.DocumentElement.OuterXml);
}
xml.Append("</InputMessagePart_0>");
xml.Append("<InputMessagePart_1>");
if (messageIn2.DocumentElement != null)
{
xml.Append(messageIn2.DocumentElement.OuterXml);
}
xml.Append("</InputMessagePart_1>");
xml.Append("</r:Root>");
merge.LoadXml(xml.ToString());
return Transform(mapFullName, merge);
}
/// <summary>
/// Apply a map to a xml message
/// </summary>
/// <param name="mapFullName">Map full name (Class, strong name)</param>
/// <param name="messageIn">Xml message to be transformed</param>
/// <returns>The transformed xml document</returns>
public static TransformResults Transform(string mapFullName, XmlDocument messageIn)
{
if (string.IsNullOrWhiteSpace(mapFullName))
{
return new TransformResults();
}
var msgOut = new XmlDocument();
StringWriter writer = null;
Debug.Write("[Resolver] Transform using the map in " + mapFullName);
// Check parameters
if (messageIn == null)
{
throw new ArgumentNullException("messageIn");
}
if (string.IsNullOrEmpty(mapFullName))
{
throw new ArgumentNullException("mapFullName");
}
// Extract the class name and the strong name from the MapFullName
var className = mapFullName.Split(',')[0];
var pos = mapFullName.IndexOf(',');
if (pos == -1)
{
throw new ArgumentException(string.Format(Resources.ExceptionMapFullName, mapFullName));
}
var strongName = mapFullName.Substring(pos);
strongName = strongName.Trim();
if (strongName.StartsWith(","))
{
strongName = strongName.Substring(1);
}
strongName = strongName.Trim();
TransformResults transformResults;
try
{
// Load the map
var mapAssembly = Assembly.Load(strongName);
var map = mapAssembly.CreateInstance(className);
if (map == null)
{
throw new EsbResolutionException(
string.Format(Resources.ExceptionMapClassInvalid, className));
}
// Extract the xslt
var xmlContentProp = map.GetType().GetProperty("XmlContent");
var xsl = xmlContentProp.GetValue(map, null);
// Extract xsl and extension objects
var xsltArgumentsProp = map.GetType().GetProperty("XsltArgumentListContent");
var xsltArguments = xsltArgumentsProp.GetValue(map, null);
// Extract source schemas
var sourceSchemasProp = map.GetType().GetProperty("SourceSchemas");
var sourceSchemas = (string[])sourceSchemasProp.GetValue(map, null);
// Extract target schemas
var targetSchemasProp = map.GetType().GetProperty("TargetSchemas");
var targetSchemas = (string[])targetSchemasProp.GetValue(map, null);
// Load all the external assemblies
var xmlExtension = new XmlDocument();
var xslArgList = new XsltArgumentList();
if (xsltArguments != null)
{
// Load the argument list and create all the needed instances
xmlExtension.LoadXml(xsltArguments.ToString());
var xmlExtensionNodes = xmlExtension.SelectNodes(Resources.XPathExtensionObject);
if (xmlExtensionNodes != null)
{
foreach (XmlNode extObjNode in xmlExtensionNodes)
{
var extAttributes = extObjNode.Attributes;
if (extAttributes == null)
{
continue;
}
var namespaceNode = extAttributes.GetNamedItem("Namespace");
var assemblyNode = extAttributes.GetNamedItem("AssemblyName");
var classNode = extAttributes.GetNamedItem("ClassName");
var extAssembly = Assembly.Load(assemblyNode.InnerText);
var extObj = extAssembly.CreateInstance(classNode.InnerText);
if (extObj != null)
{
xslArgList.AddExtensionObject(namespaceNode.InnerText, extObj);
}
}
}
}
// Apply xsl to msg in
var xslDoc = new XmlDocument();
xslDoc.LoadXml(xsl.ToString());
var settings = new XsltSettings(true, true);
var xlsTrans = new XslCompiledTransform();
xlsTrans.Load(xslDoc, settings, new XmlUrlResolver());
writer = new StringWriter();
if (messageIn.DocumentElement != null)
{
xlsTrans.Transform(new XmlNodeReader(messageIn.DocumentElement), xslArgList, writer);
}
writer.Flush();
try
{
// Return the msg out
msgOut.LoadXml(writer.ToString());
}
catch (Exception)
{
// Log the error here with useful information! If the map fails (e.g., the wrong
// map is configured in the Service Mediation policy) the Load may fail with an unhelpful
// 'Root not missing' error. We need to log an additional error here that records what
// map was being applied.
var inMessageType = messageIn.DocumentElement == null
? "<source message is empty>"
: string.Format(
"{0}#{1}",
messageIn.DocumentElement.NamespaceURI,
messageIn.DocumentElement.LocalName);
var message = string.Format(
"A transformation failed for map {0} and message of type {1}. Is the correct map configured in the ESB service mediation policy?",
mapFullName,
inMessageType);
EventLog.WriteEntry("Application", message, EventLogEntryType.Error, 3);
throw;
}
transformResults = new TransformResults(messageIn, msgOut, xslDoc, xslArgList, sourceSchemas, targetSchemas, (from schemaName in targetSchemas select SchemaStrongNameCache.GetSchemaStrongName(map.GetType(), schemaName)).ToList());
}
finally
{
if (writer != null)
{
writer.Close();
}
}
return transformResults;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Runtime.CompilerServices;
using Loon.Utils;
using Loon.Core.Graphics.Opengl;
namespace Loon.Core.Graphics
{
public abstract class LContainer : LComponent
{
public sealed class InnerComponent : IComparer<LComponent>
{
public int Compare(LComponent o1, LComponent o2)
{
return o2.GetLayer() - o1.GetLayer();
}
}
private static readonly IComparer<LComponent> DEFAULT_COMPARATOR = new InnerComponent();
protected internal bool locked;
private IComparer<LComponent> comparator = LContainer.DEFAULT_COMPARATOR;
private LComponent[] childs = new LComponent[0];
private int childCount = 0;
private LComponent latestInserted = null;
public LContainer(int x, int y, int w, int h): base(x, y, w, h)
{
this.SetFocusable(false);
}
public override bool IsContainer()
{
return true;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Add(LComponent comp)
{
if (this.Contains(comp))
{
return;
}
if (comp.GetContainer() != null)
{
comp.SetContainer(null);
}
comp.SetContainer(this);
this.childs = (LComponent[])CollectionUtils.Expand(this.childs, 1,
false);
this.childs[0] = comp;
this.childCount++;
this.desktop.SetDesktop(comp);
this.SortComponents();
this.latestInserted = comp;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Add(LComponent comp, int index)
{
if (comp.GetContainer() != null)
{
throw new InvalidOperationException(comp
+ " already reside in another container!!!");
}
comp.SetContainer(this);
LComponent[] newChilds = new LComponent[this.childs.Length + 1];
this.childCount++;
int ctr = 0;
for (int i = 0; i < this.childCount; i++)
{
if (i != index)
{
newChilds[i] = this.childs[ctr];
ctr++;
}
}
this.childs = newChilds;
this.childs[index] = comp;
this.desktop.SetDesktop(comp);
this.SortComponents();
this.latestInserted = comp;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public bool Contains(LComponent comp)
{
if (comp == null)
{
return false;
}
if (childs == null)
{
return false;
}
for (int i = 0; i < this.childCount; i++)
{
if (childs[i] != null && comp.Equals(childs[i]))
{
return true;
}
}
return false;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public int Remove(LComponent comp)
{
for (int i = 0; i < this.childCount; i++)
{
if (this.childs[i] == comp)
{
this.Remove(i);
return i;
}
}
return -1;
}
public int Remove(Type clazz)
{
if (clazz == null)
{
return -1;
}
int count = 0;
for (int i = childCount; i > 0; i--)
{
int index = i - 1;
LComponent comp = this.childs[index];
Type cls = comp.GetType();
if (clazz == null || (object)clazz == (object)cls || clazz.IsInstanceOfType(comp)
|| clazz.Equals(cls))
{
this.Remove(index);
count++;
}
}
return count;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public LComponent Remove(int index)
{
LComponent comp = this.childs[index];
this.desktop.SetComponentStat(comp, false);
comp.SetContainer(null);
// comp.dispose();
this.childs = (LComponent[])CollectionUtils.Cut(this.childs, index);
this.childCount--;
return comp;
}
public void Clear()
{
this.desktop.ClearComponentsStat(this.childs);
for (int i = 0; i < this.childCount; i++)
{
this.childs[i].SetContainer(null);
// this.childs[i].dispose();
}
this.childs = new LComponent[0];
this.childCount = 0;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Replace(LComponent oldComp, LComponent newComp)
{
int index = this.Remove(oldComp);
this.Add(newComp, index);
}
public override void Update(long timer)
{
if (isClose)
{
return;
}
if (!this.IsVisible())
{
return;
}
lock (childs)
{
base.Update(timer);
LComponent component;
for (int i = 0; i < this.childCount; i++)
{
component = childs[i];
if (component != null)
{
component.Update(timer);
}
}
}
}
public override void ValidatePosition()
{
if (isClose)
{
return;
}
base.ValidatePosition();
for (int i = 0; i < this.childCount; i++)
{
this.childs[i].ValidatePosition();
}
if (!this.elastic)
{
for (int i_0 = 0; i_0 < this.childCount; i_0++)
{
if (this.childs[i_0].GetX() > this.GetWidth()
|| this.childs[i_0].GetY() > this.GetHeight()
|| this.childs[i_0].GetX() + this.childs[i_0].GetWidth() < 0
|| this.childs[i_0].GetY() + this.childs[i_0].GetHeight() < 0)
{
SetElastic(true);
break;
}
}
}
}
protected internal override void ValidateSize()
{
base.ValidateSize();
for (int i = 0; i < this.childCount; i++)
{
this.childs[i].ValidateSize();
}
}
public override void CreateUI(GLEx g)
{
if (isClose)
{
return;
}
if (!this.IsVisible())
{
return;
}
lock (childs)
{
base.CreateUI(g);
if (this.elastic)
{
g.SetClip(this.GetScreenX(), this.GetScreenY(),
this.GetWidth(), this.GetHeight());
}
this.RenderComponents(g);
if (this.elastic)
{
g.ClearClip();
}
}
}
protected internal void RenderComponents(GLEx g)
{
for (int i = this.childCount - 1; i >= 0; i--)
{
this.childs[i].CreateUI(g);
}
}
public void SendToFront(LComponent comp)
{
if (this.childCount <= 1 || this.childs[0] == comp)
{
return;
}
if (childs[0] == comp)
{
return;
}
for (int i = 0; i < this.childCount; i++)
{
if (this.childs[i] == comp)
{
this.childs = (LComponent[])CollectionUtils
.Cut(this.childs, i);
this.childs = (LComponent[])CollectionUtils.Expand(
this.childs, 1, false);
this.childs[0] = comp;
this.SortComponents();
break;
}
}
}
public void SendToBack(LComponent comp)
{
if (this.childCount <= 1 || this.childs[this.childCount - 1] == comp)
{
return;
}
if (childs[this.childCount - 1] == comp)
{
return;
}
for (int i = 0; i < this.childCount; i++)
{
if (this.childs[i] == comp)
{
this.childs = (LComponent[])CollectionUtils
.Cut(this.childs, i);
this.childs = (LComponent[])CollectionUtils.Expand(
this.childs, 1, true);
this.childs[this.childCount - 1] = comp;
this.SortComponents();
break;
}
}
}
public void SortComponents()
{
Array.Sort(this.childs, this.comparator);
}
protected internal void TransferFocus(LComponent component)
{
for (int i = 0; i < this.childCount; i++)
{
if (component == this.childs[i])
{
int j = i;
do
{
if (--i < 0)
{
i = this.childCount - 1;
}
if (i == j)
{
return;
}
} while (!this.childs[i].RequestFocus());
break;
}
}
}
protected internal void TransferFocusBackward(LComponent component)
{
for (int i = 0; i < this.childCount; i++)
{
if (component == this.childs[i])
{
int j = i;
do
{
if (++i >= this.childCount)
{
i = 0;
}
if (i == j)
{
return;
}
} while (!this.childs[i].RequestFocus());
break;
}
}
}
public override bool IsSelected()
{
if (!base.IsSelected())
{
for (int i = 0; i < this.childCount; i++)
{
if (this.childs[i].IsSelected())
{
return true;
}
}
return false;
}
else
{
return true;
}
}
public bool IsElastic()
{
return this.elastic;
}
public void SetElastic(bool b)
{
if (GetWidth() > 128 || GetHeight() > 128)
{
this.elastic = b;
}
else
{
this.elastic = false;
}
}
public IComparer<LComponent> GetComparator()
{
return this.comparator;
}
public void SetComparator(IComparer<LComponent> c)
{
if (c == null)
{
throw new NullReferenceException("Comparator can not null !");
}
this.comparator = c;
this.SortComponents();
}
public LComponent FindComponent(int x1, int y1)
{
if (!this.Intersects(x1, y1))
{
return null;
}
for (int i = 0; i < this.childCount; i++)
{
if (this.childs[i].Intersects(x1, y1))
{
LComponent comp = (!this.childs[i].IsContainer()) ? this.childs[i]
: ((LContainer)this.childs[i]).FindComponent(x1, y1);
return comp;
}
}
return this;
}
public int GetComponentCount()
{
return this.childCount;
}
public LComponent[] GetComponents()
{
return this.childs;
}
public LComponent Get()
{
return this.latestInserted;
}
public override void Dispose()
{
base.Dispose();
if (autoDestroy)
{
if (childs != null)
{
foreach (LComponent c in childs)
{
if (c != null)
{
c.Dispose();
}
}
}
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.InteropServices;
using ALinq.Mapping;
using ALinq.SqlClient;
namespace ALinq
{
internal abstract class IdentityManager
{
// Methods
internal static IdentityManager CreateIdentityManager(bool asReadOnly)
{
if (asReadOnly)
{
return new ReadOnlyIdentityManager();
}
return new StandardIdentityManager();
}
internal abstract object Find(MetaType type, object[] keyValues);
internal abstract object FindLike(MetaType type, object instance);
internal abstract object InsertLookup(MetaType type, object instance);
internal abstract bool RemoveLike(MetaType type, object instance);
// Nested Types
private class ReadOnlyIdentityManager : IdentityManager
{
// Methods
internal override object Find(MetaType type, object[] keyValues)
{
return null;
}
internal override object FindLike(MetaType type, object instance)
{
return null;
}
internal override object InsertLookup(MetaType type, object instance)
{
return instance;
}
internal override bool RemoveLike(MetaType type, object instance)
{
return false;
}
}
private class StandardIdentityManager : IdentityManager
{
// Fields
private readonly Dictionary<MetaType, IdentityCache> caches;
private IdentityCache currentCache;
private MetaType currentType;
public StandardIdentityManager()
{
caches = new Dictionary<MetaType, IdentityCache>();
}
// Methods
internal override object Find(MetaType type, object[] keyValues)
{
SetCurrent(type);
return currentCache.Find(keyValues);
}
internal override object FindLike(MetaType type, object instance)
{
SetCurrent(type);
return currentCache.FindLike(instance);
}
private static KeyManager GetKeyManager(MetaType type)
{
int count = type.IdentityMembers.Count;
MetaDataMember member = type.IdentityMembers[0];
var manager = (KeyManager)Activator.CreateInstance(typeof(SingleKeyManager<,>).MakeGenericType(new[] { type.Type, member.Type }), BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance, null, new object[] { member.StorageAccessor, 0 }, null);
for (int i = 1; i < count; i++)
{
member = type.IdentityMembers[i];
manager = (KeyManager)Activator.CreateInstance(typeof(MultiKeyManager<,,>).MakeGenericType(new[] { type.Type, member.Type, manager.KeyType }), BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance, null, new object[] { member.StorageAccessor, i, manager }, null);
}
return manager;
}
internal override object InsertLookup(MetaType type, object instance)
{
SetCurrent(type);
return currentCache.InsertLookup(instance);
}
internal override bool RemoveLike(MetaType type, object instance)
{
SetCurrent(type);
return currentCache.RemoveLike(instance);
}
private void SetCurrent(MetaType type)
{
type = type.InheritanceRoot;
if (currentType != type)
{
if (!caches.TryGetValue(type, out currentCache))
{
KeyManager keyManager = GetKeyManager(type);
currentCache = (IdentityCache)Activator.CreateInstance(typeof(IdentityCache<,>).MakeGenericType(new[] { type.Type, keyManager.KeyType }), BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance, null, new object[] { keyManager }, null);
caches.Add(type, currentCache);
}
currentType = type;
}
}
// Nested Types
internal abstract class IdentityCache
{
// Methods
internal abstract object Find(object[] keyValues);
internal abstract object FindLike(object instance);
internal abstract object InsertLookup(object instance);
internal abstract bool RemoveLike(object instance);
}
internal class IdentityCache<T, K> : IdentityCache
{
// Fields
private int[] buckets;
private readonly IEqualityComparer<K> comparer;
private int count;
private int freeList;
private readonly KeyManager<T, K> keyManager;
//private Slot<T, K>[] slots;
private Slot[] slots;
// Methods
public IdentityCache(KeyManager<T, K> keyManager)
{
this.keyManager = keyManager;
comparer = keyManager.Comparer;
buckets = new int[7];
//slots = new Slot<T, K>[7];
slots = new Slot[7];
freeList = -1;
}
internal override object Find(object[] keyValues)
{
K local;
if (keyManager.TryCreateKeyFromValues(keyValues, out local))
{
T local2 = default(T);
if (Find(local, ref local2, false))
{
return local2;
}
}
return null;
}
private bool Find(K key, ref T value, bool add)
{
int num = comparer.GetHashCode(key) & 0x7fffffff;
for (int i = buckets[num % buckets.Length] - 1; i >= 0; i = slots[i].next)
{
if ((slots[i].hashCode == num) && comparer.Equals(slots[i].key, key))
{
value = slots[i].value;
return true;
}
}
if (add)
{
int tmpFreeList;
if (this.freeList >= 0)
{
tmpFreeList = this.freeList;
this.freeList = this.slots[tmpFreeList].next;
}
else
{
if (this.count == this.slots.Length)
{
this.Resize();
}
tmpFreeList = this.count;
this.count++;
}
int index = num % this.buckets.Length;
this.slots[tmpFreeList].hashCode = num;
this.slots[tmpFreeList].key = key;
this.slots[tmpFreeList].value = value;
this.slots[tmpFreeList].next = this.buckets[index] - 1;
this.buckets[index] = tmpFreeList + 1;
}
return false;
}
internal override object FindLike(object instance)
{
T local = (T)instance;
K key = this.keyManager.CreateKeyFromInstance(local);
if (this.Find(key, ref local, false))
{
return local;
}
return null;
}
internal override object InsertLookup(object instance)
{
T local = (T)instance;
K key = keyManager.CreateKeyFromInstance(local);
Find(key, ref local, true);
return local;
}
internal override bool RemoveLike(object instance)
{
T local = (T)instance;
K local2 = keyManager.CreateKeyFromInstance(local);
int num = comparer.GetHashCode(local2) & 0x7fffffff;
int index = num % this.buckets.Length;
int num3 = -1;
for (int i = this.buckets[index] - 1; i >= 0; i = this.slots[i].next)
{
if ((this.slots[i].hashCode == num) && this.comparer.Equals(this.slots[i].key, local2))
{
if (num3 < 0)
{
this.buckets[index] = this.slots[i].next + 1;
}
else
{
this.slots[num3].next = this.slots[i].next;
}
this.slots[i].hashCode = -1;
this.slots[i].value = default(T);
this.slots[i].next = this.freeList;
this.freeList = i;
return true;
}
num3 = i;
}
return false;
}
private void Resize()
{
int num = (this.count * 2) + 1;
var numArray = new int[num];
//Slot<T, K>[] destinationArray = new Slot<T, K>[num];
var destinationArray = new Slot[num];
Array.Copy(this.slots, 0, destinationArray, 0, this.count);
for (int i = 0; i < this.count; i++)
{
int index = destinationArray[i].hashCode % num;
destinationArray[i].next = numArray[index] - 1;
numArray[index] = i + 1;
}
buckets = numArray;
slots = destinationArray;
}
// Nested Types
[StructLayout(LayoutKind.Sequential)]
internal struct Slot
{
internal int hashCode;
internal K key;
internal T value;
internal int next;
}
}
internal abstract class KeyManager
{
// Methods
protected KeyManager()
{
}
// Properties
internal abstract Type KeyType { get; }
}
internal abstract class KeyManager<T, K> : IdentityManager.StandardIdentityManager.KeyManager
{
// Methods
protected KeyManager()
{
}
internal abstract K CreateKeyFromInstance(T instance);
internal abstract bool TryCreateKeyFromValues(object[] values, out K k);
// Properties
internal abstract IEqualityComparer<K> Comparer { get; }
}
[StructLayout(LayoutKind.Sequential)]
internal struct MultiKey<T1, T2>
{
private readonly T1 value1;
private readonly T2 value2;
internal MultiKey(T1 value1, T2 value2)
{
this.value1 = value1;
this.value2 = value2;
}
// Nested Types
internal class Comparer : IEqualityComparer<MultiKey<T1, T2>>, IEqualityComparer
{
// Fields
private readonly IEqualityComparer<T1> comparer1;
private readonly IEqualityComparer<T2> comparer2;
// Methods
internal Comparer(IEqualityComparer<T1> comparer1, IEqualityComparer<T2> comparer2)
{
this.comparer1 = comparer1;
this.comparer2 = comparer2;
}
public bool Equals(MultiKey<T1, T2> x, MultiKey<T1, T2> y)
{
return (this.comparer1.Equals(x.value1, y.value1) && this.comparer2.Equals(x.value2, y.value2));
}
public int GetHashCode(MultiKey<T1, T2> x)
{
return (this.comparer1.GetHashCode(x.value1) ^ comparer2.GetHashCode(x.value2));
}
bool System.Collections.IEqualityComparer.Equals(object x, object y)
{
return this.Equals((MultiKey<T1, T2>)x, (MultiKey<T1, T2>)y);
}
int IEqualityComparer.GetHashCode(object x)
{
return this.GetHashCode((MultiKey<T1, T2>)x);
}
}
}
internal class MultiKeyManager<T, V1, V2> : KeyManager<T, MultiKey<V1, V2>>
{
// Fields
private readonly MetaAccessor<T, V1> accessor;
private IEqualityComparer<MultiKey<V1, V2>> comparer;
private readonly KeyManager<T, V2> next;
private readonly int offset;
// Methods
internal MultiKeyManager(MetaAccessor<T, V1> accessor, int offset, IdentityManager.StandardIdentityManager.KeyManager<T, V2> next)
{
this.accessor = accessor;
this.next = next;
this.offset = offset;
}
internal override MultiKey<V1, V2> CreateKeyFromInstance(T instance)
{
return new MultiKey<V1, V2>(this.accessor.GetValue(instance), this.next.CreateKeyFromInstance(instance));
}
internal override bool TryCreateKeyFromValues(object[] values, out MultiKey<V1, V2> k)
{
V2 local;
object obj2 = values[this.offset];
if ((obj2 == null) && typeof(V1).IsValueType)
{
k = new MultiKey<V1, V2>();
return false;
}
if (!this.next.TryCreateKeyFromValues(values, out local))
{
k = new MultiKey<V1, V2>();
return false;
}
k = new MultiKey<V1, V2>((V1)obj2, local);
return true;
}
// Properties
internal override IEqualityComparer<MultiKey<V1, V2>> Comparer
{
get
{
if (this.comparer == null)
{
this.comparer = new MultiKey<V1, V2>.Comparer(EqualityComparer<V1>.Default, this.next.Comparer);
}
return this.comparer;
}
}
internal override Type KeyType
{
get
{
return typeof(MultiKey<V1, V2>);
}
}
}
internal class SingleKeyManager<T, V> : KeyManager<T, V>
{
// Fields
private readonly MetaAccessor<T, V> accessor;
private IEqualityComparer<V> comparer;
private readonly bool isKeyNullAssignable;
private readonly int offset;
// Methods
internal SingleKeyManager(MetaAccessor<T, V> accessor, int offset)
{
this.accessor = accessor;
this.offset = offset;
this.isKeyNullAssignable = TypeSystem.IsNullAssignable(typeof(V));
}
internal override V CreateKeyFromInstance(T instance)
{
return this.accessor.GetValue(instance);
}
internal override bool TryCreateKeyFromValues(object[] values, out V v)
{
object obj2 = values[this.offset];
if ((obj2 == null) && !this.isKeyNullAssignable)
{
v = default(V);
return false;
}
v = (V)obj2;
return true;
}
// Properties
internal override IEqualityComparer<V> Comparer
{
get
{
if (this.comparer == null)
{
this.comparer = EqualityComparer<V>.Default;
}
return this.comparer;
}
}
internal override Type KeyType
{
get
{
return typeof(V);
}
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Keen.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
namespace Keen.Dataset
{
/// <summary>
/// Datasets implements the IDataset interface which represents the Keen.IO Cached Datasets
/// API methods.
/// </summary>
internal class Datasets : IDataset
{
private const int MaxDatasetDefinitionListLimit = 100;
private static readonly JsonSerializerSettings SerializerSettings =
new JsonSerializerSettings
{
ContractResolver = new DefaultContractResolver
{
NamingStrategy = new SnakeCaseNamingStrategy()
},
DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate,
Formatting = Formatting.None
};
private readonly IKeenHttpClient _keenHttpClient;
private readonly string _cachedDatasetRelativeUrl;
private readonly string _masterKey;
private readonly string _readKey;
internal Datasets(IProjectSettings prjSettings,
IKeenHttpClientProvider keenHttpClientProvider)
{
if (null == prjSettings)
{
throw new ArgumentNullException(nameof(prjSettings),
"Project Settings must be provided.");
}
if (null == keenHttpClientProvider)
{
throw new ArgumentNullException(nameof(keenHttpClientProvider),
"A KeenHttpClient provider must be provided.");
}
if (string.IsNullOrWhiteSpace(prjSettings.KeenUrl) ||
!Uri.IsWellFormedUriString(prjSettings.KeenUrl, UriKind.Absolute))
{
throw new KeenException(
"A properly formatted KeenUrl must be provided via Project Settings.");
}
var serverBaseUrl = new Uri(prjSettings.KeenUrl);
_keenHttpClient = keenHttpClientProvider.GetForUrl(serverBaseUrl);
_cachedDatasetRelativeUrl =
KeenHttpClient.GetRelativeUrl(prjSettings.ProjectId,
KeenConstants.DatasetsResource);
_masterKey = prjSettings.MasterKey;
_readKey = prjSettings.ReadKey;
}
public async Task<JObject> GetResultsAsync(string datasetName,
string indexBy,
string timeframe)
{
if (string.IsNullOrWhiteSpace(datasetName))
{
throw new KeenException("A dataset name is required.");
}
if (string.IsNullOrWhiteSpace(indexBy))
{
throw new KeenException("A value to index by is required.");
}
if (string.IsNullOrWhiteSpace(timeframe))
{
throw new KeenException("A timeframe by is required.");
}
if (string.IsNullOrWhiteSpace(_readKey))
{
throw new KeenException("An API ReadKey is required to get dataset results.");
}
var datasetResultsUrl = $"{GetDatasetUrl(datasetName)}/results";
// Absolute timeframes can have reserved characters like ':', and index_by can be
// any valid JSON member name, which can have all sorts of stuff, so we escape here.
var url = $"{datasetResultsUrl}?" +
$"index_by={Uri.EscapeDataString(indexBy)}" +
$"&timeframe={Uri.EscapeDataString(timeframe)}";
var responseMsg = await _keenHttpClient
.GetAsync(url, _readKey)
.ConfigureAwait(continueOnCapturedContext: false);
var responseString = await responseMsg
.Content
.ReadAsStringAsync()
.ConfigureAwait(continueOnCapturedContext: false);
var response = JObject.Parse(responseString);
KeenUtil.CheckApiErrorCode(response);
if (!responseMsg.IsSuccessStatusCode)
{
throw new KeenException($"Request failed with status: {responseMsg.StatusCode}");
}
return response;
}
public async Task<DatasetDefinition> GetDefinitionAsync(string datasetName)
{
if (string.IsNullOrWhiteSpace(datasetName))
{
throw new KeenException("A dataset name is required.");
}
if (string.IsNullOrWhiteSpace(_readKey))
{
throw new KeenException("An API ReadKey is required to get dataset results.");
}
var responseMsg = await _keenHttpClient
.GetAsync(GetDatasetUrl(datasetName), _readKey)
.ConfigureAwait(continueOnCapturedContext: false);
var responseString = await responseMsg
.Content
.ReadAsStringAsync()
.ConfigureAwait(continueOnCapturedContext: false);
var response = JObject.Parse(responseString);
KeenUtil.CheckApiErrorCode(response);
if (!responseMsg.IsSuccessStatusCode)
{
throw new KeenException($"Request failed with status: {responseMsg.StatusCode}");
}
return JsonConvert.DeserializeObject<DatasetDefinition>(responseString,
SerializerSettings);
}
public async Task<DatasetDefinitionCollection> ListDefinitionsAsync(
int limit = 10,
string afterName = null)
{
if (string.IsNullOrWhiteSpace(_readKey))
{
throw new KeenException("An API ReadKey is required to get dataset results.");
}
// limit is just an int, so no need to encode here.
var datasetResultsUrl = $"{_cachedDatasetRelativeUrl}?limit={limit}";
if (!string.IsNullOrWhiteSpace(afterName))
{
// afterName should be a valid dataset name, which can only be
// alphanumerics, '_' and '-', so we don't escape here.
datasetResultsUrl += $"&after_name={afterName}";
}
var responseMsg = await _keenHttpClient
.GetAsync(datasetResultsUrl, _readKey)
.ConfigureAwait(continueOnCapturedContext: false);
var responseString = await responseMsg
.Content
.ReadAsStringAsync()
.ConfigureAwait(continueOnCapturedContext: false);
var response = JObject.Parse(responseString);
KeenUtil.CheckApiErrorCode(response);
if (!responseMsg.IsSuccessStatusCode)
{
throw new KeenException($"Request failed with status: {responseMsg.StatusCode}");
}
return JsonConvert.DeserializeObject<DatasetDefinitionCollection>(responseString,
SerializerSettings);
}
public async Task<IEnumerable<DatasetDefinition>> ListAllDefinitionsAsync()
{
var allDefinitions = new List<DatasetDefinition>();
var firstSet = await ListDefinitionsAsync(MaxDatasetDefinitionListLimit)
.ConfigureAwait(continueOnCapturedContext: false);
if (null == firstSet?.Datasets)
{
throw new KeenException("Failed to fetch definition list");
}
if (!firstSet.Datasets.Any())
{
return allDefinitions;
}
if (firstSet.Count <= firstSet.Datasets.Count())
{
return firstSet.Datasets;
}
allDefinitions.AddRange(firstSet.Datasets);
do
{
var nextSet = await ListDefinitionsAsync(MaxDatasetDefinitionListLimit,
allDefinitions.Last().DatasetName)
.ConfigureAwait(continueOnCapturedContext: false);
if (null == nextSet?.Datasets || !nextSet.Datasets.Any())
{
throw new KeenException("Failed to fetch definition list");
}
allDefinitions.AddRange(nextSet.Datasets);
} while (firstSet.Count > allDefinitions.Count);
return allDefinitions;
}
public async Task DeleteDatasetAsync(string datasetName)
{
if (string.IsNullOrWhiteSpace(datasetName))
{
throw new KeenException("A dataset name is required.");
}
if (string.IsNullOrWhiteSpace(_masterKey))
{
throw new KeenException("An API MasterKey is required to get dataset results.");
}
var responseMsg = await _keenHttpClient
.DeleteAsync(GetDatasetUrl(datasetName), _masterKey)
.ConfigureAwait(continueOnCapturedContext: false);
var responseString = await responseMsg
.Content
.ReadAsStringAsync()
.ConfigureAwait(continueOnCapturedContext: false);
if (HttpStatusCode.NoContent != responseMsg.StatusCode)
{
var response = JObject.Parse(responseString);
KeenUtil.CheckApiErrorCode(response);
throw new KeenException($"Request failed with status: {responseMsg.StatusCode}");
}
}
public async Task<DatasetDefinition> CreateDatasetAsync(DatasetDefinition dataset)
{
if (string.IsNullOrWhiteSpace(_masterKey))
{
throw new KeenException("An API MasterKey is required to get dataset results.");
}
// Validate
if (null == dataset)
{
throw new KeenException("An instance of DatasetDefinition must be provided");
}
// This throws if dataset is not valid.
dataset.Validate();
var content = JsonConvert.SerializeObject(dataset, SerializerSettings);
var responseMsg = await _keenHttpClient
.PutAsync(GetDatasetUrl(dataset.DatasetName), _masterKey, content)
.ConfigureAwait(continueOnCapturedContext: false);
var responseString = await responseMsg
.Content
.ReadAsStringAsync()
.ConfigureAwait(continueOnCapturedContext: false);
var response = JObject.Parse(responseString);
KeenUtil.CheckApiErrorCode(response);
if (!responseMsg.IsSuccessStatusCode)
{
throw new KeenException($"Request failed with status: {responseMsg.StatusCode}");
}
return JsonConvert.DeserializeObject<DatasetDefinition>(responseString,
SerializerSettings);
}
private string GetDatasetUrl(string datasetName = null)
{
return $"{_cachedDatasetRelativeUrl}/{datasetName}";
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.AzureStack.Management.Fabric.Admin
{
using Microsoft.AzureStack;
using Microsoft.AzureStack.Management;
using Microsoft.AzureStack.Management.Fabric;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Azure.OData;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// LogicalSubnetsOperations operations.
/// </summary>
internal partial class LogicalSubnetsOperations : IServiceOperations<FabricAdminClient>, ILogicalSubnetsOperations
{
/// <summary>
/// Initializes a new instance of the LogicalSubnetsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal LogicalSubnetsOperations(FabricAdminClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the FabricAdminClient
/// </summary>
public FabricAdminClient Client { get; private set; }
/// <summary>
/// Get a list of all volumes at a location.
/// </summary>
/// <param name='location'>
/// Location of the resource.
/// </param>
/// <param name='logicalNetwork'>
/// Name of the logical network.
/// </param>
/// <param name='logicalSubnet'>
/// Name of the logical subnet.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<LogicalSubnet>> GetWithHttpMessagesAsync(string location, string logicalNetwork, string logicalSubnet, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (location == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "location");
}
if (logicalNetwork == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "logicalNetwork");
}
if (logicalSubnet == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "logicalSubnet");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("location", location);
tracingParameters.Add("logicalNetwork", logicalNetwork);
tracingParameters.Add("logicalSubnet", logicalSubnet);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/System.{location}/providers/Microsoft.Fabric.Admin/fabricLocations/{location}/logicalNetworks/{logicalNetwork}/logicalSubnets/{logicalSubnet}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
_url = _url.Replace("{location}", System.Uri.EscapeDataString(location));
_url = _url.Replace("{logicalNetwork}", System.Uri.EscapeDataString(logicalNetwork));
_url = _url.Replace("{logicalSubnet}", System.Uri.EscapeDataString(logicalSubnet));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<LogicalSubnet>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<LogicalSubnet>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get a list of all logical subnets.
/// </summary>
/// <param name='location'>
/// Location of the resource.
/// </param>
/// <param name='logicalNetwork'>
/// Name of the logical network.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<LogicalSubnet>>> ListWithHttpMessagesAsync(string location, string logicalNetwork, ODataQuery<LogicalSubnet> odataQuery = default(ODataQuery<LogicalSubnet>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (location == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "location");
}
if (logicalNetwork == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "logicalNetwork");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("odataQuery", odataQuery);
tracingParameters.Add("location", location);
tracingParameters.Add("logicalNetwork", logicalNetwork);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/System.{location}/providers/Microsoft.Fabric.Admin/fabricLocations/{location}/logicalNetworks/{logicalNetwork}/logicalSubnets").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
_url = _url.Replace("{location}", System.Uri.EscapeDataString(location));
_url = _url.Replace("{logicalNetwork}", System.Uri.EscapeDataString(logicalNetwork));
List<string> _queryParameters = new List<string>();
if (odataQuery != null)
{
var _odataFilter = odataQuery.ToString();
if (!string.IsNullOrEmpty(_odataFilter))
{
_queryParameters.Add(_odataFilter);
}
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<LogicalSubnet>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<LogicalSubnet>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get a list of all logical subnets.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<LogicalSubnet>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<LogicalSubnet>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<LogicalSubnet>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using RestSharp;
using RestSharp.Authenticators;
using System.Collections.Generic;
using System.Configuration;
using CallfireApiClient.Api.Common.Model;
using System.Collections;
using System.Text;
using System.IO;
using System.Net;
using RestSharp.Serialization;
namespace CallfireApiClient
{
/// <summary>
/// REST client which makes HTTP calls to Callfire service
/// </summary>
public class RestApiClient
{
private readonly IRestSerializer JsonSerializer;
private static Logger Logger = new Logger();
private static KeyValueConfigurationCollection ApplicationConfig;
private ClientConfig _ClientConfig = new ClientConfig();
public ClientConfig ClientConfig
{
get
{
return _ClientConfig;
}
set
{
_ClientConfig = value;
RestClient.BaseUrl = !string.IsNullOrWhiteSpace(value.ApiBasePath) ? new Uri(value.ApiBasePath) : RestClient.BaseUrl;
SetUpRestClientProxy();
}
}
/// <summary>
/// RestSharp client configured to query Callfire API
/// <summary>/
/// <returns>RestSharp client interface</returns>
public IRestClient RestClient { get; set; }
/// <summary>
/// Returns HTTP request filters associated with API client
/// </summary>
/// <value>active filters.</value>
public SortedSet<RequestFilter> Filters { get; }
/// <summary>
/// loads client configuration
/// </summary>
static RestApiClient() {
ApplicationConfig = LoadAppSettings();
}
/// <summary>
/// Get client configuration
/// </summary>
/// <value>configuration properties collection</value>
public static KeyValueConfigurationCollection getApplicationConfig()
{
return ApplicationConfig;
}
/// <summary>
/// REST API client constructor
/// <summary>/
/// <param name="authenticator">
/// authentication API authentication method
/// </param>
public RestApiClient(IAuthenticator authenticator)
{
SetAppSettings();
JsonSerializer = new CallfireJsonConverter();
RestClient = new RestClient(_ClientConfig.ApiBasePath).UseSerializer(JsonSerializer);
RestClient.Authenticator = authenticator;
RestClient.UserAgent = this.GetType().Assembly.GetName().Name + "-csharp-" + this.GetType().Assembly.GetName().Version;
RestClient.AddHandler("application/json", JsonSerializer);
Filters = new SortedSet<RequestFilter>();
SetUpRestClientProxy();
}
/// <summary>
/// Loads client's app settings config section
/// </summary>
public static KeyValueConfigurationCollection LoadAppSettings()
{
var path = typeof(RestApiClient).Assembly.Location;
var config = ConfigurationManager.OpenExeConfiguration(path);
var appSettings = (AppSettingsSection)config.GetSection("appSettings");
return appSettings.Settings;
}
/// <summary>
/// Set up client's app config parameters from app settings
/// </summary>
private void SetAppSettings()
{
//basePath
var basePath = ApplicationConfig[ClientConstants.CONFIG_API_BASE_PATH];
if (basePath == null || string.IsNullOrWhiteSpace(basePath.Value))
{
_ClientConfig.ApiBasePath = ClientConstants.API_BASE_PATH_DEFAULT_VALUE;
}
else
{
_ClientConfig.ApiBasePath = basePath.Value;
}
//proxy
String proxyAddress = ApplicationConfig[ClientConstants.PROXY_ADDRESS_PROPERTY]?.Value;
String proxyCredentials = ApplicationConfig[ClientConstants.PROXY_CREDENTIALS_PROPERTY]?.Value;
ConfigureProxyParameters(proxyAddress, proxyCredentials);
}
/// <summary>
/// Configure app proxy parameters
/// </summary>
private void ConfigureProxyParameters(String proxyAddress, String proxyCredentials)
{
if (!String.IsNullOrEmpty(proxyAddress))
{
Logger.Debug("Configuring proxy host for client: {} auth: {}", proxyAddress, proxyCredentials);
char[] delimiterChars = { ':' };
String[] parsedAddress = proxyAddress.Split(delimiterChars);
String[] parsedCredentials = (proxyCredentials == null ? "" : proxyCredentials).Split(delimiterChars);
int portValue = parsedAddress.Length > 1 ? ClientUtils.StrToIntDef(parsedAddress[1], ClientConstants.DEFAULT_PROXY_PORT) : ClientConstants.DEFAULT_PROXY_PORT;
if (!String.IsNullOrEmpty(proxyCredentials))
{
if (parsedCredentials.Length > 1)
{
_ClientConfig.ProxyAddress = parsedAddress[0];
_ClientConfig.ProxyPort = portValue;
_ClientConfig.ProxyLogin = parsedCredentials[0];
_ClientConfig.ProxyPassword = parsedCredentials[1];
}
else
{
Logger.Debug("Proxy credentials have wrong format, must be username:password");
}
}
}
}
/// <summary>
/// Set up client's app proxy
/// </summary>
private void SetUpRestClientProxy()
{
if (!String.IsNullOrEmpty(_ClientConfig.ProxyAddress))
{
WebProxy proxy = new WebProxy(_ClientConfig.ProxyAddress, _ClientConfig.ProxyPort);
proxy.Credentials = new NetworkCredential(_ClientConfig.ProxyLogin, _ClientConfig.ProxyPassword);
RestClient.Proxy = proxy;
}
else
{
Logger.Debug("Proxy wasn't configured, please check input parameters");
}
}
/// <summary>
/// Performs GET request to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="request">optional finder request with query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Get<T>(String path, CallfireModel request = null) where T : new()
{
return Get<T>(path, ClientUtils.BuildQueryParams(request));
}
/// <summary>
/// Performs GET request to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="queryParams">query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Get<T>(string path, IEnumerable<KeyValuePair<string, object>> queryParams) where T : new()
{
Logger.Debug("GET request to {0} with params: {1}", path, queryParams);
var restRequest = CreateRestRequest(path, Method.GET, queryParams);
restRequest.AddHeader("Accept", "*/*");
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs GET request to specified path
/// <summary>
/// <param name="path">relative API request path</param>
/// <param name="queryParams">query parameters</param>
/// <returns>byte array with file data</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public Stream GetFileData(string path, IEnumerable<KeyValuePair<string, object>> queryParams = null)
{
Logger.Debug("GET request to {0} with params: {1}", path, queryParams);
var restRequest = CreateRestRequest(path, Method.GET, queryParams);
restRequest.OnBeforeDeserialization = resp =>
{
string byteOrderMarkUtf8 = Encoding.UTF8.GetString(Encoding.UTF8.GetPreamble());
if (resp.Content.StartsWith(byteOrderMarkUtf8))
resp.Content = resp.Content.Remove(0, byteOrderMarkUtf8.Length);
};
restRequest.AddHeader("Accept", "*/*");
return DoRequest(restRequest);
}
/// <summary>
/// Performs POST request with body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="payload">optional object to send</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Post<T>(String path, object payload = null) where T : new()
{
return Post<T>(path, payload, ClientUtils.EMPTY_MAP);
}
/// <summary>
/// Performs POST request with body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="payload">object to send</param>
/// <param name="queryParams">query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Post<T>(String path, object payload, IEnumerable<KeyValuePair<string, object>> queryParams) where T : new()
{
var restRequest = CreateRestRequest(path, Method.POST, queryParams);
if (payload != null)
{
validatePayload(payload);
restRequest.AddJsonBody(payload);
Logger.Debug("POST request to {0} params: {1} entity: \n{2}", path, queryParams, payload);
}
else
{
Logger.Debug("POST request to {0} params: {1}", path, queryParams);
}
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs POST request with binary body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="fileName">name of file</param>
/// <param name="filePath">path to file</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T PostFile<T>(String path, string fileName, string filePath, string contentType = null) where T : new()
{
var restRequest = CreateRestRequest(path, Method.POST);
restRequest.AddHeader("Content-Type", "multipart/form-data");
restRequest.AddFileBytes("file", File.ReadAllBytes(filePath), Path.GetFileName(filePath), contentType != null ? contentType : ClientConstants.DEFAULT_FILE_CONTENT_TYPE);
restRequest.AddParameter("name", fileName);
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs POST request with binary body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="filePath">path to file</param>
/// <param name="formParams">form parameters to include in request</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T PostFile<T>(String path, string filePath, IList<KeyValuePair<string, object>> formParams) where T : new()
{
var restRequest = CreateRestRequest(path, Method.POST);
restRequest.AddHeader("Content-Type", "multipart/form-data");
restRequest.AddFileBytes("file", File.ReadAllBytes(filePath), Path.GetFileName(filePath), ClientConstants.DEFAULT_FILE_CONTENT_TYPE);
foreach (KeyValuePair<string, object> pair in formParams)
{
restRequest.AddParameter(pair.Key, pair.Value.ToString());
}
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs POST request with binary body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="fileName">name of file</param>
/// <param name="filePath">path to file</param>
/// <param name="queryParams">query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T PostFile<T>(String path, string fileName, string filePath, IList<KeyValuePair<string, object>> queryParams) where T : new()
{
var restRequest = CreateRestRequest(path, Method.POST, queryParams);
restRequest.AddHeader("Content-Type", "multipart/form-data");
restRequest.AddFileBytes("file", File.ReadAllBytes(filePath), Path.GetFileName(filePath), ClientConstants.DEFAULT_FILE_CONTENT_TYPE);
restRequest.AddParameter("name", fileName);
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs POST request with binary body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="fileData">binary file data to upload</param>
/// <param name="fileName">name of file</param>
/// <param name="contentType">media type for file uploaded</param>
/// <param name="queryParams">query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T PostFile<T>(String path, byte[] fileData, string fileName, string contentType = null, IList<KeyValuePair<string, object>> queryParams = null) where T : new()
{
var restRequest = CreateRestRequest(path, Method.POST, queryParams);
restRequest.AddHeader("Content-Type", "multipart/form-data");
restRequest.AddFileBytes("file", fileData, fileName, contentType != null ? contentType : ClientConstants.DEFAULT_FILE_CONTENT_TYPE);
restRequest.AddParameter("name", fileName);
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs PUT request with body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="payload">optional object to send</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Put<T>(String path, object payload = null) where T : new()
{
return Put<T>(path, payload, ClientUtils.EMPTY_MAP);
}
/// <summary>
/// Performs PUT request with body to specified path
/// <summary>
/// <typeparam name="T">The type of object to create and populate with the returned data.</typeparam>
/// <param name="path">relative API request path</param>
/// <param name="payload">object to send</param>
/// <param name="queryParams">query parameters</param>
/// <returns>mapped object</returns>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public T Put<T>(String path, object payload, IEnumerable<KeyValuePair<string, object>> queryParams) where T : new()
{
var restRequest = CreateRestRequest(path, Method.PUT, queryParams);
if (payload != null)
{
validatePayload(payload);
restRequest.AddJsonBody(payload);
Logger.Debug("PUT request to {0} params: {1} entity: \n{2}", path, queryParams, payload);
}
else
{
Logger.Debug("PUT request to {0} params: {1}", path, queryParams);
}
return DoRequest<T>(restRequest);
}
/// <summary>
/// Performs DELETE request to specified path
/// <summary>
/// <param name="path">relative API request path</param>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public void Delete(String path)
{
Delete(path, ClientUtils.EMPTY_MAP);
}
/// <summary>
/// Performs DELETE request to specified path with query parameters
/// <summary>
/// <param name="path">relative API request path</param>
/// <param name="queryParams">query parameters</param>
/// <exception cref="BadRequestException"> in case HTTP response code is 400 - Bad request, the request was formatted improperly.</exception>
/// <exception cref="UnauthorizedException"> in case HTTP response code is 401 - Unauthorized, API Key missing or invalid.</exception>
/// <exception cref="AccessForbiddenException"> in case HTTP response code is 403 - Forbidden, insufficient permissions.</exception>
/// <exception cref="ResourceNotFoundException"> in case HTTP response code is 404 - NOT FOUND, the resource requested does not exist.</exception>
/// <exception cref="InternalServerErrorException"> in case HTTP response code is 500 - Internal Server Error.</exception>
/// <exception cref="CallfireApiException"> in case HTTP response code is something different from codes listed above.</exception>
/// <exception cref="CallfireClientException"> in case error has occurred in client.</exception>
public void Delete(String path, IEnumerable<KeyValuePair<string, object>> queryParams)
{
Logger.Debug("DELETE request to {0} with params: {1}", path, queryParams);
var restRequest = CreateRestRequest(path, Method.DELETE, queryParams);
DoRequest<object>(restRequest);
}
private T DoRequest<T>(IRestRequest request) where T : new()
{
FilterRequest(request);
var response = RestClient.Execute<T>(request);
if (response.Content == null)
{
Logger.Debug("received http code {0} with null entity, returning null", response.StatusCode);
return default(T);
}
VerifyResponse(response);
Logger.Debug("received entity: {0}", response.Content);
return response.Data;
}
private Stream DoRequest(IRestRequest request)
{
FilterRequest(request);
Stream downloadedStream = new MemoryStream();
request.ResponseWriter = (ms) => ms.CopyTo(downloadedStream);
var response = RestClient.Execute(request);
if (response.Content == null)
{
Logger.Debug("received http code {0} with null file data, returning null", response.StatusCode);
return null;
}
Logger.Debug("received file data: {0}", response.Content);
VerifyResponse(response);
return downloadedStream;
}
private void VerifyResponse(IRestResponse response)
{
int statusCode = (int)response.StatusCode;
if (statusCode < 400 && response.ErrorException != null)
{
Logger.Error("request has failed: {0}", response.ErrorException);
throw new CallfireClientException(response.ErrorMessage, response.ErrorException);
}
else if (statusCode >= 400)
{
ErrorMessage message;
try
{
message = JsonSerializer.Deserialize<ErrorMessage>(response);
}
catch (Exception e)
{
Logger.Error("deserialization of ErrorMessage has failed: {0}", e);
message = new ErrorMessage(statusCode, response.Content, ClientConstants.GENERIC_HELP_LINK);
}
switch (statusCode)
{
case 400:
throw new BadRequestException(message);
case 401:
throw new UnauthorizedException(message);
case 403:
throw new AccessForbiddenException(message);
case 404:
throw new ResourceNotFoundException(message);
case 500:
throw new InternalServerErrorException(message);
default:
throw new CallfireApiException(message);
}
}
}
private IRestRequest CreateRestRequest(string path, Method method, IEnumerable<KeyValuePair<string, object>> queryParams = null)
{
var request = new RestRequest(path, method);
request.AddHeader("Content-type", "application/json");
request.RequestFormat = DataFormat.Json;
request.JsonSerializer = JsonSerializer;
if (queryParams != null)
{
foreach (KeyValuePair<string, object> pair in queryParams)
{
var collection = pair.Value as ICollection;
if (collection != null)
{
foreach (var v in collection)
{
request.AddQueryParameter(pair.Key, v.ToString());
}
}
else if (pair.Value != null)
{
request.AddQueryParameter(pair.Key, pair.Value.ToString());
}
}
}
return request;
}
private void FilterRequest(IRestRequest request)
{
foreach (RequestFilter filter in Filters)
{
filter.Filter(request);
}
}
private void validatePayload(Object payload)
{
if (payload != null && payload is CallfireModel)
{
((CallfireModel)payload).validate();
}
}
}
}
| |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2.1
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using SwaggerDateConverter = DocuSign.eSign.Client.SwaggerDateConverter;
namespace DocuSign.eSign.Model
{
/// <summary>
/// SettingsMetadata
/// </summary>
[DataContract]
public partial class SettingsMetadata : IEquatable<SettingsMetadata>, IValidatableObject
{
public SettingsMetadata()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="SettingsMetadata" /> class.
/// </summary>
/// <param name="Is21CFRPart11">When set to **true**, indicates that this module is enabled on the account..</param>
/// <param name="Options">Options.</param>
/// <param name="Rights">Rights.</param>
/// <param name="UiHint">UiHint.</param>
/// <param name="UiOrder">UiOrder.</param>
/// <param name="UiType">UiType.</param>
public SettingsMetadata(string Is21CFRPart11 = default(string), List<string> Options = default(List<string>), string Rights = default(string), string UiHint = default(string), string UiOrder = default(string), string UiType = default(string))
{
this.Is21CFRPart11 = Is21CFRPart11;
this.Options = Options;
this.Rights = Rights;
this.UiHint = UiHint;
this.UiOrder = UiOrder;
this.UiType = UiType;
}
/// <summary>
/// When set to **true**, indicates that this module is enabled on the account.
/// </summary>
/// <value>When set to **true**, indicates that this module is enabled on the account.</value>
[DataMember(Name="is21CFRPart11", EmitDefaultValue=false)]
public string Is21CFRPart11 { get; set; }
/// <summary>
/// Gets or Sets Options
/// </summary>
[DataMember(Name="options", EmitDefaultValue=false)]
public List<string> Options { get; set; }
/// <summary>
/// Gets or Sets Rights
/// </summary>
[DataMember(Name="rights", EmitDefaultValue=false)]
public string Rights { get; set; }
/// <summary>
/// Gets or Sets UiHint
/// </summary>
[DataMember(Name="uiHint", EmitDefaultValue=false)]
public string UiHint { get; set; }
/// <summary>
/// Gets or Sets UiOrder
/// </summary>
[DataMember(Name="uiOrder", EmitDefaultValue=false)]
public string UiOrder { get; set; }
/// <summary>
/// Gets or Sets UiType
/// </summary>
[DataMember(Name="uiType", EmitDefaultValue=false)]
public string UiType { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class SettingsMetadata {\n");
sb.Append(" Is21CFRPart11: ").Append(Is21CFRPart11).Append("\n");
sb.Append(" Options: ").Append(Options).Append("\n");
sb.Append(" Rights: ").Append(Rights).Append("\n");
sb.Append(" UiHint: ").Append(UiHint).Append("\n");
sb.Append(" UiOrder: ").Append(UiOrder).Append("\n");
sb.Append(" UiType: ").Append(UiType).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as SettingsMetadata);
}
/// <summary>
/// Returns true if SettingsMetadata instances are equal
/// </summary>
/// <param name="other">Instance of SettingsMetadata to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(SettingsMetadata other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Is21CFRPart11 == other.Is21CFRPart11 ||
this.Is21CFRPart11 != null &&
this.Is21CFRPart11.Equals(other.Is21CFRPart11)
) &&
(
this.Options == other.Options ||
this.Options != null &&
this.Options.SequenceEqual(other.Options)
) &&
(
this.Rights == other.Rights ||
this.Rights != null &&
this.Rights.Equals(other.Rights)
) &&
(
this.UiHint == other.UiHint ||
this.UiHint != null &&
this.UiHint.Equals(other.UiHint)
) &&
(
this.UiOrder == other.UiOrder ||
this.UiOrder != null &&
this.UiOrder.Equals(other.UiOrder)
) &&
(
this.UiType == other.UiType ||
this.UiType != null &&
this.UiType.Equals(other.UiType)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Is21CFRPart11 != null)
hash = hash * 59 + this.Is21CFRPart11.GetHashCode();
if (this.Options != null)
hash = hash * 59 + this.Options.GetHashCode();
if (this.Rights != null)
hash = hash * 59 + this.Rights.GetHashCode();
if (this.UiHint != null)
hash = hash * 59 + this.UiHint.GetHashCode();
if (this.UiOrder != null)
hash = hash * 59 + this.UiOrder.GetHashCode();
if (this.UiType != null)
hash = hash * 59 + this.UiType.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.IO;
using System.Collections;
using NUnit.Framework;
using Microsoft.Build.Framework;
using Microsoft.Build.BuildEngine;
using Microsoft.Build.BuildEngine.Shared;
namespace Microsoft.Build.UnitTests
{
[TestFixture]
public class CacheManager_Tests
{
// Build result which will be added to the cache
BuildResult resultWithOutputs;
// Build result marked as should not cache
BuildResult uncacheableResult;
// Build result that was failed
BuildResult failedResult;
[SetUp]
public void Initialize()
{
// Create some items and place them in a dictionary
// Add some include information so that when we check the final
// item spec we can verify that the item was recreated properly
BuildItem buildItem1 = new BuildItem("BuildItem1", "Item1");
buildItem1.Include = "TestInclude1";
BuildItem[] buildItems = new BuildItem[1];
buildItems[0] = buildItem1;
Dictionary<object, object> dictionary = new Dictionary<object, object>();
dictionary.Add("TaskItems", buildItems);
Hashtable resultByTargetSuccess = new Hashtable(StringComparer.OrdinalIgnoreCase);
resultByTargetSuccess.Add("TaskItems", Target.BuildState.CompletedSuccessfully);
Hashtable resultByTargetFailure = new Hashtable(StringComparer.OrdinalIgnoreCase);
resultByTargetFailure.Add("TaskItems", Target.BuildState.CompletedUnsuccessfully);
Hashtable resultByTargetSkipped = new Hashtable(StringComparer.OrdinalIgnoreCase);
resultByTargetSkipped.Add("TaskItems", Target.BuildState.Skipped);
resultWithOutputs = new BuildResult(dictionary, resultByTargetSuccess, true, 1, 1, 2, true, string.Empty, string.Empty, 0, 0, 0);
failedResult = new BuildResult(dictionary, resultByTargetFailure, false, 1, 1, 2, true, string.Empty, string.Empty, 0, 0, 0);
uncacheableResult = new BuildResult(dictionary, resultByTargetSkipped, true, 1, 1, 2, true, string.Empty, string.Empty, 0, 0, 0);
}
/// <summary>
/// Test basic operation add/remove
/// </summary>
[Test]
public void CheckBasicOperation()
{
CacheManager cacheManager = new CacheManager("3.5");
CacheScope cacheScope = cacheManager.GetCacheScope("Test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items);
Assert.IsNotNull(cacheScope, "Cache should not have an entry");
CacheScope cacheScope1 = cacheManager.GetCacheScope("Test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items);
Assert.AreEqual(cacheScope, cacheScope1, "Expected to get the same scope");
cacheScope1 = cacheManager.GetCacheScope("Test1.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items);
Assert.IsNotNull(cacheScope1, "Cache should not have an entry");
Assert.AreNotEqual(cacheScope, cacheScope1, "Expected to get different scopes");
// Add an entry and verify that it ends up in the right scope
CacheEntry cacheEntry = new BuildResultCacheEntry("TestEntry", null, true);
CacheEntry cacheEntry1 = new BuildResultCacheEntry("TestEntry1", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "Test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items);
Assert.IsNotNull(cacheScope.GetCacheEntry("TestEntry"), "Cache should have an entry");
Assert.IsNotNull(cacheManager.GetCacheEntries(new string[] { "TestEntry" }, "Test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items)[0],
"Cache should have an entry");
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry1 }, "Test1.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items);
Assert.IsNotNull(cacheScope1.GetCacheEntry("TestEntry1"), "Cache should have an entry");
Assert.IsNotNull(cacheManager.GetCacheEntries(new string[] { "TestEntry1" }, "Test1.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items)[0],
"Cache should have an entry");
// Try clearing the whole cache
cacheManager.ClearCache();
Assert.AreNotEqual(cacheScope, cacheManager.GetCacheScope("Test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.Items),
"Expected to get different scopes");
}
/// <summary>
/// Test basic operation add/remove
/// </summary>
[Test]
public void TestRequestCaching()
{
CacheManager cacheManager = new CacheManager("3.5");
ArrayList actuallyBuiltTargets;
// Test the case where we pass in null targets
Dictionary<string, string> dictionary = new Dictionary<string,string>();
BuildRequest emptyRequest = new BuildRequest(1, "test.proj", null, dictionary, null, 1, true, false);
Assert.IsNull(cacheManager.GetCachedBuildResult(emptyRequest, out actuallyBuiltTargets), "Expect a null return value if T=null");
// Test the case where we pass in length 0 targets
BuildRequest length0Request = new BuildRequest(1, "test.proj", new string[0], dictionary, null, 1, true, false);
Assert.IsNull(cacheManager.GetCachedBuildResult(length0Request, out actuallyBuiltTargets), "Expect a null return value if T.Length=0");
// Test the case when the scope doesn't exist
string[] targets = new string[1]; targets[0] = "Target1";
BuildRequest length1Request = new BuildRequest(1, "test.proj", targets, new BuildPropertyGroup(), null, 1, true, false);
Assert.IsNull(cacheManager.GetCachedBuildResult(length1Request, out actuallyBuiltTargets), "Expect a null return value if no scope");
// Test the case when the scope exists but is empty
CacheScope cacheScope = cacheManager.GetCacheScope("test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
Assert.IsNull(cacheManager.GetCachedBuildResult(length1Request, out actuallyBuiltTargets), "Expect a null return value if scope is empty");
// Test the case when the scope exists but contains wrong data
CacheEntry cacheEntry = new BuildResultCacheEntry("Target2", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
Assert.IsNull(cacheManager.GetCachedBuildResult(length1Request, out actuallyBuiltTargets), "Expect a null return value if scope contains wrong data");
// Test the case when everything is correct
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.defaultTargetCacheName, string.Empty));
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.initialTargetCacheName, string.Empty));
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.projectIdCacheName, "1"));
cacheEntry = new BuildResultCacheEntry("Target1", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
BuildResult buildResult = cacheManager.GetCachedBuildResult(length1Request, out actuallyBuiltTargets);
Assert.IsNotNull(buildResult, "Expect a cached value if scope contains data");
Assert.AreEqual(1, actuallyBuiltTargets.Count);
Assert.AreEqual("Target1", actuallyBuiltTargets[0]);
Assert.AreEqual(1, buildResult.ResultByTarget.Count);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Target1"]);
// Test the case when the scope contains partially correct data
targets = new string[2]; targets[0] = "Target2"; targets[1] = "Target3";
BuildRequest length2Request = new BuildRequest(1, "test.proj", targets, new BuildPropertyGroup(), null, 1, true, false);
Assert.IsNull(cacheManager.GetCachedBuildResult(length2Request, out actuallyBuiltTargets), "Expect a null return value if partial data in the scope");
// Test the correctness case for multiple targets
cacheEntry = new BuildResultCacheEntry("Target3", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
buildResult = cacheManager.GetCachedBuildResult(length2Request, out actuallyBuiltTargets);
Assert.IsNotNull(buildResult, "Expect a cached value if scope contains data");
Assert.AreEqual(2, actuallyBuiltTargets.Count);
Assert.AreEqual("Target2", actuallyBuiltTargets[0]);
Assert.AreEqual("Target3", actuallyBuiltTargets[1]);
Assert.AreEqual(2, buildResult.ResultByTarget.Count);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Target2"]);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Target3"]);
Assert.AreEqual(1, buildResult.ProjectId);
}
/// <summary>
/// Test request caching with default/initial targets
/// </summary>
[Test]
public void TestRequestCachingDefaultInitialTargets()
{
CacheManager cacheManager = new CacheManager("3.5");
ArrayList actuallyBuiltTargets;
CacheScope cacheScope = cacheManager.GetCacheScope("test.proj", new BuildPropertyGroup(), null, CacheContentType.BuildResults);
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.defaultTargetCacheName, "Target1;Target2"));
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.initialTargetCacheName, "Initial1"));
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.projectIdCacheName, "5"));
CacheEntry cacheEntry = new BuildResultCacheEntry("Initial1", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
cacheEntry = new BuildResultCacheEntry("Target1", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), null, CacheContentType.BuildResults);
cacheEntry = new BuildResultCacheEntry("Target2", null, false);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
cacheEntry = new BuildResultCacheEntry("Target3", null, true);
cacheManager.SetCacheEntries(new CacheEntry[] { cacheEntry }, "test.proj", new BuildPropertyGroup(), null, CacheContentType.BuildResults);
// Default target
BuildRequest defaultRequest = new BuildRequest(1, "test.proj", null, new BuildPropertyGroup(), null, 1, true, false);
BuildResult buildResult = cacheManager.GetCachedBuildResult(defaultRequest, out actuallyBuiltTargets);
Assert.IsNotNull(buildResult, "Expect a cached value if scope contains data");
Assert.AreEqual(3, actuallyBuiltTargets.Count);
Assert.AreEqual("Initial1", actuallyBuiltTargets[0]);
Assert.AreEqual("Target1", actuallyBuiltTargets[1]);
Assert.AreEqual("Target2", actuallyBuiltTargets[2]);
Assert.AreEqual(3, buildResult.ResultByTarget.Count);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Initial1"]);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Target1"]);
Assert.AreEqual(Target.BuildState.CompletedUnsuccessfully, buildResult.ResultByTarget["Target2"]);
Assert.AreEqual(false, buildResult.EvaluationResult);
Assert.AreEqual(5, buildResult.ProjectId);
// Specific target
BuildRequest specificRequest = new BuildRequest(1, "test.proj", new string[] { "Target3" }, new BuildPropertyGroup(), null, 1, true, false);
buildResult = cacheManager.GetCachedBuildResult(specificRequest, out actuallyBuiltTargets);
Assert.IsNotNull(buildResult, "Expect a cached value if scope contains data");
Assert.AreEqual(2, actuallyBuiltTargets.Count);
Assert.AreEqual("Initial1", actuallyBuiltTargets[0]);
Assert.AreEqual("Target3", actuallyBuiltTargets[1]);
Assert.AreEqual(2, buildResult.ResultByTarget.Count);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Initial1"]);
Assert.AreEqual(Target.BuildState.CompletedSuccessfully, buildResult.ResultByTarget["Target3"]);
Assert.AreEqual(true, buildResult.EvaluationResult);
Assert.AreEqual(5, buildResult.ProjectId);
}
/// <summary>
/// Test Clearing Cache Scope
/// </summary>
[Test]
public void TestClearCacheScope()
{
CacheManager cacheManager = new CacheManager("3.5");
CacheScope cacheScope = cacheManager.GetCacheScope("test.proj", new BuildPropertyGroup(), null, CacheContentType.BuildResults);
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.defaultTargetCacheName, "Target1;Target2"));
cacheScope.AddCacheEntry(new PropertyCacheEntry(Constants.initialTargetCacheName, "Initial1"));
cacheManager.ClearCacheScope("test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults);
Assert.AreNotEqual(cacheScope, cacheManager.GetCacheScope("test.proj", new BuildPropertyGroup(), "3.5", CacheContentType.BuildResults), "Expected to get different scopes");
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Net;
using System.Text;
using System.Web.Http;
using System.Web.Http.ModelBinding;
using AutoMapper;
using ClientDependency.Core;
using Examine.LuceneEngine;
using Examine.LuceneEngine.Providers;
using Newtonsoft.Json;
using Umbraco.Core;
using Umbraco.Core.Logging;
using Umbraco.Core.Models.Membership;
using Umbraco.Core.Services;
using Umbraco.Web.Models.ContentEditing;
using Umbraco.Web.Mvc;
using System.Linq;
using Umbraco.Core.Models.EntityBase;
using Umbraco.Core.Models;
using Umbraco.Web.WebApi.Filters;
using umbraco.cms.businesslogic.packager;
using Constants = Umbraco.Core.Constants;
using Examine;
using Examine.LuceneEngine.SearchCriteria;
using Examine.SearchCriteria;
using Umbraco.Web.Dynamics;
using umbraco;
using System.Text.RegularExpressions;
namespace Umbraco.Web.Editors
{
/// <summary>
/// The API controller used for getting entity objects, basic name, icon, id representation of umbraco objects that are based on CMSNode
/// </summary>
/// <remarks>
/// Some objects such as macros are not based on CMSNode
/// </remarks>
[EntityControllerConfiguration]
[PluginController("UmbracoApi")]
public class EntityController : UmbracoAuthorizedJsonController
{
/// <summary>
/// Searches for results based on the entity type
/// </summary>
/// <param name="query"></param>
/// <param name="type"></param>
/// <param name="searchFrom">
/// A starting point for the search, generally a node id, but for members this is a member type alias
/// </param>
/// <returns></returns>
[HttpGet]
public IEnumerable<EntityBasic> Search(string query, UmbracoEntityTypes type, string searchFrom = null)
{
//TODO: Should we restrict search results based on what app the user has access to?
// - Theoretically you shouldn't be able to see member data if you don't have access to members right?
if (string.IsNullOrEmpty(query))
return Enumerable.Empty<EntityBasic>();
return ExamineSearch(query, type, searchFrom);
}
/// <summary>
/// Searches for all content that the user is allowed to see (based on their allowed sections)
/// </summary>
/// <param name="query"></param>
/// <returns></returns>
/// <remarks>
/// Even though a normal entity search will allow any user to search on a entity type that they may not have access to edit, we need
/// to filter these results to the sections they are allowed to edit since this search function is explicitly for the global search
/// so if we showed entities that they weren't allowed to edit they would get errors when clicking on the result.
///
/// The reason a user is allowed to search individual entity types that they are not allowed to edit is because those search
/// methods might be used in things like pickers in the content editor.
/// </remarks>
[HttpGet]
public IEnumerable<EntityTypeSearchResult> SearchAll(string query)
{
if (string.IsNullOrEmpty(query))
return Enumerable.Empty<EntityTypeSearchResult>();
var allowedSections = Security.CurrentUser.AllowedSections.ToArray();
var result = new List<EntityTypeSearchResult>();
if (allowedSections.InvariantContains(Constants.Applications.Content))
{
result.Add(new EntityTypeSearchResult
{
Results = ExamineSearch(query, UmbracoEntityTypes.Document),
EntityType = UmbracoEntityTypes.Document.ToString()
});
}
if (allowedSections.InvariantContains(Constants.Applications.Media))
{
result.Add(new EntityTypeSearchResult
{
Results = ExamineSearch(query, UmbracoEntityTypes.Media),
EntityType = UmbracoEntityTypes.Media.ToString()
});
}
if (allowedSections.InvariantContains(Constants.Applications.Members))
{
result.Add(new EntityTypeSearchResult
{
Results = ExamineSearch(query, UmbracoEntityTypes.Member),
EntityType = UmbracoEntityTypes.Member.ToString()
});
}
return result;
}
/// <summary>
/// Gets the path for a given node ID
/// </summary>
/// <param name="id"></param>
/// <param name="type"></param>
/// <returns></returns>
public IEnumerable<int> GetPath(int id, UmbracoEntityTypes type)
{
var foundContent = GetResultForId(id, type);
return foundContent.Path.Split(new[] {','}, StringSplitOptions.RemoveEmptyEntries).Select(int.Parse);
}
/// <summary>
/// Gets an entity by it's unique id if the entity supports that
/// </summary>
/// <param name="id"></param>
/// <param name="type"></param>
/// <returns></returns>
public EntityBasic GetByKey(Guid id, UmbracoEntityTypes type)
{
return GetResultForKey(id, type);
}
/// <summary>
/// Gets an entity by a xpath query
/// </summary>
/// <param name="query"></param>
/// <param name="nodeContextId"></param>
/// <param name="type"></param>
/// <returns></returns>
public EntityBasic GetByQuery(string query, int nodeContextId, UmbracoEntityTypes type)
{
if (type != UmbracoEntityTypes.Document)
throw new ArgumentException("Get by query is only compatible with enitities of type Document");
var q = ParseXPathQuery(query, nodeContextId);
var node = Umbraco.TypedContentSingleAtXPath(q);
if (node == null)
return null;
return GetById(node.Id, type);
}
//PP: wip in progress on the query parser
private string ParseXPathQuery(string query, int id)
{
//no need to parse it
if (!query.StartsWith("$"))
return query;
//get full path
Func<int, IEnumerable<string>> getPath = delegate(int nodeid){
var ent = Services.EntityService.Get(nodeid);
return ent.Path.Split(',').Reverse();
};
//get nearest published item
Func<IEnumerable<string>, int> getClosestPublishedAncestor = (path =>
{
foreach (var _id in path)
{
var item = Umbraco.TypedContent(int.Parse(_id));
if (item != null)
return item.Id;
}
return -1;
});
var rootXpath = "descendant::*[@id={0}]";
//parseable items:
var vars = new Dictionary<string, Func<string, string>>();
vars.Add("$current", q => {
var _id = getClosestPublishedAncestor(getPath(id));
return q.Replace("$current", string.Format(rootXpath, _id));
});
vars.Add("$parent", q =>
{
//remove the first item in the array if its the current node
//this happens when current is published, but we are looking for its parent specifically
var path = getPath(id);
if(path.ElementAt(0) == id.ToString()){
path = path.Skip(1);
}
var _id = getClosestPublishedAncestor(path);
return q.Replace("$parent", string.Format(rootXpath, _id));
});
vars.Add("$site", q =>
{
var _id = getClosestPublishedAncestor(getPath(id));
return q.Replace("$site", string.Format(rootXpath, _id) + "/ancestor-or-self::*[@level = 1]");
});
vars.Add("$root", q =>
{
return q.Replace("$root", string.Empty);
});
foreach (var varible in vars)
{
if (query.StartsWith(varible.Key))
{
query = varible.Value.Invoke(query);
break;
}
}
return query;
}
public EntityBasic GetById(int id, UmbracoEntityTypes type)
{
return GetResultForId(id, type);
}
public IEnumerable<EntityBasic> GetByIds([FromUri]int[] ids, UmbracoEntityTypes type)
{
if (ids == null)
{
throw new HttpResponseException(HttpStatusCode.NotFound);
}
return GetResultForIds(ids, type);
}
public IEnumerable<EntityBasic> GetByKeys([FromUri]Guid[] ids, UmbracoEntityTypes type)
{
if (ids == null)
{
throw new HttpResponseException(HttpStatusCode.NotFound);
}
return GetResultForKeys(ids, type);
}
public IEnumerable<EntityBasic> GetChildren(int id, UmbracoEntityTypes type)
{
return GetResultForChildren(id, type);
}
public IEnumerable<EntityBasic> GetAncestors(int id, UmbracoEntityTypes type)
{
return GetResultForAncestors(id, type);
}
public IEnumerable<EntityBasic> GetAll(UmbracoEntityTypes type, string postFilter, [FromUri]IDictionary<string, object> postFilterParams)
{
return GetResultForAll(type, postFilter, postFilterParams);
}
/// <summary>
/// Searches for results based on the entity type
/// </summary>
/// <param name="query"></param>
/// <param name="entityType"></param>
/// <param name="searchFrom">
/// A starting point for the search, generally a node id, but for members this is a member type alias
/// </param>
/// <returns></returns>
private IEnumerable<EntityBasic> ExamineSearch(string query, UmbracoEntityTypes entityType, string searchFrom = null)
{
var sb = new StringBuilder();
string type;
var searcher = Constants.Examine.InternalSearcher;
var fields = new[] { "id", "__NodeId" };
//TODO: WE should really just allow passing in a lucene raw query
switch (entityType)
{
case UmbracoEntityTypes.Member:
searcher = Constants.Examine.InternalMemberSearcher;
type = "member";
fields = new[] { "id", "__NodeId", "email", "loginName"};
if (searchFrom != null && searchFrom != Constants.Conventions.MemberTypes.AllMembersListId && searchFrom.Trim() != "-1")
{
sb.Append("+__NodeTypeAlias:");
sb.Append(searchFrom);
sb.Append(" ");
}
break;
case UmbracoEntityTypes.Media:
type = "media";
var mediaSearchFrom = int.MinValue;
if (Security.CurrentUser.StartMediaId > 0 ||
//if searchFrom is specified and it is greater than 0
(searchFrom != null && int.TryParse(searchFrom, out mediaSearchFrom) && mediaSearchFrom > 0))
{
sb.Append("+__Path: \\-1*\\,");
sb.Append(mediaSearchFrom > 0
? mediaSearchFrom.ToString(CultureInfo.InvariantCulture)
: Security.CurrentUser.StartMediaId.ToString(CultureInfo.InvariantCulture));
sb.Append("\\,* ");
}
break;
case UmbracoEntityTypes.Document:
type = "content";
var contentSearchFrom = int.MinValue;
if (Security.CurrentUser.StartContentId > 0 ||
//if searchFrom is specified and it is greater than 0
(searchFrom != null && int.TryParse(searchFrom, out contentSearchFrom) && contentSearchFrom > 0))
{
sb.Append("+__Path: \\-1*\\,");
sb.Append(contentSearchFrom > 0
? contentSearchFrom.ToString(CultureInfo.InvariantCulture)
: Security.CurrentUser.StartContentId.ToString(CultureInfo.InvariantCulture));
sb.Append("\\,* ");
}
break;
default:
throw new NotSupportedException("The " + typeof(EntityController) + " currently does not support searching against object type " + entityType);
}
var internalSearcher = (LuceneSearcher)ExamineManager.Instance.SearchProviderCollection[searcher];
//build a lucene query:
// the __nodeName will be boosted 10x without wildcards
// then __nodeName will be matched normally with wildcards
// the rest will be normal without wildcards
//check if text is surrounded by single or double quotes, if so, then exact match
var surroundedByQuotes = Regex.IsMatch(query, "^\".*?\"$")
|| Regex.IsMatch(query, "^\'.*?\'$");
if (surroundedByQuotes)
{
//strip quotes, escape string, the replace again
query = query.Trim(new[] { '\"', '\'' });
query = Lucene.Net.QueryParsers.QueryParser.Escape(query);
if (query.IsNullOrWhiteSpace())
{
return new List<EntityBasic>();
}
//add back the surrounding quotes
query = string.Format("{0}{1}{0}", "\"", query);
//node name exactly boost x 10
sb.Append("+(__nodeName: (");
sb.Append(query.ToLower());
sb.Append(")^10.0 ");
foreach (var f in fields)
{
//additional fields normally
sb.Append(f);
sb.Append(": (");
sb.Append(query);
sb.Append(") ");
}
}
else
{
if (query.Trim(new[] { '\"', '\'' }).IsNullOrWhiteSpace())
{
return new List<EntityBasic>();
}
query = Lucene.Net.QueryParsers.QueryParser.Escape(query);
var querywords = query.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
//node name exactly boost x 10
sb.Append("+(__nodeName:");
sb.Append("\"");
sb.Append(query.ToLower());
sb.Append("\"");
sb.Append("^10.0 ");
//node name normally with wildcards
sb.Append(" __nodeName:");
sb.Append("(");
foreach (var w in querywords)
{
sb.Append(w.ToLower());
sb.Append("* ");
}
sb.Append(") ");
foreach (var f in fields)
{
//additional fields normally
sb.Append(f);
sb.Append(":");
sb.Append("(");
foreach (var w in querywords)
{
sb.Append(w.ToLower());
sb.Append("* ");
}
sb.Append(")");
sb.Append(" ");
}
}
//must match index type
sb.Append(") +__IndexType:");
sb.Append(type);
var raw = internalSearcher.CreateSearchCriteria().RawQuery(sb.ToString());
//limit results to 200 to avoid huge over processing (CPU)
var result = internalSearcher.Search(raw, 200);
switch (entityType)
{
case UmbracoEntityTypes.Member:
return MemberFromSearchResults(result);
case UmbracoEntityTypes.Media:
return MediaFromSearchResults(result);
case UmbracoEntityTypes.Document:
return ContentFromSearchResults(result);
default:
throw new NotSupportedException("The " + typeof(EntityController) + " currently does not support searching against object type " + entityType);
}
}
/// <summary>
/// Returns a collection of entities for media based on search results
/// </summary>
/// <param name="results"></param>
/// <returns></returns>
private IEnumerable<EntityBasic> MemberFromSearchResults(ISearchResults results)
{
var mapped = Mapper.Map<IEnumerable<EntityBasic>>(results).ToArray();
//add additional data
foreach (var m in mapped)
{
//if no icon could be mapped, it will be set to document, so change it to picture
if (m.Icon == "icon-document")
{
m.Icon = "icon-user";
}
var searchResult = results.First(x => x.Id.ToInvariantString() == m.Id.ToString());
if (searchResult.Fields.ContainsKey("email") && searchResult.Fields["email"] != null)
{
m.AdditionalData["Email"] = results.First(x => x.Id.ToInvariantString() == m.Id.ToString()).Fields["email"];
}
if (searchResult.Fields.ContainsKey("__key") && searchResult.Fields["__key"] != null)
{
Guid key;
if (Guid.TryParse(searchResult.Fields["__key"], out key))
{
m.Key = key;
}
}
}
return mapped;
}
/// <summary>
/// Returns a collection of entities for media based on search results
/// </summary>
/// <param name="results"></param>
/// <returns></returns>
private IEnumerable<EntityBasic> MediaFromSearchResults(ISearchResults results)
{
var mapped = Mapper.Map<IEnumerable<EntityBasic>>(results).ToArray();
//add additional data
foreach (var m in mapped)
{
//if no icon could be mapped, it will be set to document, so change it to picture
if (m.Icon == "icon-document")
{
m.Icon = "icon-picture";
}
}
return mapped;
}
/// <summary>
/// Returns a collection of entities for content based on search results
/// </summary>
/// <param name="results"></param>
/// <returns></returns>
private IEnumerable<EntityBasic> ContentFromSearchResults(ISearchResults results)
{
var mapped = Mapper.Map<ISearchResults, IEnumerable<EntityBasic>>(results).ToArray();
//add additional data
foreach (var m in mapped)
{
var intId = m.Id.TryConvertTo<int>();
if (intId.Success)
{
m.AdditionalData["Url"] = Umbraco.NiceUrl(intId.Result);
}
}
return mapped;
}
private IEnumerable<EntityBasic> GetResultForChildren(int id, UmbracoEntityTypes entityType)
{
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
//TODO: Need to check for Object types that support hierarchic here, some might not.
return Services.EntityService.GetChildren(id, objectType.Value)
.WhereNotNull()
.Select(Mapper.Map<EntityBasic>);
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private IEnumerable<EntityBasic> GetResultForAncestors(int id, UmbracoEntityTypes entityType)
{
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
//TODO: Need to check for Object types that support hierarchic here, some might not.
var ids = Services.EntityService.Get(id).Path.Split(',').Select(int.Parse).Distinct().ToArray();
return Services.EntityService.GetAll(objectType.Value, ids)
.WhereNotNull()
.Select(Mapper.Map<EntityBasic>);
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.PropertyType:
case UmbracoEntityTypes.PropertyGroup:
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
/// <summary>
/// Gets the result for the entity list based on the type
/// </summary>
/// <param name="entityType"></param>
/// <param name="postFilter">A string where filter that will filter the results dynamically with linq - optional</param>
/// <param name="postFilterParams">the parameters to fill in the string where filter - optional</param>
/// <returns></returns>
private IEnumerable<EntityBasic> GetResultForAll(UmbracoEntityTypes entityType, string postFilter = null, IDictionary<string, object> postFilterParams = null)
{
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
//TODO: Should we order this by something ?
var entities = Services.EntityService.GetAll(objectType.Value).WhereNotNull().Select(Mapper.Map<EntityBasic>);
return ExecutePostFilter(entities, postFilter, postFilterParams);
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.Macro:
//Get all macros from the macro service
var macros = Services.MacroService.GetAll().WhereNotNull().OrderBy(x => x.Name);
var filteredMacros = ExecutePostFilter(macros, postFilter, postFilterParams);
return filteredMacros.Select(Mapper.Map<EntityBasic>);
case UmbracoEntityTypes.PropertyType:
//get all document types, then combine all property types into one list
var propertyTypes = Services.ContentTypeService.GetAllContentTypes().Cast<IContentTypeComposition>()
.Concat(Services.ContentTypeService.GetAllMediaTypes())
.ToArray()
.SelectMany(x => x.PropertyTypes)
.DistinctBy(composition => composition.Alias);
var filteredPropertyTypes = ExecutePostFilter(propertyTypes, postFilter, postFilterParams);
return Mapper.Map<IEnumerable<PropertyType>, IEnumerable<EntityBasic>>(filteredPropertyTypes);
case UmbracoEntityTypes.PropertyGroup:
//get all document types, then combine all property types into one list
var propertyGroups = Services.ContentTypeService.GetAllContentTypes().Cast<IContentTypeComposition>()
.Concat(Services.ContentTypeService.GetAllMediaTypes())
.ToArray()
.SelectMany(x => x.PropertyGroups)
.DistinctBy(composition => composition.Name);
var filteredpropertyGroups = ExecutePostFilter(propertyGroups, postFilter, postFilterParams);
return Mapper.Map<IEnumerable<PropertyGroup>, IEnumerable<EntityBasic>>(filteredpropertyGroups);
case UmbracoEntityTypes.User:
int total;
var users = Services.UserService.GetAll(0, int.MaxValue, out total);
var filteredUsers = ExecutePostFilter(users, postFilter, postFilterParams);
return Mapper.Map<IEnumerable<IUser>, IEnumerable<EntityBasic>>(filteredUsers);
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private IEnumerable<EntityBasic> GetResultForKeys(IEnumerable<Guid> keys, UmbracoEntityTypes entityType)
{
var keysArray = keys.ToArray();
if (keysArray.Any() == false) return Enumerable.Empty<EntityBasic>();
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
var entities = Services.EntityService.GetAll(objectType.Value, keysArray)
.WhereNotNull()
.Select(Mapper.Map<EntityBasic>);
// entities are in "some" order, put them back in order
var xref = entities.ToDictionary(x => x.Id);
var result = keysArray.Select(x => xref.ContainsKey(x) ? xref[x] : null).Where(x => x != null);
return result;
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.PropertyType:
case UmbracoEntityTypes.PropertyGroup:
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private IEnumerable<EntityBasic> GetResultForIds(IEnumerable<int> ids, UmbracoEntityTypes entityType)
{
var idsArray = ids.ToArray();
if (idsArray.Any() == false) return Enumerable.Empty<EntityBasic>();
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
var entities = Services.EntityService.GetAll(objectType.Value, idsArray)
.WhereNotNull()
.Select(Mapper.Map<EntityBasic>);
// entities are in "some" order, put them back in order
var xref = entities.ToDictionary(x => x.Id);
var result = idsArray.Select(x => xref.ContainsKey(x) ? xref[x] : null).Where(x => x != null);
return result;
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.PropertyType:
case UmbracoEntityTypes.PropertyGroup:
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private EntityBasic GetResultForKey(Guid key, UmbracoEntityTypes entityType)
{
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
var found = Services.EntityService.GetByKey(key, objectType.Value);
if (found == null)
{
throw new HttpResponseException(HttpStatusCode.NotFound);
}
return Mapper.Map<EntityBasic>(found);
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.PropertyType:
case UmbracoEntityTypes.PropertyGroup:
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private EntityBasic GetResultForId(int id, UmbracoEntityTypes entityType)
{
var objectType = ConvertToObjectType(entityType);
if (objectType.HasValue)
{
var found = Services.EntityService.Get(id, objectType.Value);
if (found == null)
{
throw new HttpResponseException(HttpStatusCode.NotFound);
}
return Mapper.Map<EntityBasic>(found);
}
//now we need to convert the unknown ones
switch (entityType)
{
case UmbracoEntityTypes.PropertyType:
case UmbracoEntityTypes.PropertyGroup:
case UmbracoEntityTypes.Domain:
case UmbracoEntityTypes.Language:
case UmbracoEntityTypes.User:
case UmbracoEntityTypes.Macro:
default:
throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + entityType);
}
}
private static UmbracoObjectTypes? ConvertToObjectType(UmbracoEntityTypes entityType)
{
switch (entityType)
{
case UmbracoEntityTypes.Document:
return UmbracoObjectTypes.Document;
case UmbracoEntityTypes.Media:
return UmbracoObjectTypes.Media;
case UmbracoEntityTypes.MemberType:
return UmbracoObjectTypes.MediaType;
case UmbracoEntityTypes.Template:
return UmbracoObjectTypes.Template;
case UmbracoEntityTypes.MemberGroup:
return UmbracoObjectTypes.MemberGroup;
case UmbracoEntityTypes.ContentItem:
return UmbracoObjectTypes.ContentItem;
case UmbracoEntityTypes.MediaType:
return UmbracoObjectTypes.MediaType;
case UmbracoEntityTypes.DocumentType:
return UmbracoObjectTypes.DocumentType;
case UmbracoEntityTypes.Stylesheet:
return UmbracoObjectTypes.Stylesheet;
case UmbracoEntityTypes.Member:
return UmbracoObjectTypes.Member;
case UmbracoEntityTypes.DataType:
return UmbracoObjectTypes.DataType;
default:
//There is no UmbracoEntity conversion (things like Macros, Users, etc...)
return null;
}
}
/// <summary>
/// Executes the post filter against a collection of objects
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="entities"></param>
/// <param name="postFilter"></param>
/// <param name="postFilterParams"></param>
/// <returns></returns>
private IEnumerable<T> ExecutePostFilter<T>(IEnumerable<T> entities, string postFilter, IDictionary<string, object> postFilterParams)
{
//if a post filter is assigned then try to execute it
if (postFilter.IsNullOrWhiteSpace() == false)
{
return postFilterParams == null
? entities.AsQueryable().Where(postFilter).ToArray()
: entities.AsQueryable().Where(postFilter, postFilterParams).ToArray();
}
return entities;
}
}
}
| |
using UnityEditor;
using UnityEngine;
using System.Collections.Generic;
using System.IO;
[InitializeOnLoad]
public static class tk2dEditorUtility
{
public static double version = 1.92;
public static int releaseId = 0; // < -10000 = alpha, other negative = beta release, 0 = final, positive = final patch
static tk2dEditorUtility() {
System.Reflection.FieldInfo undoCallback = typeof(EditorApplication).GetField("undoRedoPerformed", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Static);
if (undoCallback != null) {
undoCallback.SetValue(null, (EditorApplication.CallbackFunction)OnUndoRedo);
}
else {
Debug.LogError("tk2d Undo/Redo callback failed. Undo/Redo not supported in this version of Unity.");
}
}
static void OnUndoRedo() {
foreach (GameObject go in Selection.gameObjects) {
tk2dBaseSprite spr = go.GetComponent<tk2dBaseSprite>();
if (spr != null) {
spr.ForceBuild();
}
tk2dTextMesh tm = go.GetComponent<tk2dTextMesh>();
if (tm != null) {
tm.ForceBuild();
}
}
}
public static string ReleaseStringIdentifier(double _version, int _releaseId)
{
string id = _version.ToString();
if (_releaseId == 0) id += " final";
else if (_releaseId > 0) id += " final + patch " + _releaseId.ToString();
else if (_releaseId < -10000) id += " alpha " + (-_releaseId - 10000).ToString();
else if (_releaseId < 0) id += " beta " + (-_releaseId).ToString();
return id;
}
/// <summary>
/// Release filename for the current version
/// </summary>
public static string CurrentReleaseFileName(string product, double _version, int _releaseId)
{
string id = product + _version.ToString();
if (_releaseId == 0) id += "final";
else if (_releaseId > 0) id += "final_patch" + _releaseId.ToString();
else if (_releaseId < -10000) id += " alpha " + (-_releaseId - 10000).ToString();
else if (_releaseId < 0) id += "beta" + (-_releaseId).ToString();
return id;
}
[MenuItem(tk2dMenu.root + "About", false, 10100)]
public static void About2DToolkit()
{
EditorUtility.DisplayDialog("About 2D Toolkit",
"2D Toolkit Version " + ReleaseStringIdentifier(version, releaseId) + "\n" +
"Copyright (c) 2011 Unikron Software Ltd",
"Ok");
}
[MenuItem(tk2dMenu.root + "Documentation", false, 10098)]
public static void LaunchWikiDocumentation()
{
Application.OpenURL("http://www.2dtoolkit.com/doc");
}
[MenuItem(tk2dMenu.root + "Forum", false, 10099)]
public static void LaunchForum()
{
Application.OpenURL("http://www.2dtoolkit.com/forum");
}
[MenuItem(tk2dMenu.root + "Rebuild Index", false, 1)]
public static void RebuildIndex()
{
AssetDatabase.DeleteAsset(indexPath);
AssetDatabase.Refresh();
CreateIndex();
// Now rebuild system object
tk2dSystemUtility.RebuildResources();
}
[MenuItem(tk2dMenu.root + "Preferences...", false, 1)]
public static void ShowPreferences()
{
EditorWindow.GetWindow( typeof(tk2dPreferencesEditor), true, "2D Toolkit Preferences" );
}
public static string CreateNewPrefab(string name) // name is the filename of the prefab EXCLUDING .prefab
{
Object obj = Selection.activeObject;
string assetPath = AssetDatabase.GetAssetPath(obj);
if (assetPath.Length == 0)
{
assetPath = tk2dGuiUtility.SaveFileInProject("Create...", "Assets/", name, "prefab");
}
else
{
// is a directory
string path = System.IO.Directory.Exists(assetPath) ? assetPath : System.IO.Path.GetDirectoryName(assetPath);
assetPath = AssetDatabase.GenerateUniqueAssetPath(path + "/" + name + ".prefab");
}
return assetPath;
}
const string indexPath = "Assets/-tk2d.asset";
static tk2dIndex index = null;
public static tk2dIndex GetExistingIndex()
{
if (index == null)
{
index = Resources.LoadAssetAtPath(indexPath, typeof(tk2dIndex)) as tk2dIndex;
}
return index;
}
public static tk2dIndex ForceCreateIndex()
{
CreateIndex();
return GetExistingIndex();
}
public static tk2dIndex GetOrCreateIndex()
{
tk2dIndex thisIndex = GetExistingIndex();
if (thisIndex == null || thisIndex.version != tk2dIndex.CURRENT_VERSION)
{
CreateIndex();
thisIndex = GetExistingIndex();
}
return thisIndex;
}
public static void CommitIndex()
{
if (index)
{
EditorUtility.SetDirty(index);
tk2dSpriteGuiUtility.ResetCache();
}
}
static void CreateIndex()
{
tk2dIndex newIndex = ScriptableObject.CreateInstance<tk2dIndex>();
newIndex.version = tk2dIndex.CURRENT_VERSION;
newIndex.hideFlags = HideFlags.DontSave; // get this to not be destroyed in Unity 4.1
List<string> rebuildSpriteCollectionPaths = new List<string>();
// check all prefabs to see if we can find any objects we are interested in
List<string> allPrefabPaths = new List<string>();
Stack<string> paths = new Stack<string>();
paths.Push(Application.dataPath);
while (paths.Count != 0)
{
string path = paths.Pop();
string[] files = Directory.GetFiles(path, "*.prefab");
foreach (var file in files)
{
allPrefabPaths.Add(file.Substring(Application.dataPath.Length - 6));
}
foreach (string subdirs in Directory.GetDirectories(path))
paths.Push(subdirs);
}
// Check all prefabs
int currPrefabCount = 1;
foreach (string prefabPath in allPrefabPaths)
{
EditorUtility.DisplayProgressBar("Rebuilding Index", "Scanning project folder...", (float)currPrefabCount / (float)(allPrefabPaths.Count));
GameObject iterGo = AssetDatabase.LoadAssetAtPath( prefabPath, typeof(GameObject) ) as GameObject;
if (!iterGo) continue;
tk2dSpriteCollection spriteCollection = iterGo.GetComponent<tk2dSpriteCollection>();
tk2dSpriteCollectionData spriteCollectionData = iterGo.GetComponent<tk2dSpriteCollectionData>();
tk2dFont font = iterGo.GetComponent<tk2dFont>();
tk2dSpriteAnimation anim = iterGo.GetComponent<tk2dSpriteAnimation>();
if (spriteCollection)
{
tk2dSpriteCollectionData thisSpriteCollectionData = spriteCollection.spriteCollection;
if (thisSpriteCollectionData)
{
if (thisSpriteCollectionData.version < 1)
{
rebuildSpriteCollectionPaths.Add( AssetDatabase.GetAssetPath(spriteCollection ));
}
newIndex.AddSpriteCollectionData( thisSpriteCollectionData );
}
}
else if (spriteCollectionData)
{
string guid = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(spriteCollectionData));
bool present = false;
foreach (var v in newIndex.GetSpriteCollectionIndex())
{
if (v.spriteCollectionDataGUID == guid)
{
present = true;
break;
}
}
if (!present && guid != "")
newIndex.AddSpriteCollectionData(spriteCollectionData);
}
else if (font)
{
newIndex.AddOrUpdateFont(font); // unfortunate but necessary
}
else if (anim)
{
newIndex.AddSpriteAnimation(anim);
}
else
{
iterGo = null;
System.GC.Collect();
}
tk2dEditorUtility.UnloadUnusedAssets();
++currPrefabCount;
}
EditorUtility.ClearProgressBar();
// Create index
newIndex.hideFlags = 0; // to save it
AssetDatabase.CreateAsset(newIndex, indexPath);
AssetDatabase.SaveAssets();
// unload all unused assets
tk2dEditorUtility.UnloadUnusedAssets();
// Rebuild invalid sprite collections
if (rebuildSpriteCollectionPaths.Count > 0)
{
EditorUtility.DisplayDialog("Upgrade required",
"Please wait while your sprite collection is upgraded.",
"Ok");
int count = 1;
foreach (var scPath in rebuildSpriteCollectionPaths)
{
tk2dSpriteCollection sc = AssetDatabase.LoadAssetAtPath(scPath, typeof(tk2dSpriteCollection)) as tk2dSpriteCollection;
EditorUtility.DisplayProgressBar("Rebuilding Sprite Collections", "Rebuilding Sprite Collection: " + sc.name, (float)count / (float)(rebuildSpriteCollectionPaths.Count));
tk2dSpriteCollectionBuilder.Rebuild(sc);
sc = null;
tk2dEditorUtility.UnloadUnusedAssets();
++count;
}
EditorUtility.ClearProgressBar();
}
index = newIndex;
tk2dSpriteGuiUtility.ResetCache();
}
[System.ObsoleteAttribute]
static T[] FindPrefabsInProjectWithComponent<T>() where T : Component
// returns null if nothing is found
{
List<T> allGens = new List<T>();
Stack<string> paths = new Stack<string>();
paths.Push(Application.dataPath);
while (paths.Count != 0)
{
string path = paths.Pop();
string[] files = Directory.GetFiles(path, "*.prefab");
foreach (var file in files)
{
GameObject go = AssetDatabase.LoadAssetAtPath( file.Substring(Application.dataPath.Length - 6), typeof(GameObject) ) as GameObject;
if (!go) continue;
T gen = go.GetComponent<T>();
if (gen)
{
allGens.Add(gen);
}
}
foreach (string subdirs in Directory.GetDirectories(path))
paths.Push(subdirs);
}
if (allGens.Count == 0) return null;
T[] allGensArray = new T[allGens.Count];
for (int i = 0; i < allGens.Count; ++i)
allGensArray[i] = allGens[i];
return allGensArray;
}
public static GameObject CreateGameObjectInScene(string name)
{
string realName = name;
int counter = 0;
while (GameObject.Find(realName) != null)
{
realName = name + counter++;
}
GameObject go = new GameObject(realName);
if (Selection.activeGameObject != null)
{
string assetPath = AssetDatabase.GetAssetPath(Selection.activeGameObject);
if (assetPath.Length == 0) go.transform.parent = Selection.activeGameObject.transform;
}
go.transform.localPosition = Vector3.zero;
go.transform.localRotation = Quaternion.identity;
go.transform.localScale = Vector3.one;
return go;
}
public static void DrawMeshBounds(Mesh mesh, Transform transform, Color c)
{
var e = mesh.bounds.extents;
Vector3[] boundPoints = new Vector3[] {
mesh.bounds.center + new Vector3(-e.x, e.y, 0.0f),
mesh.bounds.center + new Vector3( e.x, e.y, 0.0f),
mesh.bounds.center + new Vector3( e.x,-e.y, 0.0f),
mesh.bounds.center + new Vector3(-e.x,-e.y, 0.0f),
mesh.bounds.center + new Vector3(-e.x, e.y, 0.0f) };
for (int i = 0; i < boundPoints.Length; ++i)
boundPoints[i] = transform.TransformPoint(boundPoints[i]);
Handles.color = c;
Handles.DrawPolyLine(boundPoints);
}
public static void UnloadUnusedAssets()
{
Object[] previousSelectedObjects = Selection.objects;
Selection.objects = new Object[0];
System.GC.Collect();
EditorUtility.UnloadUnusedAssets();
index = null;
Selection.objects = previousSelectedObjects;
}
public static void CollectAndUnloadUnusedAssets()
{
System.GC.Collect();
System.GC.WaitForPendingFinalizers();
EditorUtility.UnloadUnusedAssets();
}
public static void DeleteAsset(UnityEngine.Object obj)
{
if (obj == null) return;
UnityEditor.AssetDatabase.DeleteAsset(UnityEditor.AssetDatabase.GetAssetPath(obj));
}
public static bool IsPrefab(Object obj)
{
#if (UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4)
return AssetDatabase.GetAssetPath(obj).Length != 0;
#else
return (PrefabUtility.GetPrefabType(obj) == PrefabType.Prefab);
#endif
}
public static void SetGameObjectActive(GameObject go, bool active)
{
#if UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 || UNITY_3_6 || UNITY_3_7 || UNITY_3_8 || UNITY_3_9
go.SetActiveRecursively(active);
#else
go.SetActive(active);
#endif
}
public static bool IsGameObjectActive(GameObject go)
{
#if UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 || UNITY_3_6 || UNITY_3_7 || UNITY_3_8 || UNITY_3_9
return go.active;
#else
return go.activeSelf;
#endif
}
}
| |
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using System;
using System.Linq;
using System.Threading.Tasks;
using IdentityServer4.Events;
using IdentityServer4.Extensions;
using IdentityServer4.Models;
using IdentityServer4.Services;
using IdentityServer4.Stores;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace IdentityServer.Device
{
[Authorize]
[SecurityHeaders]
public class DeviceController : Controller
{
private readonly IDeviceFlowInteractionService _interaction;
private readonly IClientStore _clientStore;
private readonly IResourceStore _resourceStore;
private readonly IEventService _events;
private readonly ILogger<DeviceController> _logger;
public DeviceController(
IDeviceFlowInteractionService interaction,
IClientStore clientStore,
IResourceStore resourceStore,
IEventService eventService,
ILogger<DeviceController> logger)
{
_interaction = interaction;
_clientStore = clientStore;
_resourceStore = resourceStore;
_events = eventService;
_logger = logger;
}
[HttpGet]
public async Task<IActionResult> Index([FromQuery(Name = "user_code")] string userCode)
{
if (string.IsNullOrWhiteSpace(userCode)) return View("UserCodeCapture");
var vm = await BuildViewModelAsync(userCode);
if (vm == null) return View("Error");
vm.ConfirmUserCode = true;
return View("UserCodeConfirmation", vm);
}
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> UserCodeCapture(string userCode)
{
var vm = await BuildViewModelAsync(userCode);
if (vm == null) return View("Error");
return View("UserCodeConfirmation", vm);
}
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Callback(DeviceAuthorizationInputModel model)
{
if (model == null) throw new ArgumentNullException(nameof(model));
var result = await ProcessConsent(model);
if (result.HasValidationError) return View("Error");
return View("Success");
}
private async Task<ProcessConsentResult> ProcessConsent(DeviceAuthorizationInputModel model)
{
var result = new ProcessConsentResult();
var request = await _interaction.GetAuthorizationContextAsync(model.UserCode);
if (request == null) return result;
ConsentResponse grantedConsent = null;
// user clicked 'no' - send back the standard 'access_denied' response
if (model.Button == "no")
{
grantedConsent = ConsentResponse.Denied;
// emit event
await _events.RaiseAsync(new ConsentDeniedEvent(User.GetSubjectId(), request.ClientId, request.ScopesRequested));
}
// user clicked 'yes' - validate the data
else if (model.Button == "yes")
{
// if the user consented to some scope, build the response model
if (model.ScopesConsented != null && model.ScopesConsented.Any())
{
var scopes = model.ScopesConsented;
if (ConsentOptions.EnableOfflineAccess == false)
{
scopes = scopes.Where(x => x != IdentityServer4.IdentityServerConstants.StandardScopes.OfflineAccess);
}
grantedConsent = new ConsentResponse
{
RememberConsent = model.RememberConsent,
ScopesConsented = scopes.ToArray()
};
// emit event
await _events.RaiseAsync(new ConsentGrantedEvent(User.GetSubjectId(), request.ClientId, request.ScopesRequested, grantedConsent.ScopesConsented, grantedConsent.RememberConsent));
}
else
{
result.ValidationError = ConsentOptions.MustChooseOneErrorMessage;
}
}
else
{
result.ValidationError = ConsentOptions.InvalidSelectionErrorMessage;
}
if (grantedConsent != null)
{
// communicate outcome of consent back to identityserver
await _interaction.HandleRequestAsync(model.UserCode, grantedConsent);
// indicate that's it ok to redirect back to authorization endpoint
result.RedirectUri = model.ReturnUrl;
result.ClientId = request.ClientId;
}
else
{
// we need to redisplay the consent UI
result.ViewModel = await BuildViewModelAsync(model.UserCode, model);
}
return result;
}
private async Task<DeviceAuthorizationViewModel> BuildViewModelAsync(string userCode, DeviceAuthorizationInputModel model = null)
{
var request = await _interaction.GetAuthorizationContextAsync(userCode);
if (request != null)
{
var client = await _clientStore.FindEnabledClientByIdAsync(request.ClientId);
if (client != null)
{
var resources = await _resourceStore.FindEnabledResourcesByScopeAsync(request.ScopesRequested);
if (resources != null && (resources.IdentityResources.Any() || resources.ApiResources.Any()))
{
return CreateConsentViewModel(userCode, model, client, resources);
}
else
{
_logger.LogError("No scopes matching: {0}", request.ScopesRequested.Aggregate((x, y) => x + ", " + y));
}
}
else
{
_logger.LogError("Invalid client id: {0}", request.ClientId);
}
}
return null;
}
private DeviceAuthorizationViewModel CreateConsentViewModel(string userCode, DeviceAuthorizationInputModel model, Client client, Resources resources)
{
var vm = new DeviceAuthorizationViewModel
{
UserCode = userCode,
RememberConsent = model?.RememberConsent ?? true,
ScopesConsented = model?.ScopesConsented ?? Enumerable.Empty<string>(),
ClientName = client.ClientName ?? client.ClientId,
ClientUrl = client.ClientUri,
ClientLogoUrl = client.LogoUri,
AllowRememberConsent = client.AllowRememberConsent
};
vm.IdentityScopes = resources.IdentityResources.Select(x => CreateScopeViewModel(x, vm.ScopesConsented.Contains(x.Name) || model == null)).ToArray();
vm.ResourceScopes = resources.ApiResources.SelectMany(x => x.Scopes).Select(x => CreateScopeViewModel(x, vm.ScopesConsented.Contains(x.Name) || model == null)).ToArray();
if (ConsentOptions.EnableOfflineAccess && resources.OfflineAccess)
{
vm.ResourceScopes = vm.ResourceScopes.Union(new[]
{
GetOfflineAccessScope(vm.ScopesConsented.Contains(IdentityServer4.IdentityServerConstants.StandardScopes.OfflineAccess) || model == null)
});
}
return vm;
}
private ScopeViewModel CreateScopeViewModel(IdentityResource identity, bool check)
{
return new ScopeViewModel
{
Name = identity.Name,
DisplayName = identity.DisplayName,
Description = identity.Description,
Emphasize = identity.Emphasize,
Required = identity.Required,
Checked = check || identity.Required
};
}
public ScopeViewModel CreateScopeViewModel(Scope scope, bool check)
{
return new ScopeViewModel
{
Name = scope.Name,
DisplayName = scope.DisplayName,
Description = scope.Description,
Emphasize = scope.Emphasize,
Required = scope.Required,
Checked = check || scope.Required
};
}
private ScopeViewModel GetOfflineAccessScope(bool check)
{
return new ScopeViewModel
{
Name = IdentityServer4.IdentityServerConstants.StandardScopes.OfflineAccess,
DisplayName = ConsentOptions.OfflineAccessDisplayName,
Description = ConsentOptions.OfflineAccessDescription,
Emphasize = true,
Checked = check
};
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Reflection.Internal;
namespace System.Reflection.Metadata.Ecma335
{
/// <summary>
/// Provides extension methods for working with certain raw elements of the Ecma 335 metadata tables and heaps.
/// </summary>
public static class MetadataReaderExtensions
{
/// <summary>
/// Returns the number of rows in the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static int GetTableRowCount(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
if ((int)tableIndex >= TableIndexExtensions.Count)
{
Throw.TableIndexOutOfRange();
}
return (int)reader.TableRowCounts[(int)tableIndex];
}
/// <summary>
/// Returns the size of a row in the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static int GetTableRowSize(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
switch (tableIndex)
{
case TableIndex.Module: return reader.ModuleTable.RowSize;
case TableIndex.TypeRef: return reader.TypeRefTable.RowSize;
case TableIndex.TypeDef: return reader.TypeDefTable.RowSize;
case TableIndex.FieldPtr: return reader.FieldPtrTable.RowSize;
case TableIndex.Field: return reader.FieldTable.RowSize;
case TableIndex.MethodPtr: return reader.MethodPtrTable.RowSize;
case TableIndex.MethodDef: return reader.MethodDefTable.RowSize;
case TableIndex.ParamPtr: return reader.ParamPtrTable.RowSize;
case TableIndex.Param: return reader.ParamTable.RowSize;
case TableIndex.InterfaceImpl: return reader.InterfaceImplTable.RowSize;
case TableIndex.MemberRef: return reader.MemberRefTable.RowSize;
case TableIndex.Constant: return reader.ConstantTable.RowSize;
case TableIndex.CustomAttribute: return reader.CustomAttributeTable.RowSize;
case TableIndex.FieldMarshal: return reader.FieldMarshalTable.RowSize;
case TableIndex.DeclSecurity: return reader.DeclSecurityTable.RowSize;
case TableIndex.ClassLayout: return reader.ClassLayoutTable.RowSize;
case TableIndex.FieldLayout: return reader.FieldLayoutTable.RowSize;
case TableIndex.StandAloneSig: return reader.StandAloneSigTable.RowSize;
case TableIndex.EventMap: return reader.EventMapTable.RowSize;
case TableIndex.EventPtr: return reader.EventPtrTable.RowSize;
case TableIndex.Event: return reader.EventTable.RowSize;
case TableIndex.PropertyMap: return reader.PropertyMapTable.RowSize;
case TableIndex.PropertyPtr: return reader.PropertyPtrTable.RowSize;
case TableIndex.Property: return reader.PropertyTable.RowSize;
case TableIndex.MethodSemantics: return reader.MethodSemanticsTable.RowSize;
case TableIndex.MethodImpl: return reader.MethodImplTable.RowSize;
case TableIndex.ModuleRef: return reader.ModuleRefTable.RowSize;
case TableIndex.TypeSpec: return reader.TypeSpecTable.RowSize;
case TableIndex.ImplMap: return reader.ImplMapTable.RowSize;
case TableIndex.FieldRva: return reader.FieldRvaTable.RowSize;
case TableIndex.EncLog: return reader.EncLogTable.RowSize;
case TableIndex.EncMap: return reader.EncMapTable.RowSize;
case TableIndex.Assembly: return reader.AssemblyTable.RowSize;
case TableIndex.AssemblyProcessor: return reader.AssemblyProcessorTable.RowSize;
case TableIndex.AssemblyOS: return reader.AssemblyOSTable.RowSize;
case TableIndex.AssemblyRef: return reader.AssemblyRefTable.RowSize;
case TableIndex.AssemblyRefProcessor: return reader.AssemblyRefProcessorTable.RowSize;
case TableIndex.AssemblyRefOS: return reader.AssemblyRefOSTable.RowSize;
case TableIndex.File: return reader.FileTable.RowSize;
case TableIndex.ExportedType: return reader.ExportedTypeTable.RowSize;
case TableIndex.ManifestResource: return reader.ManifestResourceTable.RowSize;
case TableIndex.NestedClass: return reader.NestedClassTable.RowSize;
case TableIndex.GenericParam: return reader.GenericParamTable.RowSize;
case TableIndex.MethodSpec: return reader.MethodSpecTable.RowSize;
case TableIndex.GenericParamConstraint: return reader.GenericParamConstraintTable.RowSize;
default:
throw new ArgumentOutOfRangeException("tableIndex");
}
}
/// <summary>
/// Returns the offset from the start of metadata to the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static unsafe int GetTableMetadataOffset(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return (int)(reader.GetTableMetadataBlock(tableIndex).Pointer - reader.Block.Pointer);
}
private static MemoryBlock GetTableMetadataBlock(this MetadataReader reader, TableIndex tableIndex)
{
Debug.Assert(reader != null);
switch (tableIndex)
{
case TableIndex.Module: return reader.ModuleTable.Block;
case TableIndex.TypeRef: return reader.TypeRefTable.Block;
case TableIndex.TypeDef: return reader.TypeDefTable.Block;
case TableIndex.FieldPtr: return reader.FieldPtrTable.Block;
case TableIndex.Field: return reader.FieldTable.Block;
case TableIndex.MethodPtr: return reader.MethodPtrTable.Block;
case TableIndex.MethodDef: return reader.MethodDefTable.Block;
case TableIndex.ParamPtr: return reader.ParamPtrTable.Block;
case TableIndex.Param: return reader.ParamTable.Block;
case TableIndex.InterfaceImpl: return reader.InterfaceImplTable.Block;
case TableIndex.MemberRef: return reader.MemberRefTable.Block;
case TableIndex.Constant: return reader.ConstantTable.Block;
case TableIndex.CustomAttribute: return reader.CustomAttributeTable.Block;
case TableIndex.FieldMarshal: return reader.FieldMarshalTable.Block;
case TableIndex.DeclSecurity: return reader.DeclSecurityTable.Block;
case TableIndex.ClassLayout: return reader.ClassLayoutTable.Block;
case TableIndex.FieldLayout: return reader.FieldLayoutTable.Block;
case TableIndex.StandAloneSig: return reader.StandAloneSigTable.Block;
case TableIndex.EventMap: return reader.EventMapTable.Block;
case TableIndex.EventPtr: return reader.EventPtrTable.Block;
case TableIndex.Event: return reader.EventTable.Block;
case TableIndex.PropertyMap: return reader.PropertyMapTable.Block;
case TableIndex.PropertyPtr: return reader.PropertyPtrTable.Block;
case TableIndex.Property: return reader.PropertyTable.Block;
case TableIndex.MethodSemantics: return reader.MethodSemanticsTable.Block;
case TableIndex.MethodImpl: return reader.MethodImplTable.Block;
case TableIndex.ModuleRef: return reader.ModuleRefTable.Block;
case TableIndex.TypeSpec: return reader.TypeSpecTable.Block;
case TableIndex.ImplMap: return reader.ImplMapTable.Block;
case TableIndex.FieldRva: return reader.FieldRvaTable.Block;
case TableIndex.EncLog: return reader.EncLogTable.Block;
case TableIndex.EncMap: return reader.EncMapTable.Block;
case TableIndex.Assembly: return reader.AssemblyTable.Block;
case TableIndex.AssemblyProcessor: return reader.AssemblyProcessorTable.Block;
case TableIndex.AssemblyOS: return reader.AssemblyOSTable.Block;
case TableIndex.AssemblyRef: return reader.AssemblyRefTable.Block;
case TableIndex.AssemblyRefProcessor: return reader.AssemblyRefProcessorTable.Block;
case TableIndex.AssemblyRefOS: return reader.AssemblyRefOSTable.Block;
case TableIndex.File: return reader.FileTable.Block;
case TableIndex.ExportedType: return reader.ExportedTypeTable.Block;
case TableIndex.ManifestResource: return reader.ManifestResourceTable.Block;
case TableIndex.NestedClass: return reader.NestedClassTable.Block;
case TableIndex.GenericParam: return reader.GenericParamTable.Block;
case TableIndex.MethodSpec: return reader.MethodSpecTable.Block;
case TableIndex.GenericParamConstraint: return reader.GenericParamConstraintTable.Block;
default:
throw new ArgumentOutOfRangeException("tableIndex");
}
}
/// <summary>
/// Returns the size of the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
public static int GetHeapSize(this MetadataReader reader, HeapIndex heapIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.GetMetadataBlock(heapIndex).Length;
}
/// <summary>
/// Returns the offset from the start of metadata to the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
public static unsafe int GetHeapMetadataOffset(this MetadataReader reader, HeapIndex heapIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return (int)(reader.GetMetadataBlock(heapIndex).Pointer - reader.Block.Pointer);
}
/// <summary>
/// Returns the size of the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
private static MemoryBlock GetMetadataBlock(this MetadataReader reader, HeapIndex heapIndex)
{
Debug.Assert(reader != null);
switch (heapIndex)
{
case HeapIndex.UserString:
return reader.UserStringStream.Block;
case HeapIndex.String:
return reader.StringStream.Block;
case HeapIndex.Blob:
return reader.BlobStream.Block;
case HeapIndex.Guid:
return reader.GuidStream.Block;
default:
throw new ArgumentOutOfRangeException("heapIndex");
}
}
/// <summary>
/// Returns the a handle to the UserString that follows the given one in the UserString heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static UserStringHandle GetNextHandle(this MetadataReader reader, UserStringHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.UserStringStream.GetNextHandle(handle);
}
/// <summary>
/// Returns the a handle to the Blob that follows the given one in the Blob heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static BlobHandle GetNextHandle(this MetadataReader reader, BlobHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.BlobStream.GetNextHandle(handle);
}
/// <summary>
/// Returns the a handle to the String that follows the given one in the String heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static StringHandle GetNextHandle(this MetadataReader reader, StringHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.StringStream.GetNextHandle(handle);
}
/// <summary>
/// Enumerates entries of EnC log.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static IEnumerable<EditAndContinueLogEntry> GetEditAndContinueLogEntries(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EncLogTable.NumberOfRows; rid++)
{
yield return new EditAndContinueLogEntry(
new EntityHandle(reader.EncLogTable.GetToken(rid)),
reader.EncLogTable.GetFuncCode(rid));
}
}
/// <summary>
/// Enumerates entries of EnC map.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static IEnumerable<EntityHandle> GetEditAndContinueMapEntries(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EncMapTable.NumberOfRows; rid++)
{
yield return new EntityHandle(reader.EncMapTable.GetToken(rid));
}
}
/// <summary>
/// Enumerate types that define one or more properties.
/// </summary>
/// <returns>
/// The resulting sequence corresponds exactly to entries in PropertyMap table,
/// i.e. n-th returned <see cref="TypeDefinitionHandle"/> is stored in n-th row of PropertyMap.
/// </returns>
public static IEnumerable<TypeDefinitionHandle> GetTypesWithProperties(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.PropertyMapTable.NumberOfRows; rid++)
{
yield return reader.PropertyMapTable.GetParentType(rid);
}
}
/// <summary>
/// Enumerate types that define one or more events.
/// </summary>
/// <returns>
/// The resulting sequence corresponds exactly to entries in EventMap table,
/// i.e. n-th returned <see cref="TypeDefinitionHandle"/> is stored in n-th row of EventMap.
/// </returns>
public static IEnumerable<TypeDefinitionHandle> GetTypesWithEvents(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EventMapTable.NumberOfRows; rid++)
{
yield return reader.EventMapTable.GetParentType(rid);
}
}
}
}
| |
//
// EndgamePicker.cs
//
// Authors:
// Alan McGovern alan.mcgovern@gmail.com
//
// Copyright (C) 2008 Alan McGovern
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Text;
using OctoTorrent.Client.Messages;
using OctoTorrent.Common;
using OctoTorrent.Client.Messages.FastPeer;
using OctoTorrent.Client.Messages.Standard;
namespace OctoTorrent.Client
{
// Keep a list of all the pieces which have not yet being fully downloaded
// From this list we will make requests for all the blocks until the piece is complete.
public class EndGamePicker : PiecePicker
{
static Predicate<Request> TimedOut = delegate (Request r) { return r.Block.RequestTimedOut; };
static Predicate<Request> NotRequested = delegate (Request r) { return r.Block.RequestedOff == null; };
// Struct to link a request for a block to a peer
// This way we can have multiple requests for the same block
class Request
{
public Request(PeerId peer, Block block)
{
Peer = peer;
Block = block;
}
public Block Block;
public PeerId Peer;
}
// This list stores all the pieces which have not yet been completed. If a piece is *not* in this list
// we don't need to download it.
List<Piece> pieces;
// These are all the requests for the individual blocks
List<Request> requests;
public EndGamePicker()
: base(null)
{
requests = new List<Request>();
}
// Cancels a pending request when the predicate returns 'true'
void CancelWhere(Predicate<Request> predicate, bool sendCancel)
{
for (int i = 0; i < requests.Count; i++)
{
Request r = requests[i];
if (predicate(r))
{
r.Peer.AmRequestingPiecesCount--;
if (sendCancel)
r.Peer.Enqueue(new CancelMessage(r.Block.PieceIndex, r.Block.StartOffset, r.Block.RequestLength));
}
}
requests.RemoveAll(predicate);
}
public override void CancelTimedOutRequests()
{
CancelWhere(TimedOut, false);
}
public override RequestMessage ContinueExistingRequest(PeerId peer)
{
return null;
}
public override int CurrentRequestCount()
{
return requests.Count;
}
public override List<Piece> ExportActiveRequests()
{
return new List<Piece>(pieces);
}
public override void Initialise(BitField bitfield, TorrentFile[] files, IEnumerable<Piece> requests)
{
// 'Requests' should contain a list of all the pieces we need to complete
pieces = new List<Piece>(requests);
foreach (Piece piece in pieces)
{
for (int i = 0; i < piece.BlockCount; i++)
if (piece.Blocks[i].RequestedOff != null)
this.requests.Add(new Request(piece.Blocks[i].RequestedOff, piece.Blocks[i]));
}
}
public override bool IsInteresting(BitField bitfield)
{
return !bitfield.AllFalse;
}
public override MessageBundle PickPiece(PeerId id, BitField peerBitfield, List<PeerId> otherPeers, int count, int startIndex, int endIndex)
{
// Only request 2 pieces at a time in endgame mode
// to prevent a *massive* overshoot
if (id.IsChoking || id.AmRequestingPiecesCount > 2)
return null;
LoadPieces(id, peerBitfield);
// 1) See if there are any blocks which have not been requested at all. Request the block if the peer has it
foreach (Piece p in pieces)
{
if(!peerBitfield[p.Index] || p.AllBlocksRequested)
continue;
for (int i = 0; i < p.BlockCount; i++)
{
if (p.Blocks[i].Requested)
continue;
p.Blocks[i].Requested = true;
Request request = new Request(id, p.Blocks[i]);
requests.Add(request);
return new MessageBundle(request.Block.CreateRequest(id));
}
}
// 2) For each block with an existing request, add another request. We do a search from the start
// of the list to the end. So when we add a duplicate request, move both requests to the end of the list
foreach (Piece p in pieces)
{
if (!peerBitfield[p.Index])
continue;
for (int i = 0; i < p.BlockCount; i++)
{
if (p.Blocks[i].Received || AlreadyRequested(p.Blocks[i], id))
continue;
int c = requests.Count;
for (int j = 0; j < requests.Count - 1 && (c-- > 0); j++)
{
if (requests[j].Block.PieceIndex == p.Index && requests[j].Block.StartOffset == p.Blocks[i].StartOffset)
{
Request r = requests[j];
requests.RemoveAt(j);
requests.Add(r);
j--;
}
}
p.Blocks[i].Requested = true;
Request request = new Request(id, p.Blocks[i]);
requests.Add(request);
return new MessageBundle(request.Block.CreateRequest(id));
}
}
return null;
}
void LoadPieces(PeerId id, BitField b)
{
int length = b.Length;
for (int i = b.FirstTrue(0, length); i != -1; i = b.FirstTrue(i + 1, length))
if (!pieces.Exists(delegate(Piece p) { return p.Index == i; }))
pieces.Add(new Piece(i, id.TorrentManager.Torrent.PieceLength, id.TorrentManager.Torrent.Size));
}
private bool AlreadyRequested(Block block, PeerId id)
{
bool b = requests.Exists(delegate(Request r) {
return r.Block.PieceIndex == block.PieceIndex &&
r.Block.StartOffset == block.StartOffset &&
r.Peer == id;
});
return b;
}
public override void Reset()
{
// Though if you reset an EndGamePicker it really means that you should be using a regular picker now
requests.Clear();
}
public override void CancelRequest(PeerId peer, int piece, int startOffset, int length)
{
CancelWhere(delegate (Request r) {
return r.Block.PieceIndex == piece &&
r.Block.StartOffset == startOffset &&
r.Block.RequestLength == length &&
peer.Equals(r.Peer);
}, false);
}
public override void CancelRequests(PeerId peer)
{
CancelWhere(delegate(Request r) { return r.Peer == peer; }, false);
}
public override bool ValidatePiece(PeerId peer, int pieceIndex, int startOffset, int length, out Piece piece)
{
foreach (Request r in requests)
{
// When we get past this block, it means we've found a valid request for this piece
if (r.Block.PieceIndex != pieceIndex || r.Block.StartOffset != startOffset || r.Block.RequestLength != length || r.Peer != peer)
continue;
// All the other requests for this block need to be cancelled.
foreach (Piece p in pieces)
{
if (p.Index != pieceIndex)
continue;
CancelWhere(delegate(Request req) {
return req.Block.PieceIndex == pieceIndex &&
req.Block.StartOffset == startOffset &&
req.Block.RequestLength == length &&
req.Peer != peer;
}, true);
// Mark the block as received
p.Blocks[startOffset / Piece.BlockSize].Received = true;
// Once a piece is completely received, remove it from our list.
// If a piece *fails* the hashcheck, we need to add it back into the list so
// we download it again.
if (p.AllBlocksReceived)
pieces.Remove(p);
requests.Remove(r);
piece = p;
peer.AmRequestingPiecesCount--;
return true;
}
}
// The request was not valid
piece = null;
return false;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.SolutionCrawler
{
internal sealed partial class SolutionCrawlerRegistrationService
{
private sealed partial class WorkCoordinator
{
private sealed class SemanticChangeProcessor : IdleProcessor
{
private static readonly Func<int, DocumentId, bool, string> s_enqueueLogger = (t, i, b) => string.Format("[{0}] {1} - hint: {2}", t, i.ToString(), b);
private readonly SemaphoreSlim _gate;
private readonly Registration _registration;
private readonly ProjectProcessor _processor;
private readonly NonReentrantLock _workGate;
private readonly Dictionary<DocumentId, Data> _pendingWork;
public SemanticChangeProcessor(
IAsynchronousOperationListener listener,
Registration registration,
IncrementalAnalyzerProcessor documentWorkerProcessor,
int backOffTimeSpanInMS,
int projectBackOffTimeSpanInMS,
CancellationToken cancellationToken) :
base(listener, backOffTimeSpanInMS, cancellationToken)
{
_gate = new SemaphoreSlim(initialCount: 0);
_registration = registration;
_processor = new ProjectProcessor(listener, registration, documentWorkerProcessor, projectBackOffTimeSpanInMS, cancellationToken);
_workGate = new NonReentrantLock();
_pendingWork = new Dictionary<DocumentId, Data>();
Start();
}
public override Task AsyncProcessorTask
{
get
{
return Task.WhenAll(base.AsyncProcessorTask, _processor.AsyncProcessorTask);
}
}
protected override Task WaitAsync(CancellationToken cancellationToken)
{
return _gate.WaitAsync(cancellationToken);
}
protected override async Task ExecuteAsync()
{
var data = Dequeue();
// we have a hint. check whether we can take advantage of it
if (await TryEnqueueFromHint(data.Document, data.ChangedMember).ConfigureAwait(continueOnCapturedContext: false))
{
data.AsyncToken.Dispose();
return;
}
EnqueueFullProjectDependency(data.Document);
data.AsyncToken.Dispose();
}
private Data Dequeue()
{
return DequeueWorker(_workGate, _pendingWork, this.CancellationToken);
}
private async Task<bool> TryEnqueueFromHint(Document document, SyntaxPath changedMember)
{
if (changedMember == null)
{
return false;
}
// see whether we already have semantic model. otherwise, use the expansive full project dependency one
// TODO: if there is a reliable way to track changed member, we could use GetSemanticModel here which could
// rebuild compilation from scratch
SemanticModel model;
SyntaxNode declarationNode;
if (!document.TryGetSemanticModel(out model) ||
!changedMember.TryResolve(await document.GetSyntaxRootAsync(this.CancellationToken).ConfigureAwait(false), out declarationNode))
{
return false;
}
var symbol = model.GetDeclaredSymbol(declarationNode, this.CancellationToken);
if (symbol == null)
{
return false;
}
return await TryEnqueueFromMemberAsync(document, symbol).ConfigureAwait(false) ||
await TryEnqueueFromTypeAsync(document, symbol).ConfigureAwait(false);
}
private async Task<bool> TryEnqueueFromTypeAsync(Document document, ISymbol symbol)
{
if (!IsType(symbol))
{
return false;
}
if (symbol.DeclaredAccessibility == Accessibility.Private)
{
await EnqueueWorkItemAsync(document, symbol).ConfigureAwait(false);
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_EnqueueFromType, symbol.Name);
return true;
}
if (IsInternal(symbol))
{
var assembly = symbol.ContainingAssembly;
EnqueueFullProjectDependency(document, assembly);
return true;
}
return false;
}
private async Task<bool> TryEnqueueFromMemberAsync(Document document, ISymbol symbol)
{
if (!IsMember(symbol))
{
return false;
}
var typeSymbol = symbol.ContainingType;
if (symbol.DeclaredAccessibility == Accessibility.Private)
{
await EnqueueWorkItemAsync(document, symbol).ConfigureAwait(false);
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_EnqueueFromMember, symbol.Name);
return true;
}
if (typeSymbol == null)
{
return false;
}
return await TryEnqueueFromTypeAsync(document, typeSymbol).ConfigureAwait(false);
}
private Task EnqueueWorkItemAsync(Document document, ISymbol symbol)
{
return EnqueueWorkItemAsync(document, symbol.ContainingType != null ? symbol.ContainingType.Locations : symbol.Locations);
}
private async Task EnqueueWorkItemAsync(Document thisDocument, ImmutableArray<Location> locations)
{
var solution = thisDocument.Project.Solution;
var projectId = thisDocument.Id.ProjectId;
foreach (var location in locations)
{
Contract.Requires(location.IsInSource);
var document = solution.GetDocument(location.SourceTree, projectId);
if (document == null || thisDocument == document)
{
continue;
}
await _processor.EnqueueWorkItemAsync(document).ConfigureAwait(false);
}
}
private bool IsInternal(ISymbol symbol)
{
return symbol.DeclaredAccessibility == Accessibility.Internal ||
symbol.DeclaredAccessibility == Accessibility.ProtectedAndInternal ||
symbol.DeclaredAccessibility == Accessibility.ProtectedOrInternal;
}
private bool IsType(ISymbol symbol)
{
return symbol.Kind == SymbolKind.NamedType;
}
private bool IsMember(ISymbol symbol)
{
return symbol.Kind == SymbolKind.Event ||
symbol.Kind == SymbolKind.Field ||
symbol.Kind == SymbolKind.Method ||
symbol.Kind == SymbolKind.Property;
}
private void EnqueueFullProjectDependency(Document document, IAssemblySymbol internalVisibleToAssembly = null)
{
var self = document.Project.Id;
// if there is no hint (this can happen for cases such as solution/project load and etc),
// we can postpone it even further
if (internalVisibleToAssembly == null)
{
_processor.Enqueue(self, needDependencyTracking: true);
return;
}
// most likely we got here since we are called due to typing.
// calculate dependency here and register each affected project to the next pipe line
var solution = document.Project.Solution;
foreach (var projectId in GetProjectsToAnalyze(solution, self))
{
var project = solution.GetProject(projectId);
if (project == null)
{
continue;
}
Compilation compilation;
if (project.TryGetCompilation(out compilation))
{
var assembly = compilation.Assembly;
if (assembly != null && !assembly.IsSameAssemblyOrHasFriendAccessTo(internalVisibleToAssembly))
{
continue;
}
}
_processor.Enqueue(projectId);
}
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_FullProjects, internalVisibleToAssembly == null ? "full" : "internals");
}
public void Enqueue(Document document, SyntaxPath changedMember)
{
this.UpdateLastAccessTime();
using (_workGate.DisposableWait(this.CancellationToken))
{
Data data;
if (_pendingWork.TryGetValue(document.Id, out data))
{
// create new async token and dispose old one.
var newAsyncToken = this.Listener.BeginAsyncOperation("EnqueueSemanticChange");
data.AsyncToken.Dispose();
_pendingWork[document.Id] = new Data(document, data.ChangedMember == changedMember ? changedMember : null, newAsyncToken);
return;
}
_pendingWork.Add(document.Id, new Data(document, changedMember, this.Listener.BeginAsyncOperation("EnqueueSemanticChange")));
_gate.Release();
}
Logger.Log(FunctionId.WorkCoordinator_SemanticChange_Enqueue, s_enqueueLogger, Environment.TickCount, document.Id, changedMember != null);
}
private static TValue DequeueWorker<TKey, TValue>(NonReentrantLock gate, Dictionary<TKey, TValue> map, CancellationToken cancellationToken)
{
using (gate.DisposableWait(cancellationToken))
{
var first = default(KeyValuePair<TKey, TValue>);
foreach (var kv in map)
{
first = kv;
break;
}
// this is only one that removes data from the queue. so, it should always succeed
var result = map.Remove(first.Key);
Contract.Requires(result);
return first.Value;
}
}
private static IEnumerable<ProjectId> GetProjectsToAnalyze(Solution solution, ProjectId projectId)
{
var graph = solution.GetProjectDependencyGraph();
if (solution.Workspace.Options.GetOption(InternalSolutionCrawlerOptions.DirectDependencyPropagationOnly))
{
return graph.GetProjectsThatDirectlyDependOnThisProject(projectId).Concat(projectId);
}
// re-analyzing all transitive dependencies is very expensive. by default we will only
// re-analyze direct dependency for now. and consider flipping the default only if we must.
return graph.GetProjectsThatTransitivelyDependOnThisProject(projectId).Concat(projectId);
}
private struct Data
{
public readonly Document Document;
public readonly SyntaxPath ChangedMember;
public readonly IAsyncToken AsyncToken;
public Data(Document document, SyntaxPath changedMember, IAsyncToken asyncToken)
{
this.AsyncToken = asyncToken;
this.Document = document;
this.ChangedMember = changedMember;
}
}
private class ProjectProcessor : IdleProcessor
{
private static readonly Func<int, ProjectId, string> s_enqueueLogger = (t, i) => string.Format("[{0}] {1}", t, i.ToString());
private readonly SemaphoreSlim _gate;
private readonly Registration _registration;
private readonly IncrementalAnalyzerProcessor _processor;
private readonly NonReentrantLock _workGate;
private readonly Dictionary<ProjectId, Data> _pendingWork;
public ProjectProcessor(
IAsynchronousOperationListener listener,
Registration registration,
IncrementalAnalyzerProcessor processor,
int backOffTimeSpanInMS,
CancellationToken cancellationToken) :
base(listener, backOffTimeSpanInMS, cancellationToken)
{
_registration = registration;
_processor = processor;
_gate = new SemaphoreSlim(initialCount: 0);
_workGate = new NonReentrantLock();
_pendingWork = new Dictionary<ProjectId, Data>();
Start();
}
public void Enqueue(ProjectId projectId, bool needDependencyTracking = false)
{
this.UpdateLastAccessTime();
using (_workGate.DisposableWait(this.CancellationToken))
{
// the project is already in the queue. nothing needs to be done
if (_pendingWork.ContainsKey(projectId))
{
return;
}
var data = new Data(projectId, needDependencyTracking, this.Listener.BeginAsyncOperation("EnqueueWorkItemForSemanticChangeAsync"));
_pendingWork.Add(projectId, data);
_gate.Release();
}
Logger.Log(FunctionId.WorkCoordinator_Project_Enqueue, s_enqueueLogger, Environment.TickCount, projectId);
}
public async Task EnqueueWorkItemAsync(Document document)
{
// we are shutting down
this.CancellationToken.ThrowIfCancellationRequested();
// call to this method is serialized. and only this method does the writing.
var priorityService = document.GetLanguageService<IWorkCoordinatorPriorityService>();
var isLowPriority = priorityService != null && await priorityService.IsLowPriorityAsync(document, this.CancellationToken).ConfigureAwait(false);
_processor.Enqueue(
new WorkItem(document.Id, document.Project.Language, InvocationReasons.SemanticChanged,
isLowPriority, this.Listener.BeginAsyncOperation("Semantic WorkItem")));
}
protected override Task WaitAsync(CancellationToken cancellationToken)
{
return _gate.WaitAsync(cancellationToken);
}
protected override async Task ExecuteAsync()
{
var data = Dequeue();
using (data.AsyncToken)
{
var project = _registration.CurrentSolution.GetProject(data.ProjectId);
if (project == null)
{
return;
}
if (!data.NeedDependencyTracking)
{
await EnqueueWorkItemAsync(project).ConfigureAwait(false);
return;
}
// do dependency tracking here with current solution
var solution = _registration.CurrentSolution;
foreach (var projectId in GetProjectsToAnalyze(solution, data.ProjectId))
{
project = solution.GetProject(projectId);
await EnqueueWorkItemAsync(project).ConfigureAwait(false);
}
}
}
private Data Dequeue()
{
return DequeueWorker(_workGate, _pendingWork, this.CancellationToken);
}
private async Task EnqueueWorkItemAsync(Project project)
{
if (project == null)
{
return;
}
foreach (var document in project.Documents)
{
await EnqueueWorkItemAsync(document).ConfigureAwait(false);
}
}
private struct Data
{
public readonly IAsyncToken AsyncToken;
public readonly ProjectId ProjectId;
public readonly bool NeedDependencyTracking;
public Data(ProjectId projectId, bool needDependencyTracking, IAsyncToken asyncToken)
{
this.AsyncToken = asyncToken;
this.ProjectId = projectId;
this.NeedDependencyTracking = needDependencyTracking;
}
}
}
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.Simplification;
using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel.Collections;
using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel.InternalElements;
using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel.Interop;
using Microsoft.VisualStudio.LanguageServices.Implementation.Interop;
using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem;
using Microsoft.VisualStudio.LanguageServices.Implementation.Utilities;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel
{
/// <summary>
/// Implementations of EnvDTE.FileCodeModel for both languages.
/// </summary>
public sealed partial class FileCodeModel : AbstractCodeModelObject, EnvDTE.FileCodeModel, EnvDTE80.FileCodeModel2, ICodeElementContainer<AbstractCodeElement>, IVBFileCodeModelEvents, ICSCodeModelRefactoring
{
internal static ComHandle<EnvDTE80.FileCodeModel2, FileCodeModel> Create(
CodeModelState state,
object parent,
DocumentId documentId,
ITextManagerAdapter textManagerAdapter)
{
return new FileCodeModel(state, parent, documentId, textManagerAdapter).GetComHandle<EnvDTE80.FileCodeModel2, FileCodeModel>();
}
private readonly ComHandle<object, object> _parentHandle;
/// <summary>
/// Don't use directly. Instead, call <see cref="GetDocumentId()"/>.
/// </summary>
private DocumentId _documentId;
// Note: these are only valid when the underlying file is being renamed. Do not use.
private ProjectId _incomingProjectId;
private string _incomingFilePath;
private Document _previousDocument;
private readonly ITextManagerAdapter _textManagerAdapter;
private readonly CleanableWeakComHandleTable<SyntaxNodeKey, EnvDTE.CodeElement> _codeElementTable;
// These are used during batching.
private bool _batchMode;
private List<AbstractKeyedCodeElement> _batchElements;
private Document _batchDocument;
// track state to make sure we open editor only once
private int _editCount;
private IInvisibleEditor _invisibleEditor;
private SyntaxTree _lastSyntaxTree;
private FileCodeModel(
CodeModelState state,
object parent,
DocumentId documentId,
ITextManagerAdapter textManagerAdapter)
: base(state)
{
Debug.Assert(documentId != null);
Debug.Assert(textManagerAdapter != null);
_parentHandle = new ComHandle<object, object>(parent);
_documentId = documentId;
_textManagerAdapter = textManagerAdapter;
_codeElementTable = new CleanableWeakComHandleTable<SyntaxNodeKey, EnvDTE.CodeElement>();
_batchMode = false;
_batchDocument = null;
_lastSyntaxTree = GetSyntaxTree();
}
internal ITextManagerAdapter TextManagerAdapter
{
get { return _textManagerAdapter; }
}
/// <summary>
/// Internally, we store the DocumentId for the document that the FileCodeModel represents. If the underlying file
/// is renamed, the DocumentId will become invalid because the Roslyn VS workspace treats file renames as a remove/add pair.
/// To work around this, the FileCodeModel is notified when a file rename is about to occur. At that point, the
/// <see cref="_documentId"/> field is null'd out and <see cref="_incomingFilePath"/> is set to the name of the new file.
/// The next time that a FileCodeModel operation occurs that requires the DocumentId, it will be retrieved from the workspace
/// using the <see cref="_incomingFilePath"/>.
/// </summary>
internal void OnRename(string newFilePath)
{
Debug.Assert(_editCount == 0, "FileCodeModel have an open edit and the underlying file is being renamed. This is a bug.");
if (_documentId != null)
{
_previousDocument = Workspace.CurrentSolution.GetDocument(_documentId);
}
_incomingFilePath = newFilePath;
_incomingProjectId = _documentId.ProjectId;
_documentId = null;
}
internal override void Shutdown()
{
if (_invisibleEditor != null)
{
// we are shutting down, so do not worry about editCount. If the editor is still alive, dispose it.
CodeModelService.DetachFormatTrackingToBuffer(_invisibleEditor.TextBuffer);
_invisibleEditor.Dispose();
_invisibleEditor = null;
}
base.Shutdown();
}
private bool TryGetDocumentId(out DocumentId documentId)
{
if (_documentId != null)
{
documentId = _documentId;
return true;
}
documentId = null;
// We don't have DocumentId, so try to retrieve it from the workspace.
if (_incomingProjectId == null || _incomingFilePath == null)
{
return false;
}
var project = ((VisualStudioWorkspaceImpl)this.State.Workspace).ProjectTracker.GetProject(_incomingProjectId);
if (project == null)
{
return false;
}
var hostDocument = project.GetCurrentDocumentFromPath(_incomingFilePath);
if (hostDocument == null)
{
return false;
}
_documentId = hostDocument.Id;
_incomingProjectId = null;
_incomingFilePath = null;
_previousDocument = null;
documentId = _documentId;
return true;
}
internal DocumentId GetDocumentId()
{
if (_documentId != null)
{
return _documentId;
}
if (TryGetDocumentId(out var documentId))
{
return documentId;
}
throw Exceptions.ThrowEUnexpected();
}
internal void UpdateCodeElementNodeKey(AbstractKeyedCodeElement keyedElement, SyntaxNodeKey oldNodeKey, SyntaxNodeKey newNodeKey)
{
if (!_codeElementTable.TryGetValue(oldNodeKey, out var codeElement))
{
throw new InvalidOperationException($"Could not find {oldNodeKey} in Code Model element table.");
}
_codeElementTable.Remove(oldNodeKey);
var managedElement = ComAggregate.GetManagedObject<AbstractKeyedCodeElement>(codeElement);
if (!object.Equals(managedElement, keyedElement))
{
throw new InvalidOperationException($"Unexpected failure in Code Model while updating node keys {oldNodeKey} -> {newNodeKey}");
}
_codeElementTable.Add(newNodeKey, codeElement);
}
internal void OnCodeElementCreated(SyntaxNodeKey nodeKey, EnvDTE.CodeElement element)
{
// If we're creating an element with the same node key as an element that's already in the table, just remove
// the old element. The old element will continue to function but the new element will replace it in the cache.
if (_codeElementTable.ContainsKey(nodeKey))
{
_codeElementTable.Remove(nodeKey);
}
_codeElementTable.Add(nodeKey, element);
}
internal void OnCodeElementDeleted(SyntaxNodeKey nodeKey)
{
_codeElementTable.Remove(nodeKey);
}
internal T GetOrCreateCodeElement<T>(SyntaxNode node)
{
var nodeKey = CodeModelService.TryGetNodeKey(node);
if (!nodeKey.IsEmpty)
{
// Since the node already has a key, check to see if a code element already
// exists for it. If so, return that element it it's still valid; otherwise,
// remove it from the table.
if (_codeElementTable.TryGetValue(nodeKey, out var codeElement))
{
if (codeElement != null)
{
var element = ComAggregate.TryGetManagedObject<AbstractCodeElement>(codeElement);
if (element.IsValidNode())
{
if (codeElement is T)
{
return (T)codeElement;
}
throw new InvalidOperationException($"Found a valid code element for {nodeKey}, but it is not of type, {typeof(T).ToString()}");
}
}
}
// Go ahead and remove the nodeKey from the table. At this point, we'll be creating a new one.
_codeElementTable.Remove(nodeKey);
}
return (T)CodeModelService.CreateInternalCodeElement(this.State, this, node);
}
private void InitializeEditor()
{
_editCount++;
if (_editCount == 1)
{
Debug.Assert(_invisibleEditor == null);
_invisibleEditor = Workspace.OpenInvisibleEditor(GetDocumentId());
CodeModelService.AttachFormatTrackingToBuffer(_invisibleEditor.TextBuffer);
}
}
private void ReleaseEditor()
{
Debug.Assert(_editCount >= 1);
_editCount--;
if (_editCount == 0)
{
Debug.Assert(_invisibleEditor != null);
CodeModelService.DetachFormatTrackingToBuffer(_invisibleEditor.TextBuffer);
_invisibleEditor.Dispose();
_invisibleEditor = null;
}
}
internal void EnsureEditor(Action action)
{
InitializeEditor();
try
{
action();
}
finally
{
ReleaseEditor();
}
}
internal T EnsureEditor<T>(Func<T> action)
{
InitializeEditor();
try
{
return action();
}
finally
{
ReleaseEditor();
}
}
internal void PerformEdit(Func<Document, Document> action)
{
EnsureEditor(() =>
{
Debug.Assert(_invisibleEditor != null);
var document = GetDocument();
var workspace = document.Project.Solution.Workspace;
var result = action(document);
var formatted = Formatter.FormatAsync(result, Formatter.Annotation).WaitAndGetResult_CodeModel(CancellationToken.None);
formatted = Formatter.FormatAsync(formatted, SyntaxAnnotation.ElasticAnnotation).WaitAndGetResult_CodeModel(CancellationToken.None);
ApplyChanges(workspace, formatted);
});
}
internal T PerformEdit<T>(Func<Document, Tuple<T, Document>> action) where T : SyntaxNode
{
return EnsureEditor(() =>
{
Debug.Assert(_invisibleEditor != null);
var document = GetDocument();
var workspace = document.Project.Solution.Workspace;
var result = action(document);
ApplyChanges(workspace, result.Item2);
return result.Item1;
});
}
private void ApplyChanges(Workspace workspace, Document document)
{
if (IsBatchOpen)
{
_batchDocument = document;
}
else
{
workspace.TryApplyChanges(document.Project.Solution);
}
}
internal Document GetDocument()
{
if (!TryGetDocument(out var document))
{
throw Exceptions.ThrowEFail();
}
return document;
}
internal bool TryGetDocument(out Document document)
{
if (IsBatchOpen && _batchDocument != null)
{
document = _batchDocument;
return true;
}
if (!TryGetDocumentId(out var documentId) && _previousDocument != null)
{
document = _previousDocument;
}
else
{
document = Workspace.CurrentSolution.GetDocument(GetDocumentId());
}
return document != null;
}
internal SyntaxTree GetSyntaxTree()
{
return GetDocument()
.GetSyntaxTreeAsync(CancellationToken.None)
.WaitAndGetResult_CodeModel(CancellationToken.None);
}
internal SyntaxNode GetSyntaxRoot()
{
return GetDocument()
.GetSyntaxRootAsync(CancellationToken.None)
.WaitAndGetResult_CodeModel(CancellationToken.None);
}
internal SemanticModel GetSemanticModel()
{
return GetDocument()
.GetSemanticModelAsync(CancellationToken.None)
.WaitAndGetResult_CodeModel(CancellationToken.None);
}
internal Compilation GetCompilation()
{
return GetDocument().Project
.GetCompilationAsync(CancellationToken.None)
.WaitAndGetResult_CodeModel(CancellationToken.None);
}
internal ProjectId GetProjectId()
{
return GetDocumentId().ProjectId;
}
internal AbstractProject GetAbstractProject()
{
return ((VisualStudioWorkspaceImpl)Workspace).ProjectTracker.GetProject(GetProjectId());
}
internal SyntaxNode LookupNode(SyntaxNodeKey nodeKey)
{
return CodeModelService.LookupNode(nodeKey, GetSyntaxTree());
}
internal TSyntaxNode LookupNode<TSyntaxNode>(SyntaxNodeKey nodeKey)
where TSyntaxNode : SyntaxNode
{
return CodeModelService.LookupNode(nodeKey, GetSyntaxTree()) as TSyntaxNode;
}
public EnvDTE.CodeAttribute AddAttribute(string name, string value, object position)
{
return EnsureEditor(() =>
{
return AddAttribute(GetSyntaxRoot(), name, value, position, target: CodeModelService.AssemblyAttributeString);
});
}
public EnvDTE.CodeClass AddClass(string name, object position, object bases, object implementedInterfaces, EnvDTE.vsCMAccess access)
{
return EnsureEditor(() =>
{
return AddClass(GetSyntaxRoot(), name, position, bases, implementedInterfaces, access);
});
}
public EnvDTE.CodeDelegate AddDelegate(string name, object type, object position, EnvDTE.vsCMAccess access)
{
return EnsureEditor(() =>
{
return AddDelegate(GetSyntaxRoot(), name, type, position, access);
});
}
public EnvDTE.CodeEnum AddEnum(string name, object position, object bases, EnvDTE.vsCMAccess access)
{
return EnsureEditor(() =>
{
return AddEnum(GetSyntaxRoot(), name, position, bases, access);
});
}
public EnvDTE.CodeFunction AddFunction(string name, EnvDTE.vsCMFunction kind, object type, object position, EnvDTE.vsCMAccess access)
{
throw Exceptions.ThrowEFail();
}
public EnvDTE80.CodeImport AddImport(string name, object position, string alias)
{
return EnsureEditor(() =>
{
return AddImport(GetSyntaxRoot(), name, position, alias);
});
}
public EnvDTE.CodeInterface AddInterface(string name, object position, object bases, EnvDTE.vsCMAccess access)
{
return EnsureEditor(() =>
{
return AddInterface(GetSyntaxRoot(), name, position, bases, access);
});
}
public EnvDTE.CodeNamespace AddNamespace(string name, object position)
{
return EnsureEditor(() =>
{
return AddNamespace(GetSyntaxRoot(), name, position);
});
}
public EnvDTE.CodeStruct AddStruct(string name, object position, object bases, object implementedInterfaces, EnvDTE.vsCMAccess access)
{
return EnsureEditor(() =>
{
return AddStruct(GetSyntaxRoot(), name, position, bases, implementedInterfaces, access);
});
}
public EnvDTE.CodeVariable AddVariable(string name, object type, object position, EnvDTE.vsCMAccess access)
{
throw Exceptions.ThrowEFail();
}
public EnvDTE.CodeElement CodeElementFromPoint(EnvDTE.TextPoint point, EnvDTE.vsCMElement scope)
{
// Can't use point.AbsoluteCharOffset because it's calculated by the native
// implementation in GetAbsoluteOffset (in env\msenv\textmgr\autoutil.cpp)
// to only count each newline as a single character. We need to ask for line and
// column and calculate the right offset ourselves. See DevDiv2 530496 for details.
var position = GetPositionFromTextPoint(point);
var result = CodeElementFromPosition(position, scope);
if (result == null)
{
throw Exceptions.ThrowEFail();
}
return result;
}
private int GetPositionFromTextPoint(EnvDTE.TextPoint point)
{
var lineNumber = point.Line - 1;
var column = point.LineCharOffset - 1;
var line = GetDocument().GetTextAsync(CancellationToken.None).WaitAndGetResult_CodeModel(CancellationToken.None).Lines[lineNumber];
var position = line.Start + column;
return position;
}
internal EnvDTE.CodeElement CodeElementFromPosition(int position, EnvDTE.vsCMElement scope)
{
var root = GetSyntaxRoot();
var leftToken = SyntaxFactsService.FindTokenOnLeftOfPosition(root, position);
var rightToken = SyntaxFactsService.FindTokenOnRightOfPosition(root, position);
// We apply a set of heuristics to determine which member we pick to start searching.
var token = leftToken;
if (leftToken != rightToken)
{
if (leftToken.Span.End == position && rightToken.SpanStart == position)
{
// If both tokens are touching, we prefer identifiers and keywords to
// separators. Note that the language doesn't allow both tokens to be a
// keyword or identifier.
if (SyntaxFactsService.IsKeyword(rightToken) ||
SyntaxFactsService.IsIdentifier(rightToken))
{
token = rightToken;
}
}
else if (leftToken.Span.End < position && rightToken.SpanStart <= position)
{
// If only the right token is touching, we have to use it.
token = rightToken;
}
}
// If we ended up using the left token but the position is after that token,
// walk up to the first node who's last token is not the leftToken. By doing this, we
// ensure that we don't find members when the position is actually between them.
// In that case, we should find the enclosing type or namespace.
var parent = token.Parent;
if (token == leftToken && position > token.Span.End)
{
while (parent != null)
{
if (parent.GetLastToken() == token)
{
parent = parent.Parent;
}
else
{
break;
}
}
}
var node = parent != null
? parent.AncestorsAndSelf().FirstOrDefault(n => CodeModelService.MatchesScope(n, scope))
: null;
if (node == null)
{
return null;
}
if (scope == EnvDTE.vsCMElement.vsCMElementAttribute ||
scope == EnvDTE.vsCMElement.vsCMElementImportStmt ||
scope == EnvDTE.vsCMElement.vsCMElementParameter ||
scope == EnvDTE.vsCMElement.vsCMElementOptionStmt ||
scope == EnvDTE.vsCMElement.vsCMElementInheritsStmt ||
scope == EnvDTE.vsCMElement.vsCMElementImplementsStmt ||
(scope == EnvDTE.vsCMElement.vsCMElementFunction && CodeModelService.IsAccessorNode(node)))
{
// Attributes, imports, parameters, Option, Inherits and Implements
// don't have node keys of their own and won't be included in our
// collection of elements. Delegate to the service to create these.
return CodeModelService.CreateInternalCodeElement(State, this, node);
}
return GetOrCreateCodeElement<EnvDTE.CodeElement>(node);
}
public EnvDTE.CodeElements CodeElements
{
get { return NamespaceCollection.Create(this.State, this, this, SyntaxNodeKey.Empty); }
}
public EnvDTE.ProjectItem Parent
{
get { return _parentHandle.Object as EnvDTE.ProjectItem; }
}
public void Remove(object element)
{
var codeElement = ComAggregate.TryGetManagedObject<AbstractCodeElement>(element);
if (codeElement == null)
{
codeElement = ComAggregate.TryGetManagedObject<AbstractCodeElement>(this.CodeElements.Item(element));
}
if (codeElement == null)
{
throw new ArgumentException(ServicesVSResources.Element_is_not_valid, nameof(element));
}
codeElement.Delete();
}
int IVBFileCodeModelEvents.StartEdit()
{
try
{
InitializeEditor();
if (_editCount == 1)
{
_batchMode = true;
_batchElements = new List<AbstractKeyedCodeElement>();
}
return VSConstants.S_OK;
}
catch (Exception ex)
{
return Marshal.GetHRForException(ex);
}
}
int IVBFileCodeModelEvents.EndEdit()
{
try
{
if (_editCount == 1)
{
List<ValueTuple<AbstractKeyedCodeElement, SyntaxPath>> elementAndPaths = null;
if (_batchElements.Count > 0)
{
foreach (var element in _batchElements)
{
var node = element.LookupNode();
if (node != null)
{
elementAndPaths = elementAndPaths ?? new List<ValueTuple<AbstractKeyedCodeElement, SyntaxPath>>();
elementAndPaths.Add(ValueTuple.Create(element, new SyntaxPath(node)));
}
}
}
if (_batchDocument != null)
{
// perform expensive operations at once
var newDocument = Simplifier.ReduceAsync(_batchDocument, Simplifier.Annotation, cancellationToken: CancellationToken.None).WaitAndGetResult_CodeModel(CancellationToken.None);
_batchDocument.Project.Solution.Workspace.TryApplyChanges(newDocument.Project.Solution);
// done using batch document
_batchDocument = null;
}
// Ensure the file is prettylisted, even if we didn't make any edits
CodeModelService.EnsureBufferFormatted(_invisibleEditor.TextBuffer);
if (elementAndPaths != null)
{
foreach (var elementAndPath in elementAndPaths)
{
// make sure the element is there.
if (_codeElementTable.TryGetValue(elementAndPath.Item1.NodeKey, out var existingElement))
{
elementAndPath.Item1.ReacquireNodeKey(elementAndPath.Item2, CancellationToken.None);
}
// make sure existing element doesn't go away (weak reference) in the middle of
// updating the node key
GC.KeepAlive(existingElement);
}
}
_batchMode = false;
_batchElements = null;
}
return VSConstants.S_OK;
}
catch (Exception ex)
{
return Marshal.GetHRForException(ex);
}
finally
{
ReleaseEditor();
}
}
public void BeginBatch()
{
IVBFileCodeModelEvents temp = this;
ErrorHandler.ThrowOnFailure(temp.StartEdit());
}
public void EndBatch()
{
IVBFileCodeModelEvents temp = this;
ErrorHandler.ThrowOnFailure(temp.EndEdit());
}
public bool IsBatchOpen
{
get
{
return _batchMode && _editCount > 0;
}
}
public EnvDTE.CodeElement ElementFromID(string id)
{
throw new NotImplementedException();
}
public EnvDTE80.vsCMParseStatus ParseStatus
{
get
{
var syntaxTree = GetSyntaxTree();
return syntaxTree.GetDiagnostics().Any(d => d.Severity == DiagnosticSeverity.Error)
? EnvDTE80.vsCMParseStatus.vsCMParseStatusError
: EnvDTE80.vsCMParseStatus.vsCMParseStatusComplete;
}
}
public void Synchronize()
{
FireEvents();
}
EnvDTE.CodeElements ICodeElementContainer<AbstractCodeElement>.GetCollection()
{
return CodeElements;
}
internal List<GlobalNodeKey> GetCurrentNodeKeys()
{
var currentNodeKeys = new List<GlobalNodeKey>();
foreach (var element in _codeElementTable.Values)
{
var keyedElement = ComAggregate.TryGetManagedObject<AbstractKeyedCodeElement>(element);
if (keyedElement == null)
{
continue;
}
if (keyedElement.TryLookupNode(out var node))
{
var nodeKey = keyedElement.NodeKey;
currentNodeKeys.Add(new GlobalNodeKey(nodeKey, new SyntaxPath(node)));
}
}
return currentNodeKeys;
}
internal void ResetElementKeys(List<GlobalNodeKey> globalNodeKeys)
{
foreach (var globalNodeKey in globalNodeKeys)
{
ResetElementKey(globalNodeKey);
}
}
private void ResetElementKey(GlobalNodeKey globalNodeKey)
{
// Failure to find the element is not an error -- it just means the code
// element didn't exist...
if (_codeElementTable.TryGetValue(globalNodeKey.NodeKey, out var element))
{
var keyedElement = ComAggregate.GetManagedObject<AbstractKeyedCodeElement>(element);
if (keyedElement != null)
{
keyedElement.ReacquireNodeKey(globalNodeKey.Path, default(CancellationToken));
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsAzureCompositeModelClient
{
using System.Linq;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// PolymorphismOperations operations.
/// </summary>
internal partial class PolymorphismOperations : Microsoft.Rest.IServiceOperations<AzureCompositeModel>, IPolymorphismOperations
{
/// <summary>
/// Initializes a new instance of the PolymorphismOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal PolymorphismOperations(AzureCompositeModel client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AzureCompositeModel
/// </summary>
public AzureCompositeModel Client { get; private set; }
/// <summary>
/// Get complex types that are polymorphic
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Fish>> GetValidWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "GetValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/polymorphism/valid").ToString();
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse<Fish>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Fish>(_responseContent, this.Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Put complex types that are polymorphic
/// </summary>
/// <param name='complexBody'>
/// Please put a salmon that looks like this:
/// {
/// 'fishtype':'Salmon',
/// 'location':'alaska',
/// 'iswild':true,
/// 'species':'king',
/// 'length':1.0,
/// 'siblings':[
/// {
/// 'fishtype':'Shark',
/// 'age':6,
/// 'birthday': '2012-01-05T01:00:00Z',
/// 'length':20.0,
/// 'species':'predator',
/// },
/// {
/// 'fishtype':'Sawshark',
/// 'age':105,
/// 'birthday': '1900-01-05T01:00:00Z',
/// 'length':10.0,
/// 'picture': new Buffer([255, 255, 255, 255,
/// 254]).toString('base64'),
/// 'species':'dangerous',
/// },
/// {
/// 'fishtype': 'goblin',
/// 'age': 1,
/// 'birthday': '2015-08-08T00:00:00Z',
/// 'length': 30.0,
/// 'species': 'scary',
/// 'jawsize': 5
/// }
/// ]
/// };
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PutValidWithHttpMessagesAsync(Fish complexBody, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (complexBody == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "complexBody");
}
if (complexBody != null)
{
complexBody.Validate();
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("complexBody", complexBody);
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PutValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/polymorphism/valid").ToString();
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(complexBody != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(complexBody, this.Client.SerializationSettings);
_httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Put complex types that are polymorphic, attempting to omit required
/// 'birthday' field - the request should not be allowed from the client
/// </summary>
/// <param name='complexBody'>
/// Please attempt put a sawshark that looks like this, the client should not
/// allow this data to be sent:
/// {
/// "fishtype": "sawshark",
/// "species": "snaggle toothed",
/// "length": 18.5,
/// "age": 2,
/// "birthday": "2013-06-01T01:00:00Z",
/// "location": "alaska",
/// "picture": base64(FF FF FF FF FE),
/// "siblings": [
/// {
/// "fishtype": "shark",
/// "species": "predator",
/// "birthday": "2012-01-05T01:00:00Z",
/// "length": 20,
/// "age": 6
/// },
/// {
/// "fishtype": "sawshark",
/// "species": "dangerous",
/// "picture": base64(FF FF FF FF FE),
/// "length": 10,
/// "age": 105
/// }
/// ]
/// }
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PutValidMissingRequiredWithHttpMessagesAsync(Fish complexBody, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (complexBody == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "complexBody");
}
if (complexBody != null)
{
complexBody.Validate();
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("complexBody", complexBody);
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PutValidMissingRequired", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/polymorphism/missingrequired/invalid").ToString();
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(complexBody != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(complexBody, this.Client.SerializationSettings);
_httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
/// Copyright (C) 2012 SASAKI Takahiro All Rights Reserved ///
using System;
using System.IO;
using System.Text;
using System.Net;
using System.Diagnostics;
using System.Runtime.InteropServices;
public class EthIpUdpData {
public ethernet_hdstr eth;
public ip_hdstr ip;
public udp_hdstr udp;
public byte[] data;
[StructLayout(LayoutKind.Sequential)]
public struct eth_ip_udp_hdr {
public ethernet_hdstr eth;
public ip_hdstr ip;
public udp_hdstr udp;
}
protected const short ETH_TYPE_IP = 0x0800;
protected const byte IP_PROTOCOL_UDP = 17;
private EthIpUdpData(){}
static EthIpUdpData GetEthIpUdpData(byte[] ethIpUdpData){
eth_ip_udp_hdr EthIpUdpHdr = new eth_ip_udp_hdr();
if( ethIpUdpData.Length < Marshal.SizeOf(EthIpUdpHdr)) return null;
EthIpUdpHdr = BinUtil.Bytes2Structure<eth_ip_udp_hdr>(ethIpUdpData);
EthIpUdpHdr.eth.type = IPAddress.HostToNetworkOrder(EthIpUdpHdr.eth.type);
if( EthIpUdpHdr.eth.type != ETH_TYPE_IP ) return null;
if( EthIpUdpHdr.ip.protocol != IP_PROTOCOL_UDP ) return null;
EthIpUdpHdr.udp.src = IPAddress.HostToNetworkOrder(EthIpUdpHdr.udp.src);
EthIpUdpHdr.udp.dst = IPAddress.HostToNetworkOrder(EthIpUdpHdr.udp.dst);
EthIpUdpHdr.udp.length = IPAddress.HostToNetworkOrder(EthIpUdpHdr.udp.length);
EthIpUdpHdr.udp.checksum = IPAddress.HostToNetworkOrder(EthIpUdpHdr.udp.checksum);
EthIpUdpData ethIpUdpObj = new EthIpUdpData();
ethIpUdpObj.eth = EthIpUdpHdr.eth;
ethIpUdpObj.ip = EthIpUdpHdr.ip;
ethIpUdpObj.udp = EthIpUdpHdr.udp;
ethIpUdpObj.data = DumpLib.GetBytes(ethIpUdpData, Marshal.SizeOf(EthIpUdpHdr), EthIpUdpHdr.udp.length );
return ethIpUdpObj;
}
}
public class UdpData {
public udp_hdstr udp;
public byte[] data;
private UdpData(){}
static UdpData GetUdpData(byte[] udpData){
UdpData udpDataObj= new UdpData();
if( udpData.Length < Marshal.SizeOf(udpDataObj.udp)) return null;
udpDataObj.udp = new udp_hdstr();
udpDataObj.udp = BinUtil.Bytes2Structure<udp_hdstr>(udpData);
udpDataObj.udp.src = IPAddress.HostToNetworkOrder(udpDataObj.udp.src);
udpDataObj.udp.dst = IPAddress.HostToNetworkOrder(udpDataObj.udp.dst);
udpDataObj.udp.length = IPAddress.HostToNetworkOrder(udpDataObj.udp.length);
udpDataObj.udp.checksum = IPAddress.HostToNetworkOrder(udpDataObj.udp.checksum);
udpDataObj.data = DumpLib.GetBytes(udpData, Marshal.SizeOf(udpDataObj.udp), udpDataObj.udp.length );
return udpDataObj;
}
}
public class CapFrame {
public pcap_pkthdr PcapFrameHeader;
public byte[] data;
[StructLayout(LayoutKind.Sequential)]
public struct udp_frame {
public ethernet_hdstr eth;
public ip_hdstr ip;
public udp_hdstr udp;
}
protected const short ETH_TYPE_IP = 0x0800;
protected const byte IP_PROTOCOL_UDP = 17;
public byte[] GetUdpData()
{
return this.GetUdpData(this.data);
}
public byte[] GetUdpData(byte[] data)
{
byte[] bytes = null;
udp_frame UdpFrame = new udp_frame();
if( data.Length >= Marshal.SizeOf(UdpFrame))
{
UdpFrame = BinUtil.Bytes2Structure<udp_frame>(data);
UdpFrame.eth.type = IPAddress.HostToNetworkOrder(UdpFrame.eth.type);
// Console.Write("[eth]{0:X4}",UdpFrame.eth.type);
if( UdpFrame.eth.type != ETH_TYPE_IP ) return null;
// Console.Write("[ip]{0}", UdpFrame.ip.protocol);
if( UdpFrame.ip.protocol != IP_PROTOCOL_UDP ) return null;
UdpFrame.udp.src = IPAddress.HostToNetworkOrder(UdpFrame.udp.src);
UdpFrame.udp.dst = IPAddress.HostToNetworkOrder(UdpFrame.udp.dst);
UdpFrame.udp.length = IPAddress.HostToNetworkOrder(UdpFrame.udp.length);
UdpFrame.udp.checksum = IPAddress.HostToNetworkOrder(UdpFrame.udp.checksum);
// Console.Write("[UDP]");
// Console.Write(" src:{0}",UdpFrame.udp.src);
// Console.Write(" dst:{0}",UdpFrame.udp.dst);
// Console.Write(" length:{0}",UdpFrame.udp.length);
// Console.Write(" checksum:{0:X4}",UdpFrame.udp.checksum);
bytes = DumpLib.GetBytes(data, Marshal.SizeOf(UdpFrame), UdpFrame.udp.length );
}
return bytes;
}
protected uint Decode(uint t)
{
t = (uint)IPAddress.HostToNetworkOrder( (Int32)t );
return t;
}
protected ushort Decode(ushort t)
{
t = (ushort)IPAddress.HostToNetworkOrder( (Int16)t );
return t;
}
protected static readonly DateTime dtEpoch = new DateTime(1970, 1, 1);
public string GetTimeString()
{
DateTime dt = dtEpoch.AddSeconds(this.PcapFrameHeader.tv_sec);
string str = dt.ToLocalTime().ToString("yyyy/MM/dd HH:mm:ss")
+ String.Format(".{0:D6}", this.PcapFrameHeader.tv_usec);
return str;
}
public string GetTimeString(uint tv_sec, uint tv_usec)
{
DateTime dt = dtEpoch.AddSeconds(tv_sec);
string str = dt.ToLocalTime().ToString("yyyy/MM/dd HH:mm:ss")
+ String.Format(".{0:D6}", tv_usec);
return str;
}
public string GetTimeStringUtc(uint tv_sec, uint tv_usec)
{
DateTime dt = dtEpoch.AddSeconds(tv_sec);
string str = dt.ToString("yyyy/MM/dd HH:mm:ss")
+ String.Format(".{0:D6}", tv_usec);
return str;
}
}
public class CapRead {
public bool UdpChecksumEnabled = true;
protected pcap_pkthdr pcap_header;
protected ethernet_hdstr ethernet_header;
protected ip_hdstr ip_header;
protected udp_hdstr udp_header;
// tcp_hdstr tcp_header;
private byte[] FILE_HEADER =
{0xD4, 0xC3, 0xB2, 0xA1, 0x02, 0x00, 0x04, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xFF, 0xFF, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00};
private byte[] READ_FILE_HEADER;
protected BinaryReader br = null;
public CapRead()
{
}
public void Open(string FileName)
{
br = new BinaryReader(File.Open( FileName , FileMode.Open) );
READ_FILE_HEADER = br.ReadBytes( FILE_HEADER.Length);
}
public CapFrame ReadFrame()
{
byte[] data;
CapFrame cap = new CapFrame();
data = br.ReadBytes( Marshal.SizeOf(cap.PcapFrameHeader) );
if( data.Length < Marshal.SizeOf(cap.PcapFrameHeader) ) return null;
cap.PcapFrameHeader = BinUtil.Bytes2Structure<pcap_pkthdr>(data);
cap.data = br.ReadBytes( (int)cap.PcapFrameHeader.caplen );
return cap;
}
public void Close()
{
if( br != null )
{
br.Close();
br = null;
}
}
#if CAPREAD_UTEST
// public static readonly DateTime dtEpoch = new DateTime(1970, 1, 1);
static public void Main(string[] args)
{
string ReadFile;
if( args.Length > 0 )
{
ReadFile = args[0];
}
else
{
ReadFile = System.Windows.Forms.Application.StartupPath + @"\test.cap";
}
CapRead cr = null;
try{
cr = new CapRead();
cr.Open( ReadFile );
int count=0;
CapFrame frame = null;
frame = cr.ReadFrame();
while ( null != frame )
{
count++;
// Console.Write("No.{0}", count);
/// Console.Write(" {0}", frame.GetTimeString());
// Console.WriteLine();
// Console.Write(" Frame[tv_sec={0}, tv_usec={1}, caplen={2}, len={3}]",
// frame.PcapFrameHeader.tv_sec,
// frame.PcapFrameHeader.tv_usec,
// frame.PcapFrameHeader.caplen,
// frame.PcapFrameHeader.len);
// DateTime dt = dtEpoch.AddSeconds(frame.PcapFrameHeader.tv_sec);
// Console.Write(dt.ToLocalTime().ToString("(yyyy/MM/dd HH:mm:ss") );
// Console.Write(".{0:D6})", frame.PcapFrameHeader.tv_usec);
// Console.Write("({0})", frame.GetTimeString());
// Console.Write(" DataLength={0}", frame.data.Length);
// Console.WriteLine();
frame.GetUdpData();
// Console.WriteLine();
frame = cr.ReadFrame();
}
Console.WriteLine("Count:{0}", count);
}catch(Exception e)
{
Console.WriteLine(e);
}finally{
if( null != cr) cr.Close();
}
Console.WriteLine();
Console.WriteLine("### Program Finished.");
Console.ReadLine();
}
#endif
}
| |
// CRC32.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2011 Dino Chiesa.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// Last Saved: <2011-August-02 18:25:54>
//
// ------------------------------------------------------------------
//
// This module defines the CRC32 class, which can do the CRC32 algorithm, using
// arbitrary starting polynomials, and bit reversal. The bit reversal is what
// distinguishes this CRC-32 used in BZip2 from the CRC-32 that is used in PKZIP
// files, or GZIP files. This class does both.
//
// ------------------------------------------------------------------
using System;
using Interop = System.Runtime.InteropServices;
namespace Ionic.Crc
{
/// <summary>
/// Computes a CRC-32. The CRC-32 algorithm is parameterized - you
/// can set the polynomial and enable or disable bit
/// reversal. This can be used for GZIP, BZip2, or ZIP.
/// </summary>
/// <remarks>
/// This type is used internally by DotNetZip; it is generally not used
/// directly by applications wishing to create, read, or manipulate zip
/// archive files.
/// </remarks>
[Interop.GuidAttribute("ebc25cf6-9120-4283-b972-0e5520d0000C")]
[Interop.ComVisible(true)]
#if !NETCF
[Interop.ClassInterface(Interop.ClassInterfaceType.AutoDispatch)]
#endif
internal class CRC32
{
/// <summary>
/// Indicates the total number of bytes applied to the CRC.
/// </summary>
public Int64 TotalBytesRead
{
get
{
return _TotalBytesRead;
}
}
/// <summary>
/// Indicates the current CRC for all blocks slurped in.
/// </summary>
public Int32 Crc32Result
{
get
{
return unchecked((Int32)(~_register));
}
}
/// <summary>
/// Returns the CRC32 for the specified stream.
/// </summary>
/// <param name="input">The stream over which to calculate the CRC32</param>
/// <returns>the CRC32 calculation</returns>
public Int32 GetCrc32(System.IO.Stream input)
{
return GetCrc32AndCopy(input, null);
}
/// <summary>
/// Returns the CRC32 for the specified stream, and writes the input into the
/// output stream.
/// </summary>
/// <param name="input">The stream over which to calculate the CRC32</param>
/// <param name="output">The stream into which to deflate the input</param>
/// <returns>the CRC32 calculation</returns>
public Int32 GetCrc32AndCopy(System.IO.Stream input, System.IO.Stream output)
{
if (input == null)
throw new Exception("The input stream must not be null.");
unchecked
{
byte[] buffer = new byte[BUFFER_SIZE];
int readSize = BUFFER_SIZE;
_TotalBytesRead = 0;
int count = input.Read(buffer, 0, readSize);
if (output != null) output.Write(buffer, 0, count);
_TotalBytesRead += count;
while (count > 0)
{
SlurpBlock(buffer, 0, count);
count = input.Read(buffer, 0, readSize);
if (output != null) output.Write(buffer, 0, count);
_TotalBytesRead += count;
}
return (Int32)(~_register);
}
}
/// <summary>
/// Get the CRC32 for the given (word,byte) combo. This is a
/// computation defined by PKzip for PKZIP 2.0 (weak) encryption.
/// </summary>
/// <param name="W">The word to start with.</param>
/// <param name="B">The byte to combine it with.</param>
/// <returns>The CRC-ized result.</returns>
public Int32 ComputeCrc32(Int32 W, byte B)
{
return _InternalComputeCrc32((UInt32)W, B);
}
internal Int32 _InternalComputeCrc32(UInt32 W, byte B)
{
return (Int32)(crc32Table[(W ^ B) & 0xFF] ^ (W >> 8));
}
/// <summary>
/// Update the value for the running CRC32 using the given block of bytes.
/// This is useful when using the CRC32() class in a Stream.
/// </summary>
/// <param name="block">block of bytes to slurp</param>
/// <param name="offset">starting point in the block</param>
/// <param name="count">how many bytes within the block to slurp</param>
public void SlurpBlock(byte[] block, int offset, int count)
{
if (block == null)
throw new Exception("The data buffer must not be null.");
// bzip algorithm
for (int i = 0; i < count; i++)
{
int x = offset + i;
byte b = block[x];
if (this.reverseBits)
{
UInt32 temp = (_register >> 24) ^ b;
_register = (_register << 8) ^ crc32Table[temp];
}
else
{
UInt32 temp = (_register & 0x000000FF) ^ b;
_register = (_register >> 8) ^ crc32Table[temp];
}
}
_TotalBytesRead += count;
}
/// <summary>
/// Process one byte in the CRC.
/// </summary>
/// <param name = "b">the byte to include into the CRC . </param>
public void UpdateCRC(byte b)
{
if (this.reverseBits)
{
UInt32 temp = (_register >> 24) ^ b;
_register = (_register << 8) ^ crc32Table[temp];
}
else
{
UInt32 temp = (_register & 0x000000FF) ^ b;
_register = (_register >> 8) ^ crc32Table[temp];
}
}
/// <summary>
/// Process a run of N identical bytes into the CRC.
/// </summary>
/// <remarks>
/// <para>
/// This method serves as an optimization for updating the CRC when a
/// run of identical bytes is found. Rather than passing in a buffer of
/// length n, containing all identical bytes b, this method accepts the
/// byte value and the length of the (virtual) buffer - the length of
/// the run.
/// </para>
/// </remarks>
/// <param name = "b">the byte to include into the CRC. </param>
/// <param name = "n">the number of times that byte should be repeated. </param>
public void UpdateCRC(byte b, int n)
{
while (n-- > 0)
{
if (this.reverseBits)
{
uint temp = (_register >> 24) ^ b;
_register = (_register << 8) ^ crc32Table[(temp >= 0)
? temp
: (temp + 256)];
}
else
{
UInt32 temp = (_register & 0x000000FF) ^ b;
_register = (_register >> 8) ^ crc32Table[(temp >= 0)
? temp
: (temp + 256)];
}
}
}
private static uint ReverseBits(uint data)
{
unchecked
{
uint ret = data;
ret = (ret & 0x55555555) << 1 | (ret >> 1) & 0x55555555;
ret = (ret & 0x33333333) << 2 | (ret >> 2) & 0x33333333;
ret = (ret & 0x0F0F0F0F) << 4 | (ret >> 4) & 0x0F0F0F0F;
ret = (ret << 24) | ((ret & 0xFF00) << 8) | ((ret >> 8) & 0xFF00) | (ret >> 24);
return ret;
}
}
private static byte ReverseBits(byte data)
{
unchecked
{
uint u = (uint)data * 0x00020202;
uint m = 0x01044010;
uint s = u & m;
uint t = (u << 2) & (m << 1);
return (byte)((0x01001001 * (s + t)) >> 24);
}
}
private void GenerateLookupTable()
{
crc32Table = new UInt32[256];
unchecked
{
UInt32 dwCrc;
byte i = 0;
do
{
dwCrc = i;
for (byte j = 8; j > 0; j--)
{
if ((dwCrc & 1) == 1)
{
dwCrc = (dwCrc >> 1) ^ dwPolynomial;
}
else
{
dwCrc >>= 1;
}
}
if (reverseBits)
{
crc32Table[ReverseBits(i)] = ReverseBits(dwCrc);
}
else
{
crc32Table[i] = dwCrc;
}
i++;
} while (i!=0);
}
#if VERBOSE
Console.WriteLine();
Console.WriteLine("private static readonly UInt32[] crc32Table = {");
for (int i = 0; i < crc32Table.Length; i+=4)
{
Console.Write(" ");
for (int j=0; j < 4; j++)
{
Console.Write(" 0x{0:X8}U,", crc32Table[i+j]);
}
Console.WriteLine();
}
Console.WriteLine("};");
Console.WriteLine();
#endif
}
private uint gf2_matrix_times(uint[] matrix, uint vec)
{
uint sum = 0;
int i=0;
while (vec != 0)
{
if ((vec & 0x01)== 0x01)
sum ^= matrix[i];
vec >>= 1;
i++;
}
return sum;
}
private void gf2_matrix_square(uint[] square, uint[] mat)
{
for (int i = 0; i < 32; i++)
square[i] = gf2_matrix_times(mat, mat[i]);
}
/// <summary>
/// Combines the given CRC32 value with the current running total.
/// </summary>
/// <remarks>
/// This is useful when using a divide-and-conquer approach to
/// calculating a CRC. Multiple threads can each calculate a
/// CRC32 on a segment of the data, and then combine the
/// individual CRC32 values at the end.
/// </remarks>
/// <param name="crc">the crc value to be combined with this one</param>
/// <param name="length">the length of data the CRC value was calculated on</param>
public void Combine(int crc, int length)
{
uint[] even = new uint[32]; // even-power-of-two zeros operator
uint[] odd = new uint[32]; // odd-power-of-two zeros operator
if (length == 0)
return;
uint crc1= ~_register;
uint crc2= (uint) crc;
// put operator for one zero bit in odd
odd[0] = this.dwPolynomial; // the CRC-32 polynomial
uint row = 1;
for (int i = 1; i < 32; i++)
{
odd[i] = row;
row <<= 1;
}
// put operator for two zero bits in even
gf2_matrix_square(even, odd);
// put operator for four zero bits in odd
gf2_matrix_square(odd, even);
uint len2 = (uint) length;
// apply len2 zeros to crc1 (first square will put the operator for one
// zero byte, eight zero bits, in even)
do {
// apply zeros operator for this bit of len2
gf2_matrix_square(even, odd);
if ((len2 & 1)== 1)
crc1 = gf2_matrix_times(even, crc1);
len2 >>= 1;
if (len2 == 0)
break;
// another iteration of the loop with odd and even swapped
gf2_matrix_square(odd, even);
if ((len2 & 1)==1)
crc1 = gf2_matrix_times(odd, crc1);
len2 >>= 1;
} while (len2 != 0);
crc1 ^= crc2;
_register= ~crc1;
//return (int) crc1;
return;
}
/// <summary>
/// Create an instance of the CRC32 class using the default settings: no
/// bit reversal, and a polynomial of 0xEDB88320.
/// </summary>
public CRC32() : this(false)
{
}
/// <summary>
/// Create an instance of the CRC32 class, specifying whether to reverse
/// data bits or not.
/// </summary>
/// <param name='reverseBits'>
/// specify true if the instance should reverse data bits.
/// </param>
/// <remarks>
/// <para>
/// In the CRC-32 used by BZip2, the bits are reversed. Therefore if you
/// want a CRC32 with compatibility with BZip2, you should pass true
/// here. In the CRC-32 used by GZIP and PKZIP, the bits are not
/// reversed; Therefore if you want a CRC32 with compatibility with
/// those, you should pass false.
/// </para>
/// </remarks>
public CRC32(bool reverseBits) :
this( unchecked((int)0xEDB88320), reverseBits)
{
}
/// <summary>
/// Create an instance of the CRC32 class, specifying the polynomial and
/// whether to reverse data bits or not.
/// </summary>
/// <param name='polynomial'>
/// The polynomial to use for the CRC, expressed in the reversed (LSB)
/// format: the highest ordered bit in the polynomial value is the
/// coefficient of the 0th power; the second-highest order bit is the
/// coefficient of the 1 power, and so on. Expressed this way, the
/// polynomial for the CRC-32C used in IEEE 802.3, is 0xEDB88320.
/// </param>
/// <param name='reverseBits'>
/// specify true if the instance should reverse data bits.
/// </param>
///
/// <remarks>
/// <para>
/// In the CRC-32 used by BZip2, the bits are reversed. Therefore if you
/// want a CRC32 with compatibility with BZip2, you should pass true
/// here for the <c>reverseBits</c> parameter. In the CRC-32 used by
/// GZIP and PKZIP, the bits are not reversed; Therefore if you want a
/// CRC32 with compatibility with those, you should pass false for the
/// <c>reverseBits</c> parameter.
/// </para>
/// </remarks>
public CRC32(int polynomial, bool reverseBits)
{
this.reverseBits = reverseBits;
this.dwPolynomial = (uint) polynomial;
this.GenerateLookupTable();
}
/// <summary>
/// Reset the CRC-32 class - clear the CRC "remainder register."
/// </summary>
/// <remarks>
/// <para>
/// Use this when employing a single instance of this class to compute
/// multiple, distinct CRCs on multiple, distinct data blocks.
/// </para>
/// </remarks>
public void Reset()
{
_register = 0xFFFFFFFFU;
}
// private member vars
private UInt32 dwPolynomial;
private Int64 _TotalBytesRead;
private bool reverseBits;
private UInt32[] crc32Table;
private const int BUFFER_SIZE = 8192;
private UInt32 _register = 0xFFFFFFFFU;
}
/// <summary>
/// A Stream that calculates a CRC32 (a checksum) on all bytes read,
/// or on all bytes written.
/// </summary>
///
/// <remarks>
/// <para>
/// This class can be used to verify the CRC of a ZipEntry when
/// reading from a stream, or to calculate a CRC when writing to a
/// stream. The stream should be used to either read, or write, but
/// not both. If you intermix reads and writes, the results are not
/// defined.
/// </para>
///
/// <para>
/// This class is intended primarily for use internally by the
/// DotNetZip library.
/// </para>
/// </remarks>
internal class CrcCalculatorStream : System.IO.Stream, System.IDisposable
{
private static readonly Int64 UnsetLengthLimit = -99;
internal System.IO.Stream _innerStream;
private CRC32 _Crc32;
private Int64 _lengthLimit = -99;
private bool _leaveOpen;
/// <summary>
/// The default constructor.
/// </summary>
/// <remarks>
/// <para>
/// Instances returned from this constructor will leave the underlying
/// stream open upon Close(). The stream uses the default CRC32
/// algorithm, which implies a polynomial of 0xEDB88320.
/// </para>
/// </remarks>
/// <param name="stream">The underlying stream</param>
public CrcCalculatorStream(System.IO.Stream stream)
: this(true, CrcCalculatorStream.UnsetLengthLimit, stream, null)
{
}
/// <summary>
/// The constructor allows the caller to specify how to handle the
/// underlying stream at close.
/// </summary>
/// <remarks>
/// <para>
/// The stream uses the default CRC32 algorithm, which implies a
/// polynomial of 0xEDB88320.
/// </para>
/// </remarks>
/// <param name="stream">The underlying stream</param>
/// <param name="leaveOpen">true to leave the underlying stream
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
public CrcCalculatorStream(System.IO.Stream stream, bool leaveOpen)
: this(leaveOpen, CrcCalculatorStream.UnsetLengthLimit, stream, null)
{
}
/// <summary>
/// A constructor allowing the specification of the length of the stream
/// to read.
/// </summary>
/// <remarks>
/// <para>
/// The stream uses the default CRC32 algorithm, which implies a
/// polynomial of 0xEDB88320.
/// </para>
/// <para>
/// Instances returned from this constructor will leave the underlying
/// stream open upon Close().
/// </para>
/// </remarks>
/// <param name="stream">The underlying stream</param>
/// <param name="length">The length of the stream to slurp</param>
public CrcCalculatorStream(System.IO.Stream stream, Int64 length)
: this(true, length, stream, null)
{
if (length < 0)
throw new ArgumentException("length");
}
/// <summary>
/// A constructor allowing the specification of the length of the stream
/// to read, as well as whether to keep the underlying stream open upon
/// Close().
/// </summary>
/// <remarks>
/// <para>
/// The stream uses the default CRC32 algorithm, which implies a
/// polynomial of 0xEDB88320.
/// </para>
/// </remarks>
/// <param name="stream">The underlying stream</param>
/// <param name="length">The length of the stream to slurp</param>
/// <param name="leaveOpen">true to leave the underlying stream
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
public CrcCalculatorStream(System.IO.Stream stream, Int64 length, bool leaveOpen)
: this(leaveOpen, length, stream, null)
{
if (length < 0)
throw new ArgumentException("length");
}
/// <summary>
/// A constructor allowing the specification of the length of the stream
/// to read, as well as whether to keep the underlying stream open upon
/// Close(), and the CRC32 instance to use.
/// </summary>
/// <remarks>
/// <para>
/// The stream uses the specified CRC32 instance, which allows the
/// application to specify how the CRC gets calculated.
/// </para>
/// </remarks>
/// <param name="stream">The underlying stream</param>
/// <param name="length">The length of the stream to slurp</param>
/// <param name="leaveOpen">true to leave the underlying stream
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
/// <param name="crc32">the CRC32 instance to use to calculate the CRC32</param>
public CrcCalculatorStream(System.IO.Stream stream, Int64 length, bool leaveOpen,
CRC32 crc32)
: this(leaveOpen, length, stream, crc32)
{
if (length < 0)
throw new ArgumentException("length");
}
// This ctor is private - no validation is done here. This is to allow the use
// of a (specific) negative value for the _lengthLimit, to indicate that there
// is no length set. So we validate the length limit in those ctors that use an
// explicit param, otherwise we don't validate, because it could be our special
// value.
private CrcCalculatorStream
(bool leaveOpen, Int64 length, System.IO.Stream stream, CRC32 crc32)
: base()
{
_innerStream = stream;
_Crc32 = crc32 ?? new CRC32();
_lengthLimit = length;
_leaveOpen = leaveOpen;
}
/// <summary>
/// Gets the total number of bytes run through the CRC32 calculator.
/// </summary>
///
/// <remarks>
/// This is either the total number of bytes read, or the total number of
/// bytes written, depending on the direction of this stream.
/// </remarks>
public Int64 TotalBytesSlurped
{
get { return _Crc32.TotalBytesRead; }
}
/// <summary>
/// Provides the current CRC for all blocks slurped in.
/// </summary>
/// <remarks>
/// <para>
/// The running total of the CRC is kept as data is written or read
/// through the stream. read this property after all reads or writes to
/// get an accurate CRC for the entire stream.
/// </para>
/// </remarks>
public Int32 Crc
{
get { return _Crc32.Crc32Result; }
}
/// <summary>
/// Indicates whether the underlying stream will be left open when the
/// <c>CrcCalculatorStream</c> is Closed.
/// </summary>
/// <remarks>
/// <para>
/// Set this at any point before calling <see cref="Close()"/>.
/// </para>
/// </remarks>
public bool LeaveOpen
{
get { return _leaveOpen; }
set { _leaveOpen = value; }
}
/// <summary>
/// Read from the stream
/// </summary>
/// <param name="buffer">the buffer to read</param>
/// <param name="offset">the offset at which to start</param>
/// <param name="count">the number of bytes to read</param>
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
{
int bytesToRead = count;
// Need to limit the # of bytes returned, if the stream is intended to have
// a definite length. This is especially useful when returning a stream for
// the uncompressed data directly to the application. The app won't
// necessarily read only the UncompressedSize number of bytes. For example
// wrapping the stream returned from OpenReader() into a StreadReader() and
// calling ReadToEnd() on it, We can "over-read" the zip data and get a
// corrupt string. The length limits that, prevents that problem.
if (_lengthLimit != CrcCalculatorStream.UnsetLengthLimit)
{
if (_Crc32.TotalBytesRead >= _lengthLimit) return 0; // EOF
Int64 bytesRemaining = _lengthLimit - _Crc32.TotalBytesRead;
if (bytesRemaining < count) bytesToRead = (int)bytesRemaining;
}
int n = _innerStream.Read(buffer, offset, bytesToRead);
if (n > 0) _Crc32.SlurpBlock(buffer, offset, n);
return n;
}
/// <summary>
/// Write to the stream.
/// </summary>
/// <param name="buffer">the buffer from which to write</param>
/// <param name="offset">the offset at which to start writing</param>
/// <param name="count">the number of bytes to write</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (count > 0) _Crc32.SlurpBlock(buffer, offset, count);
_innerStream.Write(buffer, offset, count);
}
/// <summary>
/// Indicates whether the stream supports reading.
/// </summary>
public override bool CanRead
{
get { return _innerStream.CanRead; }
}
/// <summary>
/// Indicates whether the stream supports seeking.
/// </summary>
/// <remarks>
/// <para>
/// Always returns false.
/// </para>
/// </remarks>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Indicates whether the stream supports writing.
/// </summary>
public override bool CanWrite
{
get { return _innerStream.CanWrite; }
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
{
_innerStream.Flush();
}
/// <summary>
/// Returns the length of the underlying stream.
/// </summary>
public override long Length
{
get
{
if (_lengthLimit == CrcCalculatorStream.UnsetLengthLimit)
return _innerStream.Length;
else return _lengthLimit;
}
}
/// <summary>
/// The getter for this property returns the total bytes read.
/// If you use the setter, it will throw
/// <see cref="NotSupportedException"/>.
/// </summary>
public override long Position
{
get { return _Crc32.TotalBytesRead; }
set { throw new NotSupportedException(); }
}
/// <summary>
/// Seeking is not supported on this stream. This method always throws
/// <see cref="NotSupportedException"/>
/// </summary>
/// <param name="offset">N/A</param>
/// <param name="origin">N/A</param>
/// <returns>N/A</returns>
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
throw new NotSupportedException();
}
/// <summary>
/// This method always throws
/// <see cref="NotSupportedException"/>
/// </summary>
/// <param name="value">N/A</param>
public override void SetLength(long value)
{
throw new NotSupportedException();
}
void IDisposable.Dispose()
{
Close();
}
/// <summary>
/// Closes the stream.
/// </summary>
public override void Close()
{
base.Close();
if (!_leaveOpen)
_innerStream.Close();
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Windows;
using System.Windows.Media;
using System.Windows.Media.Media3D;
using System.Windows.Threading;
using System.Windows.Documents;
using MS.Internal;
using MS.Internal.PresentationFramework; // SafeSecurityHelper
namespace System.Windows
{
internal static class BroadcastEventHelper
{
/// <summary>
/// Add the loaded callback to the MediaContext queue
/// </summary>
internal static void AddLoadedCallback(DependencyObject d, DependencyObject logicalParent)
{
Debug.Assert(d is FrameworkElement || d is FrameworkContentElement);
DispatcherOperationCallback loadedCallback = new DispatcherOperationCallback(BroadcastEventHelper.BroadcastLoadedEvent);
// Add the pending loaded event information to the MediaContext's pending
// LoadedOrUnloadedCallbacks list so these can be called pre render
LoadedOrUnloadedOperation loadedOp = MediaContext.From(d.Dispatcher).AddLoadedOrUnloadedCallback(loadedCallback, d);
// Post to the dispatcher queue as a backup to fire the broadcast
// event in case the tree change never triggers a Layout
DispatcherOperation operation = d.Dispatcher.BeginInvoke(DispatcherPriority.Loaded, loadedCallback, d);
// Set the LoadedPending property
d.SetValue(FrameworkElement.LoadedPendingPropertyKey, new object[]{loadedOp, operation, logicalParent});
}
/// <summary>
/// Remove the loaded callback from the MediaContext queue
/// </summary>
internal static void RemoveLoadedCallback(DependencyObject d, object[] loadedPending)
{
Debug.Assert(d is FrameworkElement || d is FrameworkContentElement);
if (loadedPending != null)
{
Debug.Assert(loadedPending.Length == 3);
// Clear the LoadedPending property
d.ClearValue(FrameworkElement.LoadedPendingPropertyKey);
// If the dispatcher operation is pending abort it
DispatcherOperation operation = (DispatcherOperation)loadedPending[1];
if (operation.Status == DispatcherOperationStatus.Pending)
{
operation.Abort();
}
// Remove the pending loaded information from the MediaContext's pending
// LoadedOrUnloadedCallbacks list
MediaContext.From(d.Dispatcher).RemoveLoadedOrUnloadedCallback((LoadedOrUnloadedOperation)loadedPending[0]);
}
}
/// <summary>
/// Add the unloaded callback to the MediaContext queue
/// </summary>
internal static void AddUnloadedCallback(DependencyObject d, DependencyObject logicalParent)
{
Debug.Assert(d is FrameworkElement || d is FrameworkContentElement);
DispatcherOperationCallback unloadedCallback = new DispatcherOperationCallback(BroadcastEventHelper.BroadcastUnloadedEvent);
// Add the pending unloaded event information to the MediaContext's pending
// LoadedOrUnloadedCallbacks list so these can be called pre render
LoadedOrUnloadedOperation unloadedOp = MediaContext.From(d.Dispatcher).AddLoadedOrUnloadedCallback(unloadedCallback, d);
// Post to the dispatcher queue as a backup to fire the broadcast
// event in case the tree change never triggers a Layout
DispatcherOperation operation = d.Dispatcher.BeginInvoke(DispatcherPriority.Loaded, unloadedCallback, d);
// Set the UnloadedPending property
d.SetValue(FrameworkElement.UnloadedPendingPropertyKey, new object[]{unloadedOp, operation, logicalParent});
}
/// <summary>
/// Remove the unloaded callback from the MediaContext queue
/// </summary>
internal static void RemoveUnloadedCallback(DependencyObject d, object[] unloadedPending)
{
Debug.Assert(d is FrameworkElement || d is FrameworkContentElement);
if (unloadedPending != null)
{
Debug.Assert(unloadedPending.Length == 3);
// Clear the UnloadedPending property
d.ClearValue(FrameworkElement.UnloadedPendingPropertyKey);
// If the dispatcher operation is pending abort it
DispatcherOperation operation = (DispatcherOperation)unloadedPending[1];
if (operation.Status == DispatcherOperationStatus.Pending)
{
operation.Abort();
}
// Remove the pending unloaded information from the MediaContext's pending
// LoadedOrUnloadedCallbacks list
MediaContext.From(d.Dispatcher).RemoveLoadedOrUnloadedCallback((LoadedOrUnloadedOperation)unloadedPending[0]);
}
}
/// <summary>
/// Fire the [Loaded/Unloaded] broadcast events based upon the old and new parent values.
/// This method is called from ChangeLogicalParent() and OnVisualParentChanged().
/// </summary>
/// <param name="d">
/// Node to begin the broadcast
/// </param>
/// <param name="oldParent">
/// Old Parent
/// </param>
/// <param name="newParent">
/// New Parent
/// </param>
internal static void BroadcastLoadedOrUnloadedEvent(
DependencyObject d,
DependencyObject oldParent,
DependencyObject newParent)
{
// Added to a tree
if (oldParent == null && newParent != null)
{
if(IsLoadedHelper(newParent) == true)
{
// Broadcast Loaded event if your new parent is loaded
// Note that this broadcast will take place when you are
// attached to your loaded visual parent
FireLoadedOnDescendentsHelper(d);
}
}
// Removed from a tree
else if (oldParent != null && newParent == null)
{
if (IsLoadedHelper(oldParent) == true)
{
// Broadcast Unloaded event if your old parent was loaded
// Note that this broadcast will take place when you are
// detached from your loaded visual parent
FireUnloadedOnDescendentsHelper(d);
}
}
}
/// <summary>
/// Broadcast the Loaded event when UI is rendered and ready for user interaction.
/// </summary>
/// <param name="root">
/// Root of the sub-tree that the broadcast will start at
/// </param>
internal static object BroadcastLoadedEvent(object root)
{
DependencyObject rootDO = (DependencyObject)root;
object[] loadedPending = (object[])rootDO.GetValue(FrameworkElement.LoadedPendingProperty);
// The LoadedPendingProperty must be set if we have reached this far
Debug.Assert(loadedPending != null && loadedPending.Length == 3,
"The LoadedPendingProperty must be set if we have reached this far");
bool isLoaded = IsLoadedHelper(rootDO);
// Remove the Loaded callback from the MediaContext's queue
RemoveLoadedCallback(rootDO, loadedPending);
BroadcastLoadedSynchronously(rootDO, isLoaded);
return null;
}
internal static void BroadcastLoadedSynchronously(DependencyObject rootDO, bool isLoaded)
{
// It is possible that the loaded broadcast for a parent caused you to be loaded before
// your broadcast item got dequeued. In that case simply ignore the operation
if (!isLoaded)
{
// Broadcast the Loaded event
BroadcastEventHelper.BroadcastEvent(rootDO, FrameworkElement.LoadedEvent);
}
}
/// <summary>
/// Broadcast the Unloaded event when the element is detached from a Loaded Tree
/// </summary>
/// <param name="root">
/// Root of the sub-tree that the broadcast will start at
/// </param>
internal static object BroadcastUnloadedEvent(object root)
{
DependencyObject rootDO = (DependencyObject)root;
object[] unloadedPending = (object[])rootDO.GetValue(FrameworkElement.UnloadedPendingProperty);
// The UnloadedPendingProperty must be set if we have reached this far
Debug.Assert(unloadedPending != null && unloadedPending.Length == 3,
"The UnloadedPendingProperty must be set if we have reached this far");
bool isLoaded = IsLoadedHelper(rootDO);
// Remove the Unloaded callback from the MediaContext's queue
RemoveUnloadedCallback(rootDO, unloadedPending);
BroadcastUnloadedSynchronously(rootDO, isLoaded);
return null;
}
internal static void BroadcastUnloadedSynchronously(DependencyObject rootDO, bool isLoaded)
{
// It is possible that the unloaded broadcast for a parent caused you to be unloaded before
// your broadcast item got dequeued. In that case simply ignore the operation
if (isLoaded)
{
// Broadcast the Unloaded event
BroadcastEvent(rootDO, FrameworkElement.UnloadedEvent);
}
}
private static VisitedCallback<BroadcastEventData> BroadcastDelegate = new VisitedCallback<BroadcastEventData>(OnBroadcastCallback);
private struct BroadcastEventData
{
internal BroadcastEventData(DependencyObject root, RoutedEvent routedEvent, List<DependencyObject> eventRoute)
{
Root = root;
RoutedEvent = routedEvent;
EventRoute = eventRoute;
}
internal DependencyObject Root;
internal RoutedEvent RoutedEvent;
internal List<DependencyObject> EventRoute;
}
/// <summary>
/// Broadcast the Loaded/Unloaded event in the sub-tree starting at the given root
/// </summary>
/// <param name="root">
/// Root of the sub-tree that the event will be broadcast to
/// </param>
/// <param name="routedEvent">
/// RoutedEventID for the event we wish to broadcast
/// </param>
private static void BroadcastEvent(DependencyObject root, RoutedEvent routedEvent)
{
// Broadcast to the tree and collect the set of nodes
// on which we need fire the Loaded event
List<DependencyObject> eventRoute = new List<DependencyObject>();
// Create a DescendentsWalker for the broadcast
DescendentsWalker<BroadcastEventData> walker = new DescendentsWalker<BroadcastEventData>(
TreeWalkPriority.VisualTree, BroadcastDelegate, new BroadcastEventData(root, routedEvent, eventRoute));
// Start the walk down
walker.StartWalk(root);
// Iterate and raise the event on each of the nodes in the tree
for (int i=0; i< eventRoute.Count; i++)
{
DependencyObject d = eventRoute[i];
RoutedEventArgs args = new RoutedEventArgs(routedEvent, d);
FrameworkObject fo = new FrameworkObject(d, true /*throwIfNeither*/);
if (routedEvent == FrameworkElement.LoadedEvent)
{
fo.OnLoaded(args);
}
else
{
fo.OnUnloaded(args);
}
}
}
// Callback on visiting each node in the descendency during a broadcast event
private static bool OnBroadcastCallback(DependencyObject d, BroadcastEventData data, bool visitedViaVisualTree)
{
DependencyObject root = data.Root;
RoutedEvent routedEvent = data.RoutedEvent;
List<DependencyObject> eventRoute = data.EventRoute;
if (FrameworkElement.DType.IsInstanceOfType(d))
{
// If this is a FrameworkElement
FrameworkElement fe = (FrameworkElement)d;
if (fe != root && routedEvent == FrameworkElement.LoadedEvent && fe.UnloadedPending != null)
{
// If there is a pending Unloaded event wait till we've broadcast
// that event before we can fire the new Loaded event.
fe.FireLoadedOnDescendentsInternal();
}
else if (fe != root && routedEvent == FrameworkElement.UnloadedEvent && fe.LoadedPending != null)
{
// If there is a pending Loaded event abort it because we are now
// being Unloaded.
RemoveLoadedCallback(fe, fe.LoadedPending);
}
else
{
if (fe != root)
{
if (routedEvent == FrameworkElement.LoadedEvent && fe.LoadedPending != null)
{
// If there is a pending Loaded event abort it because we are now
// being Loaded.
RemoveLoadedCallback(fe, fe.LoadedPending);
}
else if (routedEvent == FrameworkElement.UnloadedEvent && fe.UnloadedPending != null)
{
// If there is a pending Unloaded event abort it because we are now
// being Unloaded.
RemoveUnloadedCallback(fe, fe.UnloadedPending);
}
}
// If element has handlers fire the event and continue to walk down the tree
if (fe.SubtreeHasLoadedChangeHandler)
{
// We cannot assert this condition here for the following reason.
// If the [Un]LoadedHandler is added to the current node after the parent
// for this node has been [Un]Loaded but before the current node has been [Un]Loaded
// (example: within the [Un]Loaded handler for the parent), then the IsLoaded
// cache on the current node has been updated to match that of the parent,
// and this Assert will be violated. See BroadcastEventHelper.UpdateHasHandlerFlag
// for further description.
// Debug.Assert(IsLoaded == [false/true],
// "Element should have been [Un]loaded before it is [Un]Loaded back again");
fe.IsLoadedCache = (routedEvent == FrameworkElement.LoadedEvent);
eventRoute.Add(fe);
// Continue walk down subtree
return true;
}
}
}
else
{
// If this is a FrameworkContentElement
FrameworkContentElement fce = (FrameworkContentElement)d;
if (fce != root && routedEvent == FrameworkElement.LoadedEvent && fce.UnloadedPending != null)
{
// If there is a pending Unloaded event wait till we've broadcast
// that event before we can fire the new Loaded event.
fce.FireLoadedOnDescendentsInternal();
}
else if (fce != root && routedEvent == FrameworkElement.UnloadedEvent && fce.LoadedPending != null)
{
// If there is a pending Loaded event abort it because we are now
// being Unloaded.
RemoveLoadedCallback(fce, fce.LoadedPending);
}
else
{
if (fce != root)
{
if (routedEvent == FrameworkElement.LoadedEvent && fce.LoadedPending != null)
{
// If there is a pending Loaded event abort it because we are now
// being Loaded.
RemoveLoadedCallback(fce, fce.LoadedPending);
}
else if (routedEvent == FrameworkElement.UnloadedEvent && fce.UnloadedPending != null)
{
// If there is a pending Unloaded event abort it because we are now
// being Unloaded.
RemoveUnloadedCallback(fce, fce.UnloadedPending);
}
}
// If element has handlers fire the event and continue to walk down the tree
if (fce.SubtreeHasLoadedChangeHandler)
{
// We cannot assert this condition here for the following reason.
// If the [Un]LoadedHandler is added to the current node after the parent
// for this node has been [Un]Loaded but before the current node has been [Un]Loaded
// (example: within the [Un]Loaded handler for the parent), then the IsLoaded
// cache on the current node has been updated to match that of the parent,
// and this Assert will be violated. See BroadcastEventHelper.UpdateHasHandlerFlag
// for further description.
// Debug.Assert(IsLoaded == [false/true],
// "Element should have been [Un]loaded before it is [Un]Loaded back again");
fce.IsLoadedCache = (routedEvent == FrameworkElement.LoadedEvent);
eventRoute.Add(fce);
// Continue walk down subtree
return true;
}
}
}
// Stop walk down subtree
return false;
}
private static bool SubtreeHasLoadedChangeHandlerHelper(DependencyObject d)
{
if (FrameworkElement.DType.IsInstanceOfType(d))
{
return ((FrameworkElement)d).SubtreeHasLoadedChangeHandler;
}
else if (FrameworkContentElement.DType.IsInstanceOfType(d))
{
return ((FrameworkContentElement)d).SubtreeHasLoadedChangeHandler;
}
return false;
}
private static void FireLoadedOnDescendentsHelper(DependencyObject d)
{
if (FrameworkElement.DType.IsInstanceOfType(d))
{
((FrameworkElement)d).FireLoadedOnDescendentsInternal();
}
else
{
((FrameworkContentElement)d).FireLoadedOnDescendentsInternal();
}
}
private static void FireUnloadedOnDescendentsHelper(DependencyObject d)
{
if (FrameworkElement.DType.IsInstanceOfType(d))
{
((FrameworkElement)d).FireUnloadedOnDescendentsInternal();
}
else
{
((FrameworkContentElement)d).FireUnloadedOnDescendentsInternal();
}
}
private static bool IsLoadedHelper(DependencyObject d)
{
FrameworkObject fo = new FrameworkObject(d);
return fo.IsLoaded;
}
// Helper method that recursively queries the parents to see if they are loaded.
// This method is invoked only when the loaded cache on the given node isn't valid.
internal static bool IsParentLoaded(DependencyObject d)
{
FrameworkObject fo = new FrameworkObject(d);
DependencyObject parent = fo.EffectiveParent;
Visual visual;
Visual3D visual3D;
if (parent != null)
{
return IsLoadedHelper(parent);
}
else if ((visual = d as Visual) != null)
{
// If parent is null then this is the root element
return SafeSecurityHelper.IsConnectedToPresentationSource(visual);
}
else if ((visual3D = d as Visual3D) != null)
{
// IsConnectedToPresentationSource could also be modified to take
// a DO - instead though we'll just get the containing visual2D for
// this 3D object.
visual = VisualTreeHelper.GetContainingVisual2D(visual3D);
if (visual != null)
{
return SafeSecurityHelper.IsConnectedToPresentationSource(visual);
}
else
{
return false;
}
}
else
return false;
}
/// <summary>
/// Check if the Framework Element Factory that produced the Template
/// that created this control has a Loaded Change Handler.
/// </summary>
/// <param name="templatedParent">
/// The caller must pass in a non-null templatedParent.
/// </param>
internal static FrameworkElementFactory GetFEFTreeRoot(DependencyObject templatedParent)
{
FrameworkObject fo = new FrameworkObject(templatedParent, true);
Debug.Assert( fo.IsFE );
FrameworkTemplate templatedParentTemplate = fo.FE.TemplateInternal;
FrameworkElementFactory fefTree = templatedParentTemplate.VisualTree;
return fefTree;
}
/// <summary>
/// Update the Has[Loaded/UnLoaded]Handler flags if required.
/// This method is called from OnNewParent/OnVisualParentChanged.
/// </summary>
/// <param name="d">
/// Node to begin the update
/// </param>
/// <param name="oldParent">
/// Old Parent
/// </param>
/// <param name="newParent">
/// New Parent
/// </param>
internal static void AddOrRemoveHasLoadedChangeHandlerFlag(
DependencyObject d,
DependencyObject oldParent,
DependencyObject newParent)
{
bool hasLoadChangedHandler = SubtreeHasLoadedChangeHandlerHelper(d);
if(hasLoadChangedHandler)
{
// Attaching to a Parent
if (oldParent == null && newParent != null)
{
// Subtree with a handler got added
AddHasLoadedChangeHandlerFlagInAncestry(newParent);
}
// Detaching from a Parent
else if (oldParent != null && newParent == null)
{
// Subtree with a handler got removed
RemoveHasLoadedChangeHandlerFlagInAncestry(oldParent);
}
}
}
internal static void AddHasLoadedChangeHandlerFlagInAncestry(DependencyObject d)
{
UpdateHasLoadedChangeHandlerFlagInAncestry(d, true);
}
internal static void RemoveHasLoadedChangeHandlerFlagInAncestry(DependencyObject d)
{
UpdateHasLoadedChangeHandlerFlagInAncestry(d, false);
}
/// <summary>
/// Evaluate the HasLoadedChangeHandler flag on the given node by
/// querying its children, and styles, and templates.
/// </summary>
/// <param name="fo">
/// Node
/// </param>
private static bool AreThereLoadedChangeHandlersInSubtree(ref FrameworkObject fo)
{
// HasHandler flag can be evaluated only for a FE/FCE.
if (!fo.IsValid)
return false;
if (fo.ThisHasLoadedChangeEventHandler)
return true;
if (fo.IsFE)
{
// Check if any of your visual children have the flag set
Visual v = (Visual)fo.FE;
int count = VisualTreeHelper.GetChildrenCount(v);
for(int i = 0; i < count; i++)
{
FrameworkElement child = VisualTreeHelper.GetChild(v, i) as FrameworkElement;
if (child != null && child.SubtreeHasLoadedChangeHandler)
{
return true;
}
}
}
// Check if any of your logical children have the flag set
foreach(object o in LogicalTreeHelper.GetChildren(fo.DO))
{
DependencyObject child = o as DependencyObject;
if(null != child && SubtreeHasLoadedChangeHandlerHelper(child))
{
return true;
}
}
return false;
}
/// <summary>
/// This is a recursive function that walks up the tree Adding or Removing
/// HasLoadedChangeHander bits. It also inits the IsLoadedCache on Add.
/// </summary>
/// <param name="d">
/// Node to update
/// </param>
/// <param name="addHandler">
/// Is it an AddHandler/ Add Child with Handler Operation
/// </param>
private static void UpdateHasLoadedChangeHandlerFlagInAncestry(DependencyObject d, bool addHandler)
{
FrameworkObject fo = new FrameworkObject(d);
if (!addHandler)
{
if ( AreThereLoadedChangeHandlersInSubtree(ref fo) )
return; // done
}
if (fo.IsValid)
{
if (fo.SubtreeHasLoadedChangeHandler != addHandler)
{
DependencyObject coreParent = (fo.IsFE) ? VisualTreeHelper.GetParent(fo.FE) : null;
DependencyObject logicalParent = fo.Parent;
DependencyObject parent = null;
fo.SubtreeHasLoadedChangeHandler = addHandler;
// Propagate the change to your visual ancestry
if (coreParent != null)
{
UpdateHasLoadedChangeHandlerFlagInAncestry(coreParent, addHandler);
parent = coreParent;
}
// Propagate the change to your logical ancestry
if (logicalParent != null && logicalParent != coreParent)
{
UpdateHasLoadedChangeHandlerFlagInAncestry(logicalParent, addHandler);
if (fo.IsFCE)
parent = logicalParent;
}
// Propagate the change to your mentor, if any
if (logicalParent == null && coreParent == null)
{
parent = Helper.FindMentor(fo.DO.InheritanceContext);
if (parent != null)
{
fo.ChangeSubtreeHasLoadedChangedHandler(parent);
}
}
if(addHandler)
{
// The HasLoadedChangeHandler flag is used for two purposes.
// 1. To indicate that the sub-tree starting at the current node has
// handlers for Loaded / Unloaded event. So broadcast logic
// can walk down that path to fire the events.
// 2. To indicate that the IsLoaded cache on the node is valid.
// If we are adding a handler:
// On the POP side of the recursion, as we come back down from the root,
// pull the value of IsLoadedCache from the parent in to the child.
if (fo.IsFE)
{
UpdateIsLoadedCache(fo.FE, parent);
}
else
{
UpdateIsLoadedCache(fo.FCE, parent);
}
}
}
}
else // neither a FE or an FCE
{
DependencyObject coreParent = null;
Visual v;
Visual3D v3D;
ContentElement ce;
// This is neither an FE nor and FCE
// Propagate the change to your visual ancestry
if ((v = d as Visual) != null)
{
coreParent = VisualTreeHelper.GetParent(v);
}
else if ((ce = d as ContentElement) != null)
{
coreParent = ContentOperations.GetParent(ce);
}
else if ((v3D = d as Visual3D) != null)
{
coreParent = VisualTreeHelper.GetParent(v3D);
}
if (coreParent != null)
{
UpdateHasLoadedChangeHandlerFlagInAncestry(coreParent, addHandler);
}
}
}
/// <summary>
/// Updates the IsLoadedCache on the current FrameworkElement
/// </summary>
private static void UpdateIsLoadedCache(
FrameworkElement fe,
DependencyObject parent)
{
if (fe.GetValue(FrameworkElement.LoadedPendingProperty) == null)
{
// Propagate the change to your visual ancestry
if (parent != null)
{
fe.IsLoadedCache = IsLoadedHelper(parent);
}
// This is the root visual.
else if ( SafeSecurityHelper.IsConnectedToPresentationSource( fe ))
{
fe.IsLoadedCache = true;
}
else
{
fe.IsLoadedCache = false;
}
}
else
{
// Clear the cache if Loaded is pending
fe.IsLoadedCache = false;
}
}
/// <summary>
/// Updates the IsLoadedCache on the current FrameworkContentElement
/// </summary>
private static void UpdateIsLoadedCache(
FrameworkContentElement fce,
DependencyObject parent)
{
if (fce.GetValue(FrameworkElement.LoadedPendingProperty) == null)
{
// Propagate the change to your logical ancestry
fce.IsLoadedCache = IsLoadedHelper(parent);
}
else
{
// Clear the cache if Loaded is pending
fce.IsLoadedCache = false;
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.Shared.Options;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.SolutionCrawler;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Versions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.TodoComments
{
internal partial class TodoCommentIncrementalAnalyzer : IIncrementalAnalyzer
{
public const string Name = "Todo Comment Document Worker";
private readonly TodoCommentIncrementalAnalyzerProvider _owner;
private readonly Workspace _workspace;
private readonly IOptionService _optionService;
private readonly TodoCommentTokens _todoCommentTokens;
private readonly TodoCommentState _state;
public TodoCommentIncrementalAnalyzer(Workspace workspace, IOptionService optionService, TodoCommentIncrementalAnalyzerProvider owner, TodoCommentTokens todoCommentTokens)
{
_workspace = workspace;
_optionService = optionService;
_owner = owner;
_todoCommentTokens = todoCommentTokens;
_state = new TodoCommentState();
}
public Task DocumentResetAsync(Document document, CancellationToken cancellationToken)
{
// remove cache
_state.Remove(document.Id);
return _state.PersistAsync(document, new Data(VersionStamp.Default, VersionStamp.Default, ImmutableArray<TodoItem>.Empty), cancellationToken);
}
public async Task AnalyzeSyntaxAsync(Document document, CancellationToken cancellationToken)
{
Contract.ThrowIfFalse(document.IsFromPrimaryBranch());
// it has an assumption that this will not be called concurrently for same document.
// in fact, in current design, it won't be even called concurrently for different documents.
// but, can be called concurrently for different documents in future if we choose to.
if (!_optionService.GetOption(InternalFeatureOnOffOptions.TodoComments))
{
return;
}
// use tree version so that things like compiler option changes are considered
var textVersion = await document.GetTextVersionAsync(cancellationToken).ConfigureAwait(false);
var syntaxVersion = await document.GetSyntaxVersionAsync(cancellationToken).ConfigureAwait(false);
var existingData = await _state.TryGetExistingDataAsync(document, cancellationToken).ConfigureAwait(false);
if (existingData != null)
{
// check whether we can use the data as it is (can happen when re-using persisted data from previous VS session)
if (CheckVersions(document, textVersion, syntaxVersion, existingData))
{
Contract.Requires(_workspace == document.Project.Solution.Workspace);
RaiseTaskListUpdated(_workspace, document.Project.Solution, document.Id, existingData.Items);
return;
}
}
var service = document.GetLanguageService<ITodoCommentService>();
if (service == null)
{
return;
}
var comments = await service.GetTodoCommentsAsync(document, _todoCommentTokens.GetTokens(_workspace), cancellationToken).ConfigureAwait(false);
var items = await CreateItemsAsync(document, comments, cancellationToken).ConfigureAwait(false);
var data = new Data(textVersion, syntaxVersion, items);
await _state.PersistAsync(document, data, cancellationToken).ConfigureAwait(false);
// * NOTE * cancellation can't throw after this point.
if (existingData == null || existingData.Items.Length > 0 || data.Items.Length > 0)
{
Contract.Requires(_workspace == document.Project.Solution.Workspace);
RaiseTaskListUpdated(_workspace, document.Project.Solution, document.Id, data.Items);
}
}
private async Task<ImmutableArray<TodoItem>> CreateItemsAsync(Document document, IList<TodoComment> comments, CancellationToken cancellationToken)
{
var items = ImmutableArray.CreateBuilder<TodoItem>();
if (comments != null)
{
var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false);
var syntaxTree = document.SupportsSyntaxTree ? await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false) : null;
foreach (var comment in comments)
{
items.Add(CreateItem(document, text, syntaxTree, comment));
}
}
return items.ToImmutable();
}
private TodoItem CreateItem(Document document, SourceText text, SyntaxTree tree, TodoComment comment)
{
// make sure given position is within valid text range.
var textSpan = new TextSpan(Math.Min(text.Length, Math.Max(0, comment.Position)), 0);
var location = tree == null ? Location.Create(document.FilePath, textSpan, text.Lines.GetLinePositionSpan(textSpan)) : tree.GetLocation(textSpan);
var originalLineInfo = location.GetLineSpan();
var mappedLineInfo = location.GetMappedLineSpan();
return new TodoItem(
comment.Descriptor.Priority,
comment.Message,
document.Project.Solution.Workspace,
document.Id,
mappedLine: mappedLineInfo.StartLinePosition.Line,
originalLine: originalLineInfo.StartLinePosition.Line,
mappedColumn: mappedLineInfo.StartLinePosition.Character,
originalColumn: originalLineInfo.StartLinePosition.Character,
mappedFilePath: mappedLineInfo.GetMappedFilePathIfExist(),
originalFilePath: document.FilePath);
}
public ImmutableArray<TodoItem> GetTodoItems(Workspace workspace, DocumentId id, CancellationToken cancellationToken)
{
var document = workspace.CurrentSolution.GetDocument(id);
if (document == null)
{
return ImmutableArray<TodoItem>.Empty;
}
// TODO let's think about what to do here. for now, let call it synchronously. also, there is no actual asynch-ness for the
// TryGetExistingDataAsync, API just happen to be async since our persistent API is async API. but both caller and implementor are
// actually not async.
var existingData = _state.TryGetExistingDataAsync(document, cancellationToken).WaitAndGetResult(cancellationToken);
if (existingData == null)
{
return ImmutableArray<TodoItem>.Empty;
}
return existingData.Items;
}
private static bool CheckVersions(Document document, VersionStamp textVersion, VersionStamp syntaxVersion, Data existingData)
{
// first check full version to see whether we can reuse data in same session, if we can't, check timestamp only version to see whether
// we can use it cross-session.
return document.CanReusePersistedTextVersion(textVersion, existingData.TextVersion) &&
document.CanReusePersistedSyntaxTreeVersion(syntaxVersion, existingData.SyntaxVersion);
}
internal ImmutableArray<TodoItem> GetItems_TestingOnly(DocumentId documentId)
{
return _state.GetItems_TestingOnly(documentId);
}
private void RaiseTaskListUpdated(Workspace workspace, Solution solution, DocumentId documentId, ImmutableArray<TodoItem> items)
{
if (_owner != null)
{
_owner.RaiseTaskListUpdated(documentId, workspace, solution, documentId.ProjectId, documentId, items);
}
}
public void RemoveDocument(DocumentId documentId)
{
_state.Remove(documentId);
RaiseTaskListUpdated(_workspace, null, documentId, ImmutableArray<TodoItem>.Empty);
}
public bool NeedsReanalysisOnOptionChanged(object sender, OptionChangedEventArgs e)
{
return e.Option == TodoCommentOptions.TokenList;
}
private class Data
{
public readonly VersionStamp TextVersion;
public readonly VersionStamp SyntaxVersion;
public readonly ImmutableArray<TodoItem> Items;
public Data(VersionStamp textVersion, VersionStamp syntaxVersion, ImmutableArray<TodoItem> items)
{
this.TextVersion = textVersion;
this.SyntaxVersion = syntaxVersion;
this.Items = items;
}
}
#region not used
public Task NewSolutionSnapshotAsync(Solution solution, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public Task DocumentOpenAsync(Document document, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public Task DocumentCloseAsync(Document document, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public Task AnalyzeDocumentAsync(Document document, SyntaxNode bodyOpt, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public Task AnalyzeProjectAsync(Project project, bool semanticsChanged, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public void RemoveProject(ProjectId projectId)
{
}
#endregion
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Symbology.Forms.dll
// Description: The core assembly for the DotSpatial 6.0 distribution.
// ********************************************************************************************************
// The contents of this file are subject to the MIT License (MIT)
// you may not use this file except in compliance with the License. You may obtain a copy of the License at
// http://dotspatial.codeplex.com/license
//
// Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF
// ANY KIND, either expressed or implied. See the License for the specific language governing rights and
// limitations under the License.
//
// The Original Code is DotSpatial.dll
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 4/10/2009 9:45:17 AM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
//
// ********************************************************************************************************
using System;
using System.ComponentModel;
using System.Data;
using System.Diagnostics;
using System.Windows.Forms;
namespace DotSpatial.Symbology.Forms
{
/// <summary>
/// SelectByAttributes
/// </summary>
public class SelectByAttributes : Form
{
private Button btnApply;
private Button btnOk;
private ComboBox cmbLayers;
private ComboBox cmbMethod;
private Label lblLayer;
private Label lblMethod;
#region Private Variables
private IFeatureLayer _activeLayer;
private IFrame _mapFrame;
private Button btnClose;
/// <summary>
/// Required designer variable.
/// </summary>
private IContainer components = null;
private SQLQueryControl sqlQueryControl1;
private ToolTip ttHelp;
#endregion
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new Container();
ComponentResourceManager resources = new ComponentResourceManager(typeof(SelectByAttributes));
this.lblLayer = new Label();
this.cmbLayers = new ComboBox();
this.lblMethod = new Label();
this.cmbMethod = new ComboBox();
this.btnOk = new Button();
this.btnApply = new Button();
this.ttHelp = new ToolTip(this.components);
this.btnClose = new Button();
this.sqlQueryControl1 = new SQLQueryControl();
this.SuspendLayout();
//
// lblLayer
//
this.lblLayer.AccessibleDescription = null;
this.lblLayer.AccessibleName = null;
resources.ApplyResources(this.lblLayer, "lblLayer");
this.lblLayer.Font = null;
this.lblLayer.Name = "lblLayer";
this.ttHelp.SetToolTip(this.lblLayer, resources.GetString("lblLayer.ToolTip"));
//
// cmbLayers
//
this.cmbLayers.AccessibleDescription = null;
this.cmbLayers.AccessibleName = null;
resources.ApplyResources(this.cmbLayers, "cmbLayers");
this.cmbLayers.BackgroundImage = null;
this.cmbLayers.Font = null;
this.cmbLayers.FormattingEnabled = true;
this.cmbLayers.Name = "cmbLayers";
this.ttHelp.SetToolTip(this.cmbLayers, resources.GetString("cmbLayers.ToolTip"));
this.cmbLayers.SelectedIndexChanged += new EventHandler(this.cmbLayers_SelectedIndexChanged);
//
// lblMethod
//
this.lblMethod.AccessibleDescription = null;
this.lblMethod.AccessibleName = null;
resources.ApplyResources(this.lblMethod, "lblMethod");
this.lblMethod.Font = null;
this.lblMethod.Name = "lblMethod";
this.ttHelp.SetToolTip(this.lblMethod, resources.GetString("lblMethod.ToolTip"));
//
// cmbMethod
//
this.cmbMethod.AccessibleDescription = null;
this.cmbMethod.AccessibleName = null;
resources.ApplyResources(this.cmbMethod, "cmbMethod");
this.cmbMethod.BackgroundImage = null;
this.cmbMethod.Font = null;
this.cmbMethod.FormattingEnabled = true;
this.cmbMethod.Items.AddRange(new object[] {
resources.GetString("cmbMethod.Items"),
resources.GetString("cmbMethod.Items1"),
resources.GetString("cmbMethod.Items2"),
resources.GetString("cmbMethod.Items3")});
this.cmbMethod.Name = "cmbMethod";
this.ttHelp.SetToolTip(this.cmbMethod, resources.GetString("cmbMethod.ToolTip"));
//
// btnOk
//
this.btnOk.AccessibleDescription = null;
this.btnOk.AccessibleName = null;
resources.ApplyResources(this.btnOk, "btnOk");
this.btnOk.BackgroundImage = null;
this.btnOk.Font = null;
this.btnOk.Name = "btnOk";
this.ttHelp.SetToolTip(this.btnOk, resources.GetString("btnOk.ToolTip"));
this.btnOk.UseVisualStyleBackColor = true;
this.btnOk.Click += new EventHandler(this.btnOk_Click);
//
// btnApply
//
this.btnApply.AccessibleDescription = null;
this.btnApply.AccessibleName = null;
resources.ApplyResources(this.btnApply, "btnApply");
this.btnApply.BackgroundImage = null;
this.btnApply.Font = null;
this.btnApply.Name = "btnApply";
this.ttHelp.SetToolTip(this.btnApply, resources.GetString("btnApply.ToolTip"));
this.btnApply.UseVisualStyleBackColor = true;
this.btnApply.Click += new EventHandler(this.btnApply_Click);
//
// btnClose
//
this.btnClose.AccessibleDescription = null;
this.btnClose.AccessibleName = null;
resources.ApplyResources(this.btnClose, "btnClose");
this.btnClose.BackgroundImage = null;
// this.btnClose.DialogResult = DialogResult.Cancel;
this.btnClose.Font = null;
this.btnClose.Name = "btnClose";
this.ttHelp.SetToolTip(this.btnClose, resources.GetString("btnClose.ToolTip"));
this.btnClose.UseVisualStyleBackColor = true;
this.btnClose.Click += new EventHandler(this.btnClose_Click);
//
// sqlQueryControl1
//
this.sqlQueryControl1.AccessibleDescription = null;
this.sqlQueryControl1.AccessibleName = null;
resources.ApplyResources(this.sqlQueryControl1, "sqlQueryControl1");
this.sqlQueryControl1.AttributeSource = null;
this.sqlQueryControl1.BackgroundImage = null;
this.sqlQueryControl1.ExpressionText = string.Empty;
this.sqlQueryControl1.Font = null;
this.sqlQueryControl1.Name = "sqlQueryControl1";
this.sqlQueryControl1.Table = null;
this.ttHelp.SetToolTip(this.sqlQueryControl1, resources.GetString("sqlQueryControl1.ToolTip"));
//
// SelectByAttributes
//
this.AcceptButton = this.btnOk;
this.AccessibleDescription = null;
this.AccessibleName = null;
resources.ApplyResources(this, "$this");
this.BackgroundImage = null;
this.CancelButton = this.btnClose;
this.Controls.Add(this.sqlQueryControl1);
this.Controls.Add(this.btnClose);
this.Controls.Add(this.btnApply);
this.Controls.Add(this.btnOk);
this.Controls.Add(this.cmbMethod);
this.Controls.Add(this.lblMethod);
this.Controls.Add(this.cmbLayers);
this.Controls.Add(this.lblLayer);
this.Font = null;
// this.FormBorderStyle = FormBorderStyle.FixedDialog;
this.HelpButton = true;
this.Icon = null;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "SelectByAttributes";
this.ShowIcon = false;
this.ttHelp.SetToolTip(this, resources.GetString("$this.ToolTip"));
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of SelectByAttributes
/// </summary>
public SelectByAttributes()
{
InitializeComponent();
Configure();
}
/// <summary>
/// Creates a new instance of SelectByAttributes
/// </summary>
/// <param name="mapFrame">the MapFrame containing the layers</param>
public SelectByAttributes(IFrame mapFrame)
{
_mapFrame = mapFrame;
InitializeComponent();
Configure();
}
private void Configure()
{
DataTable dt = new DataTable();
dt.Columns.Add("Name", typeof(string));
dt.Columns.Add("Value", typeof(IFeatureLayer));
foreach (ILayer layer in _mapFrame)
{
IFeatureLayer fl = layer as IFeatureLayer;
if (fl != null)
{
DataRow dr = dt.NewRow();
dr["Name"] = fl.LegendText;
dr["Value"] = fl;
dt.Rows.Add(dr);
}
}
cmbLayers.DataSource = dt;
cmbLayers.DisplayMember = "Name";
cmbLayers.ValueMember = "Value";
cmbMethod.SelectedIndex = 0;
if (cmbLayers.Items.Count > 0) cmbLayers.SelectedIndex = 0;
}
#endregion
#region Methods
#endregion
#region Properties
/// <summary>
/// Gets or sets the map frame to use for this control
/// </summary>
public IFrame MapFrame
{
get { return _mapFrame; }
set
{
_mapFrame = value;
Configure();
}
}
#endregion
#region Events
#endregion
#region Event Handlers
#endregion
#region Private Functions
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#endregion
private void cmbLayers_SelectedIndexChanged(object sender, EventArgs e)
{
DataRowView drv = cmbLayers.SelectedValue as DataRowView;
if (drv != null)
{
_activeLayer = drv.Row["Value"] as IFeatureLayer;
}
else
{
_activeLayer = cmbLayers.SelectedValue as IFeatureLayer;
}
if (_activeLayer == null) return;
if (!_activeLayer.DataSet.AttributesPopulated && _activeLayer.DataSet.NumRows() < 50000)
{
_activeLayer.DataSet.FillAttributes();
}
if (_activeLayer.EditMode || _activeLayer.DataSet.AttributesPopulated)
{
sqlQueryControl1.Table = _activeLayer.DataSet.DataTable;
}
else
{
sqlQueryControl1.AttributeSource = _activeLayer.DataSet;
}
}
private void btnApply_Click(object sender, EventArgs e)
{
ApplyFilter();
}
private void ApplyFilter()
{
string filter = sqlQueryControl1.ExpressionText;
if (_activeLayer != null)
{
try
{
_activeLayer.SelectByAttribute(filter, GetSelectMode());
}
catch (Exception ex)
{
Debug.WriteLine(ex.ToString());
MessageBox.Show("There was an error attempting to apply this expression.");
}
}
}
private ModifySelectionMode GetSelectMode()
{
switch (cmbMethod.SelectedIndex)
{
case 0: return ModifySelectionMode.Replace;
case 1: return ModifySelectionMode.Append;
case 2: return ModifySelectionMode.Subtract;
case 3: return ModifySelectionMode.SelectFrom;
}
return ModifySelectionMode.Replace;
}
private void btnOk_Click(object sender, EventArgs e)
{
ApplyFilter();
Close();
}
private void btnClose_Click(object sender, EventArgs e)
{
Close();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Security;
namespace System.Runtime.InteropServices.WindowsRuntime
{
internal delegate IEnumerator<T> GetEnumerator_Delegate<out T>();
// This is a set of stub methods implementing the support for the IEnumerable`1 interface on WinRT
// objects that implement IIterable`1. Used by the interop mashaling infrastructure.
//
// The methods on this class must be written VERY carefully to avoid introducing security holes.
// That's because they are invoked with special "this"! The "this" object
// for all of these methods are not IterableToEnumerableAdapter objects. Rather, they are of type
// IIterable<T>. No actual IterableToEnumerableAdapter object is ever instantiated. Thus, you will
// see a lot of expressions that cast "this" to "IIterable<T>".
internal sealed class IterableToEnumerableAdapter
{
private IterableToEnumerableAdapter()
{
Debug.Fail("This class is never instantiated");
}
// This method is invoked when GetEnumerator is called on a WinRT-backed implementation of IEnumerable<T>.
internal IEnumerator<T> GetEnumerator_Stub<T>()
{
IIterable<T> _this = Unsafe.As<IIterable<T>>(this);
return new IteratorToEnumeratorAdapter<T>(_this.First());
}
// This method is invoked when GetEnumerator is called on a WinRT-backed implementation of IEnumerable<T>
// and it is possible that the implementation supports IEnumerable<Type>/IEnumerable<string>/IEnumerable<Exception>/
// IEnumerable<array>/IEnumerable<delegate> rather than IEnumerable<T> because T is assignable from Type/string/
// Exception/array/delegate via co-variance.
internal IEnumerator<T> GetEnumerator_Variance_Stub<T>() where T : class
{
bool fUseString;
Delegate target = System.StubHelpers.StubHelpers.GetTargetForAmbiguousVariantCall(
this,
typeof(IEnumerable<T>).TypeHandle.Value,
out fUseString);
if (target != null)
{
return (Unsafe.As<GetEnumerator_Delegate<T>>(target))();
}
if (fUseString)
{
return Unsafe.As<IEnumerator<T>>(GetEnumerator_Stub<string>());
}
return GetEnumerator_Stub<T>();
}
}
internal sealed class BindableIterableToEnumerableAdapter
{
private BindableIterableToEnumerableAdapter()
{
Debug.Fail("This class is never instantiated");
}
private sealed class NonGenericToGenericIterator : IIterator<object>
{
private IBindableIterator iterator;
public NonGenericToGenericIterator(IBindableIterator iterator)
{ this.iterator = iterator; }
public object Current { get { return iterator.Current; } }
public bool HasCurrent { get { return iterator.HasCurrent; } }
public bool MoveNext() { return iterator.MoveNext(); }
public int GetMany(object[] items) { throw new NotSupportedException(); }
}
// This method is invoked when GetEnumerator is called on a WinRT-backed implementation of IEnumerable.
internal IEnumerator GetEnumerator_Stub()
{
IBindableIterable _this = Unsafe.As<IBindableIterable>(this);
return new IteratorToEnumeratorAdapter<object>(new NonGenericToGenericIterator(_this.First()));
}
}
// Adapter class which holds a Windows Runtime IIterator<T>, exposing it as a managed IEnumerator<T>
// There are a few implementation differences between the Iterator and IEnumerator which need to be
// addressed. Iterator starts at index 0 while IEnumerator starts at index -1 as a result of which
// the first call to IEnumerator.Current is correct only after calling MoveNext().
// Also IEnumerator throws an exception when we call Current after reaching the end of collection.
internal sealed class IteratorToEnumeratorAdapter<T> : IEnumerator<T>
{
private IIterator<T> m_iterator;
private bool m_hadCurrent;
private T m_current;
private bool m_isInitialized;
internal IteratorToEnumeratorAdapter(IIterator<T> iterator)
{
Debug.Assert(iterator != null);
m_iterator = iterator;
m_hadCurrent = true;
m_isInitialized = false;
}
public T Current
{
get
{
// The enumerator has not been advanced to the first element yet.
if (!m_isInitialized)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumNotStarted();
// The enumerator has reached the end of the collection
if (!m_hadCurrent)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumEnded();
return m_current;
}
}
object IEnumerator.Current
{
get
{
// The enumerator has not been advanced to the first element yet.
if (!m_isInitialized)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumNotStarted();
// The enumerator has reached the end of the collection
if (!m_hadCurrent)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumEnded();
return m_current;
}
}
public bool MoveNext()
{
// If we've passed the end of the iteration, IEnumerable<T> should return false, while
// IIterable will fail the interface call
if (!m_hadCurrent)
{
return false;
}
// IIterators start at index 0, rather than -1. If this is the first call, we need to just
// check HasCurrent rather than actually moving to the next element
try
{
if (!m_isInitialized)
{
m_hadCurrent = m_iterator.HasCurrent;
m_isInitialized = true;
}
else
{
m_hadCurrent = m_iterator.MoveNext();
}
// We want to save away the current value for two reasons:
// 1. Accessing .Current is cheap on other iterators, so having it be a property which is a
// simple field access preserves the expected performance characteristics (as opposed to
// triggering a COM call every time the property is accessed)
//
// 2. This allows us to preserve the same semantics as generic collection iteration when iterating
// beyond the end of the collection - namely that Current continues to return the last value
// of the collection
if (m_hadCurrent)
{
m_current = m_iterator.Current;
}
}
catch (Exception e)
{
// Translate E_CHANGED_STATE into an InvalidOperationException for an updated enumeration
if (Marshal.GetHRForException(e) == HResults.E_CHANGED_STATE)
{
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumFailedVersion();
}
else
{
throw;
}
}
return m_hadCurrent;
}
public void Reset()
{
throw new NotSupportedException();
}
public void Dispose()
{
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace TestCases.HSSF.Extractor
{
using System;
using NPOI.HSSF;
using TestCases;
using NUnit.Framework;
using System.IO;
using TestCases.HSSF;
using NPOI.HSSF.Extractor;
/**
* Unit tests for the Excel 5/95 and Excel 4 (and older) text
* extractor
*/
[TestFixture]
public class TestOldExcelExtractor : POITestCase
{
private static OldExcelExtractor CreateExtractor(String sampleFileName)
{
Stream is1 = HSSFTestDataSamples.OpenSampleFileStream(sampleFileName);
try
{
MemoryStream ms = new MemoryStream();
is1.CopyTo(ms);
ms.Position = 0;
return new OldExcelExtractor(ms);
}
catch (Exception e)
{
throw e;
}
finally
{
//is1.Close();
}
}
[Test]
public void TestSimpleExcel3()
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_3.xls");
// Check we can call GetText without error
String text = extractor.Text;
// Check we find a few words we expect in there
AssertContains(text, "Season beginning August");
AssertContains(text, "USDA");
// Check we find a few numbers we expect in there
AssertContains(text, "347");
AssertContains(text, "228");
// Check we find a few string-literal dates in there
AssertContains(text, "1981/82");
// Check the type
Assert.AreEqual(3, extractor.BiffVersion);
Assert.AreEqual(0x10, extractor.FileType);
}
[Test]
public void TestSimpleExcel4()
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_4.xls");
// Check we can call GetText without error
String text = extractor.Text;
// Check we find a few words we expect in there
AssertContains(text, "Size");
AssertContains(text, "Returns");
// Check we find a few numbers we expect in there
AssertContains(text, "11");
AssertContains(text, "784");
// Check the type
Assert.AreEqual(4, extractor.BiffVersion);
Assert.AreEqual(0x10, extractor.FileType);
}
[Test]
public void TestSimpleExcel5()
{
foreach (String ver in new String[] { "5", "95" })
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_" + ver + ".xls");
// Check we can call GetText without error
String text = extractor.Text;
// Check we find a few words we expect in there
AssertContains(text, "Sample Excel");
AssertContains(text, "Written and saved");
// Check we find a few numbers we expect in there
AssertContains(text, "15");
AssertContains(text, "169");
// Check we got the sheet names (new formats only)
AssertContains(text, "Sheet: Feuil3");
// Check the type
Assert.AreEqual(5, extractor.BiffVersion);
Assert.AreEqual(0x05, extractor.FileType);
}
}
[Test]
public void TestStrings()
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_4.xls");
String text = extractor.Text;
// Simple strings
AssertContains(text, "Table 10 -- Examination Coverage:");
AssertContains(text, "Recommended and Average Recommended Additional Tax After");
AssertContains(text, "Individual income tax returns, total");
// More complicated strings
AssertContains(text, "$100,000 or more");
AssertContains(text, "S corporation returns, Form 1120S [10,15]");
AssertContains(text, "individual income tax return \u201Cshort forms.\u201D");
// Formula based strings
// TODO Find some then test
}
[Test]
public void TestFormattedNumbersExcel4()
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_4.xls");
String text = extractor.Text;
// Simple numbers
AssertContains(text, "151");
AssertContains(text, "784");
// Numbers which come from formulas
AssertContains(text, "0.398"); // TODO Rounding
AssertContains(text, "624");
// Formatted numbers
// TODO
// AssertContains(text, "55,624");
// AssertContains(text, "11,743,477");
}
[Test]
public void TestFormattedNumbersExcel5()
{
foreach (String ver in new String[] { "5", "95" })
{
OldExcelExtractor extractor = CreateExtractor("testEXCEL_" + ver + ".xls");
String text = extractor.Text;
// Simple numbers
AssertContains(text, "1");
// Numbers which come from formulas
AssertContains(text, "13");
AssertContains(text, "169");
// Formatted numbers
// TODO
// AssertContains(text, "100.00%");
// AssertContains(text, "155.00%");
// AssertContains(text, "1,125");
// AssertContains(text, "189,945");
// AssertContains(text, "1,234,500");
// AssertContains(text, "$169.00");
// AssertContains(text, "$1,253.82");
}
}
[Test]
public void TestFromFile()
{
foreach (String ver in new String[] { "4", "5", "95" })
{
String filename = "testEXCEL_" + ver + ".xls";
FileInfo f = HSSFTestDataSamples.GetSampleFile(filename);
OldExcelExtractor extractor = new OldExcelExtractor(f);
String text = extractor.Text;
Assert.IsNotNull(text);
Assert.IsTrue(text.Length > 100);
}
}
}
}
| |
namespace SADFM.Infrastructure.Persistence
{
using System;
using System.Data.Entity;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using Models;
public partial class SADFMContext : DbContext
{
public SADFMContext()
: base("name=SADFMContext")
{
}
public virtual DbSet<Account> Accounts { get; set; }
public virtual DbSet<AccountType> AccountTypes { get; set; }
public virtual DbSet<AdmissionStatus> AdmissionStatus { get; set; }
public virtual DbSet<AdmissionType> AdmissionTypes { get; set; }
public virtual DbSet<Careplan> Careplans { get; set; }
public virtual DbSet<Case> Cases { get; set; }
public virtual DbSet<City> Cities { get; set; }
public virtual DbSet<Country> Countries { get; set; }
public virtual DbSet<Facility> Facilities { get; set; }
public virtual DbSet<FacilityGroup> FacilityGroups { get; set; }
public virtual DbSet<FacilityPermission> FacilityPermissions { get; set; }
public virtual DbSet<FacilityServiceType> FacilityServiceTypes { get; set; }
public virtual DbSet<Frequency> Frequencies { get; set; }
public virtual DbSet<Gender> Genders { get; set; }
public virtual DbSet<ICD10Code> ICD10Code { get; set; }
public virtual DbSet<Language> Languages { get; set; }
public virtual DbSet<MedicalScheme> MedicalSchemes { get; set; }
public virtual DbSet<Patient> Patients { get; set; }
public virtual DbSet<PatientCarePlan> PatientCarePlans { get; set; }
public virtual DbSet<PatientFacility> PatientFacilities { get; set; }
public virtual DbSet<Permission> Permissions { get; set; }
public virtual DbSet<ProfessionalBody> ProfessionalBodies { get; set; }
public virtual DbSet<Province> Provinces { get; set; }
public virtual DbSet<Race> Races { get; set; }
public virtual DbSet<ResidentialEnviromentType> ResidentialEnviromentTypes { get; set; }
public virtual DbSet<Role> Roles { get; set; }
public virtual DbSet<RoleGroup> RoleGroups { get; set; }
public virtual DbSet<ScoreItem> ScoreItems { get; set; }
public virtual DbSet<ScoreValue> ScoreValues { get; set; }
public virtual DbSet<ServiceType> ServiceTypes { get; set; }
public virtual DbSet<ServiceTypeGroup> ServiceTypeGroups { get; set; }
public virtual DbSet<Status> Status { get; set; }
public virtual DbSet<Title> Titles { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<Account>()
.Property(e => e.Username)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.Password)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.EmailAddress)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.Firstname)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.Lastname)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.IDNumberType)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.IDNumber)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.SecurityQuestionAnswer)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.PracticeNumber)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.Property(e => e.RegistrationNumber)
.IsUnicode(false);
modelBuilder.Entity<Account>()
.HasMany(e => e.Cases)
.WithOptional(e => e.LastUpdatedAccount)
.HasForeignKey(e => e.LastUpdatedAccountId);
modelBuilder.Entity<Account>()
.HasMany(e => e.FacilityPermissions)
.WithRequired(e => e.Account)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Account>()
.HasMany(e => e.Patients)
.WithOptional(e => e.LastUpdatedAccount)
.HasForeignKey(e => e.LastUpdatedAccountId);
modelBuilder.Entity<Account>()
.HasMany(e => e.PatientFacilities)
.WithRequired(e => e.Account)
.HasForeignKey(e => e.TeamMemberId)
.WillCascadeOnDelete(false);
modelBuilder.Entity<AccountType>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<AccountType>()
.HasMany(e => e.Accounts)
.WithRequired(e => e.AccountType)
.WillCascadeOnDelete(false);
modelBuilder.Entity<AdmissionStatus>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<AdmissionType>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Careplan>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Careplan>()
.HasMany(e => e.PatientCarePlans)
.WithRequired(e => e.Careplan)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Case>()
.Property(e => e.ReferringDoctorName)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.ReferringDoctorContact)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.ReferringDoctorEmailAddress)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.ReferringDoctorPracticeNumber)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.TreatingDoctorName)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.TreatingDoctorContact)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.TreatingDoctorEmail)
.IsUnicode(false);
modelBuilder.Entity<Case>()
.Property(e => e.TreatingDoctorPracticeNumber)
.IsUnicode(false);
modelBuilder.Entity<City>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<City>()
.HasMany(e => e.FacilityServiceTypes)
.WithRequired(e => e.City)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Country>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Country>()
.HasMany(e => e.FacilityServiceTypes)
.WithRequired(e => e.Country)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Country>()
.HasMany(e => e.Provinces)
.WithRequired(e => e.Country)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Facility>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Facility>()
.HasMany(e => e.FacilityPermissions)
.WithRequired(e => e.Facility)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Facility>()
.HasMany(e => e.FacilityServiceTypes)
.WithRequired(e => e.Facility)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Facility>()
.HasMany(e => e.PatientCarePlans)
.WithRequired(e => e.Facility)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Facility>()
.HasMany(e => e.PatientFacilities)
.WithRequired(e => e.Facility)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Facility>()
.HasMany(e => e.MedicalSchemes)
.WithMany(e => e.Facilities)
.Map(m => m.ToTable("FacilityMedicalScheme").MapLeftKey("FacilityId").MapRightKey("MedicalSchemeId"));
modelBuilder.Entity<FacilityGroup>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<FacilityGroup>()
.HasMany(e => e.Facilities)
.WithRequired(e => e.FacilityGroup)
.WillCascadeOnDelete(false);
modelBuilder.Entity<FacilityPermission>()
.HasMany(e => e.Permissions)
.WithMany(e => e.FacilityPermissions)
.Map(m => m.ToTable("FacilityPermissionsLink").MapLeftKey("FacilityPermissionsId").MapRightKey("PermissionId"));
modelBuilder.Entity<FacilityServiceType>()
.Property(e => e.ContactNumber)
.IsUnicode(false);
modelBuilder.Entity<FacilityServiceType>()
.Property(e => e.Street)
.IsUnicode(false);
modelBuilder.Entity<FacilityServiceType>()
.Property(e => e.PostalCode)
.IsUnicode(false);
modelBuilder.Entity<Frequency>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Frequency>()
.HasMany(e => e.PatientCarePlans)
.WithRequired(e => e.Frequency)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Gender>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ICD10Code>()
.Property(e => e.Code)
.IsUnicode(false);
modelBuilder.Entity<ICD10Code>()
.Property(e => e.Description)
.IsUnicode(false);
modelBuilder.Entity<Language>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<MedicalScheme>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.Firstname)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.Lastname)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.IDNumber)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.ContactNumber)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.Street)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.PostalCode)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.NextOfKinName)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.NextOfKinContact)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.MedicalSchemeMembershipNumber)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.NextOfKinEmailAddress)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.Property(e => e.NextOfKinRelationship)
.IsUnicode(false);
modelBuilder.Entity<Patient>()
.HasMany(e => e.Cases)
.WithRequired(e => e.Patient)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Patient>()
.HasMany(e => e.PatientCarePlans)
.WithRequired(e => e.Patient)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Patient>()
.HasMany(e => e.PatientFacilities)
.WithRequired(e => e.Patient)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Permission>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ProfessionalBody>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Province>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Province>()
.HasMany(e => e.Cities)
.WithRequired(e => e.Province)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Province>()
.HasMany(e => e.FacilityServiceTypes)
.WithRequired(e => e.Province)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Race>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ResidentialEnviromentType>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Role>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<RoleGroup>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<RoleGroup>()
.HasMany(e => e.Roles)
.WithRequired(e => e.RoleGroup)
.WillCascadeOnDelete(false);
modelBuilder.Entity<ScoreItem>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ScoreItem>()
.Property(e => e.Description)
.IsUnicode(false);
modelBuilder.Entity<ScoreItem>()
.Property(e => e.ToolTip)
.IsUnicode(false);
modelBuilder.Entity<ScoreItem>()
.HasMany(e => e.SubScoreItems)
.WithOptional(e => e.ParentScoreItem)
.HasForeignKey(e => e.ParentScoreItemId);
modelBuilder.Entity<ScoreItem>()
.HasMany(e => e.ScoreValues)
.WithRequired(e => e.ScoreItem)
.WillCascadeOnDelete(false);
modelBuilder.Entity<ScoreValue>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ScoreValue>()
.Property(e => e.Description)
.IsUnicode(false);
modelBuilder.Entity<ScoreValue>()
.Property(e => e.ToolTip)
.IsUnicode(false);
modelBuilder.Entity<ServiceType>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ServiceType>()
.Property(e => e.Code)
.IsUnicode(false);
modelBuilder.Entity<ServiceTypeGroup>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<ServiceTypeGroup>()
.Property(e => e.Code)
.IsUnicode(false);
modelBuilder.Entity<ServiceTypeGroup>()
.Property(e => e.Type)
.IsUnicode(false);
modelBuilder.Entity<ServiceTypeGroup>()
.HasMany(e => e.ServiceTypes)
.WithRequired(e => e.ServiceTypeGroup)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Status>()
.Property(e => e.Name)
.IsUnicode(false);
modelBuilder.Entity<Status>()
.HasMany(e => e.Accounts)
.WithRequired(e => e.Status)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Title>()
.Property(e => e.Name)
.IsUnicode(false);
}
}
}
| |
using Lucene.Net.Support;
using Lucene.Net.Support.IO;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;// Used only for WRITE_LOCK_NAME in deprecated create=true case:
using System.Runtime.CompilerServices;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
namespace Lucene.Net.Store
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Constants = Lucene.Net.Util.Constants;
using IOUtils = Lucene.Net.Util.IOUtils;
/// <summary>
/// Base class for <see cref="Directory"/> implementations that store index
/// files in the file system.
/// <para/>
/// There are currently three core
/// subclasses:
///
/// <list type="bullet">
///
/// <item><description> <see cref="SimpleFSDirectory"/> is a straightforward
/// implementation using <see cref="FileStream"/>.
/// However, it has poor concurrent performance
/// (multiple threads will bottleneck) as it
/// synchronizes when multiple threads read from the
/// same file.</description></item>
///
/// <item><description> <see cref="NIOFSDirectory"/> uses java.nio's
/// FileChannel's positional io when reading to avoid
/// synchronization when reading from the same file.
/// Unfortunately, due to a Windows-only <a
/// href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6265734">Sun
/// JRE bug</a> this is a poor choice for Windows, but
/// on all other platforms this is the preferred
/// choice. Applications using <see cref="Thread.Interrupt()"/> or
/// <see cref="Task{TResult}"/> should use
/// <see cref="SimpleFSDirectory"/> instead. See <see cref="NIOFSDirectory"/> java doc
/// for details.</description></item>
///
/// <item><description> <see cref="MMapDirectory"/> uses memory-mapped IO when
/// reading. This is a good choice if you have plenty
/// of virtual memory relative to your index size, eg
/// if you are running on a 64 bit runtime, or you are
/// running on a 32 bit runtime but your index sizes are
/// small enough to fit into the virtual memory space.
/// <para/>
/// Applications using <see cref="Thread.Interrupt()"/> or
/// <see cref="Task{TResult}"/> should use
/// <see cref="SimpleFSDirectory"/> instead. See <see cref="MMapDirectory"/>
/// doc for details.</description></item>
/// </list>
///
/// Unfortunately, because of system peculiarities, there is
/// no single overall best implementation. Therefore, we've
/// added the <see cref="Open(string)"/> method (or one of its overloads), to allow Lucene to choose
/// the best <see cref="FSDirectory"/> implementation given your
/// environment, and the known limitations of each
/// implementation. For users who have no reason to prefer a
/// specific implementation, it's best to simply use
/// <see cref="Open(string)"/> (or one of its overloads). For all others, you should instantiate the
/// desired implementation directly.
///
/// <para/>The locking implementation is by default
/// <see cref="NativeFSLockFactory"/>, but can be changed by
/// passing in a custom <see cref="LockFactory"/> instance.
/// </summary>
/// <seealso cref="Directory"/>
public abstract class FSDirectory : BaseDirectory
{
/// <summary>
/// Default read chunk size: 8192 bytes (this is the size up to which the runtime
/// does not allocate additional arrays while reading/writing) </summary>
[Obsolete("this constant is no longer used since Lucene 4.5.")]
public const int DEFAULT_READ_CHUNK_SIZE = 8192;
protected readonly DirectoryInfo m_directory; // The underlying filesystem directory
// LUCENENET specific: No such thing as "stale files" in .NET, since Flush(true) writes everything to disk before
// our FileStream is disposed.
//protected readonly ISet<string> m_staleFiles = new ConcurrentHashSet<string>(); // Files written, but not yet sync'ed
#pragma warning disable 612, 618
private int chunkSize = DEFAULT_READ_CHUNK_SIZE;
#pragma warning restore 612, 618
protected FSDirectory(DirectoryInfo dir)
: this(dir, null)
{
}
/// <summary>
/// Create a new <see cref="FSDirectory"/> for the named location (ctor for subclasses). </summary>
/// <param name="path"> the path of the directory </param>
/// <param name="lockFactory"> the lock factory to use, or null for the default
/// (<seealso cref="NativeFSLockFactory"/>); </param>
/// <exception cref="IOException"> if there is a low-level I/O error </exception>
protected internal FSDirectory(DirectoryInfo path, LockFactory lockFactory)
{
// new ctors use always NativeFSLockFactory as default:
if (lockFactory == null)
{
lockFactory = new NativeFSLockFactory();
}
m_directory = new DirectoryInfo(path.GetCanonicalPath());
if (File.Exists(path.FullName))
{
throw new DirectoryNotFoundException("file '" + path.FullName + "' exists but is not a directory");
}
SetLockFactory(lockFactory);
}
/// <summary>
/// Creates an <see cref="FSDirectory"/> instance, trying to pick the
/// best implementation given the current environment.
/// The directory returned uses the <see cref="NativeFSLockFactory"/>.
///
/// <para/>Currently this returns <see cref="MMapDirectory"/> for most Solaris
/// and Windows 64-bit runtimes, <see cref="NIOFSDirectory"/> for other
/// non-Windows runtimes, and <see cref="SimpleFSDirectory"/> for other
/// runtimes on Windows. It is highly recommended that you consult the
/// implementation's documentation for your platform before
/// using this method.
///
/// <para/><b>NOTE</b>: this method may suddenly change which
/// implementation is returned from release to release, in
/// the event that higher performance defaults become
/// possible; if the precise implementation is important to
/// your application, please instantiate it directly,
/// instead. For optimal performance you should consider using
/// <see cref="MMapDirectory"/> on 64 bit runtimes.
///
/// <para/>See <see cref="FSDirectory"/>.
/// </summary>
public static FSDirectory Open(DirectoryInfo path)
{
return Open(path, null);
}
/// <summary>
/// Just like <see cref="Open(DirectoryInfo)"/>, but
/// allows you to specify the directory as a <see cref="string"/>.
/// </summary>
/// <param name="path">The path (to a directory) to open</param>
/// <returns>An open <see cref="FSDirectory"/></returns>
public static FSDirectory Open(string path) // LUCENENET specific overload for ease of use with .NET
{
return Open(new DirectoryInfo(path), null);
}
/// <summary>
/// Just like <see cref="Open(DirectoryInfo)"/>, but allows you to
/// also specify a custom <see cref="LockFactory"/>.
/// </summary>
public static FSDirectory Open(DirectoryInfo path, LockFactory lockFactory)
{
if ((Constants.WINDOWS || Constants.SUN_OS || Constants.LINUX) && Constants.RUNTIME_IS_64BIT /*&&
MMapDirectory.UNMAP_SUPPORTED*/) // LUCENENET specific - unmap hack not needed
{
return new MMapDirectory(path, lockFactory);
}
else if (Constants.WINDOWS)
{
return new SimpleFSDirectory(path, lockFactory);
}
else
{
return new NIOFSDirectory(path, lockFactory);
}
}
/// <summary>
/// Just like <see cref="Open(DirectoryInfo, LockFactory)"/>, but
/// allows you to specify the directory as a <see cref="string"/>.
/// </summary>
/// <param name="path">The path (to a directory) to open</param>
/// <param name="lockFactory"></param>
/// <returns>An open <see cref="FSDirectory"/></returns>
public static FSDirectory Open(string path, LockFactory lockFactory) // LUCENENET specific overload for ease of use with .NET
{
return Open(new DirectoryInfo(path), lockFactory);
}
public override void SetLockFactory(LockFactory lockFactory)
{
base.SetLockFactory(lockFactory);
// for filesystem based LockFactory, delete the lockPrefix, if the locks are placed
// in index dir. If no index dir is given, set ourselves
if (lockFactory is FSLockFactory)
{
FSLockFactory lf = (FSLockFactory)lockFactory;
DirectoryInfo dir = lf.LockDir;
// if the lock factory has no lockDir set, use the this directory as lockDir
if (dir == null)
{
lf.SetLockDir(m_directory);
lf.LockPrefix = null;
}
else if (dir.GetCanonicalPath().Equals(m_directory.GetCanonicalPath(), StringComparison.Ordinal))
{
lf.LockPrefix = null;
}
}
}
/// <summary>
/// Lists all files (not subdirectories) in the
/// directory. This method never returns <c>null</c> (throws
/// <seealso cref="IOException"/> instead).
/// </summary>
/// <exception cref="DirectoryNotFoundException"> if the directory
/// does not exist, or does exist but is not a
/// directory or is invalid (for example, it is on an unmapped drive). </exception>
/// <exception cref="SecurityException">The caller does not have the required permission.</exception>
public static string[] ListAll(DirectoryInfo dir)
{
if (!System.IO.Directory.Exists(dir.FullName))
{
throw new DirectoryNotFoundException("directory '" + dir + "' does not exist");
}
else if (File.Exists(dir.FullName))
{
throw new DirectoryNotFoundException("file '" + dir + "' exists but is not a directory");
}
// Exclude subdirs
FileInfo[] files = dir.EnumerateFiles().ToArray();
string[] result = new string[files.Length];
for (int i = 0; i < files.Length; i++)
{
result[i] = files[i].Name;
}
// LUCENENET NOTE: this can never happen in .NET
//if (result == null)
//{
// throw new IOException("directory '" + dir + "' exists and is a directory, but cannot be listed: list() returned null");
//}
return result;
}
/// <summary>
/// Lists all files (not subdirectories) in the
/// directory. </summary>
/// <seealso cref="ListAll(DirectoryInfo)"/>
public override string[] ListAll()
{
EnsureOpen();
return ListAll(m_directory);
}
/// <summary>
/// Returns true iff a file with the given name exists. </summary>
[Obsolete("this method will be removed in 5.0")]
public override bool FileExists(string name)
{
EnsureOpen();
return File.Exists(Path.Combine(m_directory.FullName, name));
}
/// <summary>
/// Returns the length in bytes of a file in the directory. </summary>
public override long FileLength(string name)
{
EnsureOpen();
FileInfo file = new FileInfo(Path.Combine(m_directory.FullName, name));
long len = file.Length;
if (len == 0 && !file.Exists)
{
throw new FileNotFoundException(name);
}
else
{
return len;
}
}
/// <summary>
/// Removes an existing file in the directory. </summary>
public override void DeleteFile(string name)
{
EnsureOpen();
FileInfo file = new FileInfo(Path.Combine(m_directory.FullName, name));
// LUCENENET specific: We need to explicitly throw when the file has already been deleted,
// since FileInfo doesn't do that for us.
// (An enhancement carried over from Lucene 8.2.0)
if (!File.Exists(file.FullName))
{
throw new FileNotFoundException("Cannot delete " + file + " because it doesn't exist.");
}
try
{
file.Delete();
if (File.Exists(file.FullName))
{
throw new IOException("Cannot delete " + file);
}
}
catch (Exception e)
{
throw new IOException("Cannot delete " + file, e);
}
// LUCENENET specific: No such thing as "stale files" in .NET, since Flush(true) writes everything to disk before
// our FileStream is disposed.
//m_staleFiles.Remove(name);
}
/// <summary>
/// Creates an <see cref="IndexOutput"/> for the file with the given name. </summary>
public override IndexOutput CreateOutput(string name, IOContext context)
{
EnsureOpen();
EnsureCanWrite(name);
return new FSIndexOutput(this, name);
}
protected virtual void EnsureCanWrite(string name)
{
if (!m_directory.Exists)
{
try
{
m_directory.Create();
}
catch
{
throw new IOException("Cannot create directory: " + m_directory);
}
}
FileInfo file = new FileInfo(Path.Combine(m_directory.FullName, name));
if (file.Exists) // delete existing, if any
{
try
{
file.Delete();
}
catch
{
throw new IOException("Cannot overwrite: " + file);
}
}
}
protected virtual void OnIndexOutputClosed(FSIndexOutput io)
{
// LUCENENET specific: No such thing as "stale files" in .NET, since Flush(true) writes everything to disk before
// our FileStream is disposed.
//m_staleFiles.Add(io.name);
}
public override void Sync(ICollection<string> names)
{
EnsureOpen();
// LUCENENET specific: No such thing as "stale files" in .NET, since Flush(true) writes everything to disk before
// our FileStream is disposed. Therefore, there is nothing else to do in this method.
//ISet<string> toSync = new HashSet<string>(names);
//toSync.IntersectWith(m_staleFiles);
//// LUCENENET specific: Fsync breaks concurrency here.
//// Part of a solution suggested by Vincent Van Den Berghe: http://apache.markmail.org/message/hafnuhq2ydhfjmi2
////foreach (var name in toSync)
////{
//// Fsync(name);
////}
//// fsync the directory itsself, but only if there was any file fsynced before
//// (otherwise it can happen that the directory does not yet exist)!
//if (toSync.Count > 0)
//{
// IOUtils.Fsync(m_directory.FullName, true);
//}
//m_staleFiles.ExceptWith(toSync);
}
public override string GetLockID()
{
EnsureOpen();
string dirName; // name to be hashed
try
{
dirName = m_directory.GetCanonicalPath();
}
catch (IOException e)
{
throw new Exception(e.ToString(), e);
}
int digest = 0;
for (int charIDX = 0; charIDX < dirName.Length; charIDX++)
{
char ch = dirName[charIDX];
digest = 31*digest + ch;
}
return "lucene-" + digest.ToString("x", CultureInfo.InvariantCulture);
}
/// <summary>
/// Closes the store to future operations. </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
IsOpen = false;
}
}
/// <summary> the underlying filesystem directory </summary>
public virtual DirectoryInfo Directory
{
get
{
EnsureOpen();
return m_directory;
}
}
/// <summary>
/// For debug output. </summary>
public override string ToString()
{
return this.GetType().Name + "@" + m_directory + " lockFactory=" + LockFactory;
}
/// <summary>
/// this setting has no effect anymore. </summary>
[Obsolete("this is no longer used since Lucene 4.5.")]
public int ReadChunkSize
{
get => chunkSize;
set
{
if (value <= 0)
throw new ArgumentException("chunkSize must be positive");
this.chunkSize = value;
}
}
/// <summary>
/// Writes output with <see cref="FileStream.Write(byte[], int, int)"/>
/// </summary>
// LUCENENET specific: Since FileStream does its own buffering, this class was refactored
// to do all checksum operations as well as writing to the FileStream. By doing this we elminate
// the extra set of buffers that were only creating unnecessary memory allocations and copy operations.
protected class FSIndexOutput : BufferedIndexOutput
{
private const int CHUNK_SIZE = DEFAULT_BUFFER_SIZE;
private readonly FSDirectory parent;
internal readonly string name;
private readonly FileStream file;
private volatile bool isOpen; // remember if the file is open, so that we don't try to close it more than once
private readonly CRC32 crc = new CRC32();
public FSIndexOutput(FSDirectory parent, string name)
: base(CHUNK_SIZE, null)
{
this.parent = parent;
this.name = name;
file = new FileStream(
path: Path.Combine(parent.m_directory.FullName, name),
mode: FileMode.OpenOrCreate,
access: FileAccess.Write,
share: FileShare.ReadWrite,
bufferSize: CHUNK_SIZE);
isOpen = true;
}
/// <inheritdoc/>
public override void WriteByte(byte b)
{
if (!isOpen)
throw new ObjectDisposedException(nameof(FSIndexOutput));
crc.Update(b);
file.WriteByte(b);
}
/// <inheritdoc/>
public override void WriteBytes(byte[] b, int offset, int length)
{
if (!isOpen)
throw new ObjectDisposedException(nameof(FSIndexOutput));
crc.Update(b, offset, length);
file.Write(b, offset, length);
}
/// <inheritdoc/>
protected internal override void FlushBuffer(byte[] b, int offset, int size)
{
if (!isOpen)
throw new ObjectDisposedException(nameof(FSIndexOutput));
crc.Update(b, offset, size);
file.Write(b, offset, size);
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.NoInlining)]
public override void Flush()
{
if (!isOpen)
throw new ObjectDisposedException(nameof(FSIndexOutput));
file.Flush();
}
/// <inheritdoc/>
protected override void Dispose(bool disposing)
{
if (disposing)
{
parent.OnIndexOutputClosed(this);
// only close the file if it has not been closed yet
if (isOpen)
{
IOException priorE = null;
try
{
file.Flush(flushToDisk: true);
}
catch (IOException ioe)
{
priorE = ioe;
}
finally
{
isOpen = false;
IOUtils.DisposeWhileHandlingException(priorE, file);
}
}
}
}
/// <summary>
/// Random-access methods </summary>
[Obsolete("(4.1) this method will be removed in Lucene 5.0")]
public override void Seek(long pos)
{
if (!isOpen)
throw new ObjectDisposedException(nameof(FSIndexOutput));
file.Seek(pos, SeekOrigin.Begin);
}
/// <inheritdoc/>
public override long Length => file.Length;
// LUCENENET NOTE: FileStream doesn't have a way to set length
/// <inheritdoc/>
public override long Checksum => crc.Value; // LUCENENET specific - need to override, since we are buffering locally
/// <inheritdoc/>
public override long GetFilePointer() // LUCENENET specific - need to override, since we are buffering locally
{
return file.Position;
}
}
// LUCENENET specific: Fsync is pointless in .NET, since we are
// calling FileStream.Flush(true) before the stream is disposed
// which means we never need it at the point in Java where it is called.
//protected virtual void Fsync(string name)
//{
// IOUtils.Fsync(Path.Combine(m_directory.FullName, name), false);
//}
}
}
| |
//
// Copyright (c) 2004-2018 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#region
using System;
using System.Collections.Generic;
using System.Linq;
using NLog.Config;
using NLog.Targets;
using Xunit;
#endregion
namespace NLog.UnitTests.Config
{
public class ConfigApiTests
{
[Fact]
public void AddTarget_testname()
{
var config = new LoggingConfiguration();
config.AddTarget("name1", new FileTarget {Name = "File"});
var allTargets = config.AllTargets;
Assert.NotNull(allTargets);
Assert.Single(allTargets);
//maybe confusing, but the name of the target is not changed, only the one of the key.
Assert.Equal("File", allTargets.First().Name);
Assert.NotNull(config.FindTargetByName<FileTarget>("name1"));
config.RemoveTarget("name1");
allTargets = config.AllTargets;
Assert.Empty(allTargets);
}
[Fact]
public void AddTarget_WithName_NullNameParam()
{
var config = new LoggingConfiguration();
Exception ex = Assert.Throws<ArgumentException>(() => config.AddTarget(name: null, target: new FileTarget { Name = "name1" }));
}
[Fact]
public void AddTarget_WithName_NullTargetParam()
{
var config = new LoggingConfiguration();
Exception ex = Assert.Throws<ArgumentNullException>(() => config.AddTarget(name: "Name1", target: null));
}
[Fact]
public void AddTarget_TargetOnly_NullParam()
{
var config = new LoggingConfiguration();
Exception ex = Assert.Throws<ArgumentNullException>(() => config.AddTarget(target: null));
}
[Fact]
public void AddTarget_testname_param()
{
var config = new LoggingConfiguration();
config.AddTarget("name1", new FileTarget {Name = "name2"});
var allTargets = config.AllTargets;
Assert.NotNull(allTargets);
Assert.Single(allTargets);
//maybe confusing, but the name of the target is not changed, only the one of the key.
Assert.Equal("name2", allTargets.First().Name);
Assert.NotNull(config.FindTargetByName<FileTarget>("name1"));
}
[Fact]
public void AddTarget_testname_fromtarget()
{
var config = new LoggingConfiguration();
config.AddTarget(new FileTarget {Name = "name2"});
var allTargets = config.AllTargets;
Assert.NotNull(allTargets);
Assert.Single(allTargets);
Assert.Equal("name2", allTargets.First().Name);
Assert.NotNull(config.FindTargetByName<FileTarget>("name2"));
}
[Fact]
public void AddRule_min_max()
{
var config = new LoggingConfiguration();
config.AddTarget(new FileTarget {Name = "File"});
config.AddRule(LogLevel.Info, LogLevel.Error, "File", "*a");
Assert.NotNull(config.LoggingRules);
Assert.Equal(1, config.LoggingRules.Count);
var rule1 = config.LoggingRules.FirstOrDefault();
Assert.NotNull(rule1);
Assert.False(rule1.Final);
Assert.Equal("*a", rule1.LoggerNamePattern);
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Fatal));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Error));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Warn));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Info));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Debug));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Trace));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Off));
}
[Fact]
public void AddRule_all()
{
var config = new LoggingConfiguration();
config.AddTarget(new FileTarget {Name = "File"});
config.AddRuleForAllLevels("File", "*a");
Assert.NotNull(config.LoggingRules);
Assert.Equal(1, config.LoggingRules.Count);
var rule1 = config.LoggingRules.FirstOrDefault();
Assert.NotNull(rule1);
Assert.False(rule1.Final);
Assert.Equal("*a", rule1.LoggerNamePattern);
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Fatal));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Error));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Warn));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Info));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Debug));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Trace));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Off));
}
[Fact]
public void AddRule_onelevel()
{
var config = new LoggingConfiguration();
config.AddTarget(new FileTarget {Name = "File"});
config.AddRuleForOneLevel(LogLevel.Error, "File", "*a");
Assert.NotNull(config.LoggingRules);
Assert.Equal(1, config.LoggingRules.Count);
var rule1 = config.LoggingRules.FirstOrDefault();
Assert.NotNull(rule1);
Assert.False(rule1.Final);
Assert.Equal("*a", rule1.LoggerNamePattern);
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Fatal));
Assert.True(rule1.IsLoggingEnabledForLevel(LogLevel.Error));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Warn));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Info));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Debug));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Trace));
Assert.False(rule1.IsLoggingEnabledForLevel(LogLevel.Off));
}
[Fact]
public void AddRule_with_target()
{
var config = new LoggingConfiguration();
var fileTarget = new FileTarget {Name = "File"};
config.AddRuleForOneLevel(LogLevel.Error, fileTarget, "*a");
Assert.NotNull(config.LoggingRules);
Assert.Equal(1, config.LoggingRules.Count);
config.AddTarget(new FileTarget {Name = "File"});
var allTargets = config.AllTargets;
Assert.NotNull(allTargets);
Assert.Single(allTargets);
Assert.Equal("File", allTargets.First().Name);
Assert.NotNull(config.FindTargetByName<FileTarget>("File"));
}
[Fact]
public void AddRule_missingtarget()
{
var config = new LoggingConfiguration();
Assert.Throws<NLogConfigurationException>(() => config.AddRuleForOneLevel(LogLevel.Error, "File", "*a"));
}
[Fact]
public void CheckAllTargets()
{
var config = new LoggingConfiguration();
var fileTarget = new FileTarget {Name = "File", FileName = "file"};
config.AddRuleForOneLevel(LogLevel.Error, fileTarget, "*a");
config.AddTarget(fileTarget);
Assert.Single(config.AllTargets);
Assert.Equal(fileTarget, config.AllTargets[0]);
config.InitializeAll();
Assert.Single(config.AllTargets);
Assert.Equal(fileTarget, config.AllTargets[0]);
}
[Fact]
public void LogRuleToStringTest_min()
{
var target = new FileTarget {Name = "file1"};
var loggingRule = new LoggingRule("*", LogLevel.Error, target);
var s = loggingRule.ToString();
Assert.Equal("logNamePattern: (:All) levels: [ Error Fatal ] appendTo: [ file1 ]", s);
}
[Fact]
public void LogRuleToStringTest_minAndMax()
{
var target = new FileTarget {Name = "file1"};
var loggingRule = new LoggingRule("*", LogLevel.Debug, LogLevel.Error, target);
var s = loggingRule.ToString();
Assert.Equal("logNamePattern: (:All) levels: [ Debug Info Warn Error ] appendTo: [ file1 ]", s);
}
[Fact]
public void LogRuleToStringTest_none()
{
var target = new FileTarget {Name = "file1"};
var loggingRule = new LoggingRule("*", target);
var s = loggingRule.ToString();
Assert.Equal("logNamePattern: (:All) levels: [ ] appendTo: [ file1 ]", s);
}
[Fact]
public void LogRuleToStringTest_filter()
{
var target = new FileTarget {Name = "file1"};
var loggingRule = new LoggingRule("namespace.comp1", target);
var s = loggingRule.ToString();
Assert.Equal("logNamePattern: (namespace.comp1:Equals) levels: [ ] appendTo: [ file1 ]", s);
}
[Fact]
public void LogRuleToStringTest_multiple_targets()
{
var target = new FileTarget {Name = "file1"};
var target2 = new FileTarget {Name = "file2"};
var loggingRule = new LoggingRule("namespace.comp1", target);
loggingRule.Targets.Add(target2);
var s = loggingRule.ToString();
Assert.Equal("logNamePattern: (namespace.comp1:Equals) levels: [ ] appendTo: [ file1 file2 ]", s);
}
[Fact]
public void LogRuleSetLoggingLevels_enables()
{
var rule = new LoggingRule();
rule.SetLoggingLevels(LogLevel.Warn, LogLevel.Fatal);
Assert.Equal(rule.Levels, new[] { LogLevel.Warn, LogLevel.Error, LogLevel.Fatal });
}
[Fact]
public void LogRuleSetLoggingLevels_disables()
{
var rule = new LoggingRule();
rule.EnableLoggingForLevels(LogLevel.MinLevel, LogLevel.MaxLevel);
rule.SetLoggingLevels(LogLevel.Warn, LogLevel.Fatal);
Assert.Equal(rule.Levels, new[] { LogLevel.Warn, LogLevel.Error, LogLevel.Fatal });
}
[Fact]
public void LogRuleDisableLoggingLevels()
{
var rule = new LoggingRule();
rule.EnableLoggingForLevels(LogLevel.MinLevel, LogLevel.MaxLevel);
rule.DisableLoggingForLevels(LogLevel.Warn, LogLevel.Fatal);
Assert.Equal(rule.Levels, new[] { LogLevel.Trace, LogLevel.Debug, LogLevel.Info });
}
}
}
| |
//
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using System.Collections.Generic;
using System.IO;
using Mono.Collections.Generic;
using Microsoft.Cci.Pdb;
using Mono.Cecil.Cil;
namespace Mono.Cecil.Pdb {
public class NativePdbReader : ISymbolReader {
int age;
Guid guid;
readonly Disposable<Stream> pdb_file;
readonly Dictionary<string, Document> documents = new Dictionary<string, Document> ();
readonly Dictionary<uint, PdbFunction> functions = new Dictionary<uint, PdbFunction> ();
readonly Dictionary<PdbScope, ImportDebugInformation> imports = new Dictionary<PdbScope, ImportDebugInformation> ();
internal NativePdbReader (Disposable<Stream> file)
{
this.pdb_file = file;
}
#if !READ_ONLY
public ISymbolWriterProvider GetWriterProvider ()
{
return new NativePdbWriterProvider ();
}
#endif
/*
uint Magic = 0x53445352;
Guid Signature;
uint Age;
string FileName;
*/
public bool ProcessDebugHeader (ImageDebugHeader header)
{
if (!header.HasEntries)
return false;
var entry = header.GetCodeViewEntry ();
if (entry == null)
return false;
var directory = entry.Directory;
if (directory.Type != ImageDebugType.CodeView)
return false;
if (directory.MajorVersion != 0 || directory.MinorVersion != 0)
return false;
var data = entry.Data;
if (data.Length < 24)
return false;
var magic = ReadInt32 (data, 0);
if (magic != 0x53445352)
return false;
var guid_bytes = new byte [16];
Buffer.BlockCopy (data, 4, guid_bytes, 0, 16);
this.guid = new Guid (guid_bytes);
this.age = ReadInt32 (data, 20);
return PopulateFunctions ();
}
static int ReadInt32 (byte [] bytes, int start)
{
return (bytes [start]
| (bytes [start + 1] << 8)
| (bytes [start + 2] << 16)
| (bytes [start + 3] << 24));
}
bool PopulateFunctions ()
{
using (pdb_file) {
Dictionary<uint, PdbTokenLine> tokenToSourceMapping;
string sourceServerData;
int age;
Guid guid;
var funcs = PdbFile.LoadFunctions (pdb_file.value, out tokenToSourceMapping, out sourceServerData, out age, out guid);
if (this.guid != guid)
return false;
foreach (PdbFunction function in funcs)
functions.Add (function.token, function);
}
return true;
}
public MethodDebugInformation Read (MethodDefinition method)
{
var method_token = method.MetadataToken;
PdbFunction function;
if (!functions.TryGetValue (method_token.ToUInt32 (), out function))
return null;
var symbol = new MethodDebugInformation (method);
ReadSequencePoints (function, symbol);
symbol.scope = !function.scopes.IsNullOrEmpty ()
? ReadScopeAndLocals (function.scopes [0], symbol)
: new ScopeDebugInformation { Start = new InstructionOffset (0), End = new InstructionOffset ((int) function.length) };
if (function.tokenOfMethodWhoseUsingInfoAppliesToThisMethod != method.MetadataToken.ToUInt32 () && function.tokenOfMethodWhoseUsingInfoAppliesToThisMethod != 0)
symbol.scope.import = GetImport (function.tokenOfMethodWhoseUsingInfoAppliesToThisMethod, method.Module);
if (function.scopes.Length > 1) {
for (int i = 1; i < function.scopes.Length; i++) {
var s = ReadScopeAndLocals (function.scopes [i], symbol);
if (!AddScope (symbol.scope.Scopes, s))
symbol.scope.Scopes.Add (s);
}
}
if (function.iteratorScopes != null) {
var state_machine = new StateMachineScopeDebugInformation ();
foreach (var iterator_scope in function.iteratorScopes) {
state_machine.Scopes.Add (new StateMachineScope ((int) iterator_scope.Offset, (int) (iterator_scope.Offset + iterator_scope.Length + 1)));
}
symbol.CustomDebugInformations.Add (state_machine);
}
if (function.synchronizationInformation != null) {
var async_debug_info = new AsyncMethodBodyDebugInformation ((int) function.synchronizationInformation.GeneratedCatchHandlerOffset);
foreach (var synchronization_point in function.synchronizationInformation.synchronizationPoints) {
async_debug_info.Yields.Add (new InstructionOffset ((int) synchronization_point.SynchronizeOffset));
async_debug_info.Resumes.Add (new InstructionOffset ((int) synchronization_point.ContinuationOffset));
async_debug_info.ResumeMethods.Add (method);
}
symbol.CustomDebugInformations.Add (async_debug_info);
symbol.StateMachineKickOffMethod = (MethodDefinition) method.Module.LookupToken ((int) function.synchronizationInformation.kickoffMethodToken);
}
return symbol;
}
Collection<ScopeDebugInformation> ReadScopeAndLocals (PdbScope [] scopes, MethodDebugInformation info)
{
var symbols = new Collection<ScopeDebugInformation> (scopes.Length);
foreach (PdbScope scope in scopes)
if (scope != null)
symbols.Add (ReadScopeAndLocals (scope, info));
return symbols;
}
ScopeDebugInformation ReadScopeAndLocals (PdbScope scope, MethodDebugInformation info)
{
var parent = new ScopeDebugInformation ();
parent.Start = new InstructionOffset ((int) scope.offset);
parent.End = new InstructionOffset ((int) (scope.offset + scope.length));
if (!scope.slots.IsNullOrEmpty ()) {
parent.variables = new Collection<VariableDebugInformation> (scope.slots.Length);
foreach (PdbSlot slot in scope.slots) {
if (slot.flags == 1) // parameter names
continue;
var index = (int) slot.slot;
var variable = new VariableDebugInformation (index, slot.name);
if (slot.flags == 4)
variable.IsDebuggerHidden = true;
parent.variables.Add (variable);
}
}
if (!scope.constants.IsNullOrEmpty ()) {
parent.constants = new Collection<ConstantDebugInformation> (scope.constants.Length);
foreach (var constant in scope.constants) {
var type = info.Method.Module.Read (constant, (c, r) => r.ReadConstantSignature (new MetadataToken (c.token)));
var value = constant.value;
// Object "null" is encoded as integer
if (type != null && !type.IsValueType && value is int && (int) value == 0)
value = null;
parent.constants.Add (new ConstantDebugInformation (constant.name, type, value));
}
}
if (!scope.usedNamespaces.IsNullOrEmpty ()) {
ImportDebugInformation import;
if (imports.TryGetValue (scope, out import)) {
parent.import = import;
} else {
import = GetImport (scope, info.Method.Module);
imports.Add (scope, import);
parent.import = import;
}
}
parent.scopes = ReadScopeAndLocals (scope.scopes, info);
return parent;
}
static bool AddScope (Collection<ScopeDebugInformation> scopes, ScopeDebugInformation scope)
{
foreach (var sub_scope in scopes) {
if (sub_scope.HasScopes && AddScope (sub_scope.Scopes, scope))
return true;
if (scope.Start.Offset >= sub_scope.Start.Offset && scope.End.Offset <= sub_scope.End.Offset) {
sub_scope.Scopes.Add (scope);
return true;
}
}
return false;
}
ImportDebugInformation GetImport (uint token, ModuleDefinition module)
{
PdbFunction function;
if (!functions.TryGetValue (token, out function))
return null;
if (function.scopes.Length != 1)
return null;
var scope = function.scopes [0];
ImportDebugInformation import;
if (imports.TryGetValue (scope, out import))
return import;
import = GetImport (scope, module);
imports.Add (scope, import);
return import;
}
static ImportDebugInformation GetImport (PdbScope scope, ModuleDefinition module)
{
if (scope.usedNamespaces.IsNullOrEmpty ())
return null;
var import = new ImportDebugInformation ();
foreach (var used_namespace in scope.usedNamespaces) {
if (string.IsNullOrEmpty (used_namespace))
continue;
ImportTarget target = null;
var value = used_namespace.Substring (1);
switch (used_namespace [0]) {
case 'U':
target = new ImportTarget (ImportTargetKind.ImportNamespace) { @namespace = value };
break;
case 'T': {
var type = module.GetType (value, runtimeName: true);
if (type != null)
target = new ImportTarget (ImportTargetKind.ImportType) { type = type };
break;
}
case 'A':
var index = used_namespace.IndexOf (' ');
if (index < 0) {
target = new ImportTarget (ImportTargetKind.ImportNamespace) { @namespace = used_namespace };
break;
}
var alias_value = used_namespace.Substring (1, index - 1);
var alias_target_value = used_namespace.Substring (index + 2);
switch (used_namespace [index + 1]) {
case 'U':
target = new ImportTarget (ImportTargetKind.DefineNamespaceAlias) { alias = alias_value, @namespace = alias_target_value };
break;
case 'T':
var type = module.GetType (alias_target_value, runtimeName: true);
if (type != null)
target = new ImportTarget (ImportTargetKind.DefineTypeAlias) { alias = alias_value, type = type };
break;
}
break;
case '*':
target = new ImportTarget (ImportTargetKind.ImportNamespace) { @namespace = value };
break;
case '@':
if (!value.StartsWith ("P:"))
continue;
target = new ImportTarget (ImportTargetKind.ImportNamespace) { @namespace = value.Substring (2) };
break;
}
if (target != null)
import.Targets.Add (target);
}
return import;
}
void ReadSequencePoints (PdbFunction function, MethodDebugInformation info)
{
if (function.lines == null)
return;
info.sequence_points = new Collection<SequencePoint> ();
foreach (PdbLines lines in function.lines)
ReadLines (lines, info);
}
void ReadLines (PdbLines lines, MethodDebugInformation info)
{
var document = GetDocument (lines.file);
foreach (var line in lines.lines)
ReadLine (line, document, info);
}
static void ReadLine (PdbLine line, Document document, MethodDebugInformation info)
{
var sequence_point = new SequencePoint ((int) line.offset, document);
sequence_point.StartLine = (int) line.lineBegin;
sequence_point.StartColumn = (int) line.colBegin;
sequence_point.EndLine = (int) line.lineEnd;
sequence_point.EndColumn = (int) line.colEnd;
info.sequence_points.Add (sequence_point);
}
Document GetDocument (PdbSource source)
{
string name = source.name;
Document document;
if (documents.TryGetValue (name, out document))
return document;
document = new Document (name) {
Language = source.language.ToLanguage (),
LanguageVendor = source.vendor.ToVendor (),
Type = source.doctype.ToType (),
};
documents.Add (name, document);
return document;
}
public void Dispose ()
{
pdb_file.Dispose ();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
namespace System.Reflection
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Globalization;
using System.Runtime;
using System.Runtime.ConstrainedExecution;
using System.Runtime.InteropServices;
#if FEATURE_REMOTING
using System.Runtime.Remoting.Metadata;
#endif //FEATURE_REMOTING
using System.Runtime.Serialization;
using System.Security;
using System.Security.Permissions;
using System.Threading;
using MemberListType = System.RuntimeType.MemberListType;
using RuntimeTypeCache = System.RuntimeType.RuntimeTypeCache;
using System.Runtime.CompilerServices;
[Serializable]
[ClassInterface(ClassInterfaceType.None)]
[ComDefaultInterface(typeof(_ConstructorInfo))]
#pragma warning disable 618
[PermissionSetAttribute(SecurityAction.InheritanceDemand, Name = "FullTrust")]
#pragma warning restore 618
[System.Runtime.InteropServices.ComVisible(true)]
public abstract class ConstructorInfo : MethodBase, _ConstructorInfo
{
#region Static Members
[System.Runtime.InteropServices.ComVisible(true)]
public readonly static String ConstructorName = ".ctor";
[System.Runtime.InteropServices.ComVisible(true)]
public readonly static String TypeConstructorName = ".cctor";
#endregion
#region Constructor
protected ConstructorInfo() { }
#endregion
#if !FEATURE_CORECLR
public static bool operator ==(ConstructorInfo left, ConstructorInfo right)
{
if (ReferenceEquals(left, right))
return true;
if ((object)left == null || (object)right == null ||
left is RuntimeConstructorInfo || right is RuntimeConstructorInfo)
{
return false;
}
return left.Equals(right);
}
public static bool operator !=(ConstructorInfo left, ConstructorInfo right)
{
return !(left == right);
}
#endif // !FEATURE_CORECLR
public override bool Equals(object obj)
{
return base.Equals(obj);
}
public override int GetHashCode()
{
return base.GetHashCode();
}
#region Internal Members
internal virtual Type GetReturnType() { throw new NotImplementedException(); }
#endregion
#region MemberInfo Overrides
[System.Runtime.InteropServices.ComVisible(true)]
public override MemberTypes MemberType { get { return System.Reflection.MemberTypes.Constructor; } }
#endregion
#region Public Abstract\Virtual Members
public abstract Object Invoke(BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture);
#endregion
#region Public Members
[DebuggerStepThroughAttribute]
[Diagnostics.DebuggerHidden]
public Object Invoke(Object[] parameters)
{
// Theoretically we should set up a LookForMyCaller stack mark here and pass that along.
// But to maintain backward compatibility we can't switch to calling an
// internal overload that takes a stack mark.
// Fortunately the stack walker skips all the reflection invocation frames including this one.
// So this method will never be returned by the stack walker as the caller.
// See SystemDomain::CallersMethodCallbackWithStackMark in AppDomain.cpp.
return Invoke(BindingFlags.Default, null, parameters, null);
}
#endregion
#if !FEATURE_CORECLR
#region COM Interop Support
Type _ConstructorInfo.GetType()
{
return base.GetType();
}
Object _ConstructorInfo.Invoke_2(Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture)
{
return Invoke(obj, invokeAttr, binder, parameters, culture);
}
Object _ConstructorInfo.Invoke_3(Object obj, Object[] parameters)
{
return Invoke(obj, parameters);
}
Object _ConstructorInfo.Invoke_4(BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture)
{
return Invoke(invokeAttr, binder, parameters, culture);
}
Object _ConstructorInfo.Invoke_5(Object[] parameters)
{
return Invoke(parameters);
}
void _ConstructorInfo.GetTypeInfoCount(out uint pcTInfo)
{
throw new NotImplementedException();
}
void _ConstructorInfo.GetTypeInfo(uint iTInfo, uint lcid, IntPtr ppTInfo)
{
throw new NotImplementedException();
}
void _ConstructorInfo.GetIDsOfNames([In] ref Guid riid, IntPtr rgszNames, uint cNames, uint lcid, IntPtr rgDispId)
{
throw new NotImplementedException();
}
// If you implement this method, make sure to include _ConstructorInfo.Invoke in VM\DangerousAPIs.h and
// include _ConstructorInfo in SystemDomain::IsReflectionInvocationMethod in AppDomain.cpp.
void _ConstructorInfo.Invoke(uint dispIdMember, [In] ref Guid riid, uint lcid, short wFlags, IntPtr pDispParams, IntPtr pVarResult, IntPtr pExcepInfo, IntPtr puArgErr)
{
throw new NotImplementedException();
}
#endregion
#endif
}
[Serializable]
internal sealed class RuntimeConstructorInfo : ConstructorInfo, ISerializable, IRuntimeMethodInfo
{
#region Private Data Members
private volatile RuntimeType m_declaringType;
private RuntimeTypeCache m_reflectedTypeCache;
private string m_toString;
private ParameterInfo[] m_parameters = null; // Created lazily when GetParameters() is called.
#pragma warning disable 169
private object _empty1; // These empties are used to ensure that RuntimeConstructorInfo and RuntimeMethodInfo are have a layout which is sufficiently similar
private object _empty2;
private object _empty3;
#pragma warning restore 169
private IntPtr m_handle;
private MethodAttributes m_methodAttributes;
private BindingFlags m_bindingFlags;
private volatile Signature m_signature;
private INVOCATION_FLAGS m_invocationFlags;
#if FEATURE_APPX
private bool IsNonW8PFrameworkAPI()
{
if (DeclaringType.IsArray && IsPublic && !IsStatic)
return false;
RuntimeAssembly rtAssembly = GetRuntimeAssembly();
if (rtAssembly.IsFrameworkAssembly())
{
int ctorToken = rtAssembly.InvocableAttributeCtorToken;
if (System.Reflection.MetadataToken.IsNullToken(ctorToken) ||
!CustomAttribute.IsAttributeDefined(GetRuntimeModule(), MetadataToken, ctorToken))
return true;
}
if (GetRuntimeType().IsNonW8PFrameworkAPI())
return true;
return false;
}
internal override bool IsDynamicallyInvokable
{
get
{
return !AppDomain.ProfileAPICheck || !IsNonW8PFrameworkAPI();
}
}
#endif // FEATURE_APPX
internal INVOCATION_FLAGS InvocationFlags
{
[System.Security.SecuritySafeCritical]
get
{
if ((m_invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED) == 0)
{
INVOCATION_FLAGS invocationFlags = INVOCATION_FLAGS.INVOCATION_FLAGS_IS_CTOR; // this is a given
Type declaringType = DeclaringType;
//
// first take care of all the NO_INVOKE cases.
if ( declaringType == typeof(void) ||
(declaringType != null && declaringType.ContainsGenericParameters) ||
((CallingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs) ||
((Attributes & MethodAttributes.RequireSecObject) == MethodAttributes.RequireSecObject))
{
// We don't need other flags if this method cannot be invoked
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE;
}
else if (IsStatic || declaringType != null && declaringType.IsAbstract)
{
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NO_CTOR_INVOKE;
}
else
{
// this should be an invocable method, determine the other flags that participate in invocation
invocationFlags |= RuntimeMethodHandle.GetSecurityFlags(this);
if ( (invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY) == 0 &&
((Attributes & MethodAttributes.MemberAccessMask) != MethodAttributes.Public ||
(declaringType != null && declaringType.NeedsReflectionSecurityCheck)) )
{
// If method is non-public, or declaring type is not visible
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY;
}
// Check for attempt to create a delegate class, we demand unmanaged
// code permission for this since it's hard to validate the target address.
if (typeof(Delegate).IsAssignableFrom(DeclaringType))
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_IS_DELEGATE_CTOR;
}
#if FEATURE_APPX
if (AppDomain.ProfileAPICheck && IsNonW8PFrameworkAPI())
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API;
#endif // FEATURE_APPX
m_invocationFlags = invocationFlags | INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED;
}
return m_invocationFlags;
}
}
#endregion
#region Constructor
[System.Security.SecurityCritical] // auto-generated
internal RuntimeConstructorInfo(
RuntimeMethodHandleInternal handle, RuntimeType declaringType, RuntimeTypeCache reflectedTypeCache,
MethodAttributes methodAttributes, BindingFlags bindingFlags)
{
Contract.Ensures(methodAttributes == RuntimeMethodHandle.GetAttributes(handle));
m_bindingFlags = bindingFlags;
m_reflectedTypeCache = reflectedTypeCache;
m_declaringType = declaringType;
m_handle = handle.Value;
m_methodAttributes = methodAttributes;
}
#endregion
#if FEATURE_REMOTING
#region Legacy Remoting Cache
// The size of CachedData is accounted for by BaseObjectWithCachedData in object.h.
// This member is currently being used by Remoting for caching remoting data. If you
// need to cache data here, talk to the Remoting team to work out a mechanism, so that
// both caching systems can happily work together.
private RemotingMethodCachedData m_cachedData;
internal RemotingMethodCachedData RemotingCache
{
get
{
// This grabs an internal copy of m_cachedData and uses
// that instead of looking at m_cachedData directly because
// the cache may get cleared asynchronously. This prevents
// us from having to take a lock.
RemotingMethodCachedData cache = m_cachedData;
if (cache == null)
{
cache = new RemotingMethodCachedData(this);
RemotingMethodCachedData ret = Interlocked.CompareExchange(ref m_cachedData, cache, null);
if (ret != null)
cache = ret;
}
return cache;
}
}
#endregion
#endif //FEATURE_REMOTING
#region NonPublic Methods
RuntimeMethodHandleInternal IRuntimeMethodInfo.Value
{
[System.Security.SecuritySafeCritical]
get
{
return new RuntimeMethodHandleInternal(m_handle);
}
}
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
internal override bool CacheEquals(object o)
{
RuntimeConstructorInfo m = o as RuntimeConstructorInfo;
if ((object)m == null)
return false;
return m.m_handle == m_handle;
}
private Signature Signature
{
get
{
if (m_signature == null)
m_signature = new Signature(this, m_declaringType);
return m_signature;
}
}
private RuntimeType ReflectedTypeInternal
{
get
{
return m_reflectedTypeCache.GetRuntimeType();
}
}
private void CheckConsistency(Object target)
{
if (target == null && IsStatic)
return;
if (!m_declaringType.IsInstanceOfType(target))
{
if (target == null)
throw new TargetException(Environment.GetResourceString("RFLCT.Targ_StatMethReqTarg"));
throw new TargetException(Environment.GetResourceString("RFLCT.Targ_ITargMismatch"));
}
}
internal BindingFlags BindingFlags { get { return m_bindingFlags; } }
// Differs from MethodHandle in that it will return a valid handle even for reflection only loaded types
internal RuntimeMethodHandle GetMethodHandle()
{
return new RuntimeMethodHandle(this);
}
internal bool IsOverloaded
{
get
{
return m_reflectedTypeCache.GetConstructorList(MemberListType.CaseSensitive, Name).Length > 1;
}
}
#endregion
#region Object Overrides
public override String ToString()
{
// "Void" really doesn't make sense here. But we'll keep it for compat reasons.
if (m_toString == null)
m_toString = "Void " + FormatNameAndSig();
return m_toString;
}
#endregion
#region ICustomAttributeProvider
public override Object[] GetCustomAttributes(bool inherit)
{
return CustomAttribute.GetCustomAttributes(this, typeof(object) as RuntimeType);
}
public override Object[] GetCustomAttributes(Type attributeType, bool inherit)
{
if (attributeType == null)
throw new ArgumentNullException("attributeType");
Contract.EndContractBlock();
RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType;
if (attributeRuntimeType == null)
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeType"),"attributeType");
return CustomAttribute.GetCustomAttributes(this, attributeRuntimeType);
}
[System.Security.SecuritySafeCritical] // auto-generated
public override bool IsDefined(Type attributeType, bool inherit)
{
if (attributeType == null)
throw new ArgumentNullException("attributeType");
Contract.EndContractBlock();
RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType;
if (attributeRuntimeType == null)
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeType"),"attributeType");
return CustomAttribute.IsDefined(this, attributeRuntimeType);
}
public override IList<CustomAttributeData> GetCustomAttributesData()
{
return CustomAttributeData.GetCustomAttributesInternal(this);
}
#endregion
#region MemberInfo Overrides
public override String Name
{
[System.Security.SecuritySafeCritical] // auto-generated
get { return RuntimeMethodHandle.GetName(this); }
}
[System.Runtime.InteropServices.ComVisible(true)]
public override MemberTypes MemberType { get { return MemberTypes.Constructor; } }
public override Type DeclaringType
{
get
{
return m_reflectedTypeCache.IsGlobal ? null : m_declaringType;
}
}
public override Type ReflectedType
{
get
{
return m_reflectedTypeCache.IsGlobal ? null : ReflectedTypeInternal;
}
}
public override int MetadataToken
{
[System.Security.SecuritySafeCritical] // auto-generated
get { return RuntimeMethodHandle.GetMethodDef(this); }
}
public override Module Module
{
get { return GetRuntimeModule(); }
}
internal RuntimeType GetRuntimeType() { return m_declaringType; }
internal RuntimeModule GetRuntimeModule() { return RuntimeTypeHandle.GetModule(m_declaringType); }
internal RuntimeAssembly GetRuntimeAssembly() { return GetRuntimeModule().GetRuntimeAssembly(); }
#endregion
#region MethodBase Overrides
// This seems to always returns System.Void.
internal override Type GetReturnType() { return Signature.ReturnType; }
[System.Security.SecuritySafeCritical] // auto-generated
internal override ParameterInfo[] GetParametersNoCopy()
{
if (m_parameters == null)
m_parameters = RuntimeParameterInfo.GetParameters(this, this, Signature);
return m_parameters;
}
[Pure]
public override ParameterInfo[] GetParameters()
{
ParameterInfo[] parameters = GetParametersNoCopy();
if (parameters.Length == 0)
return parameters;
ParameterInfo[] ret = new ParameterInfo[parameters.Length];
Array.Copy(parameters, ret, parameters.Length);
return ret;
}
public override MethodImplAttributes GetMethodImplementationFlags()
{
return RuntimeMethodHandle.GetImplAttributes(this);
}
public override RuntimeMethodHandle MethodHandle
{
get
{
Type declaringType = DeclaringType;
if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_NotAllowedInReflectionOnly"));
return new RuntimeMethodHandle(this);
}
}
public override MethodAttributes Attributes
{
get
{
return m_methodAttributes;
}
}
public override CallingConventions CallingConvention
{
get
{
return Signature.CallingConvention;
}
}
internal static void CheckCanCreateInstance(Type declaringType, bool isVarArg)
{
if (declaringType == null)
throw new ArgumentNullException("declaringType");
Contract.EndContractBlock();
// ctor is ReflectOnly
if (declaringType is ReflectionOnlyType)
throw new InvalidOperationException(Environment.GetResourceString("Arg_ReflectionOnlyInvoke"));
// ctor is declared on interface class
else if (declaringType.IsInterface)
throw new MemberAccessException(
String.Format(CultureInfo.CurrentUICulture, Environment.GetResourceString("Acc_CreateInterfaceEx"), declaringType));
// ctor is on an abstract class
else if (declaringType.IsAbstract)
throw new MemberAccessException(
String.Format(CultureInfo.CurrentUICulture, Environment.GetResourceString("Acc_CreateAbstEx"), declaringType));
// ctor is on a class that contains stack pointers
else if (declaringType.GetRootElementType() == typeof(ArgIterator))
throw new NotSupportedException();
// ctor is vararg
else if (isVarArg)
throw new NotSupportedException();
// ctor is generic or on a generic class
else if (declaringType.ContainsGenericParameters)
{
#if FEATURE_LEGACYNETCF
if (CompatibilitySwitches.IsAppEarlierThanWindowsPhone8)
throw new ArgumentException(
String.Format(CultureInfo.CurrentUICulture, Environment.GetResourceString("Acc_CreateGenericEx"), declaringType));
else
#endif
throw new MemberAccessException(
String.Format(CultureInfo.CurrentUICulture, Environment.GetResourceString("Acc_CreateGenericEx"), declaringType));
}
// ctor is declared on System.Void
else if (declaringType == typeof(void))
throw new MemberAccessException(Environment.GetResourceString("Access_Void"));
}
internal void ThrowNoInvokeException()
{
CheckCanCreateInstance(DeclaringType, (CallingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs);
// ctor is .cctor
if ((Attributes & MethodAttributes.Static) == MethodAttributes.Static)
throw new MemberAccessException(Environment.GetResourceString("Acc_NotClassInit"));
throw new TargetException();
}
[System.Security.SecuritySafeCritical] // auto-generated
[DebuggerStepThroughAttribute]
[Diagnostics.DebuggerHidden]
[MethodImplAttribute(MethodImplOptions.NoInlining)] // Methods containing StackCrawlMark local var has to be marked non-inlineable
public override Object Invoke(
Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture)
{
INVOCATION_FLAGS invocationFlags = InvocationFlags;
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE) != 0)
ThrowNoInvokeException();
// check basic method consistency. This call will throw if there are problems in the target/method relationship
CheckConsistency(obj);
#if FEATURE_APPX
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0)
{
StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller;
RuntimeAssembly caller = RuntimeAssembly.GetExecutingAssembly(ref stackMark);
if (caller != null && !caller.IsSafeForReflection())
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_APIInvalidForCurrentContext", FullName));
}
#endif
if (obj != null)
{
#if FEATURE_CORECLR
// For unverifiable code, we require the caller to be critical.
// Adding the INVOCATION_FLAGS_NEED_SECURITY flag makes that check happen
invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY;
#else // FEATURE_CORECLR
new SecurityPermission(SecurityPermissionFlag.SkipVerification).Demand();
#endif // FEATURE_CORECLR
}
#if !FEATURE_CORECLR
if ((invocationFlags &(INVOCATION_FLAGS.INVOCATION_FLAGS_RISKY_METHOD | INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY)) != 0)
{
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_RISKY_METHOD) != 0)
CodeAccessPermission.Demand(PermissionType.ReflectionMemberAccess);
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY) != 0)
RuntimeMethodHandle.PerformSecurityCheck(obj, this, m_declaringType, (uint)m_invocationFlags);
}
#endif // !FEATURE_CORECLR
Signature sig = Signature;
// get the signature
int formalCount = sig.Arguments.Length;
int actualCount =(parameters != null) ? parameters.Length : 0;
if (formalCount != actualCount)
throw new TargetParameterCountException(Environment.GetResourceString("Arg_ParmCnt"));
// if we are here we passed all the previous checks. Time to look at the arguments
if (actualCount > 0)
{
Object[] arguments = CheckArguments(parameters, binder, invokeAttr, culture, sig);
Object retValue = RuntimeMethodHandle.InvokeMethod(obj, arguments, sig, false);
// copy out. This should be made only if ByRef are present.
for (int index = 0; index < arguments.Length; index++)
parameters[index] = arguments[index];
return retValue;
}
return RuntimeMethodHandle.InvokeMethod(obj, null, sig, false);
}
[System.Security.SecuritySafeCritical] // overrides SC member
#pragma warning disable 618
[ReflectionPermissionAttribute(SecurityAction.Demand, Flags = ReflectionPermissionFlag.MemberAccess)]
#pragma warning restore 618
public override MethodBody GetMethodBody()
{
MethodBody mb = RuntimeMethodHandle.GetMethodBody(this, ReflectedTypeInternal);
if (mb != null)
mb.m_methodBase = this;
return mb;
}
public override bool IsSecurityCritical
{
get { return RuntimeMethodHandle.IsSecurityCritical(this); }
}
public override bool IsSecuritySafeCritical
{
get { return RuntimeMethodHandle.IsSecuritySafeCritical(this); }
}
public override bool IsSecurityTransparent
{
get { return RuntimeMethodHandle.IsSecurityTransparent(this); }
}
public override bool ContainsGenericParameters
{
get
{
return (DeclaringType != null && DeclaringType.ContainsGenericParameters);
}
}
#endregion
#region ConstructorInfo Overrides
[System.Security.SecuritySafeCritical] // auto-generated
[DebuggerStepThroughAttribute]
[Diagnostics.DebuggerHidden]
[MethodImplAttribute(MethodImplOptions.NoInlining)] // Methods containing StackCrawlMark local var has to be marked non-inlineable
public override Object Invoke(BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture)
{
INVOCATION_FLAGS invocationFlags = InvocationFlags;
// get the declaring TypeHandle early for consistent exceptions in IntrospectionOnly context
RuntimeTypeHandle declaringTypeHandle = m_declaringType.TypeHandle;
if ((invocationFlags & (INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE | INVOCATION_FLAGS.INVOCATION_FLAGS_CONTAINS_STACK_POINTERS | INVOCATION_FLAGS.INVOCATION_FLAGS_NO_CTOR_INVOKE)) != 0)
ThrowNoInvokeException();
#if FEATURE_APPX
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0)
{
StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller;
RuntimeAssembly caller = RuntimeAssembly.GetExecutingAssembly(ref stackMark);
if (caller != null && !caller.IsSafeForReflection())
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_APIInvalidForCurrentContext", FullName));
}
#endif
#if !FEATURE_CORECLR
if ((invocationFlags & (INVOCATION_FLAGS.INVOCATION_FLAGS_RISKY_METHOD | INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY | INVOCATION_FLAGS.INVOCATION_FLAGS_IS_DELEGATE_CTOR)) != 0)
{
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_RISKY_METHOD) != 0)
CodeAccessPermission.Demand(PermissionType.ReflectionMemberAccess);
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY) != 0)
RuntimeMethodHandle.PerformSecurityCheck(null, this, m_declaringType, (uint)(m_invocationFlags | INVOCATION_FLAGS.INVOCATION_FLAGS_CONSTRUCTOR_INVOKE));
if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_IS_DELEGATE_CTOR) != 0)
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
}
#endif // !FEATURE_CORECLR
// get the signature
Signature sig = Signature;
int formalCount = sig.Arguments.Length;
int actualCount =(parameters != null) ? parameters.Length : 0;
if (formalCount != actualCount)
throw new TargetParameterCountException(Environment.GetResourceString("Arg_ParmCnt"));
// We don't need to explicitly invoke the class constructor here,
// JIT/NGen will insert the call to .cctor in the instance ctor.
// if we are here we passed all the previous checks. Time to look at the arguments
if (actualCount > 0)
{
Object[] arguments = CheckArguments(parameters, binder, invokeAttr, culture, sig);
Object retValue = RuntimeMethodHandle.InvokeMethod(null, arguments, sig, true);
// copy out. This should be made only if ByRef are present.
for (int index = 0; index < arguments.Length; index++)
parameters[index] = arguments[index];
return retValue;
}
return RuntimeMethodHandle.InvokeMethod(null, null, sig, true);
}
#endregion
#region ISerializable Implementation
[System.Security.SecurityCritical] // auto-generated
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info == null)
throw new ArgumentNullException("info");
Contract.EndContractBlock();
MemberInfoSerializationHolder.GetSerializationInfo(
info,
Name,
ReflectedTypeInternal,
ToString(),
SerializationToString(),
MemberTypes.Constructor,
null);
}
internal string SerializationToString()
{
// We don't need the return type for constructors.
return FormatNameAndSig(true);
}
internal void SerializationInvoke(Object target, SerializationInfo info, StreamingContext context)
{
RuntimeMethodHandle.SerializationInvoke(this, target, info, ref context);
}
#endregion
}
}
| |
//---------------------------------------------------------------------
// <copyright file="ObjectQueryExecutionPlan.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// @owner [....]
//---------------------------------------------------------------------
namespace System.Data.Objects.Internal
{
using System;
using System.Data.Common;
using System.Data.Common.CommandTrees;
using System.Data.Common.Internal.Materialization;
using System.Data.Common.QueryCache;
using System.Data.Common.Utils;
using System.Data.EntityClient;
using System.Data.Metadata.Edm;
using System.Data.Objects;
using System.Diagnostics;
using CompiledQueryParameters = System.Collections.ObjectModel.ReadOnlyCollection<System.Collections.Generic.KeyValuePair<ObjectParameter, System.Data.Objects.ELinq.QueryParameterExpression>>;
/// <summary>
/// Represents the 'compiled' form of all elements (query + result assembly) required to execute a specific <see cref="ObjectQuery"/>
/// </summary>
internal sealed class ObjectQueryExecutionPlan
{
internal readonly DbCommandDefinition CommandDefinition;
internal readonly ShaperFactory ResultShaperFactory;
internal readonly TypeUsage ResultType;
internal readonly MergeOption MergeOption;
internal readonly CompiledQueryParameters CompiledQueryParameters;
/// <summary>If the query yields entities from a single entity set, the value is stored here.</summary>
private readonly EntitySet _singleEntitySet;
private ObjectQueryExecutionPlan(DbCommandDefinition commandDefinition, ShaperFactory resultShaperFactory, TypeUsage resultType, MergeOption mergeOption, EntitySet singleEntitySet, CompiledQueryParameters compiledQueryParameters)
{
Debug.Assert(commandDefinition != null, "A command definition is required");
Debug.Assert(resultShaperFactory != null, "A result shaper factory is required");
Debug.Assert(resultType != null, "A result type is required");
this.CommandDefinition = commandDefinition;
this.ResultShaperFactory = resultShaperFactory;
this.ResultType = resultType;
this.MergeOption = mergeOption;
this._singleEntitySet = singleEntitySet;
this.CompiledQueryParameters = compiledQueryParameters;
}
internal static ObjectQueryExecutionPlan Prepare(ObjectContext context, DbQueryCommandTree tree, Type elementType, MergeOption mergeOption, Span span, CompiledQueryParameters compiledQueryParameters, AliasGenerator aliasGenerator)
{
TypeUsage treeResultType = tree.Query.ResultType;
// Rewrite this tree for Span?
DbExpression spannedQuery = null;
SpanIndex spanInfo;
if (ObjectSpanRewriter.TryRewrite(tree, span, mergeOption, aliasGenerator, out spannedQuery, out spanInfo))
{
tree = DbQueryCommandTree.FromValidExpression(tree.MetadataWorkspace, tree.DataSpace, spannedQuery);
}
else
{
spanInfo = null;
}
DbConnection connection = context.Connection;
DbCommandDefinition definition = null;
// The connection is required to get to the CommandDefinition builder.
if (connection == null)
{
throw EntityUtil.InvalidOperation(System.Data.Entity.Strings.ObjectQuery_InvalidConnection);
}
DbProviderServices services = DbProviderServices.GetProviderServices(connection);
try
{
definition = services.CreateCommandDefinition(tree);
}
catch (EntityCommandCompilationException)
{
// If we're running against EntityCommand, we probably already caught the providers'
// exception and wrapped it, we don't want to do that again, so we'll just rethrow
// here instead.
throw;
}
catch (Exception e)
{
// we should not be wrapping all exceptions
if (EntityUtil.IsCatchableExceptionType(e))
{
// we don't wan't folks to have to know all the various types of exceptions that can
// occur, so we just rethrow a CommandDefinitionException and make whatever we caught
// the inner exception of it.
throw EntityUtil.CommandCompilation(System.Data.Entity.Strings.EntityClient_CommandDefinitionPreparationFailed, e);
}
throw;
}
if (definition == null)
{
throw EntityUtil.ProviderDoesNotSupportCommandTrees();
}
EntityCommandDefinition entityDefinition = (EntityCommandDefinition)definition;
QueryCacheManager cacheManager = context.Perspective.MetadataWorkspace.GetQueryCacheManager();
ShaperFactory shaperFactory = ShaperFactory.Create(elementType, cacheManager, entityDefinition.CreateColumnMap(null),
context.MetadataWorkspace, spanInfo, mergeOption, false);
// attempt to determine entity information for this query (e.g. which entity type and which entity set)
//EntityType rootEntityType = null;
EntitySet singleEntitySet = null;
if (treeResultType.EdmType.BuiltInTypeKind == BuiltInTypeKind.CollectionType)
{
// determine if the entity set is unambiguous given the entity type
if (null != entityDefinition.EntitySets)
{
foreach (EntitySet entitySet in entityDefinition.EntitySets)
{
if (null != entitySet)
{
if (entitySet.ElementType.IsAssignableFrom(((CollectionType)treeResultType.EdmType).TypeUsage.EdmType))
{
if (singleEntitySet == null)
{
// found a single match
singleEntitySet = entitySet;
}
else
{
// there's more than one matching entity set
singleEntitySet = null;
break;
}
}
}
}
}
}
return new ObjectQueryExecutionPlan(definition, shaperFactory, treeResultType, mergeOption, singleEntitySet, compiledQueryParameters);
}
internal string ToTraceString()
{
string traceString = string.Empty;
EntityCommandDefinition entityCommandDef = this.CommandDefinition as EntityCommandDefinition;
if (entityCommandDef != null)
{
traceString = entityCommandDef.ToTraceString();
}
return traceString;
}
internal ObjectResult<TResultType> Execute<TResultType>(ObjectContext context, ObjectParameterCollection parameterValues)
{
DbDataReader storeReader = null;
try
{
// create entity command (just do this to snarf store command)
EntityCommandDefinition commandDefinition = (EntityCommandDefinition)this.CommandDefinition;
EntityCommand entityCommand = new EntityCommand((EntityConnection)context.Connection, commandDefinition);
// pass through parameters and timeout values
if (context.CommandTimeout.HasValue)
{
entityCommand.CommandTimeout = context.CommandTimeout.Value;
}
if (parameterValues != null)
{
foreach (ObjectParameter parameter in parameterValues)
{
int index = entityCommand.Parameters.IndexOf(parameter.Name);
if (index != -1)
{
entityCommand.Parameters[index].Value = parameter.Value ?? DBNull.Value;
}
}
}
// acquire store reader
storeReader = commandDefinition.ExecuteStoreCommands(entityCommand, CommandBehavior.Default);
ShaperFactory<TResultType> shaperFactory = (ShaperFactory<TResultType>)this.ResultShaperFactory;
Shaper<TResultType> shaper = shaperFactory.Create(storeReader, context, context.MetadataWorkspace, this.MergeOption, true);
// create materializer delegate
TypeUsage resultItemEdmType;
if (ResultType.EdmType.BuiltInTypeKind == BuiltInTypeKind.CollectionType)
{
resultItemEdmType = ((CollectionType)ResultType.EdmType).TypeUsage;
}
else
{
resultItemEdmType = ResultType;
}
return new ObjectResult<TResultType>(shaper, this._singleEntitySet, resultItemEdmType);
}
catch (Exception)
{
if (null != storeReader)
{
// Note: The caller is responsible for disposing reader if creating
// the enumerator fails.
storeReader.Dispose();
}
throw;
}
}
internal static ObjectResult<TResultType> ExecuteCommandTree<TResultType>(ObjectContext context, DbQueryCommandTree query, MergeOption mergeOption)
{
Debug.Assert(context != null, "ObjectContext cannot be null");
Debug.Assert(query != null, "Command tree cannot be null");
ObjectQueryExecutionPlan execPlan = ObjectQueryExecutionPlan.Prepare(context, query, typeof(TResultType), mergeOption, null, null, System.Data.Common.CommandTrees.ExpressionBuilder.DbExpressionBuilder.AliasGenerator);
return execPlan.Execute<TResultType>(context, null);
}
}
}
| |
using System;
using System.Diagnostics;
using NBitcoin.BouncyCastle.Math.Raw;
using NBitcoin.BouncyCastle.Utilities;
namespace NBitcoin.BouncyCastle.Math.EC
{
internal abstract class ECFieldElement
{
public abstract BigInteger ToBigInteger();
public abstract string FieldName
{
get;
}
public abstract int FieldSize
{
get;
}
public abstract ECFieldElement Add(ECFieldElement b);
public abstract ECFieldElement AddOne();
public abstract ECFieldElement Subtract(ECFieldElement b);
public abstract ECFieldElement Multiply(ECFieldElement b);
public abstract ECFieldElement Divide(ECFieldElement b);
public abstract ECFieldElement Negate();
public abstract ECFieldElement Square();
public abstract ECFieldElement Invert();
public abstract ECFieldElement Sqrt();
public virtual int BitLength
{
get
{
return ToBigInteger().BitLength;
}
}
public virtual bool IsOne
{
get
{
return BitLength == 1;
}
}
public virtual bool IsZero
{
get
{
return 0 == ToBigInteger().SignValue;
}
}
public virtual ECFieldElement MultiplyMinusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
return Multiply(b).Subtract(x.Multiply(y));
}
public virtual ECFieldElement MultiplyPlusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
return Multiply(b).Add(x.Multiply(y));
}
public virtual ECFieldElement SquareMinusProduct(ECFieldElement x, ECFieldElement y)
{
return Square().Subtract(x.Multiply(y));
}
public virtual ECFieldElement SquarePlusProduct(ECFieldElement x, ECFieldElement y)
{
return Square().Add(x.Multiply(y));
}
public virtual ECFieldElement SquarePow(int pow)
{
ECFieldElement r = this;
for(int i = 0; i < pow; ++i)
{
r = r.Square();
}
return r;
}
public virtual bool TestBitZero()
{
return ToBigInteger().TestBit(0);
}
public override bool Equals(object obj)
{
return Equals(obj as ECFieldElement);
}
public virtual bool Equals(ECFieldElement other)
{
if(this == other)
return true;
if(null == other)
return false;
return ToBigInteger().Equals(other.ToBigInteger());
}
public override int GetHashCode()
{
return ToBigInteger().GetHashCode();
}
public override string ToString()
{
return this.ToBigInteger().ToString(16);
}
public virtual byte[] GetEncoded()
{
return BigIntegers.AsUnsignedByteArray((FieldSize + 7) / 8, ToBigInteger());
}
}
internal class FpFieldElement
: ECFieldElement
{
private readonly BigInteger q, r, x;
internal static BigInteger CalculateResidue(BigInteger p)
{
int bitLength = p.BitLength;
if(bitLength >= 96)
{
BigInteger firstWord = p.ShiftRight(bitLength - 64);
if(firstWord.LongValue == -1L)
{
return BigInteger.One.ShiftLeft(bitLength).Subtract(p);
}
if((bitLength & 7) == 0)
{
return BigInteger.One.ShiftLeft(bitLength << 1).Divide(p).Negate();
}
}
return null;
}
[Obsolete("Use ECCurve.FromBigInteger to construct field elements")]
public FpFieldElement(BigInteger q, BigInteger x)
: this(q, CalculateResidue(q), x)
{
}
internal FpFieldElement(BigInteger q, BigInteger r, BigInteger x)
{
if(x == null || x.SignValue < 0 || x.CompareTo(q) >= 0)
throw new ArgumentException("value invalid in Fp field element", "x");
this.q = q;
this.r = r;
this.x = x;
}
public override BigInteger ToBigInteger()
{
return x;
}
/**
* return the field name for this field.
*
* @return the string "Fp".
*/
public override string FieldName
{
get
{
return "Fp";
}
}
public override int FieldSize
{
get
{
return q.BitLength;
}
}
public BigInteger Q
{
get
{
return q;
}
}
public override ECFieldElement Add(
ECFieldElement b)
{
return new FpFieldElement(q, r, ModAdd(x, b.ToBigInteger()));
}
public override ECFieldElement AddOne()
{
BigInteger x2 = x.Add(BigInteger.One);
if(x2.CompareTo(q) == 0)
{
x2 = BigInteger.Zero;
}
return new FpFieldElement(q, r, x2);
}
public override ECFieldElement Subtract(
ECFieldElement b)
{
return new FpFieldElement(q, r, ModSubtract(x, b.ToBigInteger()));
}
public override ECFieldElement Multiply(
ECFieldElement b)
{
return new FpFieldElement(q, r, ModMult(x, b.ToBigInteger()));
}
public override ECFieldElement MultiplyMinusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
BigInteger ax = this.x, bx = b.ToBigInteger(), xx = x.ToBigInteger(), yx = y.ToBigInteger();
BigInteger ab = ax.Multiply(bx);
BigInteger xy = xx.Multiply(yx);
return new FpFieldElement(q, r, ModReduce(ab.Subtract(xy)));
}
public override ECFieldElement MultiplyPlusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
BigInteger ax = this.x, bx = b.ToBigInteger(), xx = x.ToBigInteger(), yx = y.ToBigInteger();
BigInteger ab = ax.Multiply(bx);
BigInteger xy = xx.Multiply(yx);
BigInteger sum = ab.Add(xy);
if(r != null && r.SignValue < 0 && sum.BitLength > (q.BitLength << 1))
{
sum = sum.Subtract(q.ShiftLeft(q.BitLength));
}
return new FpFieldElement(q, r, ModReduce(sum));
}
public override ECFieldElement Divide(
ECFieldElement b)
{
return new FpFieldElement(q, r, ModMult(x, ModInverse(b.ToBigInteger())));
}
public override ECFieldElement Negate()
{
return x.SignValue == 0 ? this : new FpFieldElement(q, r, q.Subtract(x));
}
public override ECFieldElement Square()
{
return new FpFieldElement(q, r, ModMult(x, x));
}
public override ECFieldElement SquareMinusProduct(ECFieldElement x, ECFieldElement y)
{
BigInteger ax = this.x, xx = x.ToBigInteger(), yx = y.ToBigInteger();
BigInteger aa = ax.Multiply(ax);
BigInteger xy = xx.Multiply(yx);
return new FpFieldElement(q, r, ModReduce(aa.Subtract(xy)));
}
public override ECFieldElement SquarePlusProduct(ECFieldElement x, ECFieldElement y)
{
BigInteger ax = this.x, xx = x.ToBigInteger(), yx = y.ToBigInteger();
BigInteger aa = ax.Multiply(ax);
BigInteger xy = xx.Multiply(yx);
BigInteger sum = aa.Add(xy);
if(r != null && r.SignValue < 0 && sum.BitLength > (q.BitLength << 1))
{
sum = sum.Subtract(q.ShiftLeft(q.BitLength));
}
return new FpFieldElement(q, r, ModReduce(sum));
}
public override ECFieldElement Invert()
{
// TODO Modular inversion can be faster for a (Generalized) Mersenne Prime.
return new FpFieldElement(q, r, ModInverse(x));
}
/**
* return a sqrt root - the routine verifies that the calculation
* returns the right value - if none exists it returns null.
*/
public override ECFieldElement Sqrt()
{
if(IsZero || IsOne)
return this;
if(!q.TestBit(0))
throw Platform.CreateNotImplementedException("even value of q");
if(q.TestBit(1)) // q == 4m + 3
{
BigInteger e = q.ShiftRight(2).Add(BigInteger.One);
return CheckSqrt(new FpFieldElement(q, r, x.ModPow(e, q)));
}
if(q.TestBit(2)) // q == 8m + 5
{
BigInteger t1 = x.ModPow(q.ShiftRight(3), q);
BigInteger t2 = ModMult(t1, x);
BigInteger t3 = ModMult(t2, t1);
if(t3.Equals(BigInteger.One))
{
return CheckSqrt(new FpFieldElement(q, r, t2));
}
// TODO This is constant and could be precomputed
BigInteger t4 = BigInteger.Two.ModPow(q.ShiftRight(2), q);
BigInteger y = ModMult(t2, t4);
return CheckSqrt(new FpFieldElement(q, r, y));
}
// q == 8m + 1
BigInteger legendreExponent = q.ShiftRight(1);
if(!(x.ModPow(legendreExponent, q).Equals(BigInteger.One)))
return null;
BigInteger X = this.x;
BigInteger fourX = ModDouble(ModDouble(X));
;
BigInteger k = legendreExponent.Add(BigInteger.One), qMinusOne = q.Subtract(BigInteger.One);
BigInteger U, V;
do
{
BigInteger P;
do
{
P = BigInteger.Arbitrary(q.BitLength);
}
while(P.CompareTo(q) >= 0
|| !ModReduce(P.Multiply(P).Subtract(fourX)).ModPow(legendreExponent, q).Equals(qMinusOne));
BigInteger[] result = LucasSequence(P, X, k);
U = result[0];
V = result[1];
if(ModMult(V, V).Equals(fourX))
{
return new FpFieldElement(q, r, ModHalfAbs(V));
}
}
while(U.Equals(BigInteger.One) || U.Equals(qMinusOne));
return null;
}
private ECFieldElement CheckSqrt(ECFieldElement z)
{
return z.Square().Equals(this) ? z : null;
}
private BigInteger[] LucasSequence(
BigInteger P,
BigInteger Q,
BigInteger k)
{
// TODO Research and apply "common-multiplicand multiplication here"
int n = k.BitLength;
int s = k.GetLowestSetBit();
Debug.Assert(k.TestBit(s));
BigInteger Uh = BigInteger.One;
BigInteger Vl = BigInteger.Two;
BigInteger Vh = P;
BigInteger Ql = BigInteger.One;
BigInteger Qh = BigInteger.One;
for(int j = n - 1; j >= s + 1; --j)
{
Ql = ModMult(Ql, Qh);
if(k.TestBit(j))
{
Qh = ModMult(Ql, Q);
Uh = ModMult(Uh, Vh);
Vl = ModReduce(Vh.Multiply(Vl).Subtract(P.Multiply(Ql)));
Vh = ModReduce(Vh.Multiply(Vh).Subtract(Qh.ShiftLeft(1)));
}
else
{
Qh = Ql;
Uh = ModReduce(Uh.Multiply(Vl).Subtract(Ql));
Vh = ModReduce(Vh.Multiply(Vl).Subtract(P.Multiply(Ql)));
Vl = ModReduce(Vl.Multiply(Vl).Subtract(Ql.ShiftLeft(1)));
}
}
Ql = ModMult(Ql, Qh);
Qh = ModMult(Ql, Q);
Uh = ModReduce(Uh.Multiply(Vl).Subtract(Ql));
Vl = ModReduce(Vh.Multiply(Vl).Subtract(P.Multiply(Ql)));
Ql = ModMult(Ql, Qh);
for(int j = 1; j <= s; ++j)
{
Uh = ModMult(Uh, Vl);
Vl = ModReduce(Vl.Multiply(Vl).Subtract(Ql.ShiftLeft(1)));
Ql = ModMult(Ql, Ql);
}
return new BigInteger[] { Uh, Vl };
}
protected virtual BigInteger ModAdd(BigInteger x1, BigInteger x2)
{
BigInteger x3 = x1.Add(x2);
if(x3.CompareTo(q) >= 0)
{
x3 = x3.Subtract(q);
}
return x3;
}
protected virtual BigInteger ModDouble(BigInteger x)
{
BigInteger _2x = x.ShiftLeft(1);
if(_2x.CompareTo(q) >= 0)
{
_2x = _2x.Subtract(q);
}
return _2x;
}
protected virtual BigInteger ModHalf(BigInteger x)
{
if(x.TestBit(0))
{
x = q.Add(x);
}
return x.ShiftRight(1);
}
protected virtual BigInteger ModHalfAbs(BigInteger x)
{
if(x.TestBit(0))
{
x = q.Subtract(x);
}
return x.ShiftRight(1);
}
protected virtual BigInteger ModInverse(BigInteger x)
{
int bits = FieldSize;
int len = (bits + 31) >> 5;
uint[] p = Nat.FromBigInteger(bits, q);
uint[] n = Nat.FromBigInteger(bits, x);
uint[] z = Nat.Create(len);
Mod.Invert(p, n, z);
return Nat.ToBigInteger(len, z);
}
protected virtual BigInteger ModMult(BigInteger x1, BigInteger x2)
{
return ModReduce(x1.Multiply(x2));
}
protected virtual BigInteger ModReduce(BigInteger x)
{
if(r == null)
{
x = x.Mod(q);
}
else
{
bool negative = x.SignValue < 0;
if(negative)
{
x = x.Abs();
}
int qLen = q.BitLength;
if(r.SignValue > 0)
{
BigInteger qMod = BigInteger.One.ShiftLeft(qLen);
bool rIsOne = r.Equals(BigInteger.One);
while(x.BitLength > (qLen + 1))
{
BigInteger u = x.ShiftRight(qLen);
BigInteger v = x.Remainder(qMod);
if(!rIsOne)
{
u = u.Multiply(r);
}
x = u.Add(v);
}
}
else
{
int d = ((qLen - 1) & 31) + 1;
BigInteger mu = r.Negate();
BigInteger u = mu.Multiply(x.ShiftRight(qLen - d));
BigInteger quot = u.ShiftRight(qLen + d);
BigInteger v = quot.Multiply(q);
BigInteger bk1 = BigInteger.One.ShiftLeft(qLen + d);
v = v.Remainder(bk1);
x = x.Remainder(bk1);
x = x.Subtract(v);
if(x.SignValue < 0)
{
x = x.Add(bk1);
}
}
while(x.CompareTo(q) >= 0)
{
x = x.Subtract(q);
}
if(negative && x.SignValue != 0)
{
x = q.Subtract(x);
}
}
return x;
}
protected virtual BigInteger ModSubtract(BigInteger x1, BigInteger x2)
{
BigInteger x3 = x1.Subtract(x2);
if(x3.SignValue < 0)
{
x3 = x3.Add(q);
}
return x3;
}
public override bool Equals(
object obj)
{
if(obj == this)
return true;
FpFieldElement other = obj as FpFieldElement;
if(other == null)
return false;
return Equals(other);
}
public virtual bool Equals(
FpFieldElement other)
{
return q.Equals(other.q) && base.Equals(other);
}
public override int GetHashCode()
{
return q.GetHashCode() ^ base.GetHashCode();
}
}
/**
* Class representing the Elements of the finite field
* <code>F<sub>2<sup>m</sup></sub></code> in polynomial basis (PB)
* representation. Both trinomial (Tpb) and pentanomial (Ppb) polynomial
* basis representations are supported. Gaussian normal basis (GNB)
* representation is not supported.
*/
internal class F2mFieldElement
: ECFieldElement
{
/**
* Indicates gaussian normal basis representation (GNB). Number chosen
* according to X9.62. GNB is not implemented at present.
*/
public const int Gnb = 1;
/**
* Indicates trinomial basis representation (Tpb). Number chosen
* according to X9.62.
*/
public const int Tpb = 2;
/**
* Indicates pentanomial basis representation (Ppb). Number chosen
* according to X9.62.
*/
public const int Ppb = 3;
/**
* Tpb or Ppb.
*/
private int representation;
/**
* The exponent <code>m</code> of <code>F<sub>2<sup>m</sup></sub></code>.
*/
private int m;
private int[] ks;
/**
* The <code>LongArray</code> holding the bits.
*/
private LongArray x;
/**
* Constructor for Ppb.
* @param m The exponent <code>m</code> of
* <code>F<sub>2<sup>m</sup></sub></code>.
* @param k1 The integer <code>k1</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.
* @param k2 The integer <code>k2</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.
* @param k3 The integer <code>k3</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.
* @param x The BigInteger representing the value of the field element.
*/
public F2mFieldElement(
int m,
int k1,
int k2,
int k3,
BigInteger x)
{
if(x == null || x.SignValue < 0 || x.BitLength > m)
throw new ArgumentException("value invalid in F2m field element", "x");
if((k2 == 0) && (k3 == 0))
{
this.representation = Tpb;
this.ks = new int[] { k1 };
}
else
{
if(k2 >= k3)
throw new ArgumentException("k2 must be smaller than k3");
if(k2 <= 0)
throw new ArgumentException("k2 must be larger than 0");
this.representation = Ppb;
this.ks = new int[] { k1, k2, k3 };
}
this.m = m;
this.x = new LongArray(x);
}
/**
* Constructor for Tpb.
* @param m The exponent <code>m</code> of
* <code>F<sub>2<sup>m</sup></sub></code>.
* @param k The integer <code>k</code> where <code>x<sup>m</sup> +
* x<sup>k</sup> + 1</code> represents the reduction
* polynomial <code>f(z)</code>.
* @param x The BigInteger representing the value of the field element.
*/
public F2mFieldElement(
int m,
int k,
BigInteger x)
: this(m, k, 0, 0, x)
{
// Set k1 to k, and set k2 and k3 to 0
}
private F2mFieldElement(int m, int[] ks, LongArray x)
{
this.m = m;
this.representation = (ks.Length == 1) ? Tpb : Ppb;
this.ks = ks;
this.x = x;
}
public override int BitLength
{
get
{
return x.Degree();
}
}
public override bool IsOne
{
get
{
return x.IsOne();
}
}
public override bool IsZero
{
get
{
return x.IsZero();
}
}
public override bool TestBitZero()
{
return x.TestBitZero();
}
public override BigInteger ToBigInteger()
{
return x.ToBigInteger();
}
public override string FieldName
{
get
{
return "F2m";
}
}
public override int FieldSize
{
get
{
return m;
}
}
/**
* Checks, if the ECFieldElements <code>a</code> and <code>b</code>
* are elements of the same field <code>F<sub>2<sup>m</sup></sub></code>
* (having the same representation).
* @param a field element.
* @param b field element to be compared.
* @throws ArgumentException if <code>a</code> and <code>b</code>
* are not elements of the same field
* <code>F<sub>2<sup>m</sup></sub></code> (having the same
* representation).
*/
public static void CheckFieldElements(
ECFieldElement a,
ECFieldElement b)
{
if(!(a is F2mFieldElement) || !(b is F2mFieldElement))
{
throw new ArgumentException("Field elements are not "
+ "both instances of F2mFieldElement");
}
F2mFieldElement aF2m = (F2mFieldElement)a;
F2mFieldElement bF2m = (F2mFieldElement)b;
if(aF2m.representation != bF2m.representation)
{
// Should never occur
throw new ArgumentException("One of the F2m field elements has incorrect representation");
}
if((aF2m.m != bF2m.m) || !Arrays.AreEqual(aF2m.ks, bF2m.ks))
{
throw new ArgumentException("Field elements are not elements of the same field F2m");
}
}
public override ECFieldElement Add(
ECFieldElement b)
{
// No check performed here for performance reasons. Instead the
// elements involved are checked in ECPoint.F2m
// checkFieldElements(this, b);
LongArray iarrClone = this.x.Copy();
F2mFieldElement bF2m = (F2mFieldElement)b;
iarrClone.AddShiftedByWords(bF2m.x, 0);
return new F2mFieldElement(m, ks, iarrClone);
}
public override ECFieldElement AddOne()
{
return new F2mFieldElement(m, ks, x.AddOne());
}
public override ECFieldElement Subtract(
ECFieldElement b)
{
// Addition and subtraction are the same in F2m
return Add(b);
}
public override ECFieldElement Multiply(
ECFieldElement b)
{
// Right-to-left comb multiplication in the LongArray
// Input: Binary polynomials a(z) and b(z) of degree at most m-1
// Output: c(z) = a(z) * b(z) mod f(z)
// No check performed here for performance reasons. Instead the
// elements involved are checked in ECPoint.F2m
// checkFieldElements(this, b);
return new F2mFieldElement(m, ks, x.ModMultiply(((F2mFieldElement)b).x, m, ks));
}
public override ECFieldElement MultiplyMinusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
return MultiplyPlusProduct(b, x, y);
}
public override ECFieldElement MultiplyPlusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
LongArray ax = this.x, bx = ((F2mFieldElement)b).x, xx = ((F2mFieldElement)x).x, yx = ((F2mFieldElement)y).x;
LongArray ab = ax.Multiply(bx, m, ks);
LongArray xy = xx.Multiply(yx, m, ks);
if(ab == ax || ab == bx)
{
ab = (LongArray)ab.Copy();
}
ab.AddShiftedByWords(xy, 0);
ab.Reduce(m, ks);
return new F2mFieldElement(m, ks, ab);
}
public override ECFieldElement Divide(
ECFieldElement b)
{
// There may be more efficient implementations
ECFieldElement bInv = b.Invert();
return Multiply(bInv);
}
public override ECFieldElement Negate()
{
// -x == x holds for all x in F2m
return this;
}
public override ECFieldElement Square()
{
return new F2mFieldElement(m, ks, x.ModSquare(m, ks));
}
public override ECFieldElement SquareMinusProduct(ECFieldElement x, ECFieldElement y)
{
return SquarePlusProduct(x, y);
}
public override ECFieldElement SquarePlusProduct(ECFieldElement x, ECFieldElement y)
{
LongArray ax = this.x, xx = ((F2mFieldElement)x).x, yx = ((F2mFieldElement)y).x;
LongArray aa = ax.Square(m, ks);
LongArray xy = xx.Multiply(yx, m, ks);
if(aa == ax)
{
aa = (LongArray)aa.Copy();
}
aa.AddShiftedByWords(xy, 0);
aa.Reduce(m, ks);
return new F2mFieldElement(m, ks, aa);
}
public override ECFieldElement SquarePow(int pow)
{
return pow < 1 ? this : new F2mFieldElement(m, ks, x.ModSquareN(pow, m, ks));
}
public override ECFieldElement Invert()
{
return new F2mFieldElement(this.m, this.ks, this.x.ModInverse(m, ks));
}
public override ECFieldElement Sqrt()
{
return (x.IsZero() || x.IsOne()) ? this : SquarePow(m - 1);
}
/**
* @return the representation of the field
* <code>F<sub>2<sup>m</sup></sub></code>, either of
* {@link F2mFieldElement.Tpb} (trinomial
* basis representation) or
* {@link F2mFieldElement.Ppb} (pentanomial
* basis representation).
*/
public int Representation
{
get
{
return this.representation;
}
}
/**
* @return the degree <code>m</code> of the reduction polynomial
* <code>f(z)</code>.
*/
public int M
{
get
{
return this.m;
}
}
/**
* @return Tpb: The integer <code>k</code> where <code>x<sup>m</sup> +
* x<sup>k</sup> + 1</code> represents the reduction polynomial
* <code>f(z)</code>.<br/>
* Ppb: The integer <code>k1</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.<br/>
*/
public int K1
{
get
{
return this.ks[0];
}
}
/**
* @return Tpb: Always returns <code>0</code><br/>
* Ppb: The integer <code>k2</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.<br/>
*/
public int K2
{
get
{
return this.ks.Length >= 2 ? this.ks[1] : 0;
}
}
/**
* @return Tpb: Always set to <code>0</code><br/>
* Ppb: The integer <code>k3</code> where <code>x<sup>m</sup> +
* x<sup>k3</sup> + x<sup>k2</sup> + x<sup>k1</sup> + 1</code>
* represents the reduction polynomial <code>f(z)</code>.<br/>
*/
public int K3
{
get
{
return this.ks.Length >= 3 ? this.ks[2] : 0;
}
}
public override bool Equals(
object obj)
{
if(obj == this)
return true;
F2mFieldElement other = obj as F2mFieldElement;
if(other == null)
return false;
return Equals(other);
}
public virtual bool Equals(
F2mFieldElement other)
{
return ((this.m == other.m)
&& (this.representation == other.representation)
&& Arrays.AreEqual(this.ks, other.ks)
&& (this.x.Equals(other.x)));
}
public override int GetHashCode()
{
return x.GetHashCode() ^ m ^ Arrays.GetHashCode(ks);
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Reactive.Concurrency;
using System.Threading;
using Avalonia.Controls;
using Avalonia.Controls.Templates;
using Avalonia.Input;
using Avalonia.Input.Platform;
using Avalonia.Input.Raw;
using Avalonia.Layout;
using Avalonia.Platform;
using Avalonia.Styling;
using Avalonia.Threading;
namespace Avalonia
{
/// <summary>
/// Encapsulates a Avalonia application.
/// </summary>
/// <remarks>
/// The <see cref="Application"/> class encapsulates Avalonia application-specific
/// functionality, including:
/// - A global set of <see cref="DataTemplates"/>.
/// - A global set of <see cref="Styles"/>.
/// - A <see cref="FocusManager"/>.
/// - An <see cref="InputManager"/>.
/// - Registers services needed by the rest of Avalonia in the <see cref="RegisterServices"/>
/// method.
/// - Tracks the lifetime of the application.
/// </remarks>
public class Application : IApplicationLifecycle, IGlobalDataTemplates, IGlobalStyles, IStyleRoot, IResourceNode
{
/// <summary>
/// The application-global data templates.
/// </summary>
private DataTemplates _dataTemplates;
private readonly Lazy<IClipboard> _clipboard =
new Lazy<IClipboard>(() => (IClipboard)AvaloniaLocator.Current.GetService(typeof(IClipboard)));
private readonly Styler _styler = new Styler();
private Styles _styles;
private IResourceDictionary _resources;
/// <summary>
/// Initializes a new instance of the <see cref="Application"/> class.
/// </summary>
public Application()
{
OnExit += OnExiting;
}
/// <inheritdoc/>
public event EventHandler<ResourcesChangedEventArgs> ResourcesChanged;
/// <summary>
/// Gets the current instance of the <see cref="Application"/> class.
/// </summary>
/// <value>
/// The current instance of the <see cref="Application"/> class.
/// </value>
public static Application Current
{
get { return AvaloniaLocator.Current.GetService<Application>(); }
}
/// <summary>
/// Gets or sets the application's global data templates.
/// </summary>
/// <value>
/// The application's global data templates.
/// </value>
public DataTemplates DataTemplates => _dataTemplates ?? (_dataTemplates = new DataTemplates());
/// <summary>
/// Gets the application's focus manager.
/// </summary>
/// <value>
/// The application's focus manager.
/// </value>
public IFocusManager FocusManager
{
get;
private set;
}
/// <summary>
/// Gets the application's input manager.
/// </summary>
/// <value>
/// The application's input manager.
/// </value>
public InputManager InputManager
{
get;
private set;
}
/// <summary>
/// Gets the application clipboard.
/// </summary>
public IClipboard Clipboard => _clipboard.Value;
/// <summary>
/// Gets the application's global resource dictionary.
/// </summary>
public IResourceDictionary Resources
{
get => _resources ?? (Resources = new ResourceDictionary());
set
{
Contract.Requires<ArgumentNullException>(value != null);
var hadResources = false;
if (_resources != null)
{
hadResources = _resources.Count > 0;
_resources.ResourcesChanged -= ResourcesChanged;
}
_resources = value;
_resources.ResourcesChanged += ResourcesChanged;
if (hadResources || _resources.Count > 0)
{
ResourcesChanged?.Invoke(this, new ResourcesChangedEventArgs());
}
}
}
/// <summary>
/// Gets the application's global styles.
/// </summary>
/// <value>
/// The application's global styles.
/// </value>
/// <remarks>
/// Global styles apply to all windows in the application.
/// </remarks>
public Styles Styles => _styles ?? (_styles = new Styles());
/// <inheritdoc/>
bool IDataTemplateHost.IsDataTemplatesInitialized => _dataTemplates != null;
/// <summary>
/// Gets the styling parent of the application, which is null.
/// </summary>
IStyleHost IStyleHost.StylingParent => null;
/// <inheritdoc/>
bool IStyleHost.IsStylesInitialized => _styles != null;
/// <inheritdoc/>
bool IResourceProvider.HasResources => _resources?.Count > 0;
/// <inheritdoc/>
IResourceNode IResourceNode.ResourceParent => null;
/// <summary>
/// Initializes the application by loading XAML etc.
/// </summary>
public virtual void Initialize()
{
}
/// <summary>
/// Runs the application's main loop until the <see cref="ICloseable"/> is closed.
/// </summary>
/// <param name="closable">The closable to track</param>
public void Run(ICloseable closable)
{
var source = new CancellationTokenSource();
closable.Closed += OnExiting;
closable.Closed += (s, e) => source.Cancel();
Dispatcher.UIThread.MainLoop(source.Token);
}
/// <summary>
/// Runs the application's main loop until the <see cref="CancellationToken"/> is cancelled.
/// </summary>
/// <param name="token">The token to track</param>
public void Run(CancellationToken token)
{
Dispatcher.UIThread.MainLoop(token);
}
/// <summary>
/// Exits the application
/// </summary>
public void Exit()
{
OnExit?.Invoke(this, EventArgs.Empty);
}
/// <inheritdoc/>
bool IResourceProvider.TryGetResource(string key, out object value)
{
value = null;
return (_resources?.TryGetResource(key, out value) ?? false) ||
Styles.TryGetResource(key, out value);
}
/// <summary>
/// Sent when the application is exiting.
/// </summary>
public event EventHandler OnExit;
/// <summary>
/// Called when the application is exiting.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected virtual void OnExiting(object sender, EventArgs e)
{
}
/// <summary>
/// Register's the services needed by Avalonia.
/// </summary>
public virtual void RegisterServices()
{
AvaloniaSynchronizationContext.InstallIfNeeded();
FocusManager = new FocusManager();
InputManager = new InputManager();
AvaloniaLocator.CurrentMutable
.Bind<IAccessKeyHandler>().ToTransient<AccessKeyHandler>()
.Bind<IGlobalDataTemplates>().ToConstant(this)
.Bind<IGlobalStyles>().ToConstant(this)
.Bind<IFocusManager>().ToConstant(FocusManager)
.Bind<IInputManager>().ToConstant(InputManager)
.Bind<IKeyboardNavigationHandler>().ToTransient<KeyboardNavigationHandler>()
.Bind<IStyler>().ToConstant(_styler)
.Bind<ILayoutManager>().ToSingleton<LayoutManager>()
.Bind<IApplicationLifecycle>().ToConstant(this)
.Bind<IScheduler>().ToConstant(AvaloniaScheduler.Instance)
.Bind<IDragDropDevice>().ToConstant(DragDropDevice.Instance)
.Bind<IPlatformDragSource>().ToTransient<InProcessDragSource>();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Reflection;
using System.Runtime.CompilerServices;
using Xunit;
using static System.Linq.Expressions.Expression;
namespace System.Linq.Expressions.Tests
{
public class UnaryQuoteTests
{
[Theory, ClassData(typeof(CompilationTypes))]
public void QuotePreservesTypingOfBlock(bool useInterpreter)
{
ParameterExpression x = Parameter(typeof(int));
Expression<Func<int, Type>> f1 =
Lambda<Func<int, Type>>(
Call(
typeof(UnaryQuoteTests).GetMethod(nameof(Quote1)),
Lambda(
Block(typeof(void), x)
)
),
x
);
Assert.Equal(typeof(void), f1.Compile(useInterpreter)(42));
ParameterExpression s = Parameter(typeof(string));
Expression<Func<string, Type>> f2 =
Lambda<Func<string, Type>>(
Call(
typeof(UnaryQuoteTests).GetMethod(nameof(Quote2)),
Lambda(
Block(typeof(object), s)
)
),
s
);
Assert.Equal(typeof(object), f2.Compile(useInterpreter)("bar"));
}
public static Type Quote1(Expression<Action> e) => e.Body.Type;
public static Type Quote2(Expression<Func<object>> e) => e.Body.Type;
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Lambda_Action(bool useInterpreter)
{
Expression<Func<LambdaExpression>> f = () => GetQuote<Action>(() => Nop());
var quote = f.Compile(useInterpreter)();
Assert.Equal(0, quote.Parameters.Count);
Assert.Equal(ExpressionType.Call, quote.Body.NodeType);
var call = (MethodCallExpression)quote.Body;
Assert.Equal(typeof(UnaryQuoteTests).GetMethod(nameof(Nop)), call.Method);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Lambda_Action_MakeUnary(bool useInterpreter)
{
Expression<Action> e = () => Nop();
UnaryExpression q = MakeUnary(ExpressionType.Quote, e, null);
Expression<Func<LambdaExpression>> f = Lambda<Func<LambdaExpression>>(q);
var quote = f.Compile(useInterpreter)();
Assert.Equal(0, quote.Parameters.Count);
Assert.Equal(ExpressionType.Call, quote.Body.NodeType);
var call = (MethodCallExpression)quote.Body;
Assert.Equal(typeof(UnaryQuoteTests).GetMethod(nameof(Nop)), call.Method);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Lambda_IdentityFunc(bool useInterpreter)
{
Expression<Func<LambdaExpression>> f = () => GetQuote<Func<int, int>>(x => x);
var quote = f.Compile(useInterpreter)();
Assert.Equal(1, quote.Parameters.Count);
Assert.Same(quote.Body, quote.Parameters[0]);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Lambda_Closure1(bool useInterpreter)
{
Expression<Func<int, LambdaExpression>> f = x => GetQuote<Func<int>>(() => x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
AssertIsBox(quote.Body, 42, useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Lambda_Closure2(bool useInterpreter)
{
// Using an unchecked addition to ensure that an Add expression is used (and not AddChecked)
Expression<Func<int, Func<int, LambdaExpression>>> f = x => y => GetQuote<Func<int>>(() => unchecked(x + y));
var quote = f.Compile(useInterpreter)(1)(2);
Assert.Equal(0, quote.Parameters.Count);
Assert.Equal(ExpressionType.Add, quote.Body.NodeType);
var add = (BinaryExpression)quote.Body;
AssertIsBox(add.Left, 1, useInterpreter);
AssertIsBox(add.Right, 2, useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Block_Action(bool useInterpreter)
{
var expr =
Block(
Call(typeof(UnaryQuoteTests).GetMethod(nameof(Nop)))
);
var f = BuildQuote<Func<LambdaExpression>, Action>(expr);
var quote = f.Compile(useInterpreter)();
Assert.Equal(0, quote.Parameters.Count);
Assert.Same(expr, quote.Body);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Block_Local(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
Block(
new[] { x },
Assign(x, Constant(42)),
x
);
var f = BuildQuote<Func<LambdaExpression>, Func<int>>(expr);
var quote = f.Compile(useInterpreter)();
Assert.Equal(0, quote.Parameters.Count);
Assert.Same(expr, quote.Body);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Block_Local_Shadow(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
Block(
new[] { x },
Assign(x, Constant(42)),
x
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(43);
Assert.Equal(0, quote.Parameters.Count);
Assert.Same(expr, quote.Body);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Block_Closure(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
Block(
x
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
var block = quote.Body as BlockExpression;
Assert.NotNull(block);
Assert.Equal(0, block.Variables.Count);
Assert.Equal(1, block.Expressions.Count);
AssertIsBox(block.Expressions[0], 42, useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_Block_LocalAndClosure(bool useInterpreter)
{
var x = Parameter(typeof(int));
var y = Parameter(typeof(int));
var expr =
Block(
new[] { y },
Assign(y, Constant(2)),
Add(
x,
y
)
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(1);
Assert.Equal(0, quote.Parameters.Count);
var block = quote.Body as BlockExpression;
Assert.NotNull(block);
Assert.Equal(1, block.Variables.Count);
Assert.Same(y, block.Variables[0]);
Assert.Equal(2, block.Expressions.Count);
Assert.Same(block.Expressions[0], expr.Expressions[0]);
var expr1 = block.Expressions[1];
Assert.Equal(ExpressionType.Add, expr1.NodeType);
var add = (BinaryExpression)expr1;
AssertIsBox(add.Left, 1, useInterpreter);
Assert.Same(y, add.Right);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_CatchBlock_Local(bool useInterpreter)
{
var ex = Parameter(typeof(Exception));
var expr =
TryCatch(
Empty(),
Catch(
ex,
Empty()
)
);
var f = BuildQuote<Func<LambdaExpression>, Action>(expr);
var quote = f.Compile(useInterpreter)();
Assert.Equal(0, quote.Parameters.Count);
Assert.Same(expr, quote.Body);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_CatchBlock_Variable_Closure1(bool useInterpreter)
{
var x = Parameter(typeof(int));
var ex = Parameter(typeof(Exception));
var expr =
TryCatch(
x,
Catch(
ex,
Constant(0)
)
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
var @try = quote.Body as TryExpression;
Assert.NotNull(@try);
AssertIsBox(@try.Body, 42, useInterpreter);
Assert.Null(@try.Fault);
Assert.Null(@try.Finally);
Assert.NotNull(@try.Handlers);
Assert.Equal(1, @try.Handlers.Count);
var handler = @try.Handlers[0];
Assert.Same(expr.Handlers[0], handler);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_CatchBlock_Variable_Closure2(bool useInterpreter)
{
var x = Parameter(typeof(int));
var ex = Parameter(typeof(Exception));
var expr =
TryCatch(
Constant(0),
Catch(
ex,
x
)
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
var @try = quote.Body as TryExpression;
Assert.NotNull(@try);
Assert.Same(expr.Body, @try.Body);
Assert.Null(@try.Fault);
Assert.Null(@try.Finally);
Assert.NotNull(@try.Handlers);
Assert.Equal(1, @try.Handlers.Count);
var handler = @try.Handlers[0];
Assert.Null(handler.Filter);
Assert.Same(ex, handler.Variable);
AssertIsBox(@handler.Body, 42, useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_CatchBlock_NoVariable_Closure1(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
TryCatch(
x,
Catch(
typeof(Exception),
Constant(0)
)
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
var @try = quote.Body as TryExpression;
Assert.NotNull(@try);
AssertIsBox(@try.Body, 42, useInterpreter);
Assert.Null(@try.Fault);
Assert.Null(@try.Finally);
Assert.NotNull(@try.Handlers);
Assert.Equal(1, @try.Handlers.Count);
var handler = @try.Handlers[0];
Assert.Same(expr.Handlers[0], handler);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_CatchBlock_NoVariable_Closure2(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
TryCatch(
Constant(0),
Catch(
typeof(Exception),
x
)
);
var f = BuildQuote<Func<int, LambdaExpression>, Func<int>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
Assert.Equal(0, quote.Parameters.Count);
var @try = quote.Body as TryExpression;
Assert.NotNull(@try);
Assert.Same(expr.Body, @try.Body);
Assert.Null(@try.Fault);
Assert.Null(@try.Finally);
Assert.NotNull(@try.Handlers);
Assert.Equal(1, @try.Handlers.Count);
var handler = @try.Handlers[0];
Assert.Null(handler.Filter);
Assert.Null(handler.Variable);
AssertIsBox(@handler.Body, 42, useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_RuntimeVariables_Closure(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
RuntimeVariables(
x
);
var f = BuildQuote<Func<int, Expression<Func<IRuntimeVariables>>>, Func<IRuntimeVariables>>(expr, x);
var quote = f.Compile(useInterpreter)(42);
var vars = quote.Compile(useInterpreter)();
Assert.Equal(1, vars.Count);
Assert.Equal(42, vars[0]);
vars[0] = 43;
Assert.Equal(43, vars[0]);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_RuntimeVariables_Local(bool useInterpreter)
{
var x = Parameter(typeof(int));
var expr =
Block(
new[] { x },
Assign(x, Constant(42)),
RuntimeVariables(
x
)
);
var f = BuildQuote<Func<Expression<Func<IRuntimeVariables>>>, Func<IRuntimeVariables>>(expr);
var quote = f.Compile(useInterpreter)();
var vars = quote.Compile(useInterpreter)();
Assert.Equal(1, vars.Count);
Assert.Equal(42, vars[0]);
vars[0] = 43;
Assert.Equal(43, vars[0]);
}
[Theory, ClassData(typeof(CompilationTypes))]
public void Quote_RuntimeVariables_ClosureAndLocal(bool useInterpreter)
{
var x = Parameter(typeof(int));
var y = Parameter(typeof(int));
var expr =
Block(
new[] { y },
Assign(y, Constant(2)),
RuntimeVariables(
x,
y
)
);
var f = BuildQuote<Func<int, Expression<Func<IRuntimeVariables>>>, Func<IRuntimeVariables>>(expr, x);
var quote = f.Compile(useInterpreter)(1);
var vars = quote.Compile(useInterpreter)();
Assert.Equal(2, vars.Count);
Assert.Equal(1, vars[0]);
Assert.Equal(2, vars[1]);
vars[0] = 3;
vars[1] = 4;
Assert.Equal(3, vars[0]);
Assert.Equal(4, vars[1]);
}
[Fact]
public void NullLambda()
{
AssertExtensions.Throws<ArgumentNullException>("expression", () => Quote(null));
}
[Fact]
public void QuoteNonLamdba()
{
Func<int> zero = () => 0;
Expression funcConst = Constant(zero);
AssertExtensions.Throws<ArgumentException>("expression", () => Quote(funcConst));
}
[Fact]
public void CannotReduce()
{
Expression<Func<int>> exp = () => 2;
Expression q = Expression.Quote(exp);
Assert.False(q.CanReduce);
Assert.Same(q, q.Reduce());
Assert.Throws<ArgumentException>(() => q.ReduceAndCheck());
}
[Fact]
public void TypeExplicitWithGeneralLambdaArgument()
{
LambdaExpression lambda = Lambda(Constant(2));
Expression q = Quote(lambda);
Assert.Equal(typeof(Expression<Func<int>>), q.Type);
}
private void AssertIsBox<T>(Expression expression, T value, bool isInterpreted)
{
if (isInterpreted)
{
// See https://github.com/dotnet/corefx/issues/11097 for the difference between
// runtime expression quoting in the compiler and the interpreter.
Assert.Equal(ExpressionType.Convert, expression.NodeType);
var convert = (UnaryExpression)expression;
Assert.Equal(typeof(T), convert.Type);
AssertBox<object>(convert.Operand, value);
}
else
{
AssertBox(expression, value);
}
}
private void AssertBox<T>(Expression expression, T value)
{
Assert.Equal(ExpressionType.MemberAccess, expression.NodeType);
var member = (MemberExpression)expression;
var field = member.Member as FieldInfo;
Assert.NotNull(field);
Assert.Equal(typeof(StrongBox<T>).GetField(nameof(StrongBox<T>.Value)), field);
var constant = member.Expression as ConstantExpression;
Assert.NotNull(constant);
var box = constant.Value as StrongBox<T>;
Assert.NotNull(box);
Assert.Equal(value, box.Value);
}
private static Expression<TDelegate> BuildQuote<TDelegate, TQuoteType>(Expression body, params ParameterExpression[] parameters)
{
var expr =
Lambda<TDelegate>(
Call(
typeof(UnaryQuoteTests).GetMethod(nameof(GetQuote)).MakeGenericMethod(typeof(TQuoteType)),
Quote(
Lambda<TQuoteType>(body)
)
),
parameters
);
return expr;
}
public static Expression<T> GetQuote<T>(Expression<T> e) => e;
public static void Nop() { }
}
}
| |
/*
* CID001c.cs - sq culture handler.
*
* Copyright (c) 2003 Southern Storm Software, Pty Ltd
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
// Generated from "sq.txt".
namespace I18N.Other
{
using System;
using System.Globalization;
using I18N.Common;
public class CID001c : RootCulture
{
public CID001c() : base(0x001C) {}
public CID001c(int culture) : base(culture) {}
public override String Name
{
get
{
return "sq";
}
}
public override String ThreeLetterISOLanguageName
{
get
{
return "sqi";
}
}
public override String ThreeLetterWindowsLanguageName
{
get
{
return "SQI";
}
}
public override String TwoLetterISOLanguageName
{
get
{
return "sq";
}
}
public override DateTimeFormatInfo DateTimeFormat
{
get
{
DateTimeFormatInfo dfi = base.DateTimeFormat;
dfi.AMDesignator = "PD";
dfi.PMDesignator = "MD";
dfi.AbbreviatedDayNames = new String[] {"Die", "H\u00EBn", "Mar", "M\u00EBr", "Enj", "Pre", "Sht"};
dfi.DayNames = new String[] {"e diel", "e h\u00EBn\u00EB", "e mart\u00EB", "e m\u00EBrkur\u00EB", "e enjte", "e premte", "e shtun\u00EB"};
dfi.AbbreviatedMonthNames = new String[] {"Jan", "Shk", "Mar", "Pri", "Maj", "Qer", "Kor", "Gsh", "Sht", "Tet", "N\u00EBn", "Dhj", ""};
dfi.MonthNames = new String[] {"janar", "shkurt", "mars", "prill", "maj", "qershor", "korrik", "gusht", "shtator", "tetor", "n\u00EBntor", "dhjetor", ""};
dfi.DateSeparator = "-";
dfi.TimeSeparator = ".";
dfi.LongDatePattern = "dd MMMM yyyy";
dfi.LongTimePattern = "h.mm.ss.tt z";
dfi.ShortDatePattern = "yy-MM-dd";
dfi.ShortTimePattern = "h.mm.tt";
dfi.FullDateTimePattern = "dddd, dd MMMM yyyy h.mm.ss.tt z";
dfi.I18NSetDateTimePatterns(new String[] {
"d:yy-MM-dd",
"D:dddd, dd MMMM yyyy",
"f:dddd, dd MMMM yyyy h.mm.ss.tt z",
"f:dddd, dd MMMM yyyy h.mm.ss.tt z",
"f:dddd, dd MMMM yyyy h:mm:ss.tt",
"f:dddd, dd MMMM yyyy h.mm.tt",
"F:dddd, dd MMMM yyyy HH.mm.ss",
"g:yy-MM-dd h.mm.ss.tt z",
"g:yy-MM-dd h.mm.ss.tt z",
"g:yy-MM-dd h:mm:ss.tt",
"g:yy-MM-dd h.mm.tt",
"G:yy-MM-dd HH.mm.ss",
"m:MMMM dd",
"M:MMMM dd",
"r:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'",
"R:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'",
"s:yyyy'-'MM'-'dd'T'HH':'mm':'ss",
"t:h.mm.ss.tt z",
"t:h.mm.ss.tt z",
"t:h:mm:ss.tt",
"t:h.mm.tt",
"T:HH.mm.ss",
"u:yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
"U:dddd, dd MMMM yyyy HH:mm:ss",
"y:yyyy MMMM",
"Y:yyyy MMMM",
});
return dfi;
}
set
{
base.DateTimeFormat = value; // not used
}
}
public override NumberFormatInfo NumberFormat
{
get
{
NumberFormatInfo nfi = base.NumberFormat;
nfi.CurrencyDecimalSeparator = ",";
nfi.CurrencyGroupSeparator = ".";
nfi.NumberGroupSeparator = ".";
nfi.PercentGroupSeparator = ".";
nfi.NegativeSign = "-";
nfi.NumberDecimalSeparator = ",";
nfi.PercentDecimalSeparator = ",";
nfi.PercentSymbol = "%";
nfi.PerMilleSymbol = "\u2030";
return nfi;
}
set
{
base.NumberFormat = value; // not used
}
}
public override String ResolveLanguage(String name)
{
switch(name)
{
case "sq": return "shqipe";
}
return base.ResolveLanguage(name);
}
public override String ResolveCountry(String name)
{
switch(name)
{
case "AL": return "Shqip\u00EBria";
}
return base.ResolveCountry(name);
}
private class PrivateTextInfo : _I18NTextInfo
{
public PrivateTextInfo(int culture) : base(culture) {}
public override int ANSICodePage
{
get
{
return 1250;
}
}
public override int EBCDICCodePage
{
get
{
return 20880;
}
}
public override int MacCodePage
{
get
{
return 10029;
}
}
public override int OEMCodePage
{
get
{
return 852;
}
}
public override String ListSeparator
{
get
{
return ";";
}
}
}; // class PrivateTextInfo
public override TextInfo TextInfo
{
get
{
return new PrivateTextInfo(LCID);
}
}
}; // class CID001c
public class CNsq : CID001c
{
public CNsq() : base() {}
}; // class CNsq
}; // namespace I18N.Other
| |
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Runtime.InteropServices;
using System.IO;
using Microsoft.Xna.Framework.Graphics;
#if MACOS
using MonoMac.CoreGraphics;
using MonoMac.AppKit;
using MonoMac.Foundation;
using MonoMac.CoreText;
using MonoMac.ImageIO;
#else
using CoreGraphics;
using UIKit;
using Foundation;
using CoreText;
using ImageIO;
#endif
namespace CocosSharp
{
public partial class CCLabel
{
// private static CTFont _font;
// private static CGBitmapContext _bitmap;
// private static IntPtr _bitmapData;
// private static CCColor4B _brush;
// private static Dictionary<char, KerningInfo> _abcValues = new Dictionary<char, KerningInfo>();
//
// private void CreateFont(string fontName, float fontSize, CCRawList<char> charset)
// {
//
// _font = CCLabelUtilities.CreateFont (fontName, fontSize);
//
// var value = new CCLabelUtilities.ABCFloat[1];
//
// _abcValues.Clear();;
//
// for (int i = 0; i < charset.Count; i++)
// {
// var ch = charset[i];
// CCLabelUtilities.GetCharABCWidthsFloat(ch, _font, out value);
// _abcValues.Add(ch, new KerningInfo() { A = value[0].abcfA, B = value[0].abcfB, C = value[0].abcfC });
// }
//
// }
//
// private float GetFontHeight()
// {
// return _font.GetHeight();
// }
//
// private CCSize GetMeasureString(string text)
// {
// return CCLabelUtilities.MeasureString(text, _font);
// }
//
// private KerningInfo GetKerningInfo(char ch)
// {
// return _abcValues[ch];
// }
//
// private void CreateBitmap(int width, int height)
// {
//// if (_bitmap == null || (_bitmap.Width < width || _bitmap.Height < height))
//// {
//
// _bitmap = CCLabelUtilities.CreateBitmap (width, height);
// //}
//
// //if (_brush == null)
// //{
// _brush = CCColor4B.White;
// //}
// }
//
// private unsafe byte* GetBitmapData(string s, out int stride)
// {
//
// var size = GetMeasureString(s);
//
// var w = (int)(Math.Ceiling(size.Width += 2));
// var h = (int)(Math.Ceiling(size.Height += 2));
//
// CreateBitmap(w, h);
//
// CCLabelUtilities.NativeDrawString(_bitmap, s, _font, _brush, new RectangleF(0,0,w,h));
// _bitmapData = _bitmap.Data;
//
// stride = (int)_bitmap.BytesPerRow;
//
// return (byte*)_bitmapData;
// }
// private static void SaveFileStream(String path, Stream stream)
// {
// var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write);
// stream.CopyTo(fileStream);
// fileStream.Dispose();
// }
static Dictionary<string, string> nativeFontDescriptors;
string LoadFontFile (string fileName)
{
CTFont nativeFont;
var dpiSize = 0;
var ext = Path.GetExtension(fileName);
if (!String.IsNullOrEmpty(ext))
{
if (nativeFontDescriptors == null)
nativeFontDescriptors = new Dictionary<string, string> ();
string fd = null;
if (nativeFontDescriptors.TryGetValue(fileName, out fd))
return fd;
// We will not use CTFontManager.RegisterFontsForUrl (url, CTFontManagerScope.Process);
// here. The reason is that there is no way we can be sure that the font can be created to
// to identify the family name afterwards. So instead we will create a CGFont from a data provider.
// create CTFont to obtain the CTFontDescriptor, store family name and font descriptor to be accessed
// later.
try {
var filePath = string.Empty;
CCContentManager.SharedContentManager.GetAssetStreamAsBytes(fileName, out filePath);
var dataProvider = new CGDataProvider (filePath);
var cgFont = CGFont.CreateFromProvider (dataProvider);
try
{
nativeFont = new CTFont(cgFont, dpiSize, null);
if (!nativeFontDescriptors.ContainsKey(fileName))
{
nativeFontDescriptors.Add(fileName, nativeFont.PostScriptName);
NSError error;
var registered = CTFontManager.RegisterGraphicsFont(cgFont, out error);
if (!registered)
{
// If the error code is 105 then the font we are trying to register is already registered
// We will not report this as an error.
if (error.Code != 105)
throw new ArgumentException("Error registering: " + Path.GetFileName(fileName));
}
}
return nativeFont.PostScriptName;
}
catch
{
// note: MS throw the same exception FileNotFoundException if the file exists but isn't a valid font file
throw new System.IO.FileNotFoundException (fileName);
}
}
catch (Exception)
{
// note: MS throw the same exception FileNotFoundException if the file exists but isn't a valid font file
throw new System.IO.FileNotFoundException (fileName);
}
}
return fileName;
}
#if MACOS
internal CCTexture2D CreateTextSprite(string text, CCFontDefinition textDefinition)
{
if (string.IsNullOrEmpty(text))
return new CCTexture2D();
int imageWidth;
int imageHeight;
var textDef = textDefinition;
var contentScaleFactorWidth = CCLabel.DefaultTexelToContentSizeRatios.Width;
var contentScaleFactorHeight = CCLabel.DefaultTexelToContentSizeRatios.Height;
textDef.FontSize *= contentScaleFactorWidth;
textDef.Dimensions.Width *= contentScaleFactorWidth;
textDef.Dimensions.Height *= contentScaleFactorHeight;
//bool hasPremultipliedAlpha;
// font
NSFont font = null;
var ext = System.IO.Path.GetExtension(textDef.FontName);
if (!String.IsNullOrEmpty(ext) && ext.ToLower() == ".ttf")
{
try
{
textDef.FontName = LoadFontFile(textDef.FontName);
font = NSFont.FromFontName(textDef.FontName, textDef.FontSize);
}
catch
{
CCLog.Log(".ttf {0} file not found or can not be loaded.", textDef.FontName);
}
}
else
{
// font
font = NSFontManager.SharedFontManager.FontWithFamily(textDef.FontName, NSFontTraitMask.Unbold | NSFontTraitMask.Unitalic, 0, textDef.FontSize);
}
if (font == null)
{
font = NSFontManager.SharedFontManager.FontWithFamily("Arial", NSFontTraitMask.Unbold | NSFontTraitMask.Unitalic, 0, textDef.FontSize);
CCLog.Log("{0} not found. Defaulting to Arial.", textDef.FontName);
}
// color
var foregroundColor = NSColor.White;
// alignment
var horizontalAlignment = textDef.Alignment;
var verticleAlignement = textDef.LineAlignment;
var textAlign = (CCTextAlignment.Right == horizontalAlignment) ? NSTextAlignment.Right
: (CCTextAlignment.Center == horizontalAlignment) ? NSTextAlignment.Center
: NSTextAlignment.Left;
// LineBreak
var lineBreak = (CCLabelLineBreak.Character == textDef.LineBreak) ? NSLineBreakMode.CharWrapping
: (CCLabelLineBreak.Word == textDef.LineBreak) ? NSLineBreakMode.ByWordWrapping
: NSLineBreakMode.Clipping;
var nsparagraphStyle = new NSMutableParagraphStyle();
nsparagraphStyle.SetParagraphStyle(NSMutableParagraphStyle.DefaultParagraphStyle);
nsparagraphStyle.LineBreakMode = lineBreak;
nsparagraphStyle.Alignment = textAlign;
// Create a new attributed string definition
var nsAttributes = new NSStringAttributes ();
// Font attribute
nsAttributes.Font = font;
nsAttributes.ForegroundColor = foregroundColor;
nsAttributes.ParagraphStyle = nsparagraphStyle;
var stringWithAttributes = new NSAttributedString(text, nsAttributes);
var realDimensions = stringWithAttributes.Size;
// Mac crashes if the width or height is 0
if (realDimensions == SizeF.Empty)
{
CCLog.Log("Native string:", "Dimensions of native NSAttributedString can not be 0,0");
return new CCTexture2D();
}
var dimensions = new SizeF(textDef.Dimensions.Width, textDef.Dimensions.Height);
var layoutAvailable = true;
//
// * Note * This seems to only effect Mac because iOS works fine without this work around.
// Right Alignment BoundingRectWithSize does not seem to be working correctly when the following conditions are set:
// 1) Alignment Right
// 2) No dimensions
// 3) There are new line characters embedded in the string.
//
// So we set alignment to Left, calculate our bounds and then restore alignement afterwards before drawing.
//
if (dimensions.Width <= 0)
{
dimensions.Width = 8388608;
layoutAvailable = false;
// Set our alignment variables to left - see notes above.
nsparagraphStyle.Alignment = NSTextAlignment.Left;
stringWithAttributes.Dispose();
stringWithAttributes = null;
stringWithAttributes = new NSAttributedString(text, nsAttributes);
}
if (dimensions.Height <= 0)
{
dimensions.Height = 8388608;
layoutAvailable = false;
}
// Calculate our bounding rectangle
var boundingRect = stringWithAttributes.BoundingRectWithSize(new SizeF((int)dimensions.Width, (int)dimensions.Height),
NSStringDrawingOptions.UsesLineFragmentOrigin);
if (!layoutAvailable)
{
if (dimensions.Width == 8388608)
{
dimensions.Width = boundingRect.Width;
// Restore our alignment before drawing - see notes above.
nsparagraphStyle.Alignment = textAlign;
stringWithAttributes.Dispose();
stringWithAttributes = null;
stringWithAttributes = new NSAttributedString(text, nsAttributes);
}
if (dimensions.Height == 8388608)
{
dimensions.Height = boundingRect.Height;
}
}
imageWidth = (int)dimensions.Width;
imageHeight = (int)dimensions.Height;
// Alignment
var xOffset = 0.0f;
switch (textAlign) {
case NSTextAlignment.Left:
xOffset = 0;
break;
case NSTextAlignment.Center:
xOffset = (dimensions.Width-boundingRect.Width)/2.0f;
break;
case NSTextAlignment.Right: xOffset = dimensions.Width-boundingRect.Width; break;
default: break;
}
// Line alignment
var yOffset = (CCVerticalTextAlignment.Top == verticleAlignement
|| boundingRect.Height >= dimensions.Height) ? (dimensions.Height - boundingRect.Height) // align to top
: (CCVerticalTextAlignment.Bottom == verticleAlignement) ? 0 // align to bottom
: (imageHeight - boundingRect.Height) / 2.0f; // align to center
//Find the rect that the string will draw into inside the dimensions
var drawRect = new RectangleF(xOffset
, yOffset
, boundingRect.Width
, boundingRect.Height);
NSImage image = null;
try
{
//Set antialias or not
NSGraphicsContext.CurrentContext.ShouldAntialias = textDef.isShouldAntialias;
image = new NSImage(new SizeF(imageWidth, imageHeight));
image.LockFocus();
// set a default transform
var transform = new NSAffineTransform();
transform.Set();
stringWithAttributes.DrawInRect(drawRect);
image.UnlockFocus();
// We will use Texture2D from stream here instead of CCTexture2D stream.
var tex = Texture2D.FromStream(CCDrawManager.SharedDrawManager.XnaGraphicsDevice, image);
// Debugging purposes
// var path = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
// var fileName = Path.Combine(path, "Label3.png");
// using (var stream = new FileStream(fileName, FileMode.Create, FileAccess.Write))
// {
// tex.SaveAsPng(stream, imageWidth, imageHeight);
// }
// Create our texture of the label string.
var texture = new CCTexture2D(tex);
return texture;
}
catch (Exception exc)
{
CCLog.Log ("CCLabel: Error creating native label:{0}\n{1}", exc.Message, exc.StackTrace);
}
finally
{
// clean up the resources
if (image != null)
{
image.Dispose ();
image = null;
}
if (stringWithAttributes != null)
{
stringWithAttributes.Dispose ();
stringWithAttributes = null;
}
}
return new CCTexture2D ();
}
#else
internal CCTexture2D CreateTextSprite(string text, CCFontDefinition textDefinition)
{
if (string.IsNullOrEmpty(text))
return new CCTexture2D();
int imageWidth;
int imageHeight;
var textDef = textDefinition;
var contentScaleFactorWidth = CCLabel.DefaultTexelToContentSizeRatios.Width;
var contentScaleFactorHeight = CCLabel.DefaultTexelToContentSizeRatios.Height;
textDef.FontSize *= contentScaleFactorWidth;
textDef.Dimensions.Width *= contentScaleFactorWidth;
textDef.Dimensions.Height *= contentScaleFactorHeight;
bool hasPremultipliedAlpha;
// font
UIFont font = null;
var ext = System.IO.Path.GetExtension(textDef.FontName);
if (!String.IsNullOrEmpty(ext) && ext.ToLower() == ".ttf")
{
try
{
textDef.FontName = LoadFontFile(textDef.FontName);
font = UIFont.FromName(textDef.FontName, textDef.FontSize);
}
catch (Exception exc)
{
CCLog.Log(".ttf {0} file not found or can not be loaded.", textDef.FontName);
}
}
else
{
// font
font = UIFont.FromName (textDef.FontName, textDef.FontSize);
//NSFontManager.SharedFontManager.FontWithFamily(textDef.FontName, NSFontTraitMask.Unbold | NSFontTraitMask.Unitalic, 0, textDef.FontSize);
}
if (font == null)
{
font = UIFont.FromName ("Arial", textDef.FontSize);
CCLog.Log("{0} not found. Defaulting to Arial.", textDef.FontName);
}
// color
var foregroundColor = UIColor.White;
// alignment
var horizontalAlignment = textDef.Alignment;
var verticleAlignement = textDef.LineAlignment;
var textAlign = (CCTextAlignment.Right == horizontalAlignment) ? UITextAlignment.Right
: (CCTextAlignment.Center == horizontalAlignment) ? UITextAlignment.Center
: UITextAlignment.Left;
// LineBreak
var lineBreak = (CCLabelLineBreak.Character == textDef.LineBreak) ? UILineBreakMode.CharacterWrap
: (CCLabelLineBreak.Word == textDef.LineBreak) ? UILineBreakMode.WordWrap
: UILineBreakMode.Clip;
var nsparagraphStyle = (NSMutableParagraphStyle)NSParagraphStyle.Default.MutableCopy();
nsparagraphStyle.LineBreakMode = lineBreak;
nsparagraphStyle.Alignment = textAlign;
// Create a new attributed string definition
var nsAttributes = new UIStringAttributes ();
// Font attribute
nsAttributes.Font = font;
nsAttributes.ForegroundColor = foregroundColor;
nsAttributes.ParagraphStyle = nsparagraphStyle;
var stringWithAttributes = new NSAttributedString(text, nsAttributes);
var realDimensions = stringWithAttributes.Size;
// Mac crashes if the width or height is 0
if (realDimensions == SizeF.Empty)
throw new ArgumentOutOfRangeException("Native string:", "Dimensions of native NSAttributedString can not be 0,0");
var dimensions = new CGSize(textDef.Dimensions.Width, textDef.Dimensions.Height);
var layoutAvailable = true;
if (dimensions.Width <= 0)
{
dimensions.Width = 8388608;
layoutAvailable = false;
}
if (dimensions.Height <= 0)
{
dimensions.Height = 8388608;
layoutAvailable = false;
}
var boundingRect = stringWithAttributes.GetBoundingRect(new CGSize((int)dimensions.Width, (int)dimensions.Height),
NSStringDrawingOptions.UsesLineFragmentOrigin, null);
if (!layoutAvailable)
{
if (dimensions.Width == 8388608)
{
dimensions.Width = boundingRect.Width;
}
if (dimensions.Height == 8388608)
{
dimensions.Height = boundingRect.Height;
}
}
imageWidth = (int)dimensions.Width;
imageHeight = (int)dimensions.Height;
// Alignment
var xOffset = (nfloat)0.0f;
switch (textAlign) {
case UITextAlignment.Left:
xOffset = 0;
break;
case UITextAlignment.Center:
xOffset = (dimensions.Width-boundingRect.Width)/2.0f;
break;
case UITextAlignment.Right: xOffset = dimensions.Width-boundingRect.Width; break;
default: break;
}
// Line alignment
var yOffset = (CCVerticalTextAlignment.Bottom == verticleAlignement
|| boundingRect.Height >= dimensions.Height) ? (dimensions.Height - boundingRect.Height) // align to bottom
: (CCVerticalTextAlignment.Top == verticleAlignement) ? 0 // align to top
: (imageHeight - boundingRect.Height) / 2.0f; // align to center
//Find the rect that the string will draw into inside the dimensions
var drawRect = new CGRect(xOffset
, yOffset
, boundingRect.Width
, boundingRect.Height);
UIImage image = null;
CGContext context = null;
try
{
UIGraphics.BeginImageContext (new CGSize(imageWidth,imageHeight));
context = UIGraphics.GetCurrentContext ();
//Set antialias or not
context.SetShouldAntialias(textDef.isShouldAntialias);
stringWithAttributes.DrawString(drawRect);
image = UIGraphics.GetImageFromCurrentImageContext ();
UIGraphics.EndImageContext();
// We will use Texture2D from stream here instead of CCTexture2D stream.
var tex = Texture2D.FromStream(CCDrawManager.SharedDrawManager.XnaGraphicsDevice, image);
// Debugging purposes
// var path = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
// var fileName = Path.Combine(path, "Label3.png");
// using (var stream = new FileStream(fileName, FileMode.Create, FileAccess.Write))
// {
// tex.SaveAsPng(stream, imageWidth, imageHeight);
// }
// Create our texture of the label string.
var texture = new CCTexture2D(tex);
return texture;
}
catch (Exception exc)
{
CCLog.Log ("CCLabel: Error creating native label:{0}\n{1}", exc.Message, exc.StackTrace);
}
finally
{
// clean up the resources
if (image != null)
{
image.Dispose ();
image = null;
}
if (context != null)
{
context.Dispose ();
context = null;
}
if (stringWithAttributes != null)
{
stringWithAttributes.Dispose ();
stringWithAttributes = null;
}
}
return new CCTexture2D ();
}
#endif
}
}
| |
// Copyright (C) 2009-2017 Luca Piccioni
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System;
using System.Diagnostics;
namespace OpenGL.Objects.State
{
/// <summary>
/// Generic render state.
/// </summary>
[DebuggerDisplay("GraphicsState: Id={StateIdentifier}")]
public abstract class GraphicsState : IGraphicsState
{
#region Constructors
/// <summary>
/// Construct a GraphicsState.
/// </summary>
protected GraphicsState()
{
}
/// <summary>
/// Construct a GraphicsState, specifying whether it is inheritable.
/// </summary>
/// <param name="inheritable"></param>
protected GraphicsState(bool inheritable)
{
// Inheritable flag
_Inheritable = inheritable;
}
#endregion
#region State Index
/// <summary>
/// Get the next state index.
/// </summary>
/// <returns>
///
/// </returns>
protected static int NextStateIndex()
{
return (_StateIndex++);
}
/// <summary>
/// Get the total number of <see cref="IGraphicsState"/> implementations.
/// </summary>
/// <returns></returns>
protected internal static int GetStateCount() { return (_StateIndex); }
/// <summary>
/// The current state index for GraphicsState.
/// </summary>
private static int _StateIndex;
#endregion
#region Equality Operators
/// <summary>
/// Compare two GraphicsState for equality
/// </summary>
/// <param name="state1">
/// A <see cref="GraphicsState"/> to compare with <paramref name="state2"/>.
/// </param>
/// <param name="state2">
/// A <see cref="GraphicsState"/> to compare with <paramref name="state1"/>.
/// </param>
/// <returns>
/// It returns true if <paramref name="state1"/> equals to <paramref name="state2"/>, otherwise false.
/// </returns>
public static bool operator==(GraphicsState state1, GraphicsState state2)
{
return (Equals(state1, state2));
}
/// <summary>
/// Compare two GraphicsState for equality
/// </summary>
/// <param name="state1">
/// A <see cref="GraphicsState"/> to compare with <paramref name="state2"/>.
/// </param>
/// <param name="state2">
/// A <see cref="GraphicsState"/> to compare with <paramref name="state1"/>.
/// </param>
/// <returns>
/// It returns true if <paramref name="state1"/> equals to <paramref name="state2"/>, otherwise false.
/// </returns>
public static bool operator!=(GraphicsState state1, GraphicsState state2)
{
return (!Equals(state1, state2));
}
#endregion
#region Object Overrides
/// <summary>
/// Determines whether the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>.
/// </summary>
/// <param name="obj">
/// The <see cref="T:System.Object"/> to compare with the current <see cref="T:System.Object"/>.
/// </param>
/// <returns>
/// It returns true if the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>; otherwise, false.
/// </returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
return (false);
if (ReferenceEquals(this, obj))
return (true);
try {
return Equals((GraphicsState)obj);
} catch (InvalidCastException) {
return (false);
}
}
/// <summary>
/// Serves as a hash function for a particular type. <see cref="M:System.Object.GetHashCode"/> is suitable for
/// use in hashing algorithms and data structures like a hash table.
/// </summary>
/// <returns>
/// A hash code for the current <see cref="T:System.Object"/>.
/// </returns>
public override int GetHashCode()
{
return (StateIdentifier.GetHashCode());
}
/// <summary>
/// Represents the current <see cref="GraphicsState"/> for logging.
/// </summary>
/// <returns>
/// A <see cref="String"/> that represents the current <see cref="GraphicsState"/>.
/// </returns>
public override string ToString()
{
return (StateIdentifier);
}
#endregion
#region IGraphicsState Implementation
/// <summary>
/// The identifier of this GraphicsState.
/// </summary>
public abstract string StateIdentifier { get; }
/// <summary>
/// Unique index assigned to this GraphicsState.
/// </summary>
public abstract int StateIndex { get; }
/// <summary>
/// Flag indicating whether the state is context-bound.
/// </summary>
public virtual bool IsContextBound { get { return (true); } }
/// <summary>
/// Flag indicating whether the state can be applied on a <see cref="ShaderProgram"/>.
/// </summary>
public virtual bool IsProgramBound { get { return (false); } }
/// <summary>
/// Create or update resources defined by this IGraphicsState, based on the associated <see cref="ShaderProgram"/>.
/// </summary>
/// <param name="ctx">
/// A <see cref="GraphicsContext"/> used for allocating resources.
/// </param>
/// <param name="shaderProgram">
/// A <see cref="ShaderProgram"/> that will be used in conjunction with this IGraphicsState.
/// </param>
public virtual void Create(GraphicsContext ctx, ShaderProgram shaderProgram)
{
}
/// <summary>
/// Dispose resources allocated by <see cref="Create(GraphicsContext, ShaderProgram)"/>.
/// </summary>
public virtual void Delete()
{
}
/// <summary>
/// Apply the render state define by this IGraphicsState.
/// </summary>
/// <param name="ctx">
/// A <see cref="GraphicsContext"/> used in conjuction with <paramref name="shaderProgram"/>.
/// </param>
/// <param name="shaderProgram">
/// The <see cref="ShaderProgram"/> holding the uniform state.
/// </param>
public abstract void Apply(GraphicsContext ctx, ShaderProgram shaderProgram);
/// <summary>
/// Performs a deep copy of this <see cref="IGraphicsState"/>.
/// </summary>
/// <returns>
/// It returns the equivalent of this <see cref="IGraphicsState"/>, but all objects referenced
/// are not referred by both instances.
/// </returns>
public virtual IGraphicsState Push()
{
return ((GraphicsState)MemberwiseClone());
}
/// <summary>
/// Merge this state with another one.
/// </summary>
/// <param name="state">
/// A <see cref="IGraphicsState"/> having the same <see cref="StateIdentifier"/> of this state.
/// </param>
/// <remarks>
/// <para>
/// After a call to this routine, this IGraphicsState store the union of the previous information
/// and of the information of <paramref name="state"/>.
/// </para>
/// <para>
/// The semantic of the merge result is dependent on the actual implementation of this IGraphicsState. Normally
/// the merge method will copy <paramref name="state"/> into this IGraphicsState, but other state could do
/// different operations.
/// </para>
/// </remarks>
public abstract void Merge(IGraphicsState state);
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// </summary>
/// <param name="other">
/// A <see cref="GraphicsState"/> to compare to this GraphicsState.
/// </param>
/// <returns>
/// It returns true if the current object is equal to <paramref name="other"/>.
/// </returns>
/// <remarks>
/// <para>
/// This method test only whether <paramref name="other"/> type equals to this type.
/// </para>
/// </remarks>
/// <exception cref="ArgumentNullException">
/// This exception is thrown if the parameter <paramref name="other"/> is null.
/// </exception>
public virtual bool Equals(IGraphicsState other)
{
if (other == null)
throw new ArgumentNullException("other");
return (other.StateIndex == StateIndex);
}
/// <summary>
/// Flag indicating whether this state is inheritable.
/// </summary>
private bool _Inheritable = true;
#endregion
}
}
| |
#region Proprietary Notice
// ****************************************************************************
// Copyright 2014 Vadim Zabavnov
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ****************************************************************************
// File Name: ModelBase.cs.
// Created: 2014/05/30/4:59 PM.
// Modified: 2015/06/13/10:41 AM.
// ****************************************************************************
#endregion
#region Usings
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Linq;
using System.Linq.Expressions;
#endregion
namespace Zabavnov.MVVM
{
using System.Collections.Generic;
/// <summary>
/// </summary>
/// <typeparam name="T">
/// </typeparam>
public abstract class ModelBase<T> : INotifyPropertyChanged
where T : INotifyPropertyChanged
{
#region Fields
/// <summary>
/// </summary>
protected readonly PropertyManager<T> _propertyManager;
#endregion
#region Constructors and Destructors
/// <summary>
/// </summary>
protected ModelBase()
{
_propertyManager = new PropertyManager<T>(info => RaisePropertyChanged(info.Name));
}
#endregion
#region Public Events
/// <summary>
/// </summary>
public virtual event PropertyChangedEventHandler PropertyChanged;
#endregion
#region Methods
/// <summary>
/// </summary>
/// <param name="dispatcher">
/// </param>
/// <param name="names">
/// </param>
[DebuggerStepThrough]
protected internal void RaisePropertyChanged(IDispatcher dispatcher, params string[] names)
{
Contract.Requires(dispatcher != null);
Contract.Requires(names != null);
var handler = PropertyChanged;
if(handler != null)
names.Select(name => new PropertyChangedEventArgs(name)).ForEach(arg => dispatcher.Invoke(() => handler(this, arg)));
}
[DebuggerStepThrough]
protected internal void RaisePropertyChanged(params string[] names)
{
Contract.Requires(names != null);
var handler = PropertyChanged;
if(handler != null)
names.Select(name => new PropertyChangedEventArgs(name)).ForEach(arg => handler(this, arg));
}
/// <summary>
/// </summary>
/// <param name="propertyLambdas">
/// </param>
[DebuggerStepThrough]
protected internal void RaisePropertyChanged(params Expression<Func<T, object>>[] propertyLambdas)
{
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
RaisePropertyChanged(Dispatcher.DirectDispatcher, propertyLambdas);
}
/// <summary>
/// </summary>
/// <param name="dispatcher">
/// </param>
/// <param name="propertyLambdas">
/// </param>
[DebuggerStepThrough]
protected internal void RaisePropertyChanged(IDispatcher dispatcher, params Expression<Func<T, object>>[] propertyLambdas)
{
Contract.Requires(dispatcher != null);
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
var handler = PropertyChanged;
if(handler != null)
{
foreach(var lambda in propertyLambdas)
{
var args = new PropertyChangedEventArgs(lambda.GetMemberInfo().Name);
dispatcher.Invoke(() => handler(this, args));
}
}
}
/// <summary>
/// Raise change event for specified property(s)
/// </summary>
/// <param name="propertyLambda"></param>
/// <param name="propertyLambdas"></param>
[DebuggerStepThrough]
protected void RaisePropertyChanged(Expression<Func<object>> propertyLambda,
params Expression<Func<object>>[] propertyLambdas)
{
Contract.Requires(propertyLambda != null);
RaisePropertyChanged(Dispatcher.DirectDispatcher, propertyLambda, propertyLambdas);
}
[DebuggerStepThrough]
protected void RaisePropertyChanged(IDispatcher dispatcher, Expression<Func<object>> propertyLambda,
params Expression<Func<object>>[] propertyLambdas)
{
Contract.Requires(dispatcher != null);
Contract.Requires(propertyLambda != null);
var handler = PropertyChanged;
if (handler != null)
RaisePropertyChanged(dispatcher,
propertyLambdas.AddHead(propertyLambda).Select(z => new PropertyChangedEventArgs(z.GetPropertyName())));
}
[DebuggerStepThrough]
protected void RaisePropertyChanged(IDispatcher dispatcher, IEnumerable<PropertyChangedEventArgs> args)
{
Contract.Requires(dispatcher != null);
Contract.Requires(args != null);
var handler = this.PropertyChanged;
if (handler != null)
args.ForEach(z => dispatcher.Invoke(() => handler(this, z)));
}
/// <summary>
/// just a wrapper for <see cref="ExpressionExtensions.AttachActionTo(System.Action,System.Linq.Expressions.Expression{System.Func{object}},System.Linq.Expressions.Expression{System.Func{object}}[])" />
/// </summary>
/// <param name="actionToAttach"></param>
/// <param name="propertyLambdas"></param>
protected void AttachActionTo(Action actionToAttach, params Expression<Func<object>>[] propertyLambdas)
{
Contract.Requires(actionToAttach != null);
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
actionToAttach.AttachActionTo(propertyLambdas);
}
/// <summary>
/// </summary>
/// <param name="providerToReset">
/// </param>
/// <param name="propertyLambdaToRaiseChangesOn">
/// </param>
/// <param name="propertyExpressionsToMonitor">
/// </param>
/// <typeparam name="TProviderData">
/// </typeparam>
[DebuggerStepThrough]
protected void AttachResetAndRaiseOn<TProviderData>(
IDataProvider<TProviderData> providerToReset,
Expression<Func<T, object>> propertyLambdaToRaiseChangesOn,
params Expression<Func<object>>[] propertyExpressionsToMonitor)
{
Contract.Requires(propertyExpressionsToMonitor != null);
Contract.Requires(propertyExpressionsToMonitor.Length > 0);
Contract.Requires(providerToReset != null);
Contract.Requires(propertyLambdaToRaiseChangesOn != null);
AttachActionTo(() => ResetAndRaise(providerToReset, propertyLambdaToRaiseChangesOn), propertyExpressionsToMonitor);
}
/// <summary>
/// Check command on property changes
/// </summary>
/// <param name="commandToCheck">
/// The command to check
/// </param>
/// <param name="propertyLambdas">
/// The properties to check command on
/// </param>
protected void CheckCommandOn(ICommand commandToCheck, params Expression<Func<object>>[] propertyLambdas)
{
Contract.Requires(commandToCheck != null);
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
//check command first
commandToCheck.CanExecute();
AttachActionTo(() => commandToCheck.CanExecute(), propertyLambdas);
}
/// <summary>
/// </summary>
/// <param name="propertyLambdaToRaise">
/// </param>
/// <param name="propertyLambdasToMonitor">
/// </param>
protected void RaisePropertyChangedOn(
Expression<Func<T, object>> propertyLambdaToRaise,
params Expression<Func<object>>[] propertyLambdasToMonitor)
{
Contract.Requires(propertyLambdasToMonitor != null);
Contract.Requires(propertyLambdaToRaise != null);
ExpressionExtensions.AttachActionTo(() => RaisePropertyChanged(propertyLambdaToRaise), propertyLambdasToMonitor);
}
/// <summary>
/// </summary>
/// <param name="dispatcher">
/// </param>
/// <param name="propertyLambdaToRaise">
/// </param>
/// <param name="propertyLambdasToMonitor">
/// </param>
protected void RaisePropertyChangedOn(
IDispatcher dispatcher,
Expression<Func<T, object>> propertyLambdaToRaise,
params Expression<Func<object>>[] propertyLambdasToMonitor)
{
Contract.Requires(dispatcher != null);
Contract.Requires(propertyLambdaToRaise != null);
Contract.Requires(propertyLambdasToMonitor != null);
ExpressionExtensions.AttachActionTo(() => RaisePropertyChanged(dispatcher, propertyLambdaToRaise), propertyLambdasToMonitor);
}
/// <summary>
/// Reset provider status if it is in <b>ready</b> status and raise notification
/// </summary>
/// <param name="provider">
/// The provider to reset
/// </param>
/// <param name="propertyLambdas">
/// The properties to raise notification on
/// </param>
[DebuggerStepThrough]
protected void ResetAndRaise(IDataProvider provider, params Expression<Func<T, object>>[] propertyLambdas)
{
Contract.Requires(provider != null);
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
ResetAndRaise(provider, Dispatcher.DirectDispatcher, propertyLambdas);
}
/// <summary>
/// Reset provider status if it is in <b>ready</b> status and raise notification
/// </summary>
/// <param name="provider">
/// The provider to reset
/// </param>
/// <param name="dispatcher">
/// the dispatcher
/// </param>
/// <param name="propertyLambdas">
/// The properties to raise notification on
/// </param>
[DebuggerStepThrough]
protected void ResetAndRaise(IDataProvider provider, IDispatcher dispatcher, params Expression<Func<T, object>>[] propertyLambdas)
{
Contract.Requires(provider != null);
Contract.Requires(dispatcher != null);
Contract.Requires(propertyLambdas != null);
Contract.Requires(propertyLambdas.Length > 0);
if(provider.Status.Value != DataProviderStatus.NotReady)
{
provider.Reset();
RaisePropertyChanged(dispatcher, propertyLambdas);
}
}
[ContractInvariantMethod]
private void ObjectInvariant()
{
Contract.Invariant(_propertyManager != null);
}
#endregion
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using ASC.FederatedLogin;
using ASC.FederatedLogin.Helpers;
using ASC.FederatedLogin.LoginProviders;
using Microsoft.Graph;
using Microsoft.OneDrive.Sdk;
using Newtonsoft.Json.Linq;
namespace ASC.Files.Thirdparty.OneDrive
{
internal class OneDriveStorage
{
private OAuth20Token _token;
private string AccessToken
{
get
{
if (_token == null) throw new Exception("Cannot create OneDrive session with given token");
if (_token.IsExpired)
{
_token = OAuth20TokenHelper.RefreshToken<OneDriveLoginProvider>(_token);
_onedriveClientCache = null;
}
return _token.AccessToken;
}
}
private OneDriveClient _onedriveClientCache;
private OneDriveClient OnedriveClient
{
get { return _onedriveClientCache ?? (_onedriveClientCache = new OneDriveClient(new OneDriveAuthProvider(AccessToken))); }
}
public bool IsOpened { get; private set; }
public long MaxChunkedUploadFileSize = 10L * 1024L * 1024L * 1024L;
public void Open(OAuth20Token token)
{
if (IsOpened)
return;
_token = token;
IsOpened = true;
}
public void Close()
{
IsOpened = false;
}
public bool CheckAccess()
{
return OnedriveClient
.Drive
.Request()
.GetAsync()
.Result != null;
}
public static string RootPath = "/drive/root:";
public static string ApiVersion = "v1.0";
public static string MakeOneDrivePath(string parentPath, string name)
{
return (parentPath ?? "") + "/" + (name ?? "");
}
public Item GetItem(string itemId)
{
try
{
return GetItemRequest(itemId).Request().GetAsync().Result;
}
catch (Exception ex)
{
var serviceException = (ServiceException)ex.InnerException;
if (serviceException != null && serviceException.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
throw;
}
}
public List<Item> GetItems(string folderId, int limit = 500)
{
return new List<Item>(GetItemRequest(folderId).Children.Request().GetAsync().Result);
}
public Stream DownloadStream(Item file, int offset = 0)
{
if (file == null || file.File == null) throw new ArgumentNullException("file");
var fileStream = OnedriveClient
.Drive
.Items[file.Id]
.Content
.Request()
.GetAsync()
.Result;
if (fileStream != null && offset > 0)
fileStream.Seek(offset, SeekOrigin.Begin);
return fileStream;
}
public Item CreateFolder(string title, string parentId)
{
var newFolderItem = new Item
{
Folder = new Folder(),
Name = title
};
return GetItemRequest(parentId)
.Children
.Request()
.AddAsync(newFolderItem)
.Result;
}
public Item CreateFile(Stream fileStream, string title, string parentPath)
{
return OnedriveClient
.Drive
.Root
.ItemWithPath(MakeOneDrivePath(parentPath, title))
.Content
.Request()
.PutAsync<Item>(fileStream)
.Result;
}
public void DeleteItem(Item item)
{
OnedriveClient
.Drive
.Items[item.Id]
.Request()
.DeleteAsync();
}
public Item MoveItem(string itemId, string newItemName, string toFolderId)
{
var updateItem = new Item { ParentReference = new ItemReference { Id = toFolderId }, Name = newItemName };
return OnedriveClient
.Drive
.Items[itemId]
.Request()
.UpdateAsync(updateItem)
.Result;
}
public Item CopyItem(string itemId, string newItemName, string toFolderId)
{
var copyMonitor = OnedriveClient
.Drive
.Items[itemId]
.Copy(newItemName, new ItemReference { Id = toFolderId })
.Request()
.PostAsync()
.Result;
return copyMonitor.PollForOperationCompletionAsync(null, CancellationToken.None).Result;
}
public Item RenameItem(string itemId, string newName)
{
var updateItem = new Item { Name = newName };
return OnedriveClient
.Drive
.Items[itemId]
.Request()
.UpdateAsync(updateItem)
.Result;
}
public Item SaveStream(string fileId, Stream fileStream)
{
return OnedriveClient
.Drive
.Items[fileId]
.Content
.Request()
.PutAsync<Item>(fileStream)
.Result;
}
private IItemRequestBuilder GetItemRequest(string itemId)
{
return string.IsNullOrEmpty(itemId)
? OnedriveClient.Drive.Root
: OnedriveClient.Drive.Items[itemId];
}
public ResumableUploadSession CreateResumableSession(Item onedriveFile, long contentLength)
{
if (onedriveFile == null) throw new ArgumentNullException("onedriveFile");
var folderId = onedriveFile.ParentReference.Id;
var fileName = onedriveFile.Name;
var uploadUriBuilder = new UriBuilder(OneDriveLoginProvider.OneDriveApiUrl)
{
Path = "/" + ApiVersion + "/drive/items/" + folderId + ":/" + fileName + ":/oneDrive.createUploadSession"
};
var request = WebRequest.Create(uploadUriBuilder.Uri);
request.Method = "POST";
request.ContentLength = 0;
request.ContentType = "application/json; charset=UTF-8";
request.Headers.Add("Authorization", "Bearer " + AccessToken);
var uploadSession = new ResumableUploadSession(onedriveFile.Id, folderId, contentLength);
using (var response = request.GetResponse())
using (var responseStream = response.GetResponseStream())
{
if (responseStream != null)
{
using (var readStream = new StreamReader(responseStream))
{
var responseString = readStream.ReadToEnd();
var responseJson = JObject.Parse(responseString);
uploadSession.Location = responseJson.Value<string>("uploadUrl");
}
}
}
uploadSession.Status = ResumableUploadSessionStatus.Started;
return uploadSession;
}
public void Transfer(ResumableUploadSession oneDriveSession, Stream stream, long chunkLength)
{
if (stream == null)
throw new ArgumentNullException("stream");
if (oneDriveSession.Status != ResumableUploadSessionStatus.Started)
throw new InvalidOperationException("Can't upload chunk for given upload session.");
var request = WebRequest.Create(oneDriveSession.Location);
request.Method = "PUT";
request.ContentLength = chunkLength;
request.Headers.Add("Authorization", "Bearer " + AccessToken);
request.Headers.Add("Content-Range", string.Format("bytes {0}-{1}/{2}",
oneDriveSession.BytesTransfered,
oneDriveSession.BytesTransfered + chunkLength - 1,
oneDriveSession.BytesToTransfer));
using (var requestStream = request.GetRequestStream())
{
stream.CopyTo(requestStream);
}
using (var response = (HttpWebResponse)request.GetResponse())
{
if (response.StatusCode != HttpStatusCode.Created && response.StatusCode != HttpStatusCode.OK)
{
oneDriveSession.BytesTransfered += chunkLength;
}
else
{
oneDriveSession.Status = ResumableUploadSessionStatus.Completed;
using (var responseStream = response.GetResponseStream())
{
if (responseStream == null) return;
using (var readStream = new StreamReader(responseStream))
{
var responseString = readStream.ReadToEnd();
var responseJson = JObject.Parse(responseString);
oneDriveSession.FileId = responseJson.Value<string>("id");
}
}
}
}
}
public void CancelTransfer(ResumableUploadSession oneDriveSession)
{
var request = WebRequest.Create(oneDriveSession.Location);
request.Method = "DELETE";
using (request.GetResponse())
{
}
}
}
public class OneDriveAuthProvider : IAuthenticationProvider
{
private readonly string _accessToken;
public OneDriveAuthProvider(string accessToken)
{
_accessToken = accessToken;
}
public Task AuthenticateRequestAsync(HttpRequestMessage request)
{
request.Headers.Authorization = new AuthenticationHeaderValue("bearer", _accessToken);
return Task.WhenAll();
}
}
public enum ResumableUploadSessionStatus
{
None,
Started,
Completed,
Aborted
}
[Serializable]
internal class ResumableUploadSession
{
public long BytesToTransfer { get; set; }
public long BytesTransfered { get; set; }
public string FileId { get; set; }
public string FolderId { get; set; }
public ResumableUploadSessionStatus Status { get; set; }
public string Location { get; set; }
public ResumableUploadSession(string fileId, string folderId, long bytesToTransfer)
{
FileId = fileId;
FolderId = folderId;
BytesToTransfer = bytesToTransfer;
Status = ResumableUploadSessionStatus.None;
}
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Linq;
using FluentAssertions;
using Microsoft.DotNet.Cli.Utils;
using Microsoft.DotNet.TestFramework;
using Microsoft.DotNet.Tools.Test.Utilities;
using NuGet.Frameworks;
using NuGet.ProjectModel;
using NuGet.Versioning;
using Xunit;
using Microsoft.DotNet.Tools.Tests.Utilities;
using Microsoft.DotNet.CommandFactory;
using LocalizableStrings = Microsoft.DotNet.CommandFactory.LocalizableStrings;
namespace Microsoft.DotNet.Tests
{
public class GivenAProjectToolsCommandResolver : TestBase
{
private static readonly NuGetFramework s_toolPackageFramework =
NuGetFrameworks.NetCoreApp30;
private const string TestProjectName = "AppWithToolDependency";
[Fact]
public void ItReturnsNullWhenCommandNameIsNull()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = null,
CommandArguments = new string[] { "" },
ProjectDirectory = "/some/directory"
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().BeNull();
}
[Fact]
public void ItReturnsNullWhenProjectDirectoryIsNull()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "command",
CommandArguments = new string[] { "" },
ProjectDirectory = null
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().BeNull();
}
[Fact]
public void ItReturnsNullWhenProjectDirectoryDoesNotContainAProjectFile()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var projectDirectory = TestAssets.CreateTestDirectory();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "command",
CommandArguments = new string[] { "" },
ProjectDirectory = projectDirectory.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().BeNull();
}
[Fact]
public void ItReturnsNullWhenCommandNameDoesNotExistInProjectTools()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "nonexistent-command",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().BeNull();
}
[Fact]
public void ItReturnsACommandSpecWithDOTNETAsFileNameAndCommandNameInArgsWhenCommandNameExistsInProjectTools()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-portable",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
var commandFile = Path.GetFileNameWithoutExtension(result.Path);
commandFile.Should().Be("dotnet");
result.Args.Should().Contain(commandResolverArguments.CommandName);
}
[Fact]
public void ItEscapesCommandArgumentsWhenReturningACommandSpec()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-portable",
CommandArguments = new[] { "arg with space" },
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull("Because the command is a project tool dependency");
result.Args.Should().Contain("\"arg with space\"");
}
[Fact]
public void ItReturnsACommandSpecWithArgsContainingCommandPathWhenReturningACommandSpecAndCommandArgumentsAreNull()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-portable",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
var commandPath = result.Args.Trim('"');
commandPath.Should().Contain("dotnet-portable.dll");
}
[Fact]
public void ItReturnsACommandSpecWithArgsContainingCommandPathWhenInvokingAToolReferencedWithADifferentCasing()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-prefercliruntime",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
var commandPath = result.Args.Trim('"');
commandPath.Should().Contain("dotnet-prefercliruntime.dll");
}
[Fact]
public void ItWritesADepsJsonFileNextToTheLockfile()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-portable",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var repoDirectoriesProvider = new RepoDirectoriesProvider();
var nugetPackagesRoot = repoDirectoriesProvider.NugetPackages;
var toolPathCalculator = new ToolPathCalculator(nugetPackagesRoot);
var lockFilePath = toolPathCalculator.GetLockFilePath(
"dotnet-portable",
new NuGetVersion("1.0.0"),
s_toolPackageFramework);
var directory = Path.GetDirectoryName(lockFilePath);
var depsJsonFile = Directory
.EnumerateFiles(directory)
.FirstOrDefault(p => Path.GetFileName(p).EndsWith(FileNameSuffixes.DepsJson));
if (depsJsonFile != null)
{
File.Delete(depsJsonFile);
}
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
new DirectoryInfo(directory)
.Should().HaveFilesMatching("*.deps.json", SearchOption.TopDirectoryOnly);
}
[Fact]
public void GenerateDepsJsonMethodDoesntOverwriteWhenDepsFileAlreadyExists()
{
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var repoDirectoriesProvider = new RepoDirectoriesProvider();
var nugetPackagesRoot = repoDirectoriesProvider.NugetPackages;
var toolPathCalculator = new ToolPathCalculator(nugetPackagesRoot);
var lockFilePath = toolPathCalculator.GetLockFilePath(
"dotnet-portable",
new NuGetVersion("1.0.0"),
s_toolPackageFramework);
var lockFile = new LockFileFormat().Read(lockFilePath);
// NOTE: We must not use the real deps.json path here as it will interfere with tests running in parallel.
var depsJsonFile = Path.GetTempFileName();
File.WriteAllText(depsJsonFile, "temp");
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
projectToolsCommandResolver.GenerateDepsJsonFile(
lockFile,
s_toolPackageFramework,
depsJsonFile,
new SingleProjectInfo("dotnet-portable", "1.0.0", Enumerable.Empty<ResourceAssemblyInfo>()),
GetToolDepsJsonGeneratorProject());
File.ReadAllText(depsJsonFile).Should().Be("temp");
File.Delete(depsJsonFile);
}
[Fact]
public void ItAddsFxVersionAsAParamWhenTheToolHasThePrefercliruntimeFile()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get("MSBuildTestApp")
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-prefercliruntime",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
result.Args.Should().Contain("--fx-version 3.0.0");
}
[Fact]
public void ItDoesNotAddFxVersionAsAParamWhenTheToolDoesNotHaveThePrefercliruntimeFile()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get(TestProjectName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-portable",
CommandArguments = null,
ProjectDirectory = testInstance.Root.FullName
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
result.Args.Should().NotContain("--fx-version");
}
[Fact]
public void ItFindsToolsLocatedInTheNuGetFallbackFolder()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get("AppWithFallbackFolderToolDependency")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfigAndExternalRestoreSources(new RepoDirectoriesProvider().TestPackages);
var testProjectDirectory = testInstance.Root.FullName;
var fallbackFolder = Path.Combine(testProjectDirectory, "fallbackFolder");
PopulateFallbackFolder(testProjectDirectory, fallbackFolder);
var nugetConfig = UseNuGetConfigWithFallbackFolder(testInstance, fallbackFolder);
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute($"--configfile {nugetConfig}")
.Should()
.Pass();
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-fallbackfoldertool",
CommandArguments = null,
ProjectDirectory = testProjectDirectory
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
var commandPath = result.Args.Trim('"');
commandPath.Should().Contain(Path.Combine(
fallbackFolder,
"dotnet-fallbackfoldertool",
"1.0.0",
"lib",
"netcoreapp3.0",
"dotnet-fallbackfoldertool.dll"));
}
[Fact]
public void ItShowsAnErrorWhenTheToolDllIsNotFound()
{
var projectToolsCommandResolver = SetupProjectToolsCommandResolver();
var testInstance = TestAssets.Get("AppWithFallbackFolderToolDependency")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfigAndExternalRestoreSources(new RepoDirectoriesProvider().TestPackages);
var testProjectDirectory = testInstance.Root.FullName;
var fallbackFolder = Path.Combine(testProjectDirectory, "fallbackFolder");
var nugetPackages = Path.Combine(testProjectDirectory, "nugetPackages");
PopulateFallbackFolder(testProjectDirectory, fallbackFolder);
var nugetConfig = UseNuGetConfigWithFallbackFolder(testInstance, fallbackFolder);
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute($"--configfile {nugetConfig} /p:RestorePackagesPath={nugetPackages}")
.Should()
.Pass();
// We need to run the tool once to generate the deps.json
// otherwise we end up with a different error message.
var commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-fallbackfoldertool",
CommandArguments = null,
ProjectDirectory = testProjectDirectory
};
var result = projectToolsCommandResolver.Resolve(commandResolverArguments);
result.Should().NotBeNull();
Directory.Delete(Path.Combine(fallbackFolder, "dotnet-fallbackfoldertool"), true);
commandResolverArguments = new CommandResolverArguments()
{
CommandName = "dotnet-fallbackfoldertool",
CommandArguments = null,
ProjectDirectory = testProjectDirectory
};
Action action = () => projectToolsCommandResolver.Resolve(commandResolverArguments);
action.ShouldThrow<GracefulException>().WithMessage(
string.Format(LocalizableStrings.CommandAssembliesNotFound, "dotnet-fallbackfoldertool"));
}
private void PopulateFallbackFolder(string testProjectDirectory, string fallbackFolder)
{
var nugetConfigPath = Path.Combine(testProjectDirectory, "NuGet.Config");
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute($"--configfile {nugetConfigPath} --packages {fallbackFolder}")
.Should()
.Pass();
Directory.Delete(Path.Combine(fallbackFolder, ".tools"), true);
}
private string UseNuGetConfigWithFallbackFolder(TestAssetInstance testInstance, string fallbackFolder)
{
var nugetConfig = testInstance.Root.GetFile("NuGet.Config").FullName;
File.WriteAllText(
nugetConfig,
$@"<?xml version=""1.0"" encoding=""utf-8""?>
<configuration>
<fallbackPackageFolders>
<add key=""MachineWide"" value=""{fallbackFolder}""/>
</fallbackPackageFolders>
</configuration>
");
return nugetConfig;
}
private ProjectToolsCommandResolver SetupProjectToolsCommandResolver()
{
Environment.SetEnvironmentVariable(
Constants.MSBUILD_EXE_PATH,
Path.Combine(new RepoDirectoriesProvider().Stage2Sdk, "MSBuild.dll"));
var packagedCommandSpecFactory = new PackagedCommandSpecFactoryWithCliRuntime();
var projectToolsCommandResolver =
new ProjectToolsCommandResolver(packagedCommandSpecFactory, new EnvironmentProvider());
return projectToolsCommandResolver;
}
private string GetToolDepsJsonGeneratorProject()
{
// When using the product, the ToolDepsJsonGeneratorProject property is used to get this path, but for testing
// we'll hard code the path inside the SDK since we don't have a project to evaluate here
return Path.Combine(new RepoDirectoriesProvider().Stage2Sdk, "Sdks", "Microsoft.NET.Sdk", "targets", "GenerateDeps", "GenerateDeps.proj");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Provides some basic access to some environment
** functionality.
**
**
============================================================*/
namespace System
{
using System.Buffers;
using System.IO;
using System.Security;
using System.Resources;
using System.Globalization;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.Configuration.Assemblies;
using System.Runtime.InteropServices;
using System.Reflection;
using System.Diagnostics;
using Microsoft.Win32;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
public enum EnvironmentVariableTarget
{
Process = 0,
User = 1,
Machine = 2,
}
internal static partial class Environment
{
// Assume the following constants include the terminating '\0' - use <, not <=
// System environment variables are stored in the registry, and have
// a size restriction that is separate from both normal environment
// variables and registry value name lengths, according to MSDN.
// MSDN doesn't detail whether the name is limited to 1024, or whether
// that includes the contents of the environment variable.
private const int MaxSystemEnvVariableLength = 1024;
private const int MaxUserEnvVariableLength = 255;
private const int MaxMachineNameLength = 256;
// Looks up the resource string value for key.
//
// if you change this method's signature then you must change the code that calls it
// in excep.cpp and probably you will have to visit mscorlib.h to add the new signature
// as well as metasig.h to create the new signature type
internal static String GetResourceStringLocal(String key)
{
return SR.GetResourceString(key);
}
/*==================================TickCount===================================
**Action: Gets the number of ticks since the system was started.
**Returns: The number of ticks since the system was started.
**Arguments: None
**Exceptions: None
==============================================================================*/
public static extern int TickCount
{
[MethodImplAttribute(MethodImplOptions.InternalCall)]
get;
}
// Terminates this process with the given exit code.
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
internal static extern void _Exit(int exitCode);
public static void Exit(int exitCode)
{
_Exit(exitCode);
}
public static extern int ExitCode
{
[MethodImplAttribute(MethodImplOptions.InternalCall)]
get;
[MethodImplAttribute(MethodImplOptions.InternalCall)]
set;
}
// Note: The CLR's Watson bucketization code looks at the caller of the FCALL method
// to assign blame for crashes. Don't mess with this, such as by making it call
// another managed helper method, unless you consult with some CLR Watson experts.
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public static extern void FailFast(String message);
// This overload of FailFast will allow you to specify the exception object
// whose bucket details *could* be used when undergoing the failfast process.
// To be specific:
//
// 1) When invoked from within a managed EH clause (fault/finally/catch),
// if the exception object is preallocated, the runtime will try to find its buckets
// and use them. If the exception object is not preallocated, it will use the bucket
// details contained in the object (if any).
//
// 2) When invoked from outside the managed EH clauses (fault/finally/catch),
// if the exception object is preallocated, the runtime will use the callsite's
// IP for bucketing. If the exception object is not preallocated, it will use the bucket
// details contained in the object (if any).
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public static extern void FailFast(String message, Exception exception);
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public static extern void FailFast(String message, Exception exception, String errorMessage);
#if FEATURE_WIN32_REGISTRY
// This is only used by RegistryKey on Windows.
public static String ExpandEnvironmentVariables(String name)
{
if (name == null)
throw new ArgumentNullException(nameof(name));
if (name.Length == 0)
{
return name;
}
int currentSize = 100;
StringBuilder blob = new StringBuilder(currentSize); // A somewhat reasonable default size
int size;
blob.Length = 0;
size = Win32Native.ExpandEnvironmentStrings(name, blob, currentSize);
if (size == 0)
Marshal.ThrowExceptionForHR(Marshal.GetHRForLastWin32Error());
while (size > currentSize)
{
currentSize = size;
blob.Capacity = currentSize;
blob.Length = 0;
size = Win32Native.ExpandEnvironmentStrings(name, blob, currentSize);
if (size == 0)
Marshal.ThrowExceptionForHR(Marshal.GetHRForLastWin32Error());
}
return blob.ToString();
}
#endif // FEATURE_WIN32_REGISTRY
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
private static extern Int32 GetProcessorCount();
public static int ProcessorCount
{
get
{
return GetProcessorCount();
}
}
/*==============================GetCommandLineArgs==============================
**Action: Gets the command line and splits it appropriately to deal with whitespace,
** quotes, and escape characters.
**Returns: A string array containing your command line arguments.
**Arguments: None
**Exceptions: None.
==============================================================================*/
public static String[] GetCommandLineArgs()
{
/*
* There are multiple entry points to a hosted app.
* The host could use ::ExecuteAssembly() or ::CreateDelegate option
* ::ExecuteAssembly() -> In this particular case, the runtime invokes the main
method based on the arguments set by the host, and we return those arguments
*
* ::CreateDelegate() -> In this particular case, the host is asked to create a
* delegate based on the appDomain, assembly and methodDesc passed to it.
* which the caller uses to invoke the method. In this particular case we do not have
* any information on what arguments would be passed to the delegate.
* So our best bet is to simply use the commandLine that was used to invoke the process.
* in case it is present.
*/
if (s_CommandLineArgs != null)
return (string[])s_CommandLineArgs.Clone();
return GetCommandLineArgsNative();
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern String[] GetCommandLineArgsNative();
private static string[] s_CommandLineArgs = null;
private static void SetCommandLineArgs(string[] cmdLineArgs)
{
s_CommandLineArgs = cmdLineArgs;
}
private unsafe static char[] GetEnvironmentCharArray()
{
char[] block = null;
RuntimeHelpers.PrepareConstrainedRegions();
char* pStrings = null;
try
{
pStrings = Win32Native.GetEnvironmentStrings();
if (pStrings == null)
{
throw new OutOfMemoryException();
}
// Format for GetEnvironmentStrings is:
// [=HiddenVar=value\0]* [Variable=value\0]* \0
// See the description of Environment Blocks in MSDN's
// CreateProcess page (null-terminated array of null-terminated strings).
// Search for terminating \0\0 (two unicode \0's).
char* p = pStrings;
while (!(*p == '\0' && *(p + 1) == '\0'))
p++;
int len = (int)(p - pStrings + 1);
block = new char[len];
fixed (char* pBlock = block)
string.wstrcpy(pBlock, pStrings, len);
}
finally
{
if (pStrings != null)
Win32Native.FreeEnvironmentStrings(pStrings);
}
return block;
}
/*===================================NewLine====================================
**Action: A property which returns the appropriate newline string for the given
** platform.
**Returns: \r\n on Win32.
**Arguments: None.
**Exceptions: None.
==============================================================================*/
public static String NewLine
{
get
{
#if PLATFORM_WINDOWS
return "\r\n";
#else
return "\n";
#endif // PLATFORM_WINDOWS
}
}
/*===================================Version====================================
**Action: Returns the COM+ version struct, describing the build number.
**Returns:
**Arguments:
**Exceptions:
==============================================================================*/
public static Version Version
{
get
{
// Previously this represented the File version of mscorlib.dll. Many other libraries in the framework and outside took dependencies on the first three parts of this version
// remaining constant throughout 4.x. From 4.0 to 4.5.2 this was fine since the file version only incremented the last part.Starting with 4.6 we switched to a file versioning
// scheme that matched the product version. In order to preserve compatibility with existing libraries, this needs to be hard-coded.
return new Version(4, 0, 30319, 42000);
}
}
#if !FEATURE_PAL
private static Lazy<bool> s_IsWindows8OrAbove = new Lazy<bool>(() =>
{
ulong conditionMask = Win32Native.VerSetConditionMask(0, Win32Native.VER_MAJORVERSION, Win32Native.VER_GREATER_EQUAL);
conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_MINORVERSION, Win32Native.VER_GREATER_EQUAL);
conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_SERVICEPACKMAJOR, Win32Native.VER_GREATER_EQUAL);
conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_SERVICEPACKMINOR, Win32Native.VER_GREATER_EQUAL);
// Windows 8 version is 6.2
var version = new Win32Native.OSVERSIONINFOEX { MajorVersion = 6, MinorVersion = 2, ServicePackMajor = 0, ServicePackMinor = 0 };
return Win32Native.VerifyVersionInfoW(version,
Win32Native.VER_MAJORVERSION | Win32Native.VER_MINORVERSION | Win32Native.VER_SERVICEPACKMAJOR | Win32Native.VER_SERVICEPACKMINOR,
conditionMask);
});
internal static bool IsWindows8OrAbove => s_IsWindows8OrAbove.Value;
#endif
#if FEATURE_COMINTEROP
// Does the current version of Windows have Windows Runtime suppport?
private static Lazy<bool> s_IsWinRTSupported = new Lazy<bool>(() =>
{
return WinRTSupported();
});
internal static bool IsWinRTSupported => s_IsWinRTSupported.Value;
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool WinRTSupported();
#endif // FEATURE_COMINTEROP
/*==================================StackTrace==================================
**Action:
**Returns:
**Arguments:
**Exceptions:
==============================================================================*/
public static String StackTrace
{
[MethodImpl(MethodImplOptions.NoInlining)] // Prevent inlining from affecting where the stacktrace starts
get
{
return Internal.Runtime.Augments.EnvironmentAugments.StackTrace;
}
}
internal static String GetStackTrace(Exception e, bool needFileInfo)
{
// Note: Setting needFileInfo to true will start up COM and set our
// apartment state. Try to not call this when passing "true"
// before the EE's ExecuteMainMethod has had a chance to set up the
// apartment state. --
StackTrace st;
if (e == null)
st = new StackTrace(needFileInfo);
else
st = new StackTrace(e, needFileInfo);
// Do no include a trailing newline for backwards compatibility
return st.ToString(System.Diagnostics.StackTrace.TraceFormat.Normal);
}
public static extern bool HasShutdownStarted
{
[MethodImplAttribute(MethodImplOptions.InternalCall)]
get;
}
internal static bool UserInteractive
{
get
{
return true;
}
}
public static int CurrentManagedThreadId
{
get
{
return Thread.CurrentThread.ManagedThreadId;
}
}
internal static extern int CurrentProcessorNumber
{
[MethodImplAttribute(MethodImplOptions.InternalCall)]
get;
}
// The upper bits of t_executionIdCache are the executionId. The lower bits of
// the t_executionIdCache are counting down to get it periodically refreshed.
// TODO: Consider flushing the executionIdCache on Wait operations or similar
// actions that are likely to result in changing the executing core
[ThreadStatic]
private static int t_executionIdCache;
private const int ExecutionIdCacheShift = 16;
private const int ExecutionIdCacheCountDownMask = (1 << ExecutionIdCacheShift) - 1;
private const int ExecutionIdRefreshRate = 5000;
private static int RefreshExecutionId()
{
int executionId = CurrentProcessorNumber;
// On Unix, CurrentProcessorNumber is implemented in terms of sched_getcpu, which
// doesn't exist on all platforms. On those it doesn't exist on, GetCurrentProcessorNumber
// returns -1. As a fallback in that case and to spread the threads across the buckets
// by default, we use the current managed thread ID as a proxy.
if (executionId < 0) executionId = Environment.CurrentManagedThreadId;
Debug.Assert(ExecutionIdRefreshRate <= ExecutionIdCacheCountDownMask);
// Mask with Int32.MaxValue to ensure the execution Id is not negative
t_executionIdCache = ((executionId << ExecutionIdCacheShift) & Int32.MaxValue) | ExecutionIdRefreshRate;
return executionId;
}
// Cached processor number used as a hint for which per-core stack to access. It is periodically
// refreshed to trail the actual thread core affinity.
internal static int CurrentExecutionId
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get
{
int executionIdCache = t_executionIdCache--;
if ((executionIdCache & ExecutionIdCacheCountDownMask) == 0)
return RefreshExecutionId();
return (executionIdCache >> ExecutionIdCacheShift);
}
}
public static string GetEnvironmentVariable(string variable)
{
if (variable == null)
{
throw new ArgumentNullException(nameof(variable));
}
// separated from the EnvironmentVariableTarget overload to help with tree shaking in common case
return GetEnvironmentVariableCore(variable);
}
internal static string GetEnvironmentVariable(string variable, EnvironmentVariableTarget target)
{
if (variable == null)
{
throw new ArgumentNullException(nameof(variable));
}
ValidateTarget(target);
return GetEnvironmentVariableCore(variable, target);
}
public static void SetEnvironmentVariable(string variable, string value)
{
ValidateVariableAndValue(variable, ref value);
// separated from the EnvironmentVariableTarget overload to help with tree shaking in common case
SetEnvironmentVariableCore(variable, value);
}
internal static void SetEnvironmentVariable(string variable, string value, EnvironmentVariableTarget target)
{
ValidateVariableAndValue(variable, ref value);
ValidateTarget(target);
SetEnvironmentVariableCore(variable, value, target);
}
private static void ValidateVariableAndValue(string variable, ref string value)
{
if (variable == null)
{
throw new ArgumentNullException(nameof(variable));
}
if (variable.Length == 0)
{
throw new ArgumentException(SR.Argument_StringZeroLength, nameof(variable));
}
if (variable[0] == '\0')
{
throw new ArgumentException(SR.Argument_StringFirstCharIsZero, nameof(variable));
}
if (variable.IndexOf('=') != -1)
{
throw new ArgumentException(SR.Argument_IllegalEnvVarName, nameof(variable));
}
if (string.IsNullOrEmpty(value) || value[0] == '\0')
{
// Explicitly null out value if it's empty
value = null;
}
}
private static void ValidateTarget(EnvironmentVariableTarget target)
{
if (target != EnvironmentVariableTarget.Process &&
target != EnvironmentVariableTarget.Machine &&
target != EnvironmentVariableTarget.User)
{
throw new ArgumentOutOfRangeException(nameof(target), target, SR.Format(SR.Arg_EnumIllegalVal, target));
}
}
private static string GetEnvironmentVariableCore(string variable)
{
Span<char> buffer = stackalloc char[128]; // A somewhat reasonable default size
return GetEnvironmentVariableCoreHelper(variable, buffer);
}
private static string GetEnvironmentVariableCoreHelper(string variable, Span<char> buffer)
{
int requiredSize = Win32Native.GetEnvironmentVariable(variable, buffer);
if (requiredSize == 0 && Marshal.GetLastWin32Error() == Interop.Errors.ERROR_ENVVAR_NOT_FOUND)
{
return null;
}
if (requiredSize > buffer.Length)
{
char[] chars = ArrayPool<char>.Shared.Rent(requiredSize);
try
{
return GetEnvironmentVariableCoreHelper(variable, chars);
}
finally
{
ArrayPool<char>.Shared.Return(chars);
}
}
return new string(buffer.Slice(0, requiredSize));
}
private static string GetEnvironmentVariableCore(string variable, EnvironmentVariableTarget target)
{
if (target == EnvironmentVariableTarget.Process)
return GetEnvironmentVariableCore(variable);
#if FEATURE_WIN32_REGISTRY
if (AppDomain.IsAppXModel())
#endif
{
return null;
}
#if FEATURE_WIN32_REGISTRY
RegistryKey baseKey;
string keyName;
if (target == EnvironmentVariableTarget.Machine)
{
baseKey = Registry.LocalMachine;
keyName = @"System\CurrentControlSet\Control\Session Manager\Environment";
}
else if (target == EnvironmentVariableTarget.User)
{
baseKey = Registry.CurrentUser;
keyName = "Environment";
}
else
{
throw new ArgumentException(SR.Format(SR.Arg_EnumIllegalVal, (int)target));
}
using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: false))
{
return environmentKey?.GetValue(variable) as string;
}
#endif
}
internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariables()
{
// Format for GetEnvironmentStrings is:
// (=HiddenVar=value\0 | Variable=value\0)* \0
// See the description of Environment Blocks in MSDN's
// CreateProcess page (null-terminated array of null-terminated strings).
// Note the =HiddenVar's aren't always at the beginning.
// Copy strings out, parsing into pairs and inserting into the table.
// The first few environment variable entries start with an '='.
// The current working directory of every drive (except for those drives
// you haven't cd'ed into in your DOS window) are stored in the
// environment block (as =C:=pwd) and the program's exit code is
// as well (=ExitCode=00000000).
char[] block = GetEnvironmentCharArray();
for (int i = 0; i < block.Length; i++)
{
int startKey = i;
// Skip to key. On some old OS, the environment block can be corrupted.
// Some will not have '=', so we need to check for '\0'.
while (block[i] != '=' && block[i] != '\0')
i++;
if (block[i] == '\0')
continue;
// Skip over environment variables starting with '='
if (i - startKey == 0)
{
while (block[i] != 0)
i++;
continue;
}
string key = new string(block, startKey, i - startKey);
i++; // skip over '='
int startValue = i;
while (block[i] != 0)
i++; // Read to end of this entry
string value = new string(block, startValue, i - startValue); // skip over 0 handled by for loop's i++
yield return new KeyValuePair<string, string>(key, value);
}
}
internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariables(EnvironmentVariableTarget target)
{
if (target == EnvironmentVariableTarget.Process)
return EnumerateEnvironmentVariables();
return EnumerateEnvironmentVariablesFromRegistry(target);
}
internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariablesFromRegistry(EnvironmentVariableTarget target)
{
#if FEATURE_WIN32_REGISTRY
if (AppDomain.IsAppXModel())
#endif
{
// Without registry support we have nothing to return
ValidateTarget(target);
yield break;
}
#if FEATURE_WIN32_REGISTRY
RegistryKey baseKey;
string keyName;
if (target == EnvironmentVariableTarget.Machine)
{
baseKey = Registry.LocalMachine;
keyName = @"System\CurrentControlSet\Control\Session Manager\Environment";
}
else if (target == EnvironmentVariableTarget.User)
{
baseKey = Registry.CurrentUser;
keyName = @"Environment";
}
else
{
throw new ArgumentOutOfRangeException(nameof(target), target, SR.Format(SR.Arg_EnumIllegalVal, target));
}
using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: false))
{
if (environmentKey != null)
{
foreach (string name in environmentKey.GetValueNames())
{
string value = environmentKey.GetValue(name, "").ToString();
yield return new KeyValuePair<string, string>(name, value);
}
}
}
#endif // FEATURE_WIN32_REGISTRY
}
private static void SetEnvironmentVariableCore(string variable, string value)
{
// explicitly null out value if is the empty string.
if (string.IsNullOrEmpty(value) || value[0] == '\0')
value = null;
if (!Win32Native.SetEnvironmentVariable(variable, value))
{
int errorCode = Marshal.GetLastWin32Error();
switch (errorCode)
{
case Interop.Errors.ERROR_ENVVAR_NOT_FOUND:
// Allow user to try to clear a environment variable
return;
case Interop.Errors.ERROR_FILENAME_EXCED_RANGE:
// The error message from Win32 is "The filename or extension is too long",
// which is not accurate.
throw new ArgumentException(SR.Format(SR.Argument_LongEnvVarValue));
case Interop.Errors.ERROR_NOT_ENOUGH_MEMORY:
case Interop.Errors.ERROR_NO_SYSTEM_RESOURCES:
throw new OutOfMemoryException(Interop.Kernel32.GetMessage(errorCode));
default:
throw new ArgumentException(Interop.Kernel32.GetMessage(errorCode));
}
}
}
private static void SetEnvironmentVariableCore(string variable, string value, EnvironmentVariableTarget target)
{
if (target == EnvironmentVariableTarget.Process)
{
SetEnvironmentVariableCore(variable, value);
return;
}
#if FEATURE_WIN32_REGISTRY
if (AppDomain.IsAppXModel())
#endif
{
// other targets ignored
return;
}
#if FEATURE_WIN32_REGISTRY
// explicitly null out value if is the empty string.
if (string.IsNullOrEmpty(value) || value[0] == '\0')
value = null;
RegistryKey baseKey;
string keyName;
if (target == EnvironmentVariableTarget.Machine)
{
baseKey = Registry.LocalMachine;
keyName = @"System\CurrentControlSet\Control\Session Manager\Environment";
}
else if (target == EnvironmentVariableTarget.User)
{
// User-wide environment variables stored in the registry are limited to 255 chars for the environment variable name.
const int MaxUserEnvVariableLength = 255;
if (variable.Length >= MaxUserEnvVariableLength)
{
throw new ArgumentException(SR.Argument_LongEnvVarValue, nameof(variable));
}
baseKey = Registry.CurrentUser;
keyName = "Environment";
}
else
{
throw new ArgumentException(SR.Format(SR.Arg_EnumIllegalVal, (int)target));
}
using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: true))
{
if (environmentKey != null)
{
if (value == null)
{
environmentKey.DeleteValue(variable, throwOnMissingValue: false);
}
else
{
environmentKey.SetValue(variable, value);
}
}
}
// send a WM_SETTINGCHANGE message to all windows
IntPtr r = Interop.User32.SendMessageTimeout(new IntPtr(Interop.User32.HWND_BROADCAST),
Interop.User32.WM_SETTINGCHANGE, IntPtr.Zero, "Environment", 0, 1000, IntPtr.Zero);
Debug.Assert(r != IntPtr.Zero, "SetEnvironmentVariable failed: " + Marshal.GetLastWin32Error());
#endif // FEATURE_WIN32_REGISTRY
}
}
}
| |
/**
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
// @Generated by gentest/gentest.rb from gentest/fixtures/YGAlignSelfTest.html
using System;
using NUnit.Framework;
namespace Facebook.Yoga
{
[TestFixture]
public class YGAlignSelfTest
{
[Test]
public void Test_align_self_center()
{
YogaNode root = new YogaNode();
root.Width = 100;
root.Height = 100;
YogaNode root_child0 = new YogaNode();
root_child0.AlignSelf = YogaAlign.Center;
root_child0.Width = 10;
root_child0.Height = 10;
root.Insert(0, root_child0);
root.StyleDirection = YogaDirection.LTR;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(45f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
root.StyleDirection = YogaDirection.RTL;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(45f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
}
[Test]
public void Test_align_self_flex_end()
{
YogaNode root = new YogaNode();
root.Width = 100;
root.Height = 100;
YogaNode root_child0 = new YogaNode();
root_child0.AlignSelf = YogaAlign.FlexEnd;
root_child0.Width = 10;
root_child0.Height = 10;
root.Insert(0, root_child0);
root.StyleDirection = YogaDirection.LTR;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(90f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
root.StyleDirection = YogaDirection.RTL;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(0f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
}
[Test]
public void Test_align_self_flex_start()
{
YogaNode root = new YogaNode();
root.Width = 100;
root.Height = 100;
YogaNode root_child0 = new YogaNode();
root_child0.AlignSelf = YogaAlign.FlexStart;
root_child0.Width = 10;
root_child0.Height = 10;
root.Insert(0, root_child0);
root.StyleDirection = YogaDirection.LTR;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(0f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
root.StyleDirection = YogaDirection.RTL;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(90f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
}
[Test]
public void Test_align_self_flex_end_override_flex_start()
{
YogaNode root = new YogaNode();
root.AlignItems = YogaAlign.FlexStart;
root.Width = 100;
root.Height = 100;
YogaNode root_child0 = new YogaNode();
root_child0.AlignSelf = YogaAlign.FlexEnd;
root_child0.Width = 10;
root_child0.Height = 10;
root.Insert(0, root_child0);
root.StyleDirection = YogaDirection.LTR;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(90f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
root.StyleDirection = YogaDirection.RTL;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(0f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(10f, root_child0.LayoutWidth);
Assert.AreEqual(10f, root_child0.LayoutHeight);
}
[Test]
public void Test_align_self_baseline()
{
YogaNode root = new YogaNode();
root.FlexDirection = YogaFlexDirection.Row;
root.Width = 100;
root.Height = 100;
YogaNode root_child0 = new YogaNode();
root_child0.AlignSelf = YogaAlign.Baseline;
root_child0.Width = 50;
root_child0.Height = 50;
root.Insert(0, root_child0);
YogaNode root_child1 = new YogaNode();
root_child1.AlignSelf = YogaAlign.Baseline;
root_child1.Width = 50;
root_child1.Height = 20;
root.Insert(1, root_child1);
YogaNode root_child1_child0 = new YogaNode();
root_child1_child0.Width = 50;
root_child1_child0.Height = 10;
root_child1.Insert(0, root_child1_child0);
root.StyleDirection = YogaDirection.LTR;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(0f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(50f, root_child0.LayoutWidth);
Assert.AreEqual(50f, root_child0.LayoutHeight);
Assert.AreEqual(50f, root_child1.LayoutX);
Assert.AreEqual(40f, root_child1.LayoutY);
Assert.AreEqual(50f, root_child1.LayoutWidth);
Assert.AreEqual(20f, root_child1.LayoutHeight);
Assert.AreEqual(0f, root_child1_child0.LayoutX);
Assert.AreEqual(0f, root_child1_child0.LayoutY);
Assert.AreEqual(50f, root_child1_child0.LayoutWidth);
Assert.AreEqual(10f, root_child1_child0.LayoutHeight);
root.StyleDirection = YogaDirection.RTL;
root.CalculateLayout();
Assert.AreEqual(0f, root.LayoutX);
Assert.AreEqual(0f, root.LayoutY);
Assert.AreEqual(100f, root.LayoutWidth);
Assert.AreEqual(100f, root.LayoutHeight);
Assert.AreEqual(50f, root_child0.LayoutX);
Assert.AreEqual(0f, root_child0.LayoutY);
Assert.AreEqual(50f, root_child0.LayoutWidth);
Assert.AreEqual(50f, root_child0.LayoutHeight);
Assert.AreEqual(0f, root_child1.LayoutX);
Assert.AreEqual(40f, root_child1.LayoutY);
Assert.AreEqual(50f, root_child1.LayoutWidth);
Assert.AreEqual(20f, root_child1.LayoutHeight);
Assert.AreEqual(0f, root_child1_child0.LayoutX);
Assert.AreEqual(0f, root_child1_child0.LayoutY);
Assert.AreEqual(50f, root_child1_child0.LayoutWidth);
Assert.AreEqual(10f, root_child1_child0.LayoutHeight);
}
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using Pathfinding;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace Pathfinding {
public class NodeLink3Node : PointNode {
public NodeLink3 link;
public Vector3 portalA;
public Vector3 portalB;
public NodeLink3Node (AstarPath active) : base(active) {}
public override bool GetPortal (GraphNode other, List<Vector3> left, List<Vector3> right, bool backwards) {
if (this.connections.Length < 2) return false;
if (this.connections.Length != 2) throw new System.Exception("Invalid NodeLink3Node. Expected 2 connections, found " + this.connections.Length);
//if ( other != connections[0] || other != connections[1] ) return false;
if (left != null) {
//Debug.DrawLine ( portalA, portalB, Color.red);
left.Add(portalA);
right.Add(portalB);
/*
* Vector3 normal = link.transform.forward;
* Vector3 tangent = Vector3.Dot (normal, (Vector3)(other.Position - this.Position) ) > 0 ? link.transform.right*0.5f : -link.transform.right*0.5f;
*
* Debug.DrawLine ( link.transform.position -tangent * link.portalWidth, link.transform.position +tangent * link.portalWidth, Color.red);
*
* Debug.DrawRay ( link.transform.position -tangent * link.portalWidth, Vector3.up*5, Color.red);
* Debug.Break ();
* left.Add ( link.transform.position -tangent * link.portalWidth );
* right.Add (link.transform.position +tangent * link.portalWidth );*/
}
return true;
}
public GraphNode GetOther (GraphNode a) {
if (this.connections.Length < 2) return null;
if (this.connections.Length != 2) throw new System.Exception("Invalid NodeLink3Node. Expected 2 connections, found " + this.connections.Length);
return a == connections[0] ? (connections[1] as NodeLink3Node).GetOtherInternal(this) : (connections[0] as NodeLink3Node).GetOtherInternal(this);
}
GraphNode GetOtherInternal (GraphNode a) {
if (this.connections.Length < 2) return null;
return a == connections[0] ? connections[1] : connections[0];
}
}
[AddComponentMenu("Pathfinding/Link3")]
[HelpURL("http://arongranberg.com/astar/docs/class_pathfinding_1_1_node_link3.php")]
public class NodeLink3 : GraphModifier {
protected static Dictionary<GraphNode, NodeLink3> reference = new Dictionary<GraphNode, NodeLink3>();
public static NodeLink3 GetNodeLink (GraphNode node) {
NodeLink3 v;
reference.TryGetValue(node, out v);
return v;
}
/** End position of the link */
public Transform end;
/** The connection will be this times harder/slower to traverse.
* Note that values lower than one will not always make the pathfinder choose this path instead of another path even though this one should
* lead to a lower total cost unless you also adjust the Heuristic Scale in A* Inspector -> Settings -> Pathfinding or disable the heuristic altogether.
*/
public float costFactor = 1.0f;
/** Make a one-way connection */
public bool oneWay = false;
/* Delete existing connection instead of adding one */
//public bool deleteConnection = false;
//private bool createHiddenNodes = true;
public Transform StartTransform {
get { return transform; }
}
public Transform EndTransform {
get { return end; }
}
NodeLink3Node startNode;
NodeLink3Node endNode;
MeshNode connectedNode1, connectedNode2;
Vector3 clamped1, clamped2;
bool postScanCalled = false;
public GraphNode StartNode {
get { return startNode; }
}
public GraphNode EndNode {
get { return endNode; }
}
public override void OnPostScan () {
if (AstarPath.active.isScanning) {
InternalOnPostScan();
} else {
AstarPath.active.AddWorkItem(new AstarPath.AstarWorkItem(delegate(bool force) {
InternalOnPostScan();
return true;
}));
}
}
public void InternalOnPostScan () {
#if !ASTAR_NO_POINT_GRAPH
if (AstarPath.active.astarData.pointGraph == null) {
AstarPath.active.astarData.AddGraph(new PointGraph());
}
//Get nearest nodes from the first point graph, assuming both start and end transforms are nodes
startNode = AstarPath.active.astarData.pointGraph.AddNode(new NodeLink3Node(AstarPath.active), (Int3)StartTransform.position); //AstarPath.active.astarData.pointGraph.GetNearest(StartTransform.position).node as PointNode;
startNode.link = this;
endNode = AstarPath.active.astarData.pointGraph.AddNode(new NodeLink3Node(AstarPath.active), (Int3)EndTransform.position); //AstarPath.active.astarData.pointGraph.GetNearest(EndTransform.position).node as PointNode;
endNode.link = this;
#else
throw new System.Exception("Point graphs are not included. Check your A* Optimization settings.");
#endif
connectedNode1 = null;
connectedNode2 = null;
if (startNode == null || endNode == null) {
startNode = null;
endNode = null;
return;
}
postScanCalled = true;
reference[startNode] = this;
reference[endNode] = this;
Apply(true);
}
public override void OnGraphsPostUpdate () {
//if (connectedNode1 != null && connectedNode2 != null) {
if (!AstarPath.active.isScanning) {
if (connectedNode1 != null && connectedNode1.Destroyed) {
connectedNode1 = null;
}
if (connectedNode2 != null && connectedNode2.Destroyed) {
connectedNode2 = null;
}
if (!postScanCalled) {
OnPostScan();
} else {
//OnPostScan will also call this method
Apply(false);
}
}
}
protected override void OnEnable () {
base.OnEnable();
#if !ASTAR_NO_POINT_GRAPH
if (AstarPath.active != null && AstarPath.active.astarData != null && AstarPath.active.astarData.pointGraph != null) {
OnGraphsPostUpdate();
}
#endif
}
protected override void OnDisable () {
base.OnDisable();
postScanCalled = false;
if (startNode != null) reference.Remove(startNode);
if (endNode != null) reference.Remove(endNode);
if (startNode != null && endNode != null) {
startNode.RemoveConnection(endNode);
endNode.RemoveConnection(startNode);
if (connectedNode1 != null && connectedNode2 != null) {
startNode.RemoveConnection(connectedNode1);
connectedNode1.RemoveConnection(startNode);
endNode.RemoveConnection(connectedNode2);
connectedNode2.RemoveConnection(endNode);
}
}
}
void RemoveConnections (GraphNode node) {
//TODO, might be better to replace connection
node.ClearConnections(true);
}
[ContextMenu("Recalculate neighbours")]
void ContextApplyForce () {
if (Application.isPlaying) {
Apply(true);
if (AstarPath.active != null) {
AstarPath.active.FloodFill();
}
}
}
public void Apply (bool forceNewCheck) {
//TODO
//This function assumes that connections from the n1,n2 nodes never need to be removed in the future (e.g because the nodes move or something)
NNConstraint nn = NNConstraint.None;
nn.distanceXZ = true;
int graph = (int)startNode.GraphIndex;
//Search all graphs but the one which start and end nodes are on
nn.graphMask = ~(1 << graph);
bool same = true;
if (true) {
NNInfo n1 = AstarPath.active.GetNearest(StartTransform.position, nn);
same &= n1.node == connectedNode1 && n1.node != null;
connectedNode1 = n1.node as MeshNode;
clamped1 = n1.clampedPosition;
if (connectedNode1 != null) Debug.DrawRay((Vector3)connectedNode1.position, Vector3.up*5, Color.red);
}
if (true) {
NNInfo n2 = AstarPath.active.GetNearest(EndTransform.position, nn);
same &= n2.node == connectedNode2 && n2.node != null;
connectedNode2 = n2.node as MeshNode;
clamped2 = n2.clampedPosition;
if (connectedNode2 != null) Debug.DrawRay((Vector3)connectedNode2.position, Vector3.up*5, Color.cyan);
}
if (connectedNode2 == null || connectedNode1 == null) return;
startNode.SetPosition((Int3)StartTransform.position);
endNode.SetPosition((Int3)EndTransform.position);
if (same && !forceNewCheck) return;
RemoveConnections(startNode);
RemoveConnections(endNode);
uint cost = (uint)Mathf.RoundToInt(((Int3)(StartTransform.position-EndTransform.position)).costMagnitude*costFactor);
startNode.AddConnection(endNode, cost);
endNode.AddConnection(startNode, cost);
Int3 dir = connectedNode2.position - connectedNode1.position;
for (int a = 0; a < connectedNode1.GetVertexCount(); a++) {
Int3 va1 = connectedNode1.GetVertex(a);
Int3 va2 = connectedNode1.GetVertex((a+1) % connectedNode1.GetVertexCount());
if (Int3.DotLong((va2-va1).Normal2D(), dir) > 0) continue;
for (int b = 0; b < connectedNode2.GetVertexCount(); b++) {
Int3 vb1 = connectedNode2.GetVertex(b);
Int3 vb2 = connectedNode2.GetVertex((b+1) % connectedNode2.GetVertexCount());
if (Int3.DotLong((vb2-vb1).Normal2D(), dir) < 0) continue;
//Debug.DrawLine ((Vector3)va1, (Vector3)va2, Color.magenta);
//Debug.DrawLine ((Vector3)vb1, (Vector3)vb2, Color.cyan);
//Debug.Break ();
if (Int3.Angle((vb2-vb1), (va2-va1)) > (170.0/360.0f)*Mathf.PI*2) {
float t1 = 0;
float t2 = 1;
t2 = System.Math.Min(t2, VectorMath.ClosestPointOnLineFactor(va1, va2, vb1));
t1 = System.Math.Max(t1, VectorMath.ClosestPointOnLineFactor(va1, va2, vb2));
if (t2 < t1) {
Debug.LogError("Wait wut!? " + t1 + " " + t2 + " " + va1 + " " + va2 + " " + vb1 + " " + vb2+"\nTODO, fix this error");
} else {
Vector3 pa = (Vector3)(va2-va1)*t1 + (Vector3)va1;
Vector3 pb = (Vector3)(va2-va1)*t2 + (Vector3)va1;
startNode.portalA = pa;
startNode.portalB = pb;
endNode.portalA = pb;
endNode.portalB = pa;
//Add connections between nodes, or replace old connections if existing
connectedNode1.AddConnection(startNode, (uint)Mathf.RoundToInt(((Int3)(clamped1 - StartTransform.position)).costMagnitude*costFactor));
connectedNode2.AddConnection(endNode, (uint)Mathf.RoundToInt(((Int3)(clamped2 - EndTransform.position)).costMagnitude*costFactor));
startNode.AddConnection(connectedNode1, (uint)Mathf.RoundToInt(((Int3)(clamped1 - StartTransform.position)).costMagnitude*costFactor));
endNode.AddConnection(connectedNode2, (uint)Mathf.RoundToInt(((Int3)(clamped2 - EndTransform.position)).costMagnitude*costFactor));
return;
}
}
}
}
}
void DrawCircle (Vector3 o, float r, int detail, Color col) {
Vector3 prev = new Vector3(Mathf.Cos(0)*r, 0, Mathf.Sin(0)*r) + o;
Gizmos.color = col;
for (int i = 0; i <= detail; i++) {
float t = (i*Mathf.PI*2f)/detail;
Vector3 c = new Vector3(Mathf.Cos(t)*r, 0, Mathf.Sin(t)*r) + o;
Gizmos.DrawLine(prev, c);
prev = c;
}
}
private readonly static Color GizmosColor = new Color(206.0f/255.0f, 136.0f/255.0f, 48.0f/255.0f, 0.5f);
private readonly static Color GizmosColorSelected = new Color(235.0f/255.0f, 123.0f/255.0f, 32.0f/255.0f, 1.0f);
void DrawGizmoBezier (Vector3 p1, Vector3 p2) {
Vector3 dir = p2-p1;
if (dir == Vector3.zero) return;
Vector3 normal = Vector3.Cross(Vector3.up, dir);
Vector3 normalUp = Vector3.Cross(dir, normal);
normalUp = normalUp.normalized;
normalUp *= dir.magnitude*0.1f;
Vector3 p1c = p1+normalUp;
Vector3 p2c = p2+normalUp;
Vector3 prev = p1;
for (int i = 1; i <= 20; i++) {
float t = i/20.0f;
Vector3 p = AstarSplines.CubicBezier(p1, p1c, p2c, p2, t);
Gizmos.DrawLine(prev, p);
prev = p;
}
}
public virtual void OnDrawGizmosSelected () {
OnDrawGizmos(true);
}
public void OnDrawGizmos () {
OnDrawGizmos(false);
}
public void OnDrawGizmos (bool selected) {
Color col = selected ? GizmosColorSelected : GizmosColor;
if (StartTransform != null) {
DrawCircle(StartTransform.position, 0.4f, 10, col);
}
if (EndTransform != null) {
DrawCircle(EndTransform.position, 0.4f, 10, col);
}
//Gizmos.DrawLine ( transform.position - transform.right*0.5f*portalWidth, transform.position + transform.right*0.5f*portalWidth );
if (StartTransform != null && EndTransform != null) {
Gizmos.color = col;
DrawGizmoBezier(StartTransform.position, EndTransform.position);
if (selected) {
Vector3 cross = Vector3.Cross(Vector3.up, (EndTransform.position-StartTransform.position)).normalized;
DrawGizmoBezier(StartTransform.position+cross*0.1f, EndTransform.position+cross*0.1f);
DrawGizmoBezier(StartTransform.position-cross*0.1f, EndTransform.position-cross*0.1f);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
/// <summary>Common Unix errno error codes.</summary>
internal enum Error
{
// These values were defined in src/Native/System.Native/fxerrno.h
//
// They compare against values obtained via Interop.Sys.GetLastError() not Marshal.GetLastWin32Error()
// which obtains the raw errno that varies between unixes. The strong typing as an enum is meant to
// prevent confusing the two. Casting to or from int is suspect. Use GetLastErrorInfo() if you need to
// correlate these to the underlying platform values or obtain the corresponding error message.
//
SUCCESS = 0,
E2BIG = 0x10001, // Argument list too long.
EACCES = 0x10002, // Permission denied.
EADDRINUSE = 0x10003, // Address in use.
EADDRNOTAVAIL = 0x10004, // Address not available.
EAFNOSUPPORT = 0x10005, // Address family not supported.
EAGAIN = 0x10006, // Resource unavailable, try again (same value as EWOULDBLOCK),
EALREADY = 0x10007, // Connection already in progress.
EBADF = 0x10008, // Bad file descriptor.
EBADMSG = 0x10009, // Bad message.
EBUSY = 0x1000A, // Device or resource busy.
ECANCELED = 0x1000B, // Operation canceled.
ECHILD = 0x1000C, // No child processes.
ECONNABORTED = 0x1000D, // Connection aborted.
ECONNREFUSED = 0x1000E, // Connection refused.
ECONNRESET = 0x1000F, // Connection reset.
EDEADLK = 0x10010, // Resource deadlock would occur.
EDESTADDRREQ = 0x10011, // Destination address required.
EDOM = 0x10012, // Mathematics argument out of domain of function.
EDQUOT = 0x10013, // Reserved.
EEXIST = 0x10014, // File exists.
EFAULT = 0x10015, // Bad address.
EFBIG = 0x10016, // File too large.
EHOSTUNREACH = 0x10017, // Host is unreachable.
EIDRM = 0x10018, // Identifier removed.
EILSEQ = 0x10019, // Illegal byte sequence.
EINPROGRESS = 0x1001A, // Operation in progress.
EINTR = 0x1001B, // Interrupted function.
EINVAL = 0x1001C, // Invalid argument.
EIO = 0x1001D, // I/O error.
EISCONN = 0x1001E, // Socket is connected.
EISDIR = 0x1001F, // Is a directory.
ELOOP = 0x10020, // Too many levels of symbolic links.
EMFILE = 0x10021, // File descriptor value too large.
EMLINK = 0x10022, // Too many links.
EMSGSIZE = 0x10023, // Message too large.
EMULTIHOP = 0x10024, // Reserved.
ENAMETOOLONG = 0x10025, // Filename too long.
ENETDOWN = 0x10026, // Network is down.
ENETRESET = 0x10027, // Connection aborted by network.
ENETUNREACH = 0x10028, // Network unreachable.
ENFILE = 0x10029, // Too many files open in system.
ENOBUFS = 0x1002A, // No buffer space available.
ENODEV = 0x1002C, // No such device.
ENOENT = 0x1002D, // No such file or directory.
ENOEXEC = 0x1002E, // Executable file format error.
ENOLCK = 0x1002F, // No locks available.
ENOLINK = 0x10030, // Reserved.
ENOMEM = 0x10031, // Not enough space.
ENOMSG = 0x10032, // No message of the desired type.
ENOPROTOOPT = 0x10033, // Protocol not available.
ENOSPC = 0x10034, // No space left on device.
ENOSYS = 0x10037, // Function not supported.
ENOTCONN = 0x10038, // The socket is not connected.
ENOTDIR = 0x10039, // Not a directory or a symbolic link to a directory.
ENOTEMPTY = 0x1003A, // Directory not empty.
ENOTRECOVERABLE = 0x1003B, // State not recoverable.
ENOTSOCK = 0x1003C, // Not a socket.
ENOTSUP = 0x1003D, // Not supported (same value as EOPNOTSUP).
ENOTTY = 0x1003E, // Inappropriate I/O control operation.
ENXIO = 0x1003F, // No such device or address.
EOVERFLOW = 0x10040, // Value too large to be stored in data type.
EOWNERDEAD = 0x10041, // Previous owner died.
EPERM = 0x10042, // Operation not permitted.
EPIPE = 0x10043, // Broken pipe.
EPROTO = 0x10044, // Protocol error.
EPROTONOSUPPORT = 0x10045, // Protocol not supported.
EPROTOTYPE = 0x10046, // Protocol wrong type for socket.
ERANGE = 0x10047, // Result too large.
EROFS = 0x10048, // Read-only file system.
ESPIPE = 0x10049, // Invalid seek.
ESRCH = 0x1004A, // No such process.
ESTALE = 0x1004B, // Reserved.
ETIMEDOUT = 0x1004D, // Connection timed out.
ETXTBSY = 0x1004E, // Text file busy.
EXDEV = 0x1004F, // Cross-device link.
ESOCKTNOSUPPORT = 0x1005E, // Socket type not supported.
EPFNOSUPPORT = 0x10060, // Protocol family not supported.
ESHUTDOWN = 0x1006C, // Socket shutdown.
EHOSTDOWN = 0x10070, // Host is down.
ENODATA = 0x10071, // No data available.
// Custom Error codes to track errors beyond kernel interface.
EHOSTNOTFOUND = 0x20001, // Name lookup failed
// POSIX permits these to have the same value and we make them always equal so
// that CoreFX cannot introduce a dependency on distinguishing between them that
// would not work on all platforms.
EOPNOTSUPP = ENOTSUP, // Operation not supported on socket.
EWOULDBLOCK = EAGAIN, // Operation would block.
}
// Represents a platform-agnostic Error and underlying platform-specific errno
internal struct ErrorInfo
{
private Error _error;
private int _rawErrno;
internal ErrorInfo(int errno)
{
_error = Interop.Sys.ConvertErrorPlatformToPal(errno);
_rawErrno = errno;
}
internal ErrorInfo(Error error)
{
_error = error;
_rawErrno = -1;
}
internal Error Error
{
get { return _error; }
}
internal int RawErrno
{
get { return _rawErrno == -1 ? (_rawErrno = Interop.Sys.ConvertErrorPalToPlatform(_error)) : _rawErrno; }
}
internal string GetErrorMessage()
{
return Interop.Sys.StrError(RawErrno);
}
public override string ToString()
{
return $"RawErrno: {RawErrno} Error: {Error} GetErrorMessage: {GetErrorMessage()}"; // No localization required; text is member names used for debugging purposes
}
}
internal partial class Sys
{
internal static Error GetLastError()
{
return ConvertErrorPlatformToPal(Marshal.GetLastWin32Error());
}
internal static ErrorInfo GetLastErrorInfo()
{
return new ErrorInfo(Marshal.GetLastWin32Error());
}
internal static unsafe string StrError(int platformErrno)
{
int maxBufferLength = 1024; // should be long enough for most any UNIX error
byte* buffer = stackalloc byte[maxBufferLength];
byte* message = StrErrorR(platformErrno, buffer, maxBufferLength);
if (message == null)
{
// This means the buffer was not large enough, but still contains
// as much of the error message as possible and is guaranteed to
// be null-terminated. We're not currently resizing/retrying because
// maxBufferLength is large enough in practice, but we could do
// so here in the future if necessary.
message = buffer;
}
return Marshal.PtrToStringAnsi((IntPtr)message);
}
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_ConvertErrorPlatformToPal")]
internal static extern Error ConvertErrorPlatformToPal(int platformErrno);
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_ConvertErrorPalToPlatform")]
internal static extern int ConvertErrorPalToPlatform(Error error);
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_StrErrorR")]
private static extern unsafe byte* StrErrorR(int platformErrno, byte* buffer, int bufferSize);
}
}
// NOTE: extension method can't be nested inside Interop class.
internal static class InteropErrorExtensions
{
// Intended usage is e.g. Interop.Error.EFAIL.Info() for brevity
// vs. new Interop.ErrorInfo(Interop.Error.EFAIL) for synthesizing
// errors. Errors originated from the system should be obtained
// via GetLastErrorInfo(), not GetLastError().Info() as that will
// convert twice, which is not only inefficient but also lossy if
// we ever encounter a raw errno that no equivalent in the Error
// enum.
public static Interop.ErrorInfo Info(this Interop.Error error)
{
return new Interop.ErrorInfo(error);
}
}
| |
// Copyright (c) 2014 SIL International
// This software is licensed under the MIT License (http://opensource.org/licenses/MIT)
using System;
using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Windows.Forms;
using SIL.IO;
using SIL.Reporting;
using SIL.Windows.Forms.HtmlBrowser;
namespace SIL.Windows.Forms.GeckoBrowserAdapter
{
/// <summary>
/// This class is an adapter for GeckoFx' GeckoWebBrowser class. It is used by
/// SIL.Windows.Forms.HtmlBrowser.XWebBrowser.
///
/// Clients should NOT use this class directly. Instead they should use the XWebBrowser
/// class (or Gecko.GeckoWebBrowser if they need GeckoFx functionality).
/// </summary>
/// <remarks> The assembly containing this class gets loaded dynamically so
/// that we don't have a dependency on GeckoFx unless we want to use it.</remarks>
class GeckoFxWebBrowserAdapter: IWebBrowser
{
private const string GeckoBrowserType = "Gecko.GeckoWebBrowser";
private const string XpcomType = "Gecko.Xpcom";
private readonly Control _webBrowser;
internal static Assembly GeckoCoreAssembly;
internal static Assembly GeckoWinAssembly;
public GeckoFxWebBrowserAdapter(Control parent)
{
LoadGeckoAssemblies();
SetUpXulRunner();
_webBrowser = InstantiateGeckoWebBrowser();
parent.Controls.Add(_webBrowser);
var callbacks = parent as IWebBrowserCallbacks;
AddEventHandler(_webBrowser, "CanGoBackChanged", (sender, e) => callbacks.OnCanGoBackChanged(e));
AddEventHandler(_webBrowser, "CanGoForwardChanged", (sender, e) => callbacks.OnCanGoForwardChanged(e));
AddEventHandler(_webBrowser, "DocumentTitleChanged", (sender, e) => callbacks.OnDocumentTitleChanged(e));
AddEventHandler(_webBrowser, "StatusTextChanged", (sender, e) => callbacks.OnStatusTextChanged(e));
AddGeckoDefinedEventHandler(_webBrowser, "DocumentCompleted", "DocumentCompletedHandler");
AddGeckoDefinedEventHandler(_webBrowser, "Navigated", "NavigatedHandler");
AddGeckoDefinedEventHandler(_webBrowser, "Navigating", "NavigatingHandler");
AddGeckoDefinedEventHandler(_webBrowser, "CreateWindow2", "CreateWindow2Handler");
AddGeckoDefinedEventHandler(_webBrowser, "ProgressChanged", "WebProgressHandler");
AddGeckoDefinedEventHandler(_webBrowser, "DomClick", "DomClickHandler");
}
private static int XulRunnerVersion
{
get
{
var geckofx = GeckoCoreAssembly;
if (geckofx == null)
return 0;
var versionAttribute = geckofx.GetCustomAttributes(typeof(AssemblyFileVersionAttribute), true)
.FirstOrDefault() as AssemblyFileVersionAttribute;
return versionAttribute == null ? 0 : new Version(versionAttribute.Version).Major;
}
}
private static void SetUpXulRunner()
{
if (IsXpcomInitialized())
return;
string xulRunnerPath = Environment.GetEnvironmentVariable("XULRUNNER");
if (!Directory.Exists(xulRunnerPath))
{
xulRunnerPath = Path.Combine(FileLocationUtilities.DirectoryOfApplicationOrSolution, "xulrunner");
if (!Directory.Exists(xulRunnerPath))
{
// Gecko 45
xulRunnerPath = Path.Combine(FileLocationUtilities.DirectoryOfApplicationOrSolution,
Path.Combine("Firefox"));
if (!Directory.Exists(xulRunnerPath))
{
//if this is a programmer, go look in the lib directory
xulRunnerPath = Path.Combine(FileLocationUtilities.DirectoryOfApplicationOrSolution,
Path.Combine("lib", "xulrunner"));
}
//on my build machine, I really like to have the dir labelled with the version.
//but it's a hassle to update all the other parts (installer, build machine) with this number,
//so we only use it if we don't find the unnumbered alternative.
if (!Directory.Exists(xulRunnerPath))
{
xulRunnerPath = Path.Combine(FileLocationUtilities.DirectoryOfApplicationOrSolution,
Path.Combine("lib", "xulrunner" + XulRunnerVersion));
}
if (!Directory.Exists(xulRunnerPath))
{
throw new ConfigurationException(
"Can't find the directory where xulrunner (version {0}) is installed",
XulRunnerVersion);
}
}
}
InitializeXpcom(xulRunnerPath);
Application.ApplicationExit += OnApplicationExit;
}
#region Reflective methods for handling Gecko in a version agnostic way
private Uri GetGeckoNavigatedEventArgsUri(object eventArg)
{
var eventType = GeckoWinAssembly.GetType("Gecko.GeckoNavigatedEventArgs");
return GetUriValue(eventArg, eventType);
}
private Uri GetGeckoNavigatingEventArgsUri(object eventArg)
{
var eventType = GeckoCoreAssembly.GetType("Gecko.Events.GeckoNavigatingEventArgs") ??
GeckoWinAssembly.GetType("Gecko.GeckoNavigatingEventArgs"); //Try new ns then old ns
return GetUriValue(eventArg, eventType);
}
private static Uri GetUriValue(object eventArg, Type eventType)
{
var uriField = eventType.GetField("Uri");
return uriField.GetValue(eventArg) as Uri;
}
private void SetCancelEventArgsCancel(EventArgs eventArg, bool cancelValue)
{
var cancelArgs = eventArg as CancelEventArgs;
if(cancelArgs != null)
{
cancelArgs.Cancel = cancelValue;
}
}
private Uri GetBrowserUrl(object webBrowser)
{
return GetBrowserProperty<Uri>(webBrowser, "Url");
}
/// <summary>
/// This method will reflectively add a locally defined handler to an event of the
/// GeckoWebBrowser. If the type of the EventHandler or EventArg is defined in gecko
/// then the <code>AddGeckoDefinedEventhandler</code> must be used.
/// </summary>
private void AddEventHandler(Control webBrowser, string eventName, EventHandler action)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var browserEvent = webBrowserType.GetEvent(eventName);
browserEvent.AddEventHandler(webBrowser, action);
}
/// <summary>
/// This method will reflectively add a locally defined handler to an event defined in the GeckoWebBrowser.
/// This method will look up all the event types reflectively and can be used even when the EventArgs or
/// EventHandler types are defined in the gecko assembly.
/// </summary>
private void AddGeckoDefinedEventHandler(Control webBrowser, string eventName, string handlerName)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var browserEvent = webBrowserType.GetEvent(eventName);
if (browserEvent == null)
{
switch (eventName)
{
// CreateWindow2 is marked obsolete as far back as Geckofx29: [Obsolete("Merged to CreateWindow event, just use it")]
// It no longer exists at all in Geckofx60.
// But it is needed/used in Geckofx14 which we supposedly still support in this flexible code...
case "CreateWindow2":
browserEvent = webBrowserType.GetEvent("CreateWindow");
break;
default:
// let events take their course, so to speak... (crash upcoming)
break;
}
}
var eventArgsType = browserEvent.EventHandlerType;
var methodInfo = GetType().GetMethod(handlerName, BindingFlags.NonPublic | BindingFlags.Instance);
var docCompletedDelegate = Delegate.CreateDelegate(eventArgsType, this, methodInfo);
var addEventMethod = browserEvent.GetAddMethod();
addEventMethod.Invoke(webBrowser, new object[] { docCompletedDelegate });
}
// ReSharper disable UnusedMember.Local
// these Handlers are all used by reflection
private void WebProgressHandler(object sender, EventArgs e)
{
var geckoProgressArgsType = GeckoWinAssembly.GetType("Gecko.GeckoProgressEventArgs");
var currentProgressProp = geckoProgressArgsType.GetField("CurrentProgress");
var currentProgressVal = currentProgressProp.GetValue(e);
var maxProgressProp = geckoProgressArgsType.GetField("MaximumProgress");
var maxProgressVal = maxProgressProp.GetValue(e);
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
callbacks.OnProgressChanged(new WebBrowserProgressChangedEventArgs((long)currentProgressVal, (long)maxProgressVal));
}
private void NavigatedHandler(object sender, EventArgs args)
{
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
callbacks.OnNavigated(new WebBrowserNavigatedEventArgs(GetGeckoNavigatedEventArgsUri(args)));
}
private void DocumentCompletedHandler(object sender, EventArgs args)
{
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
callbacks.OnDocumentCompleted(new WebBrowserDocumentCompletedEventArgs(GetBrowserUrl(_webBrowser)));
}
private void NavigatingHandler(object sender, EventArgs args)
{
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
var ev = new WebBrowserNavigatingEventArgs(GetGeckoNavigatingEventArgsUri(args), string.Empty);
callbacks.OnNavigating(ev);
SetCancelEventArgsCancel(args, ev.Cancel);
}
private void CreateWindow2Handler(object sender, EventArgs args)
{
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
var ev = new CancelEventArgs();
callbacks.OnNewWindow(ev);
SetCancelEventArgsCancel(args, ev.Cancel);
}
// ReSharper restore UnusedMember.Local
/// <summary>
/// Reflectively construct a GeckoWebBrowser and set the Dock property.
/// </summary>
/// <returns>a reflectively created GeckoWebBrowser as a Control</returns>
private Control InstantiateGeckoWebBrowser()
{
var browserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var constructor = browserType.GetConstructor(new Type[] { });
var dockProp = browserType.GetProperty("Dock");
var geckoWebBrowser = constructor.Invoke(new object[] { });
dockProp.SetValue(geckoWebBrowser, DockStyle.Fill, BindingFlags.Default, null, null, null);
return geckoWebBrowser as Control;
}
/// <summary>
/// Attempt to load GeckoAssemblies from the programs running environment. Try and load modern gecko dlls which have
/// no version number in the filenames then fallback to trying to load geckofx 14.
/// </summary>
private static void LoadGeckoAssemblies()
{
if (GeckoCoreAssembly != null && GeckoWinAssembly != null)
return;
try
{
try
{
GeckoCoreAssembly = Assembly.Load("Geckofx-Core");
}
catch(FileNotFoundException)
{
//Fallback to geckofx version 14 name
GeckoCoreAssembly = Assembly.LoadFrom("geckofx-core-14.dll");
}
try
{
GeckoWinAssembly = Assembly.Load("Geckofx-Winforms");
}
catch(FileNotFoundException)
{
//Fallback to geckofx version 14 name
GeckoWinAssembly = Assembly.LoadFrom("Geckofx-Winforms-14.dll");
}
}
catch(Exception e)
{
MessageBox.Show("Unable to load geckofx dependancy. Files may not have been included in the build.",
"Failed to load geckofx", MessageBoxButtons.OK, MessageBoxIcon.Error);
throw new ApplicationException("Unable to load geckofx dependancy", e);
}
}
private T GetBrowserProperty<T>(object webBrowser, string propertyName)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var property = webBrowserType.GetProperty(propertyName, typeof(T));
return (T)property.GetValue(webBrowser, BindingFlags.Default, null, null, null);
}
private void SetBrowserProperty<T>(object webBrowser, string propertyName, T propertyValue)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var property = webBrowserType.GetProperty(propertyName, typeof(T));
property.SetValue(webBrowser, propertyValue, BindingFlags.Default, null, null, null);
}
/// <summary>
/// Call a browser method which returns a specific type.
/// Looks up the method name by reflection and calls that method
/// on the given webbrowser instance and return the value.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="webBrowser"></param>
/// <param name="methodName"></param>
/// <returns></returns>
private T CallBrowserMethod<T>(object webBrowser, string methodName)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var method = webBrowserType.GetMethod(methodName);
return (T)method.Invoke(webBrowser, BindingFlags.Default, null, null, null);
}
private static bool IsXpcomInitialized()
{
var xpcomType = GeckoCoreAssembly.GetType(XpcomType);
var initProp = xpcomType.GetProperty("IsInitialized");
var initialized = initProp.GetValue(null, BindingFlags.Static, null, null, null);
return (bool)initialized;
}
private static void InitializeXpcom(string xulRunnerPath)
{
var xpcomType = GeckoCoreAssembly.GetType(XpcomType);
var initMethod = xpcomType.GetMethod("Initialize", new [] { typeof(string) });
initMethod.Invoke(null, new object[] {xulRunnerPath});
}
private static void ShutdownXpcom()
{
var xpcomType = GeckoCoreAssembly.GetType(XpcomType);
var initMethod = xpcomType.GetMethod("Shutdown");
initMethod.Invoke(null, null);
}
/// <summary>
/// Look up a method name from the browser that matches the method name and
/// the type of the parameters given and then call that on the given webbrowser
/// instance.
/// </summary>
private bool CallBrowserMethod(object webBrowser, string methodName, object[] parameters)
{
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var types = new Type[parameters == null ? 0 : parameters.Length];
for (var i = 0; parameters != null && i < parameters.Length; ++i)
{
types[i] = parameters[i].GetType();
}
var method = webBrowserType.GetMethod(methodName, types);
if(method == null)
{
return false;
}
method.Invoke(webBrowser, parameters);
return true;
}
#endregion
private static void OnApplicationExit(object sender, EventArgs e)
{
// We come here iff we initialized Xpcom. In that case we want to call shutdown,
// otherwise the app might not exit properly.
if (IsXpcomInitialized())
ShutdownXpcom();
Application.ApplicationExit -= OnApplicationExit;
}
private void DomClickHandler(object sender, EventArgs e)
{
var callbacks = _webBrowser.Parent as IWebBrowserCallbacks;
callbacks.OnDomClick(e);
}
internal void SetClass(object target, string val)
{
if (target == null)
return;
// target.SetAttribute("class", "selected")
var elementType = GeckoCoreAssembly.GetType("Gecko.GeckoHtmlElement");
var setAttrMethod = elementType.GetMethod("SetAttribute");
setAttrMethod.Invoke(target, new object[] {"class", val});
}
#region IWebBrowser Members
/// <summary>
/// Rather then adding more reflective methods just handle this property here at the adapter level.
/// </summary>
public bool AllowNavigation { get; set; }
/// <summary>
/// TODO: If this is necessary for the GeckoBrowser we need to figure out an implementation
/// </summary>
public bool AllowWebBrowserDrop { get; set; }
public bool CanGoBack
{
get { return AllowNavigation && GetBrowserProperty<bool>(_webBrowser, "CanGoBack"); }
}
public bool CanGoForward
{
get { return AllowNavigation && GetBrowserProperty<bool>(_webBrowser, "CanGoForward"); }
}
public void Dispose()
{
CallBrowserMethod(_webBrowser, "Dispose", null);
// Call GC.SupressFinalize to take this object off the finalization queue
// and prevent finalization code for this object from executing a second time.
GC.SuppressFinalize(this);
}
public string DocumentText
{
set
{
// we used to use LoadContent and fall back to LoadHtml if that method didn't
// work. However, I (EB) couldn't get LoadContent to work, so we now always use
// LoadHtml which should work in most cases unless it is a complex HTML page.
CallBrowserMethod(_webBrowser, "LoadHtml", new object[] { value });
}
}
public string DocumentTitle
{
get { return GetBrowserProperty<string>(_webBrowser, "DocumentTitle"); }
}
public bool Focused
{
get { return _webBrowser.Focused; }
}
public bool IsBusy
{
get { return GetBrowserProperty<bool>(_webBrowser, "IsBusy"); }
}
public bool IsWebBrowserContextMenuEnabled
{
get { return !GetBrowserProperty<bool>(_webBrowser, "NoDefaultContextMenu"); }
set { SetBrowserProperty(_webBrowser, "NoDefaultContextMenu", !value); }
}
public string StatusText
{
get { return GetBrowserProperty<string>(_webBrowser, "StatusText"); }
}
public Uri Url
{
get { return GetBrowserUrl(_webBrowser); }
set { CallBrowserMethod(_webBrowser, "Navigate", new object[] { value.OriginalString }); }
}
public bool GoBack()
{
if(AllowNavigation)
return CallBrowserMethod<bool>(_webBrowser, "GoBack");
return false;
}
public bool GoForward()
{
if(AllowNavigation)
return CallBrowserMethod<bool>(_webBrowser, "GoForward");
return false;
}
public void Navigate(string urlString)
{
CallBrowserMethod(_webBrowser, "Navigate", new object[] { urlString });
}
public void Navigate(Uri url)
{
Navigate(url.AbsoluteUri);
}
public void Refresh()
{
_webBrowser.Refresh();
}
public void Refresh(WebBrowserRefreshOption opt)
{
_webBrowser.Refresh();
}
public void Stop()
{
CallBrowserMethod(_webBrowser, "Stop", new object[] {});
}
public void ScrollLastElementIntoView()
{
var geckoDocumentType = GeckoCoreAssembly.GetType("Gecko.GeckoDocument");
var geckoHtmlElementType = GeckoCoreAssembly.GetType("Gecko.GeckoHtmlElement");
var geckoNodeListType = GeckoCoreAssembly.GetType("Gecko.GeckoNodeCollection");
var webBrowserType = GeckoWinAssembly.GetType(GeckoBrowserType);
var documentProperty = webBrowserType.GetProperty("Document", geckoDocumentType);
var document = documentProperty.GetValue(_webBrowser, BindingFlags.Default, null, null, null);
if(document != null)
{
var bodyProperty = geckoDocumentType.GetProperty("Body", geckoHtmlElementType);
var body = bodyProperty.GetValue(document, BindingFlags.Default, null, null, null);
if(body != null)
{
var childrenProperty = geckoHtmlElementType.GetProperty("ChildNodes", geckoNodeListType);
var children = childrenProperty.GetValue(body, BindingFlags.Default, null, null, null);
var countLengthProp = geckoNodeListType.GetProperty("Length", typeof(uint)); // GeckoFx 60+
if (countLengthProp == null)
{
countLengthProp = geckoNodeListType.GetProperty("Length", typeof(int)); // GeckoFx 45 used an int for this property
}
var countLength = Convert.ToInt32(countLengthProp.GetValue(children, BindingFlags.Default, null, null, null));
if(countLength > 0)
{
var lastChildProp = geckoNodeListType.GetProperty("Item"); // Magic
var lastchild = lastChildProp.GetValue(children, new object[] { countLength - 1 });
var scrollIntoView = geckoHtmlElementType.GetMethod("ScrollIntoView");
scrollIntoView.Invoke(lastchild, BindingFlags.Default, null, null, null);
}
}
}
}
public object NativeBrowser
{
get { return _webBrowser; }
}
/// <summary>
/// TODO: If Gecko browsers have keyboard shortcuts, write some code to enable/disable them here.
/// </summary>
public bool WebBrowserShortcutsEnabled { get; set; }
#endregion
}
}
| |
using Lucene.Net.Documents;
using Lucene.Net.Util;
using NUnit.Framework;
namespace Lucene.Net.Search
{
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using Field = Field;
using IndexReader = Lucene.Net.Index.IndexReader;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
using SpanFirstQuery = Lucene.Net.Search.Spans.SpanFirstQuery;
using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
using SpanNotQuery = Lucene.Net.Search.Spans.SpanNotQuery;
using SpanOrQuery = Lucene.Net.Search.Spans.SpanOrQuery;
using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
using Term = Lucene.Net.Index.Term;
/// <summary>
/// Tests primitive queries (ie: that rewrite to themselves) to
/// insure they match the expected set of docs, and that the score of each
/// match is equal to the value of the scores explanation.
///
/// <p>
/// The assumption is that if all of the "primitive" queries work well,
/// then anything that rewrites to a primitive will work well also.
/// </p>
/// </summary>
/// <seealso cref= "Subclasses for actual tests" </seealso>
[TestFixture]
public class TestExplanations : LuceneTestCaseWithReducedFloatPrecision
{
protected internal static IndexSearcher Searcher;
protected internal static IndexReader Reader;
protected internal static Directory Directory;
public const string KEY = "KEY";
// boost on this field is the same as the iterator for the doc
public const string FIELD = "field";
// same contents, but no field boost
public const string ALTFIELD = "alt";
[TestFixtureTearDown]
public static void AfterClassTestExplanations()
{
Searcher = null;
Reader.Dispose();
Reader = null;
Directory.Dispose();
Directory = null;
}
[TestFixtureSetUp]
public static void BeforeClassTestExplanations()
{
Directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
for (int i = 0; i < DocFields.Length; i++)
{
Document doc = new Document();
doc.Add(NewStringField(KEY, "" + i, Field.Store.NO));
Field f = NewTextField(FIELD, DocFields[i], Field.Store.NO);
f.Boost = i;
doc.Add(f);
doc.Add(NewTextField(ALTFIELD, DocFields[i], Field.Store.NO));
writer.AddDocument(doc);
}
Reader = writer.Reader;
writer.Dispose();
Searcher = NewSearcher(Reader);
}
protected internal static readonly string[] DocFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3 zz", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3 zz" };
/// <summary>
/// check the expDocNrs first, then check the query (and the explanations) </summary>
public virtual void Qtest(Query q, int[] expDocNrs)
{
CheckHits.CheckHitCollector(Random(), q, FIELD, Searcher, expDocNrs);
}
/// <summary>
/// Tests a query using qtest after wrapping it with both optB and reqB </summary>
/// <seealso cref= #qtest </seealso>
/// <seealso cref= #reqB </seealso>
/// <seealso cref= #optB </seealso>
public virtual void Bqtest(Query q, int[] expDocNrs)
{
Qtest(ReqB(q), expDocNrs);
Qtest(OptB(q), expDocNrs);
}
/// <summary>
/// Convenience subclass of FieldCacheTermsFilter
/// </summary>
public class ItemizedFilter : FieldCacheTermsFilter
{
internal static string[] Int2str(int[] terms)
{
string[] @out = new string[terms.Length];
for (int i = 0; i < terms.Length; i++)
{
@out[i] = "" + terms[i];
}
return @out;
}
public ItemizedFilter(string keyField, int[] keys)
: base(keyField, Int2str(keys))
{
}
public ItemizedFilter(int[] keys)
: base(KEY, Int2str(keys))
{
}
}
/// <summary>
/// helper for generating MultiPhraseQueries </summary>
public static Term[] Ta(string[] s)
{
Term[] t = new Term[s.Length];
for (int i = 0; i < s.Length; i++)
{
t[i] = new Term(FIELD, s[i]);
}
return t;
}
/// <summary>
/// MACRO for SpanTermQuery </summary>
public virtual SpanTermQuery St(string s)
{
return new SpanTermQuery(new Term(FIELD, s));
}
/// <summary>
/// MACRO for SpanNotQuery </summary>
public virtual SpanNotQuery Snot(SpanQuery i, SpanQuery e)
{
return new SpanNotQuery(i, e);
}
/// <summary>
/// MACRO for SpanOrQuery containing two SpanTerm queries </summary>
public virtual SpanOrQuery Sor(string s, string e)
{
return Sor(St(s), St(e));
}
/// <summary>
/// MACRO for SpanOrQuery containing two SpanQueries </summary>
public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery e)
{
return new SpanOrQuery(s, e);
}
/// <summary>
/// MACRO for SpanOrQuery containing three SpanTerm queries </summary>
public virtual SpanOrQuery Sor(string s, string m, string e)
{
return Sor(St(s), St(m), St(e));
}
/// <summary>
/// MACRO for SpanOrQuery containing two SpanQueries </summary>
public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery m, SpanQuery e)
{
return new SpanOrQuery(s, m, e);
}
/// <summary>
/// MACRO for SpanNearQuery containing two SpanTerm queries </summary>
public virtual SpanNearQuery Snear(string s, string e, int slop, bool inOrder)
{
return Snear(St(s), St(e), slop, inOrder);
}
/// <summary>
/// MACRO for SpanNearQuery containing two SpanQueries </summary>
public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery e, int slop, bool inOrder)
{
return new SpanNearQuery(new SpanQuery[] { s, e }, slop, inOrder);
}
/// <summary>
/// MACRO for SpanNearQuery containing three SpanTerm queries </summary>
public virtual SpanNearQuery Snear(string s, string m, string e, int slop, bool inOrder)
{
return Snear(St(s), St(m), St(e), slop, inOrder);
}
/// <summary>
/// MACRO for SpanNearQuery containing three SpanQueries </summary>
public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery m, SpanQuery e, int slop, bool inOrder)
{
return new SpanNearQuery(new SpanQuery[] { s, m, e }, slop, inOrder);
}
/// <summary>
/// MACRO for SpanFirst(SpanTermQuery) </summary>
public virtual SpanFirstQuery Sf(string s, int b)
{
return new SpanFirstQuery(St(s), b);
}
/// <summary>
/// MACRO: Wraps a Query in a BooleanQuery so that it is optional, along
/// with a second prohibited clause which will never match anything
/// </summary>
public virtual Query OptB(Query q)
{
BooleanQuery bq = new BooleanQuery(true);
bq.Add(q, BooleanClause.Occur.SHOULD);
bq.Add(new TermQuery(new Term("NEVER", "MATCH")), BooleanClause.Occur.MUST_NOT);
return bq;
}
/// <summary>
/// MACRO: Wraps a Query in a BooleanQuery so that it is required, along
/// with a second optional clause which will match everything
/// </summary>
public virtual Query ReqB(Query q)
{
BooleanQuery bq = new BooleanQuery(true);
bq.Add(q, BooleanClause.Occur.MUST);
bq.Add(new TermQuery(new Term(FIELD, "w1")), BooleanClause.Occur.SHOULD);
return bq;
}
/// <summary>
/// Placeholder: JUnit freaks if you don't have one test ... making
/// class abstract doesn't help
/// </summary>
[Test]
public virtual void TestNoop()
{
/* NOOP */
}
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Management.Automation;
using Microsoft.Azure.Commands.RecoveryServices.SiteRecovery.Properties;
using Microsoft.Azure.Management.RecoveryServices.SiteRecovery.Models;
namespace Microsoft.Azure.Commands.RecoveryServices.SiteRecovery
{
/// <summary>
/// Used to initiate a Test failover operation.
/// </summary>
[Cmdlet(
VerbsLifecycle.Start,
"AzureRmRecoveryServicesAsrTestFailoverJob",
DefaultParameterSetName = ASRParameterSets.ByRPIObject,
SupportsShouldProcess = true)]
[Alias(
"Start-ASRTFO",
"Start-ASRTestFailoverJob")]
[OutputType(typeof(ASRJob))]
public class StartAzureRmRecoveryServicesAsrTestFailoverJob : SiteRecoveryCmdletBase
{
/// <summary>
/// Gets or sets Recovery Plan object.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObject,
Mandatory = true,
ValueFromPipeline = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithVMNetwork,
Mandatory = true,
ValueFromPipeline = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithAzureVMNetworkId,
Mandatory = true,
ValueFromPipeline = true)]
[ValidateNotNullOrEmpty]
public ASRRecoveryPlan RecoveryPlan { get; set; }
/// <summary>
/// Gets or sets Replication Protected Item.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObject,
Mandatory = true,
ValueFromPipeline = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithVMNetwork,
Mandatory = true,
ValueFromPipeline = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithAzureVMNetworkId,
Mandatory = true,
ValueFromPipeline = true)]
[ValidateNotNullOrEmpty]
public ASRReplicationProtectedItem ReplicationProtectedItem { get; set; }
/// <summary>
/// Gets or sets failover direction for the recovery plan.
/// </summary>
[Parameter(Mandatory = true)]
[ValidateSet(
Constants.PrimaryToRecovery,
Constants.RecoveryToPrimary)]
public string Direction { get; set; }
/// <summary>
/// Gets or sets Network.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithVMNetwork,
Mandatory = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithVMNetwork,
Mandatory = true)]
public ASRNetwork VMNetwork { get; set; }
/// <summary>
/// Gets or sets Network.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithAzureVMNetworkId,
Mandatory = true)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithAzureVMNetworkId,
Mandatory = true)]
public string AzureVMNetworkId { get; set; }
/// <summary>
/// Gets or sets Data encryption certificate file path for failover of Protected Item.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
public string DataEncryptionPrimaryCertFile { get; set; }
/// <summary>
/// Gets or sets Data encryption certificate file path for failover of Protected Item.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
public string DataEncryptionSecondaryCertFile { get; set; }
/// <summary>
/// Gets or sets Recovery Point object.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObject,
Mandatory = false)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithVMNetwork,
Mandatory = false)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPIObjectWithAzureVMNetworkId,
Mandatory = false)]
[ValidateNotNullOrEmpty]
public ASRRecoveryPoint RecoveryPoint { get; set; }
/// <summary>
/// Gets or sets Recovery Tag for the Recovery Point Type.
/// </summary>
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObject,
Mandatory = false)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithVMNetwork,
Mandatory = false)]
[Parameter(
ParameterSetName = ASRParameterSets.ByRPObjectWithAzureVMNetworkId,
Mandatory = false)]
[ValidateNotNullOrEmpty]
[ValidateSet(
Constants.RecoveryTagLatest,
Constants.RecoveryTagLatestAvailable,
Constants.RecoveryTagLatestAvailableApplicationConsistent)]
[DefaultValue(Constants.RecoveryTagLatestAvailable)]
public string RecoveryTag { get; set; }
/// <summary>
/// ProcessRecord of the command.
/// </summary>
public override void ExecuteSiteRecoveryCmdlet()
{
base.ExecuteSiteRecoveryCmdlet();
if (this.ShouldProcess(
"Protected item or Recovery plan",
"Start test failover"))
{
if (!string.IsNullOrEmpty(this.DataEncryptionPrimaryCertFile))
{
var certBytesPrimary = File.ReadAllBytes(this.DataEncryptionPrimaryCertFile);
this.primaryKekCertpfx = Convert.ToBase64String(certBytesPrimary);
}
if (!string.IsNullOrEmpty(this.DataEncryptionSecondaryCertFile))
{
var certBytesSecondary =
File.ReadAllBytes(this.DataEncryptionSecondaryCertFile);
this.secondaryKekCertpfx = Convert.ToBase64String(certBytesSecondary);
}
switch (this.ParameterSetName)
{
case ASRParameterSets.ByRPIObjectWithVMNetwork:
case ASRParameterSets.ByRPObjectWithVMNetwork:
this.networkType = "VmNetworkAsInput";
this.networkId = this.VMNetwork.ID;
break;
case ASRParameterSets.ByRPIObjectWithAzureVMNetworkId:
case ASRParameterSets.ByRPObjectWithAzureVMNetworkId:
this.networkType = "VmNetworkAsInput";
this.networkId = this.AzureVMNetworkId;
break;
case ASRParameterSets.ByRPIObject:
case ASRParameterSets.ByRPObject:
this.networkType = "NoNetworkAttachAsInput";
this.networkId = null;
break;
}
if ((this.ParameterSetName == ASRParameterSets.ByRPObject) ||
(this.ParameterSetName == ASRParameterSets.ByRPObjectWithVMNetwork) ||
(this.ParameterSetName == ASRParameterSets.ByRPObjectWithAzureVMNetworkId))
{
this.StartRpTestFailover();
}
else
{
this.protectionContainerName = Utilities.GetValueFromArmId(
this.ReplicationProtectedItem.ID,
ARMResourceTypeConstants.ReplicationProtectionContainers);
this.fabricName = Utilities.GetValueFromArmId(
this.ReplicationProtectedItem.ID,
ARMResourceTypeConstants.ReplicationFabrics);
this.StartRPITestFailover();
}
}
}
/// <summary>
/// Starts RPI Test failover.
/// </summary>
private void StartRPITestFailover()
{
var testFailoverInputProperties = new TestFailoverInputProperties
{
FailoverDirection = this.Direction,
NetworkId = this.networkId,
NetworkType = this.networkType,
ProviderSpecificDetails = new ProviderSpecificFailoverInput(),
SkipTestFailoverCleanup = bool.TrueString
};
var input = new TestFailoverInput { Properties = testFailoverInputProperties };
if (0 ==
string.Compare(
this.ReplicationProtectedItem.ReplicationProvider,
Constants.HyperVReplicaAzure,
StringComparison.OrdinalIgnoreCase))
{
if (this.Direction == Constants.PrimaryToRecovery)
{
var failoverInput = new HyperVReplicaAzureFailoverProviderInput
{
PrimaryKekCertificatePfx = this.primaryKekCertpfx,
SecondaryKekCertificatePfx = this.secondaryKekCertpfx
};
input.Properties.ProviderSpecificDetails = failoverInput;
}
else
{
new ArgumentException(
Resources
.UnsupportedDirectionForTFO); // Throw Unsupported Direction Exception
}
}
else if (string.Compare(
this.ReplicationProtectedItem.ReplicationProvider,
Constants.InMageAzureV2,
StringComparison.OrdinalIgnoreCase) ==
0)
{
// Validate if the Replication Protection Item is part of any Replication Group.
Guid guidResult;
var parseFlag = Guid.TryParse(
((ASRInMageAzureV2SpecificRPIDetails)this.ReplicationProtectedItem
.ProviderSpecificDetails).MultiVmGroupName,
out guidResult);
if (parseFlag == false ||
guidResult == Guid.Empty ||
string.Compare(
((ASRInMageAzureV2SpecificRPIDetails)this.ReplicationProtectedItem
.ProviderSpecificDetails).MultiVmGroupName,
((ASRInMageAzureV2SpecificRPIDetails)this.ReplicationProtectedItem
.ProviderSpecificDetails).MultiVmGroupId) !=
0)
{
// Replication Group was created at the time of Protection.
throw new InvalidOperationException(
string.Format(
Resources.UnsupportedReplicationProtectionActionForTestFailover,
this.ReplicationProtectedItem.ReplicationProvider));
}
// Validate the Direction as PrimaryToRecovery.
if (this.Direction == Constants.PrimaryToRecovery)
{
// Set the InMageAzureV2 Provider specific input in the Test Failover Input.
var failoverInput = new InMageAzureV2FailoverProviderInput
{
RecoveryPointId = this.RecoveryPoint != null ? this.RecoveryPoint.ID : null
};
input.Properties.ProviderSpecificDetails = failoverInput;
}
else
{
// RecoveryToPrimary Direction is Invalid for InMageAzureV2.
new ArgumentException(Resources.InvalidDirectionForVMWareToAzure);
}
}
else if (string.Compare(
this.ReplicationProtectedItem.ReplicationProvider,
Constants.InMage,
StringComparison.OrdinalIgnoreCase) ==
0)
{
throw new InvalidOperationException(
string.Format(
Resources.UnsupportedReplicationProviderForTestFailover,
this.ReplicationProtectedItem.ReplicationProvider));
}
var response = this.RecoveryServicesClient.StartAzureSiteRecoveryTestFailover(
this.fabricName,
this.protectionContainerName,
this.ReplicationProtectedItem.Name,
input);
var jobResponse = this.RecoveryServicesClient.GetAzureSiteRecoveryJobDetails(
PSRecoveryServicesClient.GetJobIdFromReponseLocation(response.Location));
this.WriteObject(new ASRJob(jobResponse));
}
/// <summary>
/// Starts RP Test failover.
/// </summary>
private void StartRpTestFailover()
{
// Refresh RP Object
var rp = this.RecoveryServicesClient.GetAzureSiteRecoveryRecoveryPlan(
this.RecoveryPlan.Name);
var recoveryPlanTestFailoverInputProperties =
new RecoveryPlanTestFailoverInputProperties
{
FailoverDirection =
this.Direction == PossibleOperationsDirections.PrimaryToRecovery.ToString()
? PossibleOperationsDirections.PrimaryToRecovery
: PossibleOperationsDirections.RecoveryToPrimary,
NetworkId = this.networkId,
NetworkType = this.networkType,
ProviderSpecificDetails = new List<RecoveryPlanProviderSpecificFailoverInput>(),
SkipTestFailoverCleanup = bool.TrueString
};
foreach (var replicationProvider in rp.Properties.ReplicationProviders)
{
if (0 ==
string.Compare(
replicationProvider,
Constants.HyperVReplicaAzure,
StringComparison.OrdinalIgnoreCase))
{
if (this.Direction == Constants.PrimaryToRecovery)
{
var recoveryPlanHyperVReplicaAzureFailoverInput =
new RecoveryPlanHyperVReplicaAzureFailoverInput
{
PrimaryKekCertificatePfx = this.primaryKekCertpfx,
SecondaryKekCertificatePfx = this.secondaryKekCertpfx,
VaultLocation = "dummy"
};
recoveryPlanTestFailoverInputProperties.ProviderSpecificDetails.Add(
recoveryPlanHyperVReplicaAzureFailoverInput);
}
else
{
throw new ArgumentException(
Resources
.UnsupportedDirectionForTFO); // Throw Unsupported Direction Exception
}
}
else if (string.Compare(
replicationProvider,
Constants.InMageAzureV2,
StringComparison.OrdinalIgnoreCase) ==
0)
{
// Check if the Direction is PrimaryToRecovery.
if (this.Direction == Constants.PrimaryToRecovery)
{
// Set the Recovery Point Types for InMage.
var recoveryPointType =
this.RecoveryTag ==
Constants.RecoveryTagLatestAvailableApplicationConsistent
? InMageV2RpRecoveryPointType.LatestApplicationConsistent
: this.RecoveryTag == Constants.RecoveryTagLatest
? InMageV2RpRecoveryPointType.Latest
: InMageV2RpRecoveryPointType.LatestProcessed;
// Create the InMageAzureV2 Provider specific input.
var recoveryPlanInMageAzureV2FailoverInput =
new RecoveryPlanInMageAzureV2FailoverInput
{
RecoveryPointType = recoveryPointType,
VaultLocation = "dummy"
};
// Add the InMageAzureV2 Provider specific input in the Test Failover Input.
recoveryPlanTestFailoverInputProperties.ProviderSpecificDetails.Add(
recoveryPlanInMageAzureV2FailoverInput);
}
}
else if (string.Compare(
replicationProvider,
Constants.InMage,
StringComparison.OrdinalIgnoreCase) ==
0)
{
throw new InvalidOperationException(
string.Format(
Resources.UnsupportedReplicationProviderForTestFailover,
this.ReplicationProtectedItem.ReplicationProvider));
}
}
var recoveryPlanTestFailoverInput =
new RecoveryPlanTestFailoverInput
{
Properties = recoveryPlanTestFailoverInputProperties
};
var response = this.RecoveryServicesClient.StartAzureSiteRecoveryTestFailover(
this.RecoveryPlan.Name,
recoveryPlanTestFailoverInput);
var jobResponse = this.RecoveryServicesClient.GetAzureSiteRecoveryJobDetails(
PSRecoveryServicesClient.GetJobIdFromReponseLocation(response.Location));
this.WriteObject(new ASRJob(jobResponse));
}
#region local parameters
/// <summary>
/// Network ID.
/// </summary>
private string networkId = string.Empty; // Network ARM Id
/// <summary>
/// Network Type (Logical network or VM network).
/// </summary>
private string networkType =
string.Empty; // LogicalNetworkAsInput|VmNetworkAsInput|NoNetworkAttachAsInput
/// <summary>
/// Gets or sets Name of the PE.
/// </summary>
public string protectionEntityName;
/// <summary>
/// Gets or sets Name of the Protection Container.
/// </summary>
public string protectionContainerName;
/// <summary>
/// Gets or sets Name of the Fabric.
/// </summary>
public string fabricName;
/// <summary>
/// Primary Kek Cert pfx file.
/// </summary>
private string primaryKekCertpfx;
/// <summary>
/// Secondary Kek Cert pfx file.
/// </summary>
private string secondaryKekCertpfx;
#endregion local parameters
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using wkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Notebooks.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedNotebookServiceClientTest
{
[xunit::FactAttribute]
public void GetInstanceRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
Disks =
{
new Instance.Types.Disk(),
},
UpgradeHistory =
{
new Instance.Types.UpgradeHistoryEntry(),
},
ShieldedInstanceConfig = new Instance.Types.ShieldedInstanceConfig(),
ServiceAccountScopes =
{
"service_account_scopesd4b7f658",
},
Tags = { "tags52c47ad5", },
NicType = Instance.Types.NicType.Gvnic,
ReservationAffinity = new ReservationAffinity(),
};
mockGrpcClient.Setup(x => x.GetInstance(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance response = client.GetInstance(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
Disks =
{
new Instance.Types.Disk(),
},
UpgradeHistory =
{
new Instance.Types.UpgradeHistoryEntry(),
},
ShieldedInstanceConfig = new Instance.Types.ShieldedInstanceConfig(),
ServiceAccountScopes =
{
"service_account_scopesd4b7f658",
},
Tags = { "tags52c47ad5", },
NicType = Instance.Types.NicType.Gvnic,
ReservationAffinity = new ReservationAffinity(),
};
mockGrpcClient.Setup(x => x.GetInstanceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Instance>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance responseCallSettings = await client.GetInstanceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Instance responseCancellationToken = await client.GetInstanceAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetInstance()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
Disks =
{
new Instance.Types.Disk(),
},
UpgradeHistory =
{
new Instance.Types.UpgradeHistoryEntry(),
},
ShieldedInstanceConfig = new Instance.Types.ShieldedInstanceConfig(),
ServiceAccountScopes =
{
"service_account_scopesd4b7f658",
},
Tags = { "tags52c47ad5", },
NicType = Instance.Types.NicType.Gvnic,
ReservationAffinity = new ReservationAffinity(),
};
mockGrpcClient.Setup(x => x.GetInstance(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance response = client.GetInstance(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
Disks =
{
new Instance.Types.Disk(),
},
UpgradeHistory =
{
new Instance.Types.UpgradeHistoryEntry(),
},
ShieldedInstanceConfig = new Instance.Types.ShieldedInstanceConfig(),
ServiceAccountScopes =
{
"service_account_scopesd4b7f658",
},
Tags = { "tags52c47ad5", },
NicType = Instance.Types.NicType.Gvnic,
ReservationAffinity = new ReservationAffinity(),
};
mockGrpcClient.Setup(x => x.GetInstanceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Instance>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance responseCallSettings = await client.GetInstanceAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Instance responseCancellationToken = await client.GetInstanceAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void IsInstanceUpgradeableRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
IsInstanceUpgradeableRequest request = new IsInstanceUpgradeableRequest
{
NotebookInstance = "notebook_instance62ef1454",
};
IsInstanceUpgradeableResponse expectedResponse = new IsInstanceUpgradeableResponse
{
Upgradeable = false,
UpgradeVersion = "upgrade_version716e0863",
UpgradeInfo = "upgrade_info29eca0a2",
UpgradeImage = "upgrade_image84c2e9b0",
};
mockGrpcClient.Setup(x => x.IsInstanceUpgradeable(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
IsInstanceUpgradeableResponse response = client.IsInstanceUpgradeable(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task IsInstanceUpgradeableRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
IsInstanceUpgradeableRequest request = new IsInstanceUpgradeableRequest
{
NotebookInstance = "notebook_instance62ef1454",
};
IsInstanceUpgradeableResponse expectedResponse = new IsInstanceUpgradeableResponse
{
Upgradeable = false,
UpgradeVersion = "upgrade_version716e0863",
UpgradeInfo = "upgrade_info29eca0a2",
UpgradeImage = "upgrade_image84c2e9b0",
};
mockGrpcClient.Setup(x => x.IsInstanceUpgradeableAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<IsInstanceUpgradeableResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
IsInstanceUpgradeableResponse responseCallSettings = await client.IsInstanceUpgradeableAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
IsInstanceUpgradeableResponse responseCancellationToken = await client.IsInstanceUpgradeableAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetInstanceHealthRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealth(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse response = client.GetInstanceHealth(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceHealthRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealthAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GetInstanceHealthResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse responseCallSettings = await client.GetInstanceHealthAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
GetInstanceHealthResponse responseCancellationToken = await client.GetInstanceHealthAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetInstanceHealth()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealth(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse response = client.GetInstanceHealth(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceHealthAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealthAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GetInstanceHealthResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse responseCallSettings = await client.GetInstanceHealthAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
GetInstanceHealthResponse responseCancellationToken = await client.GetInstanceHealthAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetInstanceHealthResourceNames()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealth(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse response = client.GetInstanceHealth(request.InstanceName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceHealthResourceNamesAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceHealthRequest request = new GetInstanceHealthRequest
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
GetInstanceHealthResponse expectedResponse = new GetInstanceHealthResponse
{
HealthState = GetInstanceHealthResponse.Types.HealthState.Unhealthy,
HealthInfo =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
};
mockGrpcClient.Setup(x => x.GetInstanceHealthAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GetInstanceHealthResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
GetInstanceHealthResponse responseCallSettings = await client.GetInstanceHealthAsync(request.InstanceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
GetInstanceHealthResponse responseCancellationToken = await client.GetInstanceHealthAsync(request.InstanceName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetEnvironmentRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironment(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment response = client.GetEnvironment(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetEnvironmentRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironmentAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Environment>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment responseCallSettings = await client.GetEnvironmentAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Environment responseCancellationToken = await client.GetEnvironmentAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetEnvironment()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironment(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment response = client.GetEnvironment(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetEnvironmentAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironmentAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Environment>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment responseCallSettings = await client.GetEnvironmentAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Environment responseCancellationToken = await client.GetEnvironmentAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetScheduleRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetSchedule(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule response = client.GetSchedule(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetScheduleRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetScheduleAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Schedule>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule responseCallSettings = await client.GetScheduleAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Schedule responseCancellationToken = await client.GetScheduleAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetSchedule()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetSchedule(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule response = client.GetSchedule(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetScheduleAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetScheduleAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Schedule>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule responseCallSettings = await client.GetScheduleAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Schedule responseCancellationToken = await client.GetScheduleAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetScheduleResourceNames()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetSchedule(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule response = client.GetSchedule(request.ScheduleName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetScheduleResourceNamesAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetScheduleRequest request = new GetScheduleRequest
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
};
Schedule expectedResponse = new Schedule
{
ScheduleName = ScheduleName.FromProjectLocationSchedule("[PROJECT]", "[LOCATION]", "[SCHEDULE]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
State = Schedule.Types.State.Deleting,
CronSchedule = "cron_schedulee564a4b3",
TimeZone = "time_zone73f23b20",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
ExecutionTemplate = new ExecutionTemplate(),
RecentExecutions = { new Execution(), },
};
mockGrpcClient.Setup(x => x.GetScheduleAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Schedule>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Schedule responseCallSettings = await client.GetScheduleAsync(request.ScheduleName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Schedule responseCancellationToken = await client.GetScheduleAsync(request.ScheduleName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetExecutionRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecution(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution response = client.GetExecution(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetExecutionRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecutionAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Execution>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution responseCallSettings = await client.GetExecutionAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Execution responseCancellationToken = await client.GetExecutionAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetExecution()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecution(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution response = client.GetExecution(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetExecutionAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecutionAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Execution>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution responseCallSettings = await client.GetExecutionAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Execution responseCancellationToken = await client.GetExecutionAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetExecutionResourceNames()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecution(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution response = client.GetExecution(request.ExecutionName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetExecutionResourceNamesAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetExecutionRequest request = new GetExecutionRequest
{
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
};
Execution expectedResponse = new Execution
{
ExecutionTemplate = new ExecutionTemplate(),
ExecutionName = ExecutionName.FromProjectLocationExecution("[PROJECT]", "[LOCATION]", "[EXECUTION]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
State = Execution.Types.State.Expired,
OutputNotebookFile = "output_notebook_file4938a367",
JobUri = "job_uria032473c",
};
mockGrpcClient.Setup(x => x.GetExecutionAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Execution>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Execution responseCallSettings = await client.GetExecutionAsync(request.ExecutionName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Execution responseCancellationToken = await client.GetExecutionAsync(request.ExecutionName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Appleseed.Framework.Settings;
namespace Appleseed.Framework.Content.Data
{
/// <summary>
/// Author: Joe Audette
/// Created: 1/18/2004
/// Last Modified: 2/5/2004
///
/// Class that encapsulates all data logic necessary
/// for Blogs within the Portal database.
/// </summary>
public class BlogDB
{
/// <summary>
/// This is used as a common setting from Blogs
/// </summary>
public static string ImagesSetting = "ImageCollection";
/// <summary>
/// The GetBlogs method returns a SqlDataReader containing all of the
/// Blog entries for a specific blog module from the database.
/// <a href="GetBlogs.htm" style="color:green">GetBlogs Stored Procedure</a>
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <returns></returns>
public SqlDataReader GetBlogs(int moduleID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogsGet", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
// Execute the command
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
// Return the datareader
return result;
}
/// <summary>
/// The GetBlogsStat method returns a SqlDataReader containing all of the
/// Blog statistics for a specific blog module from the database.
/// <a href="GetBlogs.htm" style="color:green">GetBlogStats Stored Procedure</a>
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <returns></returns>
public SqlDataReader GetBlogStats(int moduleID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogStatsGet", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
// Execute the command
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
// Return the datareader
return result;
}
/// <summary>
/// The GetBlogMonthArchive method returns a SqlDataReader containing all of the
/// Blog statistics for a specific blog module from the database.
/// <a href="GetBlogs.htm" style="color:green">GetBlogMonthArchive Stored Procedure</a>
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <returns></returns>
public SqlDataReader GetBlogMonthArchive(int moduleID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogsByMonthArchiveGet", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
// Execute the command
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
// Return the datareader
return result;
}
/// <summary>
/// The GetBlogEntriesByMonth method returns a SqlDataReader containing all of the
/// Blog statistics for a specific blog module from the database.
/// <a href="GetBlogs.htm" style="color:green">GetBlogEntriesByMonth Stored Procedure</a>
/// </summary>
/// <param name="month">The month.</param>
/// <param name="year">The year.</param>
/// <param name="moduleID">The module ID.</param>
/// <returns></returns>
public SqlDataReader GetBlogEntriesByMonth(int month, int year, int moduleID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogsByMonthGet", myConnection);
myCommand.CommandType = CommandType.StoredProcedure;
SqlParameter parameterMonth = new SqlParameter("@Month", SqlDbType.Int, 4);
parameterMonth.Value = month;
myCommand.Parameters.Add(parameterMonth);
SqlParameter parameterYear = new SqlParameter("@Year", SqlDbType.Int, 4);
parameterYear.Value = year;
myCommand.Parameters.Add(parameterYear);
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
return result;
}
/// <summary>
/// The GetSingleBlog method returns a SqlDataReader containing details
/// about a specific Blog from the Blogs database table.
/// </summary>
/// <param name="itemID">The item ID.</param>
/// <returns></returns>
public SqlDataReader GetSingleBlog(int itemID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogGetSingle", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
// Execute the command
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
// Return the datareader
return result;
}
/// <summary>
/// The GetSingleBlogWithImages method returns a SqlDataReader containing details
/// about a specific Blog from the Blogs database table.
/// </summary>
/// <param name="itemID">The item ID.</param>
/// <param name="variation">The variation.</param>
/// <returns></returns>
public DataSet GetSingleBlogWithImages(int itemID, string variation)
{
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogGetSingleWithImages", myConnection);
myCommand.CommandType = CommandType.StoredProcedure;
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
SqlParameter parameterVariation = new SqlParameter("@Variation", SqlDbType.VarChar, 50);
parameterVariation.Value = variation;
myCommand.Parameters.Add(parameterVariation);
SqlDataAdapter da = new SqlDataAdapter();
da.SelectCommand = myCommand;
DataSet ds = new DataSet();
myConnection.Open();
try
{
da.Fill(ds);
}
finally
{
myConnection.Close(); //by Manu fix close bug #2
}
return ds;
}
/// <summary>
/// The DeleteBlog method deletes a specified Blog from
/// the Blogs database table.
/// </summary>
/// <param name="itemID">The item ID.</param>
public void DeleteBlog(int itemID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogDelete", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
myConnection.Open();
try
{
myCommand.ExecuteNonQuery();
}
finally
{
myConnection.Close();
}
}
/// <summary>
/// The AddBlog method adds a new Blog within the
/// Blogs database table, and returns ItemID value as a result.
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <param name="userName">Name of the user.</param>
/// <param name="title">The title.</param>
/// <param name="excerpt">The excerpt.</param>
/// <param name="description">The description.</param>
/// <param name="startDate">The start date.</param>
/// <param name="isInNewsletter">if set to <c>true</c> [is in newsletter].</param>
/// <returns></returns>
public int AddBlog(int moduleID, string userName, string title, string excerpt, string description,
DateTime startDate, bool isInNewsletter)
{
if (userName.Length < 1)
{
userName = "unknown";
}
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogAdd", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Direction = ParameterDirection.Output;
myCommand.Parameters.Add(parameterItemID);
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
SqlParameter parameterUserName = new SqlParameter("@UserName", SqlDbType.NVarChar, 100);
parameterUserName.Value = userName;
myCommand.Parameters.Add(parameterUserName);
SqlParameter parameterTitle = new SqlParameter("@Title", SqlDbType.NVarChar, 100);
parameterTitle.Value = title;
myCommand.Parameters.Add(parameterTitle);
SqlParameter parameterExcerpt = new SqlParameter("@Excerpt", SqlDbType.NVarChar, 512);
parameterExcerpt.Value = excerpt;
myCommand.Parameters.Add(parameterExcerpt);
SqlParameter parameterDescription = new SqlParameter("@Description", SqlDbType.NText);
parameterDescription.Value = description;
myCommand.Parameters.Add(parameterDescription);
SqlParameter parameterStartDate = new SqlParameter("@StartDate", SqlDbType.DateTime);
parameterStartDate.Value = startDate;
myCommand.Parameters.Add(parameterStartDate);
SqlParameter parameterIsInNewsletter = new SqlParameter("@IsInNewsletter", SqlDbType.Bit);
parameterIsInNewsletter.Value = isInNewsletter;
myCommand.Parameters.Add(parameterIsInNewsletter);
myConnection.Open();
try
{
myCommand.ExecuteNonQuery();
}
finally
{
myConnection.Close();
}
return (int) parameterItemID.Value;
}
/// <summary>
/// The UpdateBlog method updates a specified Blog within
/// the Blogs database table.
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <param name="itemID">The item ID.</param>
/// <param name="userName">Name of the user.</param>
/// <param name="title">The title.</param>
/// <param name="excerpt">The excerpt.</param>
/// <param name="description">The description.</param>
/// <param name="startDate">The start date.</param>
/// <param name="isInNewsletter">if set to <c>true</c> [is in newsletter].</param>
public void UpdateBlog(int moduleID, int itemID, string userName, string title, string excerpt,
string description, DateTime startDate, bool isInNewsletter)
{
if (userName.Length < 1)
{
userName = "unknown";
}
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogUpdate", myConnection);
// Mark the Command as a SPROC
myCommand.CommandType = CommandType.StoredProcedure;
// Add Parameters to SPROC
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
SqlParameter parameterUserName = new SqlParameter("@UserName", SqlDbType.NVarChar, 100);
parameterUserName.Value = userName;
myCommand.Parameters.Add(parameterUserName);
SqlParameter parameterTitle = new SqlParameter("@Title", SqlDbType.NVarChar, 100);
parameterTitle.Value = title;
myCommand.Parameters.Add(parameterTitle);
SqlParameter parameterExcerpt = new SqlParameter("@Excerpt", SqlDbType.NVarChar, 512);
parameterExcerpt.Value = excerpt;
myCommand.Parameters.Add(parameterExcerpt);
SqlParameter parameterDescription = new SqlParameter("@Description", SqlDbType.NText);
parameterDescription.Value = description;
myCommand.Parameters.Add(parameterDescription);
SqlParameter parameterStartDate = new SqlParameter("@StartDate", SqlDbType.DateTime);
parameterStartDate.Value = startDate;
myCommand.Parameters.Add(parameterStartDate);
SqlParameter parameterIsInNewsletter = new SqlParameter("@IsInNewsletter", SqlDbType.Bit);
parameterIsInNewsletter.Value = isInNewsletter;
myCommand.Parameters.Add(parameterIsInNewsletter);
myConnection.Open();
try
{
myCommand.ExecuteNonQuery();
}
finally
{
myConnection.Close();
}
}
/// <summary>
/// Adds a new comment to the BlogComments database table for the Blog
/// Entry with the given moduleID and itemID.
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <param name="itemID">The item ID.</param>
/// <param name="name">The name.</param>
/// <param name="title">The title.</param>
/// <param name="url">The URL.</param>
/// <param name="comment">The comment.</param>
public void AddBlogComment(int moduleID, int itemID, string name, string title,
string url, string comment)
{
if (name.Length < 1)
{
name = "unknown";
}
if (title.Length > 100)
{
title = title.Substring(0, 100);
}
if (name.Length > 100)
{
name = name.Substring(0, 100);
}
if (url.Length > 200)
{
url = url.Substring(0, 200);
}
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogCommentAdd", myConnection);
myCommand.CommandType = CommandType.StoredProcedure;
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
SqlParameter parameterUserName = new SqlParameter("@Name", SqlDbType.NVarChar, 100);
parameterUserName.Value = name;
myCommand.Parameters.Add(parameterUserName);
SqlParameter parameterTitle = new SqlParameter("@Title", SqlDbType.NVarChar, 100);
parameterTitle.Value = title;
myCommand.Parameters.Add(parameterTitle);
SqlParameter parameterUrl = new SqlParameter("@URL", SqlDbType.NVarChar, 200);
parameterUrl.Value = url;
myCommand.Parameters.Add(parameterUrl);
SqlParameter parameterComment = new SqlParameter("@Comment", SqlDbType.NText);
parameterComment.Value = comment;
myCommand.Parameters.Add(parameterComment);
myConnection.Open();
try
{
myCommand.ExecuteNonQuery();
}
finally
{
myConnection.Close();
}
}
/// <summary>
/// deletes a specified Blog Comment from
/// the Blogs database table.
/// </summary>
/// <param name="commentID">The comment ID.</param>
public void DeleteBlogComment(int commentID)
{
// Create Instance of Connection and Command Object
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogCommentDelete", myConnection);
myCommand.CommandType = CommandType.StoredProcedure;
SqlParameter parameterCommentID = new SqlParameter("@BlogCommentID", SqlDbType.Int, 4);
parameterCommentID.Value = commentID;
myCommand.Parameters.Add(parameterCommentID);
myConnection.Open();
try
{
myCommand.ExecuteNonQuery();
}
finally
{
myConnection.Close();
}
}
/// <summary>
/// returns a SqlDataReader containing all of the
/// Comments for the Blog entry specified by the moduleID and itemID.
/// </summary>
/// <param name="moduleID">The module ID.</param>
/// <param name="itemID">The item ID.</param>
/// <returns></returns>
public SqlDataReader GetBlogComments(int moduleID, int itemID)
{
SqlConnection myConnection = Config.SqlConnectionString;
SqlCommand myCommand = new SqlCommand("rb_BlogCommentsGet", myConnection);
myCommand.CommandType = CommandType.StoredProcedure;
SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4);
parameterModuleID.Value = moduleID;
myCommand.Parameters.Add(parameterModuleID);
SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4);
parameterItemID.Value = itemID;
myCommand.Parameters.Add(parameterItemID);
myConnection.Open();
SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection);
return result;
}
}
}
| |
/**
* Copyright (c) 2014-present, Facebook, Inc. All rights reserved.
*
* You are hereby granted a non-exclusive, worldwide, royalty-free license to use,
* copy, modify, and distribute this software in source code or binary form for use
* in connection with the web services and APIs provided by Facebook.
*
* As with any software that integrates with the Facebook platform, your use of
* this software is subject to the Facebook Developer Principles and Policies
* [http://developers.facebook.com/policy/]. This copyright notice shall be
* included in all copies or substantial portions of the software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
namespace Facebook.Unity.Mobile.IOS
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
internal class IOSFacebook : MobileFacebook
{
private const string CancelledResponse = "{\"cancelled\":true}";
private bool limitEventUsage;
private IIOSWrapper iosWrapper;
public IOSFacebook()
: this(new IOSWrapper(), new CallbackManager())
{
}
public IOSFacebook(IIOSWrapper iosWrapper, CallbackManager callbackManager)
: base(callbackManager)
{
this.iosWrapper = iosWrapper;
}
public enum FBInsightsFlushBehavior
{
FBInsightsFlushBehaviorAuto,
FBInsightsFlushBehaviorExplicitOnly,
}
public override bool LimitEventUsage
{
get
{
return this.limitEventUsage;
}
set
{
this.limitEventUsage = value;
this.iosWrapper.FBAppEventsSetLimitEventUsage(value);
}
}
public override string SDKName
{
get
{
return "FBiOSSDK";
}
}
public override string SDKVersion
{
get
{
return this.iosWrapper.FBSdkVersion();
}
}
public void Init(
string appId,
bool frictionlessRequests,
HideUnityDelegate hideUnityDelegate,
InitDelegate onInitComplete)
{
base.Init(
hideUnityDelegate,
onInitComplete);
this.iosWrapper.Init(
appId,
frictionlessRequests,
FacebookSettings.IosURLSuffix,
Constants.UnitySDKUserAgentSuffixLegacy);
}
public override void LogInWithReadPermissions(
IEnumerable<string> permissions,
FacebookDelegate<ILoginResult> callback)
{
this.iosWrapper.LogInWithReadPermissions(this.AddCallback(callback), permissions.ToCommaSeparateList());
}
public override void LogInWithPublishPermissions(
IEnumerable<string> permissions,
FacebookDelegate<ILoginResult> callback)
{
this.iosWrapper.LogInWithPublishPermissions(this.AddCallback(callback), permissions.ToCommaSeparateList());
}
public override void LogOut()
{
base.LogOut();
this.iosWrapper.LogOut();
}
public override void AppRequest(
string message,
OGActionType? actionType,
string objectId,
IEnumerable<string> to,
IEnumerable<object> filters,
IEnumerable<string> excludeIds,
int? maxRecipients,
string data,
string title,
FacebookDelegate<IAppRequestResult> callback)
{
this.ValidateAppRequestArgs(
message,
actionType,
objectId,
to,
filters,
excludeIds,
maxRecipients,
data,
title,
callback);
string mobileFilter = null;
if (filters != null && filters.Any())
{
mobileFilter = filters.First() as string;
}
this.iosWrapper.AppRequest(
this.AddCallback(callback),
message,
(actionType != null) ? actionType.ToString() : string.Empty,
objectId != null ? objectId : string.Empty,
to != null ? to.ToArray() : null,
to != null ? to.Count() : 0,
mobileFilter != null ? mobileFilter : string.Empty,
excludeIds != null ? excludeIds.ToArray() : null,
excludeIds != null ? excludeIds.Count() : 0,
maxRecipients.HasValue,
maxRecipients.HasValue ? maxRecipients.Value : 0,
data,
title);
}
public override void AppInvite(
Uri appLinkUrl,
Uri previewImageUrl,
FacebookDelegate<IAppInviteResult> callback)
{
string appLinkUrlStr = string.Empty;
string previewImageUrlStr = string.Empty;
if (appLinkUrl != null && !string.IsNullOrEmpty(appLinkUrl.AbsoluteUri))
{
appLinkUrlStr = appLinkUrl.AbsoluteUri;
}
if (previewImageUrl != null && !string.IsNullOrEmpty(previewImageUrl.AbsoluteUri))
{
previewImageUrlStr = previewImageUrl.AbsoluteUri;
}
this.iosWrapper.AppInvite(
this.AddCallback(callback),
appLinkUrlStr,
previewImageUrlStr);
}
public override void ShareLink(
Uri contentURL,
string contentTitle,
string contentDescription,
Uri photoURL,
FacebookDelegate<IShareResult> callback)
{
this.iosWrapper.ShareLink(
this.AddCallback(callback),
contentURL.AbsoluteUrlOrEmptyString(),
contentTitle,
contentDescription,
photoURL.AbsoluteUrlOrEmptyString());
}
public override void FeedShare(
string toId,
Uri link,
string linkName,
string linkCaption,
string linkDescription,
Uri picture,
string mediaSource,
FacebookDelegate<IShareResult> callback)
{
string linkStr = link != null ? link.ToString() : string.Empty;
string pictureStr = picture != null ? picture.ToString() : string.Empty;
this.iosWrapper.FeedShare(
this.AddCallback(callback),
toId,
linkStr,
linkName,
linkCaption,
linkDescription,
pictureStr,
mediaSource);
}
public override void GameGroupCreate(
string name,
string description,
string privacy,
FacebookDelegate<IGroupCreateResult> callback)
{
this.iosWrapper.CreateGameGroup(this.AddCallback(callback), name, description, privacy);
}
public override void GameGroupJoin(
string id,
FacebookDelegate<IGroupJoinResult> callback)
{
this.iosWrapper.JoinGameGroup(System.Convert.ToInt32(CallbackManager.AddFacebookDelegate(callback)), id);
}
public override void AppEventsLogEvent(
string logEvent,
float? valueToSum,
Dictionary<string, object> parameters)
{
NativeDict dict = MarshallDict(parameters);
if (valueToSum.HasValue)
{
this.iosWrapper.LogAppEvent(logEvent, valueToSum.Value, dict.NumEntries, dict.Keys, dict.Values);
}
else
{
this.iosWrapper.LogAppEvent(logEvent, 0.0, dict.NumEntries, dict.Keys, dict.Values);
}
}
public override void AppEventsLogPurchase(
float logPurchase,
string currency,
Dictionary<string, object> parameters)
{
NativeDict dict = MarshallDict(parameters);
this.iosWrapper.LogPurchaseAppEvent(logPurchase, currency, dict.NumEntries, dict.Keys, dict.Values);
}
public override void ActivateApp(string appId)
{
this.iosWrapper.FBSettingsActivateApp(appId);
}
public override void FetchDeferredAppLink(FacebookDelegate<IAppLinkResult> callback)
{
this.iosWrapper.FetchDeferredAppLink(this.AddCallback(callback));
}
public override void GetAppLink(
FacebookDelegate<IAppLinkResult> callback)
{
this.iosWrapper.GetAppLink(System.Convert.ToInt32(CallbackManager.AddFacebookDelegate(callback)));
}
public override void RefreshCurrentAccessToken(
FacebookDelegate<IAccessTokenRefreshResult> callback)
{
this.iosWrapper.RefreshCurrentAccessToken(
System.Convert.ToInt32(CallbackManager.AddFacebookDelegate(callback)));
}
protected override void SetShareDialogMode(ShareDialogMode mode)
{
this.iosWrapper.SetShareDialogMode((int)mode);
}
private static NativeDict MarshallDict(Dictionary<string, object> dict)
{
NativeDict res = new NativeDict();
if (dict != null && dict.Count > 0)
{
res.Keys = new string[dict.Count];
res.Values = new string[dict.Count];
res.NumEntries = 0;
foreach (KeyValuePair<string, object> kvp in dict)
{
res.Keys[res.NumEntries] = kvp.Key;
res.Values[res.NumEntries] = kvp.Value.ToString();
res.NumEntries++;
}
}
return res;
}
private static NativeDict MarshallDict(Dictionary<string, string> dict)
{
NativeDict res = new NativeDict();
if (dict != null && dict.Count > 0)
{
res.Keys = new string[dict.Count];
res.Values = new string[dict.Count];
res.NumEntries = 0;
foreach (KeyValuePair<string, string> kvp in dict)
{
res.Keys[res.NumEntries] = kvp.Key;
res.Values[res.NumEntries] = kvp.Value;
res.NumEntries++;
}
}
return res;
}
private int AddCallback<T>(FacebookDelegate<T> callback) where T : IResult
{
string asyncId = this.CallbackManager.AddFacebookDelegate(callback);
return Convert.ToInt32(asyncId);
}
private class NativeDict
{
public NativeDict()
{
this.NumEntries = 0;
this.Keys = null;
this.Values = null;
}
public int NumEntries { get; set; }
public string[] Keys { get; set; }
public string[] Values { get; set; }
}
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Material sourced from the bluePortal project (http://blueportal.codeplex.com).
// Licensed under the Microsoft Public License (available at http://www.opensource.org/licenses/ms-pl.html).
using System;
using System.Data;
using System.Collections;
using System.Configuration;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
namespace CSSFriendly
{
public class DataListAdapter : System.Web.UI.WebControls.Adapters.WebControlAdapter
{
private WebControlAdapterExtender _extender = null;
private WebControlAdapterExtender Extender
{
get
{
if (((_extender == null) && (Control != null)) ||
((_extender != null) && (Control != _extender.AdaptedControl)))
{
_extender = new WebControlAdapterExtender(Control);
}
System.Diagnostics.Debug.Assert(_extender != null, "CSS Friendly adapters internal error", "Null extender instance");
return _extender;
}
}
private int RepeatColumns
{
get
{
DataList dataList = Control as DataList;
int nRet = 1;
if (dataList != null)
{
if (dataList.RepeatColumns == 0)
{
if (dataList.RepeatDirection == RepeatDirection.Horizontal)
{
nRet = dataList.Items.Count;
}
}
else
{
nRet = dataList.RepeatColumns;
}
}
return nRet;
}
}
/// ///////////////////////////////////////////////////////////////////////////////
/// PROTECTED
protected override void OnInit(EventArgs e)
{
base.OnInit(e);
if (Extender.AdapterEnabled)
{
RegisterScripts();
}
}
protected override void RenderBeginTag(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
Extender.RenderBeginTag(writer, "AspNet-DataList");
}
else
{
base.RenderBeginTag(writer);
}
}
protected override void RenderEndTag(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
Extender.RenderEndTag(writer);
}
else
{
base.RenderEndTag(writer);
}
}
protected override void RenderContents(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
DataList dataList = Control as DataList;
if (dataList != null)
{
writer.Indent++;
writer.WriteLine();
writer.WriteBeginTag("table");
writer.WriteAttribute("cellpadding", "0");
writer.WriteAttribute("cellspacing", "0");
writer.WriteAttribute("summary", Control.ToolTip);
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
if (dataList.HeaderTemplate != null)
{
PlaceHolder container = new PlaceHolder();
dataList.HeaderTemplate.InstantiateIn(container);
container.DataBind();
if ((container.Controls.Count == 1) && typeof(LiteralControl).IsInstanceOfType(container.Controls[0]))
{
writer.WriteLine();
writer.WriteBeginTag("caption");
writer.Write(HtmlTextWriter.TagRightChar);
LiteralControl literalControl = container.Controls[0] as LiteralControl;
writer.Write(literalControl.Text.Trim());
writer.WriteEndTag("caption");
}
else
{
writer.WriteLine();
writer.WriteBeginTag("thead");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
writer.WriteBeginTag("tr");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
writer.WriteBeginTag("th");
writer.WriteAttribute("colspan", RepeatColumns.ToString());
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
container.RenderControl(writer);
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("th");
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tr");
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("thead");
}
}
if (dataList.FooterTemplate != null)
{
writer.WriteLine();
writer.WriteBeginTag("tfoot");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
writer.WriteBeginTag("tr");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
writer.WriteBeginTag("td");
writer.WriteAttribute("colspan", RepeatColumns.ToString());
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
PlaceHolder container = new PlaceHolder();
dataList.FooterTemplate.InstantiateIn(container);
container.DataBind();
container.RenderControl(writer);
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("td");
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tr");
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tfoot");
}
if (dataList.ItemTemplate != null)
{
writer.WriteLine();
writer.WriteBeginTag("tbody");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
int nItemsInColumn = (int)Math.Ceiling(((Double)dataList.Items.Count) / ((Double)RepeatColumns));
for (int iItem = 0; iItem < dataList.Items.Count; iItem++)
{
int nRow = iItem / RepeatColumns;
int nCol = iItem % RepeatColumns;
int nDesiredIndex = iItem;
if (dataList.RepeatDirection == RepeatDirection.Vertical)
{
nDesiredIndex = (nCol * nItemsInColumn) + nRow;
}
if ((iItem % RepeatColumns) == 0)
{
writer.WriteLine();
writer.WriteBeginTag("tr");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
}
writer.WriteLine();
writer.WriteBeginTag("td");
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
foreach (Control itemCtrl in dataList.Items[iItem].Controls)
{
itemCtrl.RenderControl(writer);
}
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("td");
if (((iItem + 1) % RepeatColumns) == 0)
{
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tr");
}
}
if ((dataList.Items.Count % RepeatColumns) != 0)
{
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tr");
}
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("tbody");
}
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("table");
writer.Indent--;
writer.WriteLine();
}
}
else
{
base.RenderContents(writer);
}
}
/// ///////////////////////////////////////////////////////////////////////////////
/// PRIVATE
private void RegisterScripts()
{
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Media.PathFigureCollection.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Media
{
sealed public partial class PathFigureCollection : System.Windows.Media.Animation.Animatable, IFormattable, System.Collections.IList, System.Collections.ICollection, IList<PathFigure>, ICollection<PathFigure>, IEnumerable<PathFigure>, System.Collections.IEnumerable
{
#region Methods and constructors
public void Add(PathFigure value)
{
}
public void Clear()
{
}
public PathFigureCollection Clone()
{
return default(PathFigureCollection);
}
protected override void CloneCore(System.Windows.Freezable source)
{
}
public PathFigureCollection CloneCurrentValue()
{
return default(PathFigureCollection);
}
protected override void CloneCurrentValueCore(System.Windows.Freezable source)
{
}
public bool Contains(PathFigure value)
{
return default(bool);
}
public void CopyTo(PathFigure[] array, int index)
{
}
protected override System.Windows.Freezable CreateInstanceCore()
{
return default(System.Windows.Freezable);
}
protected override bool FreezeCore(bool isChecking)
{
return default(bool);
}
protected override void GetAsFrozenCore(System.Windows.Freezable source)
{
}
protected override void GetCurrentValueAsFrozenCore(System.Windows.Freezable source)
{
}
public PathFigureCollection.Enumerator GetEnumerator()
{
return default(PathFigureCollection.Enumerator);
}
public int IndexOf(PathFigure value)
{
return default(int);
}
public void Insert(int index, PathFigure value)
{
}
public static PathFigureCollection Parse(string source)
{
return default(PathFigureCollection);
}
public PathFigureCollection(int capacity)
{
}
public PathFigureCollection(IEnumerable<PathFigure> collection)
{
}
public PathFigureCollection()
{
}
public bool Remove(PathFigure value)
{
return default(bool);
}
public void RemoveAt(int index)
{
}
IEnumerator<PathFigure> System.Collections.Generic.IEnumerable<System.Windows.Media.PathFigure>.GetEnumerator()
{
return default(IEnumerator<PathFigure>);
}
void System.Collections.ICollection.CopyTo(Array array, int index)
{
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return default(System.Collections.IEnumerator);
}
int System.Collections.IList.Add(Object value)
{
return default(int);
}
bool System.Collections.IList.Contains(Object value)
{
return default(bool);
}
int System.Collections.IList.IndexOf(Object value)
{
return default(int);
}
void System.Collections.IList.Insert(int index, Object value)
{
}
void System.Collections.IList.Remove(Object value)
{
}
string System.IFormattable.ToString(string format, IFormatProvider provider)
{
return default(string);
}
public string ToString(IFormatProvider provider)
{
return default(string);
}
#endregion
#region Properties and indexers
public int Count
{
get
{
return default(int);
}
}
public PathFigure this [int index]
{
get
{
return default(PathFigure);
}
set
{
}
}
bool System.Collections.Generic.ICollection<System.Windows.Media.PathFigure>.IsReadOnly
{
get
{
return default(bool);
}
}
bool System.Collections.ICollection.IsSynchronized
{
get
{
return default(bool);
}
}
Object System.Collections.ICollection.SyncRoot
{
get
{
return default(Object);
}
}
bool System.Collections.IList.IsFixedSize
{
get
{
return default(bool);
}
}
bool System.Collections.IList.IsReadOnly
{
get
{
return default(bool);
}
}
Object System.Collections.IList.this [int index]
{
get
{
return default(Object);
}
set
{
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Threading.Tasks;
using Microsoft.VisualStudio.Text;
using Xunit;
using Roslyn.Test.Utilities;
namespace Microsoft.VisualStudio.InteractiveWindow.UnitTests
{
public class InteractiveWindowHistoryTests : IDisposable
{
#region Helpers
private readonly InteractiveWindowTestHost _testHost;
private readonly IInteractiveWindow _window;
private readonly IInteractiveWindowOperations _operations;
public InteractiveWindowHistoryTests()
{
_testHost = new InteractiveWindowTestHost();
_window = _testHost.Window;
_operations = _window.Operations;
}
void IDisposable.Dispose()
{
_testHost.Dispose();
}
/// <summary>
/// Sets the active code to the specified text w/o executing it.
/// </summary>
private void SetActiveCode(string text)
{
using (var edit = _window.CurrentLanguageBuffer.CreateEdit(EditOptions.None, reiteratedVersionNumber: null, editTag: null))
{
edit.Replace(new Span(0, _window.CurrentLanguageBuffer.CurrentSnapshot.Length), text);
edit.Apply();
}
}
private async Task InsertAndExecuteInputs(params string[] inputs)
{
foreach (var input in inputs)
{
await InsertAndExecuteInput(input).ConfigureAwait(true);
}
}
private async Task InsertAndExecuteInput(string input)
{
_window.InsertCode(input);
AssertCurrentSubmission(input);
await ExecuteInput().ConfigureAwait(true);
}
private async Task ExecuteInput()
{
await ((InteractiveWindow)_window).ExecuteInputAsync().ConfigureAwait(true);
}
private void AssertCurrentSubmission(string expected)
{
Assert.Equal(expected, _window.CurrentLanguageBuffer.CurrentSnapshot.GetText());
}
#endregion Helpers
[WpfFact]
public async Task CheckHistoryPrevious()
{
const string inputString = "1 ";
await InsertAndExecuteInput(inputString).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString);
}
[WpfFact]
public async Task CheckHistoryPreviousWithAnEmptySubmission() {
//submit, submit, submit, up, up, up
const string inputString1 = "1 ";
const string inputString2 = " ";
const string inputString3 = "3 ";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
await InsertAndExecuteInput(inputString3).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString3);
//second input was empty, so it wasn't added to history
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
//has reached the top, no change
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
}
[WpfFact]
public async Task CheckHistoryPreviousNotCircular()
{
//submit, submit, up, up, up
const string inputString1 = "1 ";
const string inputString2 = "2 ";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
//this up should not be circular
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
}
[WpfFact]
public async Task CheckHistoryPreviousAfterSubmittingEntryFromHistory()
{
//submit, submit, submit, up, up, submit, up, up, up
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string inputString3 = "3 ";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
await InsertAndExecuteInput(inputString3).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString3);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
await ExecuteInput().ConfigureAwait(true);
//history navigation should start from the last history pointer
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
//has reached the top, no change
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
}
[WpfFact]
public async Task CheckHistoryPreviousAfterSubmittingNewEntryWhileNavigatingHistory()
{
//submit, submit, up, up, submit new, up, up, up
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string inputString3 = "3 ";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
SetActiveCode(inputString3);
AssertCurrentSubmission(inputString3);
await ExecuteInput().ConfigureAwait(true);
//History pointer should be reset. Previous should now bring up last entry
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString3);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
//has reached the top, no change
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
}
[WpfFact]
public async Task CheckHistoryNextNotCircular()
{
//submit, submit, down, up, down, down
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string empty = "";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
//Next should do nothing as history pointer is uninitialized and there is
//no next entry. Buffer should be empty
_operations.HistoryNext();
AssertCurrentSubmission(empty);
//Go back once entry
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
//Go fwd one entry - should do nothing as history pointer is at last entry
//buffer should have same value as before
_operations.HistoryNext();
AssertCurrentSubmission(inputString2);
//Next should again do nothing as it is the last item, buffer should have the same value
_operations.HistoryNext();
AssertCurrentSubmission(inputString2);
//This is to make sure the window doesn't crash
await ExecuteInput().ConfigureAwait(true);
AssertCurrentSubmission(empty);
}
[WpfFact]
public async Task CheckHistoryNextAfterSubmittingEntryFromHistory()
{
//submit, submit, submit, up, up, submit, down, down, down
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string inputString3 = "3 ";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
await InsertAndExecuteInput(inputString3).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString3);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
//submit inputString2 again. Should be added at the end of history
await ExecuteInput().ConfigureAwait(true);
//history navigation should start from the last history pointer
_operations.HistoryNext();
AssertCurrentSubmission(inputString3);
//This next should take us to the InputString2 which was resubmitted
_operations.HistoryNext();
AssertCurrentSubmission(inputString2);
//has reached the top, no change
_operations.HistoryNext();
AssertCurrentSubmission(inputString2);
}
[WpfFact]
public async Task CheckHistoryNextAfterSubmittingNewEntryWhileNavigatingHistory()
{
//submit, submit, up, up, submit new, down, up
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string inputString3 = "3 ";
const string empty = "";
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString2);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString1);
SetActiveCode(inputString3);
AssertCurrentSubmission(inputString3);
await ExecuteInput().ConfigureAwait(true);
//History pointer should be reset. next should do nothing
_operations.HistoryNext();
AssertCurrentSubmission(empty);
_operations.HistoryPrevious();
AssertCurrentSubmission(inputString3);
}
[WpfFact]
public async Task CheckUncommittedInputAfterNavigatingHistory()
{
//submit, submit, up, up, submit new, down, up
const string inputString1 = "1 ";
const string inputString2 = "2 ";
const string uncommittedInput = nameof(uncommittedInput);
await InsertAndExecuteInput(inputString1).ConfigureAwait(true);
await InsertAndExecuteInput(inputString2).ConfigureAwait(true);
//Add uncommitted input
SetActiveCode(uncommittedInput);
//Navigate history. This should save uncommitted input
_operations.HistoryPrevious();
//Navigate to next item at the end of history.
//This should bring back uncommitted input
_operations.HistoryNext();
AssertCurrentSubmission(uncommittedInput);
}
[WpfFact]
public async Task CheckHistoryPreviousAfterReset()
{
const string resetCommand1 = "#reset";
const string resetCommand2 = "#reset ";
await InsertAndExecuteInput(resetCommand1).ConfigureAwait(true);
await InsertAndExecuteInput(resetCommand2).ConfigureAwait(true);
_operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand2);
_operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand1);
_operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand1);
}
[WpfFact]
public async Task TestHistoryPrevious()
{
await InsertAndExecuteInputs("1", "2", "3").ConfigureAwait(true);
_operations.HistoryPrevious(); AssertCurrentSubmission("3");
_operations.HistoryPrevious(); AssertCurrentSubmission("2");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
}
[WpfFact]
public async Task TestHistoryNext()
{
await InsertAndExecuteInputs("1", "2", "3").ConfigureAwait(true);
SetActiveCode("4");
_operations.HistoryNext(); AssertCurrentSubmission("4");
_operations.HistoryNext(); AssertCurrentSubmission("4");
_operations.HistoryPrevious(); AssertCurrentSubmission("3");
_operations.HistoryPrevious(); AssertCurrentSubmission("2");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryNext(); AssertCurrentSubmission("2");
_operations.HistoryNext(); AssertCurrentSubmission("3");
_operations.HistoryNext(); AssertCurrentSubmission("4");
_operations.HistoryNext(); AssertCurrentSubmission("4");
}
[WpfFact]
public async Task TestHistoryPreviousWithPattern_NoMatch()
{
await InsertAndExecuteInputs("123", "12", "1").ConfigureAwait(true);
_operations.HistoryPrevious("4"); AssertCurrentSubmission("");
_operations.HistoryPrevious("4"); AssertCurrentSubmission("");
}
[WpfFact]
public async Task TestHistoryPreviousWithPattern_PatternMaintained()
{
await InsertAndExecuteInputs("123", "12", "1").ConfigureAwait(true);
_operations.HistoryPrevious("12"); AssertCurrentSubmission("12"); // Skip over non-matching entry.
_operations.HistoryPrevious("12"); AssertCurrentSubmission("123");
_operations.HistoryPrevious("12"); AssertCurrentSubmission("123");
}
[WpfFact]
public async Task TestHistoryPreviousWithPattern_PatternDropped()
{
await InsertAndExecuteInputs("1", "2", "3").ConfigureAwait(true);
_operations.HistoryPrevious("2"); AssertCurrentSubmission("2"); // Skip over non-matching entry.
_operations.HistoryPrevious(null); AssertCurrentSubmission("1"); // Pattern isn't passed, so return to normal iteration.
_operations.HistoryPrevious(null); AssertCurrentSubmission("1");
}
[WpfFact]
public async Task TestHistoryPreviousWithPattern_PatternChanged()
{
await InsertAndExecuteInputs("10", "20", "15", "25").ConfigureAwait(true);
_operations.HistoryPrevious("1"); AssertCurrentSubmission("15"); // Skip over non-matching entry.
_operations.HistoryPrevious("2"); AssertCurrentSubmission("20"); // Skip over non-matching entry.
_operations.HistoryPrevious("2"); AssertCurrentSubmission("20");
}
[WpfFact]
public async Task TestHistoryNextWithPattern_NoMatch()
{
await InsertAndExecuteInputs("start", "1", "12", "123").ConfigureAwait(true);
SetActiveCode("end");
_operations.HistoryPrevious(); AssertCurrentSubmission("123");
_operations.HistoryPrevious(); AssertCurrentSubmission("12");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("start");
_operations.HistoryNext("4"); AssertCurrentSubmission("end");
_operations.HistoryNext("4"); AssertCurrentSubmission("end");
}
[WpfFact]
public async Task TestHistoryNextWithPattern_PatternMaintained()
{
await InsertAndExecuteInputs("start", "1", "12", "123").ConfigureAwait(true);
SetActiveCode("end");
_operations.HistoryPrevious(); AssertCurrentSubmission("123");
_operations.HistoryPrevious(); AssertCurrentSubmission("12");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("start");
_operations.HistoryNext("12"); AssertCurrentSubmission("12"); // Skip over non-matching entry.
_operations.HistoryNext("12"); AssertCurrentSubmission("123");
_operations.HistoryNext("12"); AssertCurrentSubmission("end");
}
[WpfFact]
public async Task TestHistoryNextWithPattern_PatternDropped()
{
await InsertAndExecuteInputs("start", "3", "2", "1").ConfigureAwait(true);
SetActiveCode("end");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("2");
_operations.HistoryPrevious(); AssertCurrentSubmission("3");
_operations.HistoryPrevious(); AssertCurrentSubmission("start");
_operations.HistoryNext("2"); AssertCurrentSubmission("2"); // Skip over non-matching entry.
_operations.HistoryNext(null); AssertCurrentSubmission("1"); // Pattern isn't passed, so return to normal iteration.
_operations.HistoryNext(null); AssertCurrentSubmission("end");
}
[WpfFact]
public async Task TestHistoryNextWithPattern_PatternChanged()
{
await InsertAndExecuteInputs("start", "25", "15", "20", "10").ConfigureAwait(true);
SetActiveCode("end");
_operations.HistoryPrevious(); AssertCurrentSubmission("10");
_operations.HistoryPrevious(); AssertCurrentSubmission("20");
_operations.HistoryPrevious(); AssertCurrentSubmission("15");
_operations.HistoryPrevious(); AssertCurrentSubmission("25");
_operations.HistoryPrevious(); AssertCurrentSubmission("start");
_operations.HistoryNext("1"); AssertCurrentSubmission("15"); // Skip over non-matching entry.
_operations.HistoryNext("2"); AssertCurrentSubmission("20"); // Skip over non-matching entry.
_operations.HistoryNext("2"); AssertCurrentSubmission("end");
}
[WpfFact]
public async Task TestHistorySearchPrevious()
{
await InsertAndExecuteInputs("123", "12", "1").ConfigureAwait(true);
// Default search string is empty.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("1"); // Pattern is captured before this step.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("12");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("123");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("123");
}
[WpfFact]
public async Task TestHistorySearchPreviousWithPattern()
{
await InsertAndExecuteInputs("123", "12", "1").ConfigureAwait(true);
SetActiveCode("12");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("12"); // Pattern is captured before this step.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("123");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("123");
}
[WpfFact]
public async Task TestHistorySearchNextWithPattern()
{
await InsertAndExecuteInputs("12", "123", "12", "1").ConfigureAwait(true);
SetActiveCode("end");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistoryPrevious(); AssertCurrentSubmission("12");
_operations.HistoryPrevious(); AssertCurrentSubmission("123");
_operations.HistoryPrevious(); AssertCurrentSubmission("12");
_operations.HistorySearchNext(); AssertCurrentSubmission("123"); // Pattern is captured before this step.
_operations.HistorySearchNext(); AssertCurrentSubmission("12");
_operations.HistorySearchNext(); AssertCurrentSubmission("end");
}
[WpfFact]
public async Task TestHistoryPreviousAndSearchPrevious()
{
await InsertAndExecuteInputs("200", "100", "30", "20", "10", "2", "1").ConfigureAwait(true);
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("10"); // Pattern is captured before this step.
_operations.HistoryPrevious(); AssertCurrentSubmission("20"); // NB: Doesn't match pattern.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("100");
_operations.HistoryPrevious(); AssertCurrentSubmission("200");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("200"); // No-op results in non-matching history entry after SearchPrevious.
}
[WpfFact]
public async Task TestHistoryPreviousAndSearchPrevious_ExplicitPattern()
{
await InsertAndExecuteInputs("200", "100", "30", "20", "10", "2", "1").ConfigureAwait(true);
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("10"); // Pattern is captured before this step.
_operations.HistoryPrevious("2"); AssertCurrentSubmission("20"); // NB: Doesn't match pattern.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern.
_operations.HistorySearchPrevious(); AssertCurrentSubmission("100");
_operations.HistoryPrevious("2"); AssertCurrentSubmission("200");
_operations.HistorySearchPrevious(); AssertCurrentSubmission("200"); // No-op results in non-matching history entry after SearchPrevious.
}
[WpfFact]
public async Task TestHistoryNextAndSearchNext()
{
await InsertAndExecuteInputs("1", "2", "10", "20", "30", "100", "200").ConfigureAwait(true);
SetActiveCode("4");
_operations.HistoryPrevious(); AssertCurrentSubmission("200");
_operations.HistoryPrevious(); AssertCurrentSubmission("100");
_operations.HistoryPrevious(); AssertCurrentSubmission("30");
_operations.HistoryPrevious(); AssertCurrentSubmission("20");
_operations.HistoryPrevious(); AssertCurrentSubmission("10");
_operations.HistoryPrevious(); AssertCurrentSubmission("2");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistorySearchNext(); AssertCurrentSubmission("10"); // Pattern is captured before this step.
_operations.HistoryNext(); AssertCurrentSubmission("20"); // NB: Doesn't match pattern.
_operations.HistorySearchNext(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern.
_operations.HistorySearchNext(); AssertCurrentSubmission("4"); // Restoring input results in non-matching history entry after SearchNext.
_operations.HistoryNext(); AssertCurrentSubmission("4");
}
[WpfFact]
public async Task TestHistoryNextAndSearchNext_ExplicitPattern()
{
await InsertAndExecuteInputs("1", "2", "10", "20", "30", "100", "200").ConfigureAwait(true);
SetActiveCode("4");
_operations.HistoryPrevious(); AssertCurrentSubmission("200");
_operations.HistoryPrevious(); AssertCurrentSubmission("100");
_operations.HistoryPrevious(); AssertCurrentSubmission("30");
_operations.HistoryPrevious(); AssertCurrentSubmission("20");
_operations.HistoryPrevious(); AssertCurrentSubmission("10");
_operations.HistoryPrevious(); AssertCurrentSubmission("2");
_operations.HistoryPrevious(); AssertCurrentSubmission("1");
_operations.HistorySearchNext(); AssertCurrentSubmission("10"); // Pattern is captured before this step.
_operations.HistoryNext("2"); AssertCurrentSubmission("20"); // NB: Doesn't match pattern.
_operations.HistorySearchNext(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern.
_operations.HistorySearchNext(); AssertCurrentSubmission("4"); // Restoring input results in non-matching history entry after SearchNext.
_operations.HistoryNext("2"); AssertCurrentSubmission("4");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System
{
// The class designed as to keep minimal the working set of Uri class.
// The idea is to stay with static helper methods and strings
internal static class IPv4AddressHelper
{
internal const long Invalid = -1;
// Note: the native parser cannot handle MaxIPv4Value, only MaxIPv4Value - 1
private const long MaxIPv4Value = UInt32.MaxValue;
private const int Octal = 8;
private const int Decimal = 10;
private const int Hex = 16;
private const int NumberOfLabels = 4;
// methods
// Parse and canonicalize
internal static string ParseCanonicalName(string str, int start, int end, ref bool isLoopback)
{
unsafe
{
byte* numbers = stackalloc byte[NumberOfLabels];
isLoopback = Parse(str, numbers, start, end);
return numbers[0] + "." + numbers[1] + "." + numbers[2] + "." + numbers[3];
}
}
// Only called from the IPv6Helper, only parse the canonical format
internal static int ParseHostNumber(string str, int start, int end)
{
unsafe
{
byte* numbers = stackalloc byte[NumberOfLabels];
ParseCanonical(str, numbers, start, end);
return (numbers[0] << 24) + (numbers[1] << 16) + (numbers[2] << 8) + numbers[3];
}
}
//
// IsValid
//
// Performs IsValid on a substring. Updates the index to where we
// believe the IPv4 address ends
//
// Inputs:
// <argument> name
// string containing possible IPv4 address
//
// <argument> start
// offset in <name> to start checking for IPv4 address
//
// <argument> end
// offset in <name> of the last character we can touch in the check
//
// Outputs:
// <argument> end
// index of last character in <name> we checked
//
// <argument> allowIPv6
// enables parsing IPv4 addresses embedded in IPv6 address literals
//
// <argument> notImplicitFile
// do not consider this URI holding an implicit filename
//
// <argument> unknownScheme
// the check is made on an unknown scheme (suppress IPv4 canonicalization)
//
// Assumes:
// The address string is terminated by either
// end of the string, characters ':' '/' '\' '?'
//
//
// Returns:
// bool
//
// Throws:
// Nothing
//
//Remark: MUST NOT be used unless all input indexes are verified and trusted.
internal static unsafe bool IsValid(char* name, int start, ref int end, bool allowIPv6, bool notImplicitFile, bool unknownScheme)
{
// IPv6 can only have canonical IPv4 embedded. Unknown schemes will not attempt parsing of non-canonical IPv4 addresses.
if (allowIPv6 || unknownScheme)
{
return IsValidCanonical(name, start, ref end, allowIPv6, notImplicitFile);
}
else
{
return ParseNonCanonical(name, start, ref end, notImplicitFile) != Invalid;
}
}
//
// IsValidCanonical
//
// Checks if the substring is a valid canonical IPv4 address or an IPv4 address embedded in an IPv6 literal
// This is an attempt to parse ABNF productions from RFC3986, Section 3.2.2:
// IP-literal = "[" ( IPv6address / IPvFuture ) "]"
// IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
// dec-octet = DIGIT ; 0-9
// / %x31-39 DIGIT ; 10-99
// / "1" 2DIGIT ; 100-199
// / "2" %x30-34 DIGIT ; 200-249
// / "25" %x30-35 ; 250-255
//
internal static unsafe bool IsValidCanonical(char* name, int start, ref int end, bool allowIPv6, bool notImplicitFile)
{
int dots = 0;
int number = 0;
bool haveNumber = false;
bool firstCharIsZero = false;
while (start < end)
{
char ch = name[start];
if (allowIPv6)
{
// for ipv4 inside ipv6 the terminator is either ScopeId, prefix or ipv6 terminator
if (ch == ']' || ch == '/' || ch == '%') break;
}
else if (ch == '/' || ch == '\\' || (notImplicitFile && (ch == ':' || ch == '?' || ch == '#')))
{
break;
}
if (ch <= '9' && ch >= '0')
{
if (!haveNumber && (ch == '0'))
{
if ((start + 1 < end) && name[start + 1] == '0')
{
// 00 is not allowed as a prefix.
return false;
}
firstCharIsZero = true;
}
haveNumber = true;
number = number * 10 + (name[start] - '0');
if (number > 255)
{
return false;
}
}
else if (ch == '.')
{
if (!haveNumber || (number > 0 && firstCharIsZero))
{
// 0 is not allowed to prefix a number.
return false;
}
++dots;
haveNumber = false;
number = 0;
firstCharIsZero = false;
}
else
{
return false;
}
++start;
}
bool res = (dots == 3) && haveNumber;
if (res)
{
end = start;
}
return res;
}
// Parse any canonical or non-canonical IPv4 formats and return a long between 0 and MaxIPv4Value.
// Return Invalid (-1) for failures.
// If the address has less than three dots, only the rightmost section is assumed to contain the combined value for
// the missing sections: 0xFF00FFFF == 0xFF.0x00.0xFF.0xFF == 0xFF.0xFFFF
internal static unsafe long ParseNonCanonical(char* name, int start, ref int end, bool notImplicitFile)
{
int numberBase = Decimal;
char ch;
long[] parts = new long[4];
long currentValue = 0;
bool atLeastOneChar = false;
// Parse one dotted section at a time
int dotCount = 0; // Limit 3
int current = start;
for (; current < end; current++)
{
ch = name[current];
currentValue = 0;
// Figure out what base this section is in
numberBase = Decimal;
if (ch == '0')
{
numberBase = Octal;
current++;
atLeastOneChar = true;
if (current < end)
{
ch = name[current];
if (ch == 'x' || ch == 'X')
{
numberBase = Hex;
current++;
atLeastOneChar = false;
}
}
}
// Parse this section
for (; current < end; current++)
{
ch = name[current];
int digitValue;
if ((numberBase == Decimal || numberBase == Hex) && '0' <= ch && ch <= '9')
{
digitValue = ch - '0';
}
else if (numberBase == Octal && '0' <= ch && ch <= '7')
{
digitValue = ch - '0';
}
else if (numberBase == Hex && 'a' <= ch && ch <= 'f')
{
digitValue = ch + 10 - 'a';
}
else if (numberBase == Hex && 'A' <= ch && ch <= 'F')
{
digitValue = ch + 10 - 'A';
}
else
{
break; // Invalid/terminator
}
currentValue = (currentValue * numberBase) + digitValue;
if (currentValue > MaxIPv4Value) // Overflow
{
return Invalid;
}
atLeastOneChar = true;
}
if (current < end && name[current] == '.')
{
if (dotCount >= 3 // Max of 3 dots and 4 segments
|| !atLeastOneChar // No empty segments: 1...1
// Only the last segment can be more than 255 (if there are less than 3 dots)
|| currentValue > 0xFF)
{
return Invalid;
}
parts[dotCount] = currentValue;
dotCount++;
atLeastOneChar = false;
continue;
}
// We don't get here unless We find an invalid character or a terminator
break;
}
// Terminators
if (!atLeastOneChar)
{
return Invalid; // Empty trailing segment: 1.1.1.
}
else if (current >= end)
{
// end of string, allowed
}
else if ((ch = name[current]) == '/' || ch == '\\' || (notImplicitFile && (ch == ':' || ch == '?' || ch == '#')))
{
end = current;
}
else
{
// not a valid terminating character
return Invalid;
}
parts[dotCount] = currentValue;
// Parsed, reassemble and check for overflows
switch (dotCount)
{
case 0: // 0xFFFFFFFF
if (parts[0] > MaxIPv4Value)
{
return Invalid;
}
return parts[0];
case 1: // 0xFF.0xFFFFFF
if (parts[1] > 0xffffff)
{
return Invalid;
}
return (parts[0] << 24) | (parts[1] & 0xffffff);
case 2: // 0xFF.0xFF.0xFFFF
if (parts[2] > 0xffff)
{
return Invalid;
}
return (parts[0] << 24) | ((parts[1] & 0xff) << 16) | (parts[2] & 0xffff);
case 3: // 0xFF.0xFF.0xFF.0xFF
if (parts[3] > 0xff)
{
return Invalid;
}
return (parts[0] << 24) | ((parts[1] & 0xff) << 16) | ((parts[2] & 0xff) << 8) | (parts[3] & 0xff);
default:
return Invalid;
}
}
//
// Parse
//
// Convert this IPv4 address into a sequence of 4 8-bit numbers
//
private static unsafe bool Parse(string name, byte* numbers, int start, int end)
{
fixed (char* ipString = name)
{
int changedEnd = end;
long result = IPv4AddressHelper.ParseNonCanonical(ipString, start, ref changedEnd, true);
// end includes ports, so changedEnd may be different from end
Debug.Assert(result != Invalid, "Failed to parse after already validated: " + name);
unchecked
{
numbers[0] = (byte)(result >> 24);
numbers[1] = (byte)(result >> 16);
numbers[2] = (byte)(result >> 8);
numbers[3] = (byte)(result);
}
}
return numbers[0] == 127;
}
// Assumes:
// <Name> has been validated and contains only decimal digits in groups
// of 8-bit numbers and the characters '.'
// Address may terminate with ':' or with the end of the string
//
private static unsafe bool ParseCanonical(string name, byte* numbers, int start, int end)
{
for (int i = 0; i < NumberOfLabels; ++i)
{
byte b = 0;
char ch;
for (; (start < end) && (ch = name[start]) != '.' && ch != ':'; ++start)
{
b = (byte)(b * 10 + (byte)(ch - '0'));
}
numbers[i] = b;
++start;
}
return numbers[0] == 127;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Akka.Actor;
using Akka.Event;
using Akka.Remote.Transport.Streaming.Utils;
using Akka.Util.Internal;
using Google.ProtocolBuffers;
namespace Akka.Remote.Transport.Streaming
{
// This could be optimized with unsafe code
internal static class LittleEndian
{
public static void WriteInt32(int value, byte[] buffer, int offset)
{
buffer[offset] = (byte)(value & 0x000000FF);
buffer[offset + 1] = (byte)((value & 0x0000FF00) >> 8);
buffer[offset + 2] = (byte)((value & 0x00FF0000) >> 16);
buffer[offset + 3] = (byte)((value & 0xFF000000) >> 24);
}
public static int ReadInt32(byte[] buffer, int offset)
{
return buffer[offset] |
buffer[offset + 1] << 8 |
buffer[offset + 2] << 16 |
buffer[offset + 3] << 24;
}
}
public class StreamAssociationHandle : AssociationHandle
{
private readonly StreamTransportSettings _settings;
private readonly Stream _stream;
private readonly object _state;
private readonly AsyncQueue<ByteString> _writeQueue;
private IHandleEventListener _eventListener;
private readonly byte[] _readBuffer;
private readonly byte[] _writeBuffer;
private readonly TaskCompletionSource<bool> _initialized;
private readonly TaskCompletionSource<bool> _disassociated;
private readonly Task<bool> _stopped;
private readonly Task<bool> _writeLoop;
private readonly Task<bool> _readLoop;
/// <summary>
/// Returns true if gracefully stopped, otherwise false.
/// </summary>
public Task<bool> Stopped => _stopped;
public StreamAssociationHandle(StreamTransportSettings settings, Stream stream, Address localAddress, Address remoteAddress, object state)
: base(localAddress, remoteAddress)
{
_settings = settings;
_stream = stream;
_state = state;
_writeQueue = new AsyncQueue<ByteString>(settings.RemoteDispatcher);
_readBuffer = new byte[settings.StreamReadBufferSize];
_writeBuffer = new byte[settings.StreamWriteBufferSize];
_initialized = new TaskCompletionSource<bool>();
_disassociated = new TaskCompletionSource<bool>();
_stopped = Task.Run(() => StopRunner());
_writeLoop = Task.Run(() => WriteLoop());
_readLoop = Task.Run(() => ReadLoop());
}
internal void Initialize(IHandleEventListener eventListener)
{
_eventListener = eventListener;
_readLoop.ContinueWith(task =>
{
bool gracefulStop = task.Result;
//Log(gracefulStop, "Read loop completed");
_disassociated.TrySetResult(gracefulStop);
}, CancellationToken.None, TaskContinuationOptions.None, TaskScheduler.Default);
_writeLoop.ContinueWith(task =>
{
bool gracefulStop = task.Result;
//Log(gracefulStop, "Write loop completed");
_disassociated.TrySetResult(gracefulStop);
}, CancellationToken.None, TaskContinuationOptions.None, TaskScheduler.Default);
_initialized.TrySetResult(true);
}
private void Log(bool isInfo, string message)
{
Log(isInfo ? LogLevel.InfoLevel : LogLevel.WarningLevel, message);
}
private void Log(LogLevel level, string message)
{
_settings.Log.Log(level, "{0} - ({1}:{2} -> {3}:{4})", message, LocalAddress.Host, LocalAddress.Port, RemoteAddress.Host, RemoteAddress.Port);
}
private async Task<bool> ReadLoop()
{
try
{
if (!await _initialized.Task)
return false;
byte[] readBuffer = _readBuffer;
int bufferLength = readBuffer.Length;
int readBytes = 0;
int readIndex = 0;
while (true)
{
int available = readBytes - readIndex;
if (available < sizeof (int))
{
// To simplify the message length read logic, we enforce the length bytes to be whole in the buffer
// Copy the partial length at the start of the buffer
if (available != 0)
Buffer.BlockCopy(readBuffer, readIndex, readBuffer, 0, available);
readIndex = 0;
readBytes = await _stream.ReadAsync(readBuffer, available, bufferLength - available).ConfigureAwait(false);
if (readBytes == 0)
return true;
// Adjust readBytes to include the partial length that we copied earlier
readBytes += available;
}
int payloadLength = LittleEndian.ReadInt32(readBuffer, readIndex);
readIndex += sizeof (int);
byte[] payloadBuffer = null;
if (payloadLength < 0 || payloadLength > _settings.FrameSizeHardLimit)
{
//TODO Log Error
return false;
}
if (payloadLength > _settings.MaximumFrameSize)
{
// This could happen if the MaximumFrameSize is configured bigger on the remote association.
// Normally they are dropped before getting sent so we don't receive them.
#region Skip the bytes
//TODO Log Error
int payloadOffset = 0;
while (payloadOffset < payloadLength)
{
if (readIndex == readBytes)
{
readIndex = 0;
readBytes = await _stream.ReadAsync(readBuffer, 0, bufferLength).ConfigureAwait(false);
if (readBytes == 0)
return true;
}
available = Math.Min(readBytes - readIndex, payloadLength - payloadOffset);
readIndex += available;
payloadOffset += available;
}
#endregion
}
else if (payloadLength > _settings.ChunkedReadThreshold && payloadLength > bufferLength - readIndex)
{
// The payload is chunked as a protection against denial of service. Otherwise a malicious remote endpoint
// could make the node allocate big buffers without sending the data first.
#region Read Chunked
//TODO Could allow direct read if Socket.Available include the full payload length
//TODO The chunked buffers could be pooled (See RecyclableMemoryStream)
var chunks = new List<byte[]>();
int payloadOffset = 0;
while (payloadOffset < payloadLength)
{
if (readIndex == readBytes)
{
readIndex = 0;
readBytes = await _stream.ReadAsync(readBuffer, 0, bufferLength).ConfigureAwait(false);
if (readBytes == 0)
return true;
}
available = Math.Min(readBytes - readIndex, payloadLength - payloadOffset);
byte[] chunk = new byte[available];
Buffer.BlockCopy(readBuffer, readIndex, chunk, 0, available);
chunks.Add(chunk);
readIndex += available;
payloadOffset += available;
}
payloadOffset = 0;
payloadBuffer = new byte[payloadLength];
foreach (byte[] chunk in chunks)
{
int chunkLength = chunk.Length;
Buffer.BlockCopy(chunk, 0, payloadBuffer, payloadOffset, chunkLength);
payloadOffset += chunkLength;
}
#endregion
}
else
{
payloadBuffer = new byte[payloadLength];
int payloadOffset = 0;
while (payloadOffset < payloadLength)
{
if (readIndex == readBytes)
{
readIndex = 0;
readBytes = await _stream.ReadAsync(readBuffer, 0, bufferLength).ConfigureAwait(false);
if (readBytes == 0)
return true;
}
available = Math.Min(readBytes - readIndex, payloadLength - payloadOffset);
Buffer.BlockCopy(readBuffer, readIndex, payloadBuffer, payloadOffset, available);
readIndex += available;
payloadOffset += available;
}
}
if (payloadBuffer != null)
{
//Log(LogLevel.DebugLevel, "Payload received");
var payload = new InboundPayload(ByteString.Unsafe.FromBytes(payloadBuffer));
_eventListener.Notify(payload);
}
}
}
catch (Exception)
{
//TODO Log
return false;
}
}
public override bool Write(ByteString payload)
{
//Log(LogLevel.DebugLevel, "Queuing outgoing payload");
_writeQueue.Enqueue(payload);
//TODO Implement backoff based on total queued message length
// But make sure the backoff is not buggy in EndpointWriter first.
return true;
}
private async Task<bool> WriteLoop()
{
try
{
if (!await _initialized.Task)
return false;
byte[] writeBuffer = _writeBuffer;
int bufferLength = writeBuffer.Length;
int bufferOffset = 0;
while (true)
{
var item = _writeQueue.DequeueAsync();
ByteString payload;
if (item.IsCompleted)
{
if (item.IsCanceled)
{
if (bufferOffset > 0)
{
// We are closing, flush the buffer
await _stream.WriteAsync(writeBuffer, 0, bufferOffset).ConfigureAwait(false);
}
break;
}
payload = item.Value;
}
else
{
if (bufferOffset > 0)
{
// We are about to wait for data, flush the buffer
await _stream.WriteAsync(writeBuffer, 0, bufferOffset).ConfigureAwait(false);
bufferOffset = 0;
}
await item;
if (item.IsCanceled)
break;
payload = item.Value;
}
if (bufferLength - bufferOffset < sizeof (int))
{
// Not enough space to write payload length, flush the buffer
await _stream.WriteAsync(writeBuffer, 0, bufferOffset).ConfigureAwait(false);
bufferOffset = 0;
}
// Copy the length
int payloadLength = payload.Length;
if (payloadLength > _settings.MaximumFrameSize)
{
//Drop the message
//TODO Log Error
}
else
{
LittleEndian.WriteInt32(payloadLength, writeBuffer, bufferOffset);
bufferOffset += sizeof (int);
byte[] payloadBuffer = ByteString.Unsafe.GetBuffer(payload);
int payloadOffset = 0;
while (payloadOffset < payloadLength)
{
if (bufferLength == bufferOffset)
{
// Flush the buffer
await _stream.WriteAsync(writeBuffer, 0, bufferOffset).ConfigureAwait(false);
bufferOffset = 0;
}
int available = Math.Min(bufferLength - bufferOffset, payloadLength - payloadOffset);
Buffer.BlockCopy(payloadBuffer, payloadOffset, writeBuffer, bufferOffset, available);
payloadOffset += available;
bufferOffset += available;
}
}
}
}
catch (Exception)
{
//TODO Log
return false;
}
return true;
}
private async Task<bool> StopRunner()
{
var gracefulStop = await _disassociated.Task;
_initialized.TrySetResult(false);
if (gracefulStop)
gracefulStop = await TryGracefulStop();
Log(gracefulStop, "Association stopped");
_writeQueue.Dispose();
_settings.CloseStream(_stream, _state);
_eventListener.Notify(new Disassociated(DisassociateInfo.Unknown));
return gracefulStop;
}
private async Task<bool> TryGracefulStop()
{
try
{
_writeQueue.CompleteAdding();
using (CancellationTokenSource cancel = new CancellationTokenSource(_settings.FlushWaitTimeout))
{
bool gracefulStop = await _writeLoop.WithCancellation(cancel.Token);
if (!gracefulStop)
return false;
_settings.ShutdownStreamGracefully(_stream, _state);
gracefulStop = await _readLoop.WithCancellation(cancel.Token);
if (!gracefulStop)
return false;
}
}
catch (Exception)
{
//TODO Log
return false;
}
return true;
}
public override void Disassociate()
{
//Log(true, "Disassociate");
_disassociated.TrySetResult(true);
}
}
}
| |
/* Copyright (c) Citrix Systems Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using Moq;
using NUnit.Framework;
using XenAdmin.Alerts;
using XenAdmin.Core;
using XenAdmin.Network;
using XenAdminTests.UnitTests.UnitTestHelper;
using XenAPI;
namespace XenAdminTests.UnitTests.AlertTests
{
[TestFixture, Category(TestCategories.Unit)]
public class XenServerUpdateAlertTests
{
private Mock<IXenConnection> connA;
private Mock<IXenConnection> connB;
private Mock<Host> hostA;
private Mock<Host> hostB;
protected Cache cacheA;
protected Cache cacheB;
[Test]
public void TestAlertWithConnectionAndHosts()
{
XenServerVersion ver = new XenServerVersion("1.2.3", "name", true, "http://url", new List<XenServerPatch>(), new DateTime(2011,4,1).ToString(), "123");
var alert = new XenServerVersionAlert(ver);
alert.IncludeConnection(connA.Object);
alert.IncludeConnection(connB.Object);
alert.IncludeHosts(new List<Host>() { hostA.Object, hostB.Object });
IUnitTestVerifier validator = new VerifyGetters(alert);
validator.Verify(new AlertClassUnitTestData
{
AppliesTo = "HostAName, HostBName, ConnAName, ConnBName",
FixLinkText = "Go to Web Page",
HelpID = "XenServerUpdateAlert",
Description = "name is now available. Download the latest at the Citrix website.",
HelpLinkText = "Help",
Title = "name is now available",
Priority = "Priority5"
});
Assert.IsFalse(alert.CanIgnore);
VerifyConnExpectations(Times.Once);
VerifyHostsExpectations(Times.Once);
}
[Test]
public void TestAlertWithHostsAndNoConnection()
{
XenServerVersion ver = new XenServerVersion("1.2.3", "name", true, "http://url", new List<XenServerPatch>(), new DateTime(2011, 4, 1).ToString(), "123");
var alert = new XenServerVersionAlert(ver);
alert.IncludeHosts(new List<Host> { hostA.Object, hostB.Object });
IUnitTestVerifier validator = new VerifyGetters(alert);
validator.Verify(new AlertClassUnitTestData
{
AppliesTo = "HostAName, HostBName",
FixLinkText = "Go to Web Page",
HelpID = "XenServerUpdateAlert",
Description = "name is now available. Download the latest at the Citrix website.",
HelpLinkText = "Help",
Title = "name is now available",
Priority = "Priority5"
});
Assert.IsFalse(alert.CanIgnore);
VerifyConnExpectations(Times.Never);
VerifyHostsExpectations(Times.Once);
}
[Test]
public void TestAlertWithConnectionAndNoHosts()
{
XenServerVersion ver = new XenServerVersion("1.2.3", "name", true, "http://url", new List<XenServerPatch>(), new DateTime(2011, 4, 1).ToString(), "123");
var alert = new XenServerVersionAlert(ver);
alert.IncludeConnection(connA.Object);
alert.IncludeConnection(connB.Object);
IUnitTestVerifier validator = new VerifyGetters(alert);
validator.Verify(new AlertClassUnitTestData
{
AppliesTo = "ConnAName, ConnBName",
FixLinkText = "Go to Web Page",
HelpID = "XenServerUpdateAlert",
Description = "name is now available. Download the latest at the Citrix website.",
HelpLinkText = "Help",
Title = "name is now available",
Priority = "Priority5"
});
Assert.IsFalse(alert.CanIgnore);
VerifyConnExpectations(Times.Once);
VerifyHostsExpectations(Times.Never);
}
[Test]
public void TestAlertWithNoConnectionAndNoHosts()
{
XenServerVersion ver = new XenServerVersion("1.2.3", "name", true, "http://url", new List<XenServerPatch>(), new DateTime(2011, 4, 1).ToString(), "123");
var alert = new XenServerVersionAlert(ver);
IUnitTestVerifier validator = new VerifyGetters(alert);
validator.Verify(new AlertClassUnitTestData
{
AppliesTo = string.Empty,
FixLinkText = "Go to Web Page",
HelpID = "XenServerUpdateAlert",
Description = "name is now available. Download the latest at the Citrix website.",
HelpLinkText = "Help",
Title = "name is now available",
Priority = "Priority5"
});
Assert.IsTrue(alert.CanIgnore);
VerifyConnExpectations(Times.Never);
VerifyHostsExpectations(Times.Never);
}
[Test, ExpectedException(typeof(NullReferenceException))]
public void TestAlertWithNullVersion()
{
var alert = new XenServerVersionAlert(null);
}
private void VerifyConnExpectations(Func<Times> times)
{
connA.VerifyGet(n => n.Name, times());
connB.VerifyGet(n => n.Name, times());
}
private void VerifyHostsExpectations(Func<Times> times)
{
hostA.VerifyGet(n => n.Name, times());
hostB.VerifyGet(n => n.Name, times());
}
[SetUp]
public void TestSetUp()
{
connA = new Mock<IXenConnection>(MockBehavior.Strict);
connA.Setup(n => n.Name).Returns("ConnAName");
cacheA = new Cache();
connA.Setup(x => x.Cache).Returns(cacheA);
connB = new Mock<IXenConnection>(MockBehavior.Strict);
connB.Setup(n => n.Name).Returns("ConnBName");
cacheB = new Cache();
connB.Setup(x => x.Cache).Returns(cacheB);
hostA = new Mock<Host>(MockBehavior.Strict);
hostA.Setup(n => n.Name).Returns("HostAName");
hostA.Setup(n => n.Equals(It.IsAny<object>())).Returns((object o) => ReferenceEquals(o, hostA.Object));
hostB = new Mock<Host>(MockBehavior.Strict);
hostB.Setup(n => n.Name).Returns("HostBName");
hostB.Setup(n => n.Equals(It.IsAny<object>())).Returns((object o) => ReferenceEquals(o, hostB.Object));
}
[TearDown]
public void TestTearDown()
{
cacheA = null;
cacheB = null;
connA = null;
connB = null;
hostA = null;
hostB = null;
}
}
}
| |
// Copyright (c) DotSpatial Team. All rights reserved.
// Licensed under the MIT license. See License.txt file in the project root for full license information.
using System;
using System.Drawing;
using DotSpatial.Serialization;
namespace DotSpatial.Symbology.Forms
{
/// <summary>
/// System.Font is notoriously difficult when serialization comes up. This replaces that
/// with a more serializable version.
/// </summary>
[Serializable]
public class TextFont : IDisposable
{
#region Fields
private StringAlignment _alignment;
private Brush _brush;
private bool _brushValid;
private Color _color;
private string _familyName;
private StringFormatFlags _flags;
private Font _font;
private bool _fontValid;
private StringFormat _format;
private bool _formatValid;
private StringAlignment _lineAlignment;
private float _size;
private FontStyle _style;
private StringTrimming _trimming;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="TextFont"/> class.
/// Creates a default, black, 8pt sans serif font with a normal style.
/// </summary>
public TextFont()
{
Configure();
}
/// <summary>
/// Initializes a new instance of the <see cref="TextFont"/> class.
/// Creates a sans serif, black, normal font of the specified size.
/// </summary>
/// <param name="size">The size to use.</param>
public TextFont(float size)
{
Configure();
_size = size;
}
/// <summary>
/// Initializes a new instance of the <see cref="TextFont"/> class.
/// </summary>
/// <param name="font">The font to use.</param>
/// <param name="color">The color to use.</param>
public TextFont(Font font, Color color)
{
Configure();
_familyName = font.FontFamily.Name;
_size = font.Size;
_style = font.Style;
_color = color;
}
/// <summary>
/// Initializes a new instance of the <see cref="TextFont"/> class.
/// </summary>
/// <param name="family">The font family to use.</param>
/// <param name="size">The size to use.</param>
/// <param name="style">The style to use.</param>
/// <param name="color">The color to use.</param>
public TextFont(FontFamily family, float size, FontStyle style, Color color)
{
Configure();
_familyName = family.Name;
_size = size;
_style = style;
_color = color;
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the alignment information on the vertical plane.
/// </summary>
[Serialize("Alignment")]
public StringAlignment Alignment
{
get
{
return _alignment;
}
set
{
_alignment = value;
_formatValid = false;
}
}
/// <summary>
/// Gets or sets the System.Color to use for the font color.
/// </summary>
[Serialize("Color")]
public Color Color
{
get
{
return _color;
}
set
{
_color = value;
_brushValid = false;
}
}
/// <summary>
/// Gets or sets the string family name for this font.
/// </summary>
[Serialize("FamilyName")]
public string FamilyName
{
get
{
return _familyName;
}
set
{
_familyName = value;
_fontValid = false;
}
}
/// <summary>
/// Gets or sets the string format flags.
/// </summary>
[Serialize("FormatFlags")]
public StringFormatFlags FormatFlags
{
get
{
return _flags;
}
set
{
_flags = value;
_formatValid = false;
}
}
/// <summary>
/// Gets or sets the line alignment on the horizontal plane.
/// </summary>
[Serialize("LineAlignment")]
public StringAlignment LineAlignment
{
get
{
return _lineAlignment;
}
set
{
_lineAlignment = value;
_formatValid = false;
}
}
/// <summary>
/// Gets or sets the floating point value controling the size.
/// </summary>
[Serialize("Size")]
public float Size
{
get
{
return _size;
}
set
{
_size = value;
_fontValid = false;
}
}
/// <summary>
/// Gets or sets the style.
/// </summary>
[Serialize("Style")]
public FontStyle Style
{
get
{
return _style;
}
set
{
_style = value;
_fontValid = false;
}
}
/// <summary>
/// Gets or sets the StringTrimming options.
/// </summary>
[Serialize("Trimming")]
public StringTrimming Trimming
{
get
{
return _trimming;
}
set
{
_trimming = value;
_formatValid = false;
}
}
#endregion
#region Methods
/// <summary>
/// Disposes of the font, brush and format that are stored internally.
/// </summary>
public void Dispose()
{
_font?.Dispose();
_brush?.Dispose();
_format?.Dispose();
}
/// <summary>
/// Draws the specified text to the specified graphics object in the specified location,
/// but using all of the parameters specified by this TextFont object.
/// </summary>
/// <param name="g">The Graphics surface to draw to.</param>
/// <param name="text">The string text to draw.</param>
/// <param name="x">The x coordinate of the top left position.</param>
/// <param name="y">The y coordinate of the top left position.</param>
public void Draw(Graphics g, string text, float x, float y)
{
Setup();
OnDraw(g, text, x, y);
}
/// <summary>
/// Draws the specified text to the specified graphics object in the specified location,
/// but using all of the parameters specified by this TextFont object.
/// </summary>
/// <param name="g">The Graphics surface to draw to.</param>
/// <param name="text">The string text to draw.</param>
/// <param name="location">The PointF describing the location to draw.</param>
public void Draw(Graphics g, string text, PointF location)
{
Setup();
OnDraw(g, text, location.X, location.Y);
}
/// <summary>
/// Draws the specified text to the specified graphics object within the specified box.
/// </summary>
/// <param name="g">The graphics surface to draw to.</param>
/// <param name="text">The text to draw.</param>
/// <param name="box">The rectangular box to draw within.</param>
public void Draw(Graphics g, string text, RectangleF box)
{
Setup();
}
/// <summary>
/// This returns the actual internal font. Be careful not to dispose this.
/// </summary>
/// <returns>A System.Font.</returns>
public Font GetFont()
{
Setup(); // in case the font is not yet valid
return _font;
}
/// <summary>
/// Sets the font on this Textfont to the specified value.
/// </summary>
/// <param name="font">The font to use.</param>
public void SetFont(Font font)
{
_familyName = font.Name;
_size = font.Size;
_style = font.Style;
_font = font;
_fontValid = true;
}
/// <summary>
/// Assigns the parameters from the specifed StringFormat class to the members of
/// this TextFont.
/// </summary>
/// <param name="format">The StringFormat to apply to this object.</param>
public void SetFormat(StringFormat format)
{
_alignment = format.Alignment;
_flags = format.FormatFlags;
_lineAlignment = format.LineAlignment;
_trimming = format.Trimming;
}
/// <summary>
/// Handles drawing for point location drawing.
/// </summary>
/// <param name="g">The Graphics surface to draw to.</param>
/// <param name="text">The string to draw.</param>
/// <param name="x">The x floating point value.</param>
/// <param name="y">The y floating point value.</param>
protected virtual void OnDraw(Graphics g, string text, float x, float y)
{
g.DrawString(text, _font, _brush, x, y, _format);
}
/// <summary>
/// Handles drawing for drawing that falls within a rectangleF structure.
/// </summary>
/// <param name="g">The Graphics surface to draw to.</param>
/// <param name="text">The string to draw.</param>
/// <param name="box">The RectangleF structure.</param>
protected virtual void OnDraw(Graphics g, string text, RectangleF box)
{
g.DrawString(text, _font, _brush, box, _format);
}
private void Configure()
{
// brush
_color = Color.Black;
// font
_familyName = FontFamily.GenericSansSerif.Name;
_size = 8;
_style = FontStyle.Regular;
// format
StringFormat temp = new StringFormat();
_alignment = temp.Alignment;
_flags = temp.FormatFlags;
_lineAlignment = temp.LineAlignment;
_trimming = temp.Trimming;
temp.Dispose();
}
private void Setup()
{
if (!_brushValid || _brush == null)
{
_brush?.Dispose();
_brush = new SolidBrush(_color);
}
if (!_fontValid || _font == null)
{
_font?.Dispose();
_font = new Font(_familyName, _size, _style);
}
if (!_formatValid || _format == null)
{
_format?.Dispose();
_format = new StringFormat
{
Alignment = _alignment,
FormatFlags = _flags,
LineAlignment = _lineAlignment,
Trimming = _trimming
};
}
}
#endregion
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.Serialization.Formatters;
using uWebshop.Newtonsoft.Json.Converters;
using uWebshop.Newtonsoft.Json.Serialization;
using uWebshop.Newtonsoft.Json.Utilities;
using System.Runtime.Serialization;
using ErrorEventArgs = uWebshop.Newtonsoft.Json.Serialization.ErrorEventArgs;
namespace uWebshop.Newtonsoft.Json
{
/// <summary>
/// Serializes and deserializes objects into and from the JSON format.
/// The <see cref="JsonSerializer"/> enables you to control how objects are encoded into JSON.
/// </summary>
public class JsonSerializer
{
#region Properties_binder
internal TypeNameHandling _typeNameHandling;
internal FormatterAssemblyStyle _typeNameAssemblyFormat;
internal PreserveReferencesHandling _preserveReferencesHandling;
internal ReferenceLoopHandling _referenceLoopHandling;
internal MissingMemberHandling _missingMemberHandling;
internal ObjectCreationHandling _objectCreationHandling;
internal NullValueHandling _nullValueHandling;
internal DefaultValueHandling _defaultValueHandling;
internal ConstructorHandling _constructorHandling;
internal JsonConverterCollection _converters;
internal IContractResolver _contractResolver;
internal ITraceWriter _traceWriter;
internal SerializationBinder _binder;
internal StreamingContext _context;
private IReferenceResolver _referenceResolver;
private Formatting? _formatting;
private DateFormatHandling? _dateFormatHandling;
private DateTimeZoneHandling? _dateTimeZoneHandling;
private DateParseHandling? _dateParseHandling;
private FloatFormatHandling? _floatFormatHandling;
private FloatParseHandling? _floatParseHandling;
private StringEscapeHandling? _stringEscapeHandling;
private CultureInfo _culture;
private int? _maxDepth;
private bool _maxDepthSet;
private bool? _checkAdditionalContent;
private string _dateFormatString;
private bool _dateFormatStringSet;
/// <summary>
/// Occurs when the <see cref="JsonSerializer"/> errors during serialization and deserialization.
/// </summary>
public virtual event EventHandler<ErrorEventArgs> Error;
/// <summary>
/// Gets or sets the <see cref="IReferenceResolver"/> used by the serializer when resolving references.
/// </summary>
public virtual IReferenceResolver ReferenceResolver
{
get { return GetReferenceResolver(); }
set
{
if (value == null)
throw new ArgumentNullException("value", "Reference resolver cannot be null.");
_referenceResolver = value;
}
}
/// <summary>
/// Gets or sets the <see cref="SerializationBinder"/> used by the serializer when resolving type names.
/// </summary>
public virtual SerializationBinder Binder
{
get { return _binder; }
set
{
if (value == null)
throw new ArgumentNullException("value", "Serialization binder cannot be null.");
_binder = value;
}
}
/// <summary>
/// Gets or sets the <see cref="ITraceWriter"/> used by the serializer when writing trace messages.
/// </summary>
/// <value>The trace writer.</value>
public virtual ITraceWriter TraceWriter
{
get { return _traceWriter; }
set { _traceWriter = value; }
}
/// <summary>
/// Gets or sets how type name writing and reading is handled by the serializer.
/// </summary>
public virtual TypeNameHandling TypeNameHandling
{
get { return _typeNameHandling; }
set
{
if (value < TypeNameHandling.None || value > TypeNameHandling.Auto)
throw new ArgumentOutOfRangeException("value");
_typeNameHandling = value;
}
}
/// <summary>
/// Gets or sets how a type name assembly is written and resolved by the serializer.
/// </summary>
/// <value>The type name assembly format.</value>
public virtual FormatterAssemblyStyle TypeNameAssemblyFormat
{
get { return _typeNameAssemblyFormat; }
set
{
if (value < FormatterAssemblyStyle.Simple || value > FormatterAssemblyStyle.Full)
throw new ArgumentOutOfRangeException("value");
_typeNameAssemblyFormat = value;
}
}
/// <summary>
/// Gets or sets how object references are preserved by the serializer.
/// </summary>
public virtual PreserveReferencesHandling PreserveReferencesHandling
{
get { return _preserveReferencesHandling; }
set
{
if (value < PreserveReferencesHandling.None || value > PreserveReferencesHandling.All)
throw new ArgumentOutOfRangeException("value");
_preserveReferencesHandling = value;
}
}
/// <summary>
/// Get or set how reference loops (e.g. a class referencing itself) is handled.
/// </summary>
public virtual ReferenceLoopHandling ReferenceLoopHandling
{
get { return _referenceLoopHandling; }
set
{
if (value < ReferenceLoopHandling.Error || value > ReferenceLoopHandling.Serialize)
throw new ArgumentOutOfRangeException("value");
_referenceLoopHandling = value;
}
}
/// <summary>
/// Get or set how missing members (e.g. JSON contains a property that isn't a member on the object) are handled during deserialization.
/// </summary>
public virtual MissingMemberHandling MissingMemberHandling
{
get { return _missingMemberHandling; }
set
{
if (value < MissingMemberHandling.Ignore || value > MissingMemberHandling.Error)
throw new ArgumentOutOfRangeException("value");
_missingMemberHandling = value;
}
}
/// <summary>
/// Get or set how null values are handled during serialization and deserialization.
/// </summary>
public virtual NullValueHandling NullValueHandling
{
get { return _nullValueHandling; }
set
{
if (value < NullValueHandling.Include || value > NullValueHandling.Ignore)
throw new ArgumentOutOfRangeException("value");
_nullValueHandling = value;
}
}
/// <summary>
/// Get or set how null default are handled during serialization and deserialization.
/// </summary>
public virtual DefaultValueHandling DefaultValueHandling
{
get { return _defaultValueHandling; }
set
{
if (value < DefaultValueHandling.Include || value > DefaultValueHandling.IgnoreAndPopulate)
throw new ArgumentOutOfRangeException("value");
_defaultValueHandling = value;
}
}
/// <summary>
/// Gets or sets how objects are created during deserialization.
/// </summary>
/// <value>The object creation handling.</value>
public virtual ObjectCreationHandling ObjectCreationHandling
{
get { return _objectCreationHandling; }
set
{
if (value < ObjectCreationHandling.Auto || value > ObjectCreationHandling.Replace)
throw new ArgumentOutOfRangeException("value");
_objectCreationHandling = value;
}
}
/// <summary>
/// Gets or sets how constructors are used during deserialization.
/// </summary>
/// <value>The constructor handling.</value>
public virtual ConstructorHandling ConstructorHandling
{
get { return _constructorHandling; }
set
{
if (value < ConstructorHandling.Default || value > ConstructorHandling.AllowNonPublicDefaultConstructor)
throw new ArgumentOutOfRangeException("value");
_constructorHandling = value;
}
}
/// <summary>
/// Gets a collection <see cref="JsonConverter"/> that will be used during serialization.
/// </summary>
/// <value>Collection <see cref="JsonConverter"/> that will be used during serialization.</value>
public virtual JsonConverterCollection Converters
{
get
{
if (_converters == null)
_converters = new JsonConverterCollection();
return _converters;
}
}
/// <summary>
/// Gets or sets the contract resolver used by the serializer when
/// serializing .NET objects to JSON and vice versa.
/// </summary>
public virtual IContractResolver ContractResolver
{
get { return _contractResolver; }
set { _contractResolver = value ?? DefaultContractResolver.Instance; }
}
/// <summary>
/// Gets or sets the <see cref="StreamingContext"/> used by the serializer when invoking serialization callback methods.
/// </summary>
/// <value>The context.</value>
public virtual StreamingContext Context
{
get { return _context; }
set { _context = value; }
}
/// <summary>
/// Indicates how JSON text output is formatted.
/// </summary>
public virtual Formatting Formatting
{
get { return _formatting ?? JsonSerializerSettings.DefaultFormatting; }
set { _formatting = value; }
}
/// <summary>
/// Get or set how dates are written to JSON text.
/// </summary>
public virtual DateFormatHandling DateFormatHandling
{
get { return _dateFormatHandling ?? JsonSerializerSettings.DefaultDateFormatHandling; }
set { _dateFormatHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> time zones are handling during serialization and deserialization.
/// </summary>
public virtual DateTimeZoneHandling DateTimeZoneHandling
{
get { return _dateTimeZoneHandling ?? JsonSerializerSettings.DefaultDateTimeZoneHandling; }
set { _dateTimeZoneHandling = value; }
}
/// <summary>
/// Get or set how date formatted strings, e.g. "\/Date(1198908717056)\/" and "2012-03-21T05:40Z", are parsed when reading JSON.
/// </summary>
public virtual DateParseHandling DateParseHandling
{
get { return _dateParseHandling ?? JsonSerializerSettings.DefaultDateParseHandling; }
set { _dateParseHandling = value; }
}
/// <summary>
/// Get or set how floating point numbers, e.g. 1.0 and 9.9, are parsed when reading JSON text.
/// </summary>
public virtual FloatParseHandling FloatParseHandling
{
get { return _floatParseHandling ?? JsonSerializerSettings.DefaultFloatParseHandling; }
set { _floatParseHandling = value; }
}
/// <summary>
/// Get or set how special floating point numbers, e.g. <see cref="F:System.Double.NaN"/>,
/// <see cref="F:System.Double.PositiveInfinity"/> and <see cref="F:System.Double.NegativeInfinity"/>,
/// are written as JSON text.
/// </summary>
public virtual FloatFormatHandling FloatFormatHandling
{
get { return _floatFormatHandling ?? JsonSerializerSettings.DefaultFloatFormatHandling; }
set { _floatFormatHandling = value; }
}
/// <summary>
/// Get or set how strings are escaped when writing JSON text.
/// </summary>
public virtual StringEscapeHandling StringEscapeHandling
{
get { return _stringEscapeHandling ?? JsonSerializerSettings.DefaultStringEscapeHandling; }
set { _stringEscapeHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> and <see cref="DateTimeOffset"/> values are formatting when writing JSON text.
/// </summary>
public virtual string DateFormatString
{
get { return _dateFormatString ?? JsonSerializerSettings.DefaultDateFormatString; }
set
{
_dateFormatString = value;
_dateFormatStringSet = true;
}
}
/// <summary>
/// Gets or sets the culture used when reading JSON. Defaults to <see cref="CultureInfo.InvariantCulture"/>.
/// </summary>
public virtual CultureInfo Culture
{
get { return _culture ?? JsonSerializerSettings.DefaultCulture; }
set { _culture = value; }
}
/// <summary>
/// Gets or sets the maximum depth allowed when reading JSON. Reading past this depth will throw a <see cref="JsonReaderException"/>.
/// </summary>
public virtual int? MaxDepth
{
get { return _maxDepth; }
set
{
if (value <= 0)
throw new ArgumentException("Value must be positive.", "value");
_maxDepth = value;
_maxDepthSet = true;
}
}
/// <summary>
/// Gets a value indicating whether there will be a check for additional JSON content after deserializing an object.
/// </summary>
/// <value>
/// <c>true</c> if there will be a check for additional JSON content after deserializing an object; otherwise, <c>false</c>.
/// </value>
public virtual bool CheckAdditionalContent
{
get { return _checkAdditionalContent ?? JsonSerializerSettings.DefaultCheckAdditionalContent; }
set { _checkAdditionalContent = value; }
}
internal bool IsCheckAdditionalContentSet()
{
return (_checkAdditionalContent != null);
}
#endregion
/// <summary>
/// Initializes a new instance of the <see cref="JsonSerializer"/> class.
/// </summary>
public JsonSerializer()
{
_referenceLoopHandling = JsonSerializerSettings.DefaultReferenceLoopHandling;
_missingMemberHandling = JsonSerializerSettings.DefaultMissingMemberHandling;
_nullValueHandling = JsonSerializerSettings.DefaultNullValueHandling;
_defaultValueHandling = JsonSerializerSettings.DefaultDefaultValueHandling;
_objectCreationHandling = JsonSerializerSettings.DefaultObjectCreationHandling;
_preserveReferencesHandling = JsonSerializerSettings.DefaultPreserveReferencesHandling;
_constructorHandling = JsonSerializerSettings.DefaultConstructorHandling;
_typeNameHandling = JsonSerializerSettings.DefaultTypeNameHandling;
_context = JsonSerializerSettings.DefaultContext;
_binder = DefaultSerializationBinder.Instance;
_culture = JsonSerializerSettings.DefaultCulture;
_contractResolver = DefaultContractResolver.Instance;
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </summary>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </returns>
public static JsonSerializer Create()
{
return new JsonSerializer();
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </summary>
/// <param name="settings">The settings to be applied to the <see cref="JsonSerializer"/>.</param>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </returns>
public static JsonSerializer Create(JsonSerializerSettings settings)
{
JsonSerializer serializer = Create();
if (settings != null)
ApplySerializerSettings(serializer, settings);
return serializer;
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </summary>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </returns>
public static JsonSerializer CreateDefault()
{
// copy static to local variable to avoid concurrency issues
Func<JsonSerializerSettings> defaultSettingsCreator = JsonConvert.DefaultSettings;
JsonSerializerSettings defaultSettings = (defaultSettingsCreator != null) ? defaultSettingsCreator() : null;
return Create(defaultSettings);
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </summary>
/// <param name="settings">The settings to be applied to the <see cref="JsonSerializer"/>.</param>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </returns>
public static JsonSerializer CreateDefault(JsonSerializerSettings settings)
{
JsonSerializer serializer = CreateDefault();
if (settings != null)
ApplySerializerSettings(serializer, settings);
return serializer;
}
private static void ApplySerializerSettings(JsonSerializer serializer, JsonSerializerSettings settings)
{
if (!CollectionUtils.IsNullOrEmpty(settings.Converters))
{
// insert settings converters at the beginning so they take precedence
// if user wants to remove one of the default converters they will have to do it manually
for (int i = 0; i < settings.Converters.Count; i++)
{
serializer.Converters.Insert(i, settings.Converters[i]);
}
}
// serializer specific
if (settings._typeNameHandling != null)
serializer.TypeNameHandling = settings.TypeNameHandling;
if (settings._typeNameAssemblyFormat != null)
serializer.TypeNameAssemblyFormat = settings.TypeNameAssemblyFormat;
if (settings._preserveReferencesHandling != null)
serializer.PreserveReferencesHandling = settings.PreserveReferencesHandling;
if (settings._referenceLoopHandling != null)
serializer.ReferenceLoopHandling = settings.ReferenceLoopHandling;
if (settings._missingMemberHandling != null)
serializer.MissingMemberHandling = settings.MissingMemberHandling;
if (settings._objectCreationHandling != null)
serializer.ObjectCreationHandling = settings.ObjectCreationHandling;
if (settings._nullValueHandling != null)
serializer.NullValueHandling = settings.NullValueHandling;
if (settings._defaultValueHandling != null)
serializer.DefaultValueHandling = settings.DefaultValueHandling;
if (settings._constructorHandling != null)
serializer.ConstructorHandling = settings.ConstructorHandling;
if (settings._context != null)
serializer.Context = settings.Context;
if (settings._checkAdditionalContent != null)
serializer._checkAdditionalContent = settings._checkAdditionalContent;
if (settings.Error != null)
serializer.Error += settings.Error;
if (settings.ContractResolver != null)
serializer.ContractResolver = settings.ContractResolver;
if (settings.ReferenceResolver != null)
serializer.ReferenceResolver = settings.ReferenceResolver;
if (settings.TraceWriter != null)
serializer.TraceWriter = settings.TraceWriter;
if (settings.Binder != null)
serializer.Binder = settings.Binder;
// reader/writer specific
// unset values won't override reader/writer set values
if (settings._formatting != null)
serializer._formatting = settings._formatting;
if (settings._dateFormatHandling != null)
serializer._dateFormatHandling = settings._dateFormatHandling;
if (settings._dateTimeZoneHandling != null)
serializer._dateTimeZoneHandling = settings._dateTimeZoneHandling;
if (settings._dateParseHandling != null)
serializer._dateParseHandling = settings._dateParseHandling;
if (settings._dateFormatStringSet)
{
serializer._dateFormatString = settings._dateFormatString;
serializer._dateFormatStringSet = settings._dateFormatStringSet;
}
if (settings._floatFormatHandling != null)
serializer._floatFormatHandling = settings._floatFormatHandling;
if (settings._floatParseHandling != null)
serializer._floatParseHandling = settings._floatParseHandling;
if (settings._stringEscapeHandling != null)
serializer._stringEscapeHandling = settings._stringEscapeHandling;
if (settings._culture != null)
serializer._culture = settings._culture;
if (settings._maxDepthSet)
{
serializer._maxDepth = settings._maxDepth;
serializer._maxDepthSet = settings._maxDepthSet;
}
}
/// <summary>
/// Populates the JSON values onto the target object.
/// </summary>
/// <param name="reader">The <see cref="TextReader"/> that contains the JSON structure to reader values from.</param>
/// <param name="target">The target object to populate values onto.</param>
public void Populate(TextReader reader, object target)
{
Populate(new JsonTextReader(reader), target);
}
/// <summary>
/// Populates the JSON values onto the target object.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> that contains the JSON structure to reader values from.</param>
/// <param name="target">The target object to populate values onto.</param>
public void Populate(JsonReader reader, object target)
{
PopulateInternal(reader, target);
}
internal virtual void PopulateInternal(JsonReader reader, object target)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
ValidationUtils.ArgumentNotNull(target, "target");
var serializerReader = new JsonSerializerInternalReader(this);
serializerReader.Populate(reader, target);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> that contains the JSON structure to deserialize.</param>
/// <returns>The <see cref="Object"/> being deserialized.</returns>
public object Deserialize(JsonReader reader)
{
return Deserialize(reader, null);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="StringReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="TextReader"/> containing the object.</param>
/// <param name="objectType">The <see cref="Type"/> of object being deserialized.</param>
/// <returns>The instance of <paramref name="objectType"/> being deserialized.</returns>
public object Deserialize(TextReader reader, Type objectType)
{
return Deserialize(new JsonTextReader(reader), objectType);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> containing the object.</param>
/// <typeparam name="T">The type of the object to deserialize.</typeparam>
/// <returns>The instance of <typeparamref name="T"/> being deserialized.</returns>
public T Deserialize<T>(JsonReader reader)
{
return (T) Deserialize(reader, typeof (T));
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> containing the object.</param>
/// <param name="objectType">The <see cref="Type"/> of object being deserialized.</param>
/// <returns>The instance of <paramref name="objectType"/> being deserialized.</returns>
public object Deserialize(JsonReader reader, Type objectType)
{
return DeserializeInternal(reader, objectType);
}
internal virtual object DeserializeInternal(JsonReader reader, Type objectType)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
// set serialization options onto reader
CultureInfo previousCulture = null;
if (_culture != null && !_culture.Equals(reader.Culture))
{
previousCulture = reader.Culture;
reader.Culture = _culture;
}
DateTimeZoneHandling? previousDateTimeZoneHandling = null;
if (_dateTimeZoneHandling != null && reader.DateTimeZoneHandling != _dateTimeZoneHandling)
{
previousDateTimeZoneHandling = reader.DateTimeZoneHandling;
reader.DateTimeZoneHandling = _dateTimeZoneHandling.Value;
}
DateParseHandling? previousDateParseHandling = null;
if (_dateParseHandling != null && reader.DateParseHandling != _dateParseHandling)
{
previousDateParseHandling = reader.DateParseHandling;
reader.DateParseHandling = _dateParseHandling.Value;
}
FloatParseHandling? previousFloatParseHandling = null;
if (_floatParseHandling != null && reader.FloatParseHandling != _floatParseHandling)
{
previousFloatParseHandling = reader.FloatParseHandling;
reader.FloatParseHandling = _floatParseHandling.Value;
}
int? previousMaxDepth = null;
if (_maxDepthSet && reader.MaxDepth != _maxDepth)
{
previousMaxDepth = reader.MaxDepth;
reader.MaxDepth = _maxDepth;
}
TraceJsonReader traceJsonReader = (TraceWriter != null && TraceWriter.LevelFilter >= TraceLevel.Verbose) ? new TraceJsonReader(reader) : null;
var serializerReader = new JsonSerializerInternalReader(this);
object value = serializerReader.Deserialize(traceJsonReader ?? reader, objectType, CheckAdditionalContent);
if (traceJsonReader != null)
TraceWriter.Trace(TraceLevel.Verbose, "Deserialized JSON: " + Environment.NewLine + traceJsonReader.GetJson(), null);
// reset reader back to previous options
if (previousCulture != null)
reader.Culture = previousCulture;
if (previousDateTimeZoneHandling != null)
reader.DateTimeZoneHandling = previousDateTimeZoneHandling.Value;
if (previousDateParseHandling != null)
reader.DateParseHandling = previousDateParseHandling.Value;
if (previousFloatParseHandling != null)
reader.FloatParseHandling = previousFloatParseHandling.Value;
if (_maxDepthSet)
reader.MaxDepth = previousMaxDepth;
return value;
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="textWriter">The <see cref="TextWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
public void Serialize(TextWriter textWriter, object value)
{
Serialize(new JsonTextWriter(textWriter), value);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="jsonWriter">The <see cref="JsonWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
/// <param name="objectType">
/// The type of the value being serialized.
/// This parameter is used when <see cref="TypeNameHandling"/> is Auto to write out the type name if the type of the value does not match.
/// Specifing the type is optional.
/// </param>
public void Serialize(JsonWriter jsonWriter, object value, Type objectType)
{
SerializeInternal(jsonWriter, value, objectType);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="textWriter">The <see cref="TextWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
/// <param name="objectType">
/// The type of the value being serialized.
/// This parameter is used when <see cref="TypeNameHandling"/> is Auto to write out the type name if the type of the value does not match.
/// Specifing the type is optional.
/// </param>
public void Serialize(TextWriter textWriter, object value, Type objectType)
{
Serialize(new JsonTextWriter(textWriter), value, objectType);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="JsonWriter"/>.
/// </summary>
/// <param name="jsonWriter">The <see cref="JsonWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
public void Serialize(JsonWriter jsonWriter, object value)
{
SerializeInternal(jsonWriter, value, null);
}
internal virtual void SerializeInternal(JsonWriter jsonWriter, object value, Type objectType)
{
ValidationUtils.ArgumentNotNull(jsonWriter, "jsonWriter");
// set serialization options onto writer
Formatting? previousFormatting = null;
if (_formatting != null && jsonWriter.Formatting != _formatting)
{
previousFormatting = jsonWriter.Formatting;
jsonWriter.Formatting = _formatting.Value;
}
DateFormatHandling? previousDateFormatHandling = null;
if (_dateFormatHandling != null && jsonWriter.DateFormatHandling != _dateFormatHandling)
{
previousDateFormatHandling = jsonWriter.DateFormatHandling;
jsonWriter.DateFormatHandling = _dateFormatHandling.Value;
}
DateTimeZoneHandling? previousDateTimeZoneHandling = null;
if (_dateTimeZoneHandling != null && jsonWriter.DateTimeZoneHandling != _dateTimeZoneHandling)
{
previousDateTimeZoneHandling = jsonWriter.DateTimeZoneHandling;
jsonWriter.DateTimeZoneHandling = _dateTimeZoneHandling.Value;
}
FloatFormatHandling? previousFloatFormatHandling = null;
if (_floatFormatHandling != null && jsonWriter.FloatFormatHandling != _floatFormatHandling)
{
previousFloatFormatHandling = jsonWriter.FloatFormatHandling;
jsonWriter.FloatFormatHandling = _floatFormatHandling.Value;
}
StringEscapeHandling? previousStringEscapeHandling = null;
if (_stringEscapeHandling != null && jsonWriter.StringEscapeHandling != _stringEscapeHandling)
{
previousStringEscapeHandling = jsonWriter.StringEscapeHandling;
jsonWriter.StringEscapeHandling = _stringEscapeHandling.Value;
}
CultureInfo previousCulture = null;
if (_culture != null && !_culture.Equals(jsonWriter.Culture))
{
previousCulture = jsonWriter.Culture;
jsonWriter.Culture = _culture;
}
string previousDateFormatString = null;
if (_dateFormatStringSet && jsonWriter.DateFormatString != _dateFormatString)
{
previousDateFormatString = jsonWriter.DateFormatString;
jsonWriter.DateFormatString = _dateFormatString;
}
TraceJsonWriter traceJsonWriter = (TraceWriter != null && TraceWriter.LevelFilter >= TraceLevel.Verbose) ? new TraceJsonWriter(jsonWriter) : null;
var serializerWriter = new JsonSerializerInternalWriter(this);
serializerWriter.Serialize(traceJsonWriter ?? jsonWriter, value, objectType);
if (traceJsonWriter != null)
TraceWriter.Trace(TraceLevel.Verbose, "Serialized JSON: " + Environment.NewLine + traceJsonWriter.GetJson(), null);
// reset writer back to previous options
if (previousFormatting != null)
jsonWriter.Formatting = previousFormatting.Value;
if (previousDateFormatHandling != null)
jsonWriter.DateFormatHandling = previousDateFormatHandling.Value;
if (previousDateTimeZoneHandling != null)
jsonWriter.DateTimeZoneHandling = previousDateTimeZoneHandling.Value;
if (previousFloatFormatHandling != null)
jsonWriter.FloatFormatHandling = previousFloatFormatHandling.Value;
if (previousStringEscapeHandling != null)
jsonWriter.StringEscapeHandling = previousStringEscapeHandling.Value;
if (_dateFormatStringSet)
jsonWriter.DateFormatString = previousDateFormatString;
if (previousCulture != null)
jsonWriter.Culture = previousCulture;
}
internal IReferenceResolver GetReferenceResolver()
{
if (_referenceResolver == null)
_referenceResolver = new DefaultReferenceResolver();
return _referenceResolver;
}
internal JsonConverter GetMatchingConverter(Type type)
{
return GetMatchingConverter(_converters, type);
}
internal static JsonConverter GetMatchingConverter(IList<JsonConverter> converters, Type objectType)
{
#if DEBUG
ValidationUtils.ArgumentNotNull(objectType, "objectType");
#endif
if (converters != null)
{
for (int i = 0; i < converters.Count; i++)
{
JsonConverter converter = converters[i];
if (converter.CanConvert(objectType))
return converter;
}
}
return null;
}
internal void OnError(ErrorEventArgs e)
{
EventHandler<ErrorEventArgs> error = Error;
if (error != null)
error(this, e);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Threading;
using System.Windows;
using Prism.Events;
using Prism.Properties;
using Prism.Regions.Behaviors;
using Microsoft.Practices.ServiceLocation;
using Perspex;
using Perspex.Controls;
using Prism.Common;
namespace Prism.Regions
{
/// <summary>
/// This class is responsible for maintaining a collection of regions and attaching regions to controls.
/// </summary>
/// <remarks>
/// This class supplies the attached properties that can be used for simple region creation from XAML.
/// </remarks>
public class RegionManager : IRegionManager
{
#region Static members (for XAML support)
static RegionManager()
{
RegionNameProperty.Changed.Subscribe(args => OnSetRegionNameCallback(args.Sender, args));
RegionContextProperty.Changed.Subscribe(args => OnRegionContextChanged(args.Sender, args));
}
private static readonly WeakDelegatesManager updatingRegionsListeners = new WeakDelegatesManager();
/// <summary>
/// Identifies the RegionName attached property.
/// </summary>
/// <remarks>
/// When a control has both the <see cref="RegionNameProperty"/> and
/// <see cref="RegionManagerProperty"/> attached properties set to
/// a value different than <see langword="null" /> and there is a
/// <see cref="IRegionAdapter"/> mapping registered for the control, it
/// will create and adapt a new region for that control, and register it
/// in the <see cref="IRegionManager"/> with the specified region name.
/// </remarks>
public static readonly PerspexProperty<string> RegionNameProperty =
PerspexProperty.RegisterAttached<PerspexObject, string>("RegionName", typeof(RegionManager));
private static void OnSetRegionNameCallback(PerspexObject element, PerspexPropertyChangedEventArgs args)
{
if (!IsInDesignMode(element))
{
CreateRegion(element);
}
}
/// <summary>
/// Sets the <see cref="RegionNameProperty"/> attached property.
/// </summary>
/// <param name="regionTarget">The object to adapt. This is typically a container (i.e a control).</param>
/// <param name="regionName">The name of the region to register.</param>
public static void SetRegionName(PerspexObject regionTarget, string regionName)
{
if (regionTarget == null) throw new ArgumentNullException(nameof(regionTarget));
regionTarget.SetValue(RegionNameProperty, regionName);
}
/// <summary>
/// Gets the value for the <see cref="RegionNameProperty"/> attached property.
/// </summary>
/// <param name="regionTarget">The object to adapt. This is typically a container (i.e a control).</param>
/// <returns>The name of the region that should be created when
/// <see cref="RegionManagerProperty"/> is also set in this element.</returns>
public static string GetRegionName(PerspexObject regionTarget)
{
if (regionTarget == null) throw new ArgumentNullException(nameof(regionTarget));
return regionTarget.GetValue(RegionNameProperty);
}
private static readonly PerspexProperty<ObservableObject<IRegion>> ObservableRegionProperty =
PerspexProperty.RegisterAttached<PerspexObject, ObservableObject<IRegion>>("ObservableRegion", typeof(RegionManager));
/// <summary>
/// Returns an <see cref="ObservableObject{T}"/> wrapper that can hold an <see cref="IRegion"/>. Using this wrapper
/// you can detect when an <see cref="IRegion"/> has been created by the <see cref="RegionAdapterBase{T}"/>.
///
/// If the <see cref="ObservableObject{T}"/> wrapper does not yet exist, a new wrapper will be created. When the region
/// gets created and assigned to the wrapper, you can use the <see cref="ObservableObject{T}.ValuePropertyChanged"/> event
/// to get notified of that change.
/// </summary>
/// <param name="view">The view that will host the region. </param>
/// <returns>Wrapper that can hold an <see cref="IRegion"/> value and can notify when the <see cref="IRegion"/> value changes. </returns>
public static ObservableObject<IRegion> GetObservableRegion(PerspexObject view)
{
if (view == null) throw new ArgumentNullException(nameof(view));
ObservableObject<IRegion> regionWrapper = view.GetValue(ObservableRegionProperty);
if (regionWrapper == null)
{
regionWrapper = new ObservableObject<IRegion>();
view.SetValue(ObservableRegionProperty, regionWrapper);
}
return regionWrapper;
}
private static void CreateRegion(PerspexObject element)
{
IServiceLocator locator = ServiceLocator.Current;
DelayedRegionCreationBehavior regionCreationBehavior = locator.GetInstance<DelayedRegionCreationBehavior>();
regionCreationBehavior.TargetElement = element;
regionCreationBehavior.Attach();
}
/// <summary>
/// Identifies the RegionManager attached property.
/// </summary>
/// <remarks>
/// When a control has both the <see cref="RegionNameProperty"/> and
/// <see cref="RegionManagerProperty"/> attached properties set to
/// a value different than <see langword="null" /> and there is a
/// <see cref="IRegionAdapter"/> mapping registered for the control, it
/// will create and adapt a new region for that control, and register it
/// in the <see cref="IRegionManager"/> with the specified region name.
/// </remarks>
public static readonly PerspexProperty<IRegionManager> RegionManagerProperty =
PerspexProperty.RegisterAttached<PerspexObject, IRegionManager>("RegionManager", typeof (RegionManager));
/// <summary>
/// Gets the value of the <see cref="RegionNameProperty"/> attached property.
/// </summary>
/// <param name="target">The target element.</param>
/// <returns>The <see cref="IRegionManager"/> attached to the <paramref name="target"/> element.</returns>
public static IRegionManager GetRegionManager(PerspexObject target)
{
if (target == null) throw new ArgumentNullException(nameof(target));
return target.GetValue(RegionManagerProperty);
}
/// <summary>
/// Sets the <see cref="RegionManagerProperty"/> attached property.
/// </summary>
/// <param name="target">The target element.</param>
/// <param name="value">The value.</param>
public static void SetRegionManager(PerspexObject target, IRegionManager value)
{
if (target == null) throw new ArgumentNullException(nameof(target));
target.SetValue(RegionManagerProperty, value);
}
/// <summary>
/// Identifies the RegionContext attached property.
/// </summary>
public static readonly PerspexProperty<object> RegionContextProperty =
PerspexProperty.RegisterAttached<PerspexObject, object>("RegionContext", typeof(RegionManager));
private static void OnRegionContextChanged(PerspexObject depObj, PerspexPropertyChangedEventArgs e)
{
if (RegionContext.GetObservableContext(depObj).Value != e.NewValue)
{
RegionContext.GetObservableContext(depObj).Value = e.NewValue;
}
}
/// <summary>
/// Gets the value of the <see cref="RegionContextProperty"/> attached property.
/// </summary>
/// <param name="target">The target element.</param>
/// <returns>The region context to pass to the contained views.</returns>
public static object GetRegionContext(PerspexObject target)
{
if (target == null) throw new ArgumentNullException(nameof(target));
return target.GetValue(RegionContextProperty);
}
/// <summary>
/// Sets the <see cref="RegionContextProperty"/> attached property.
/// </summary>
/// <param name="target">The target element.</param>
/// <param name="value">The value.</param>
public static void SetRegionContext(PerspexObject target, object value)
{
if (target == null) throw new ArgumentNullException(nameof(target));
target.SetValue(RegionContextProperty, value);
}
/// <summary>
/// Notification used by attached behaviors to update the region managers appropriatelly if needed to.
/// </summary>
/// <remarks>This event uses weak references to the event handler to prevent this static event of keeping the
/// target element longer than expected.</remarks>
public static event EventHandler UpdatingRegions
{
add { updatingRegionsListeners.AddListener(value); }
remove { updatingRegionsListeners.RemoveListener(value); }
}
/// <summary>
/// Notifies attached behaviors to update the region managers appropriatelly if needed to.
/// </summary>
/// <remarks>
/// This method is normally called internally, and there is usually no need to call this from user code.
/// </remarks>
public static void UpdateRegions()
{
try
{
updatingRegionsListeners.Raise(null, EventArgs.Empty);
}
catch (TargetInvocationException ex)
{
Exception rootException = ex.GetRootException();
throw new UpdateRegionsException(string.Format(CultureInfo.CurrentCulture,
Resources.UpdateRegionException, rootException), ex.InnerException);
}
}
private static bool IsInDesignMode(PerspexObject element)
{
return Perspex.Design.IsDesignMode;
}
#endregion
private readonly RegionCollection regionCollection;
/// <summary>
/// Initializes a new instance of <see cref="RegionManager"/>.
/// </summary>
public RegionManager()
{
regionCollection = new RegionCollection(this);
}
/// <summary>
/// Gets a collection of <see cref="IRegion"/> that identify each region by name. You can use this collection to add or remove regions to the current region manager.
/// </summary>
/// <value>A <see cref="IRegionCollection"/> with all the registered regions.</value>
public IRegionCollection Regions
{
get { return regionCollection; }
}
/// <summary>
/// Creates a new region manager.
/// </summary>
/// <returns>A new region manager that can be used as a different scope from the current region manager.</returns>
public IRegionManager CreateRegionManager()
{
return new RegionManager();
}
/// <summary>
/// Add a view to the Views collection of a Region. Note that the region must already exist in this regionmanager.
/// </summary>
/// <param name="regionName">The name of the region to add a view to</param>
/// <param name="view">The view to add to the views collection</param>
/// <returns>The RegionManager, to easily add several views. </returns>
public IRegionManager AddToRegion(string regionName, object view)
{
if (!Regions.ContainsRegionWithName(regionName))
{
throw new ArgumentException(string.Format(Thread.CurrentThread.CurrentCulture, Resources.RegionNotFound, regionName), "regionName");
}
return Regions[regionName].Add(view);
}
/// <summary>
/// Associate a view with a region, by registering a type. When the region get's displayed
/// this type will be resolved using the ServiceLocator into a concrete instance. The instance
/// will be added to the Views collection of the region
/// </summary>
/// <param name="regionName">The name of the region to associate the view with.</param>
/// <param name="viewType">The type of the view to register with the </param>
/// <returns>The regionmanager, for adding several views easily</returns>
public IRegionManager RegisterViewWithRegion(string regionName, Type viewType)
{
var regionViewRegistry = ServiceLocator.Current.GetInstance<IRegionViewRegistry>();
regionViewRegistry.RegisterViewWithRegion(regionName, viewType);
return this;
}
/// <summary>
/// Associate a view with a region, using a delegate to resolve a concreate instance of the view.
/// When the region get's displayed, this delelgate will be called and the result will be added to the
/// views collection of the region.
/// </summary>
/// <param name="regionName">The name of the region to associate the view with.</param>
/// <param name="getContentDelegate">The delegate used to resolve a concreate instance of the view.</param>
/// <returns>The regionmanager, for adding several views easily</returns>
public IRegionManager RegisterViewWithRegion(string regionName, Func<object> getContentDelegate)
{
var regionViewRegistry = ServiceLocator.Current.GetInstance<IRegionViewRegistry>();
regionViewRegistry.RegisterViewWithRegion(regionName, getContentDelegate);
return this;
}
/// <summary>
/// Navigates the specified region manager.
/// </summary>
/// <param name="regionName">The name of the region to call Navigate on.</param>
/// <param name="source">The URI of the content to display.</param>
/// <param name="navigationCallback">The navigation callback.</param>
public void RequestNavigate(string regionName, Uri source, Action<NavigationResult> navigationCallback)
{
if (navigationCallback == null) throw new ArgumentNullException("navigationCallback");
if (Regions.ContainsRegionWithName(regionName))
{
Regions[regionName].RequestNavigate(source, navigationCallback);
}
else
{
navigationCallback(new NavigationResult(new NavigationContext(null, source), false));
}
}
/// <summary>
/// Navigates the specified region manager.
/// </summary>
/// <param name="regionName">The name of the region to call Navigate on.</param>
/// <param name="source">The URI of the content to display.</param>
public void RequestNavigate(string regionName, Uri source)
{
RequestNavigate(regionName, source, nr => { });
}
/// <summary>
/// Navigates the specified region manager.
/// </summary>
/// <param name="regionName">The name of the region to call Navigate on.</param>
/// <param name="source">The URI of the content to display.</param>
/// <param name="navigationCallback">The navigation callback.</param>
public void RequestNavigate(string regionName, string source, Action<NavigationResult> navigationCallback)
{
if (source == null) throw new ArgumentNullException("source");
RequestNavigate(regionName, new Uri(source, UriKind.RelativeOrAbsolute), navigationCallback);
}
/// <summary>
/// Navigates the specified region manager.
/// </summary>
/// <param name="regionName">The name of the region to call Navigate on.</param>
/// <param name="source">The URI of the content to display.</param>
public void RequestNavigate(string regionName, string source)
{
RequestNavigate(regionName, source, nr => { });
}
/// <summary>
/// This method allows an IRegionManager to locate a specified region and navigate in it to the specified target Uri, passing a navigation callback and an instance of NavigationParameters, which holds a collection of object parameters.
/// </summary>
/// <param name="regionName">The name of the region where the navigation will occur.</param>
/// <param name="target">A Uri that represents the target where the region will navigate.</param>
/// <param name="navigationCallback">The navigation callback that will be executed after the navigation is completed.</param>
/// <param name="navigationParameters">An instance of NavigationParameters, which holds a collection of object parameters.</param>
public void RequestNavigate(string regionName, Uri target, Action<NavigationResult> navigationCallback, NavigationParameters navigationParameters)
{
if (Regions.ContainsRegionWithName(regionName))
{
Regions[regionName].RequestNavigate(target, navigationCallback, navigationParameters);
}
}
/// <summary>
/// This method allows an IRegionManager to locate a specified region and navigate in it to the specified target string, passing a navigation callback and an instance of NavigationParameters, which holds a collection of object parameters.
/// </summary>
/// <param name="regionName">The name of the region where the navigation will occur.</param>
/// <param name="target">A string that represents the target where the region will navigate.</param>
/// <param name="navigationCallback">The navigation callback that will be executed after the navigation is completed.</param>
/// <param name="navigationParameters">An instance of NavigationParameters, which holds a collection of object parameters.</param>
public void RequestNavigate(string regionName, string target, Action<NavigationResult> navigationCallback, NavigationParameters navigationParameters)
{
RequestNavigate(regionName, new Uri(target, UriKind.RelativeOrAbsolute), navigationCallback, navigationParameters);
}
/// <summary>
/// This method allows an IRegionManager to locate a specified region and navigate in it to the specified target Uri, passing an instance of NavigationParameters, which holds a collection of object parameters.
/// </summary>
/// <param name="regionName">The name of the region where the navigation will occur.</param>
/// <param name="target">A Uri that represents the target where the region will navigate.</param>
/// <param name="navigationParameters">An instance of NavigationParameters, which holds a collection of object parameters.</param>
public void RequestNavigate(string regionName, Uri target, NavigationParameters navigationParameters)
{
RequestNavigate(regionName, target, nr => { }, navigationParameters);
}
/// <summary>
/// This method allows an IRegionManager to locate a specified region and navigate in it to the specified target string, passing an instance of NavigationParameters, which holds a collection of object parameters.
/// </summary>
/// <param name="regionName">The name of the region where the navigation will occur.</param>
/// <param name="target">A string that represents the target where the region will navigate.</param>
/// <param name="navigationParameters">An instance of NavigationParameters, which holds a collection of object parameters.</param>
public void RequestNavigate(string regionName, string target, NavigationParameters navigationParameters)
{
RequestNavigate(regionName, new Uri(target, UriKind.RelativeOrAbsolute), nr => { }, navigationParameters);
}
private class RegionCollection : IRegionCollection
{
private readonly IRegionManager regionManager;
private readonly List<IRegion> regions;
public RegionCollection(IRegionManager regionManager)
{
this.regionManager = regionManager;
this.regions = new List<IRegion>();
}
public event NotifyCollectionChangedEventHandler CollectionChanged;
public IEnumerator<IRegion> GetEnumerator()
{
UpdateRegions();
return this.regions.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public IRegion this[string regionName]
{
get
{
UpdateRegions();
IRegion region = GetRegionByName(regionName);
if (region == null)
{
throw new KeyNotFoundException(string.Format(CultureInfo.CurrentUICulture, Resources.RegionNotInRegionManagerException, regionName));
}
return region;
}
}
public void Add(IRegion region)
{
if (region == null) throw new ArgumentNullException("region");
UpdateRegions();
if (region.Name == null)
{
throw new InvalidOperationException(Resources.RegionNameCannotBeEmptyException);
}
if (this.GetRegionByName(region.Name) != null)
{
throw new ArgumentException(string.Format(CultureInfo.InvariantCulture,
Resources.RegionNameExistsException, region.Name));
}
this.regions.Add(region);
region.RegionManager = this.regionManager;
this.OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, region, 0));
}
public bool Remove(string regionName)
{
UpdateRegions();
bool removed = false;
IRegion region = GetRegionByName(regionName);
if (region != null)
{
removed = true;
this.regions.Remove(region);
region.RegionManager = null;
this.OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, region, 0));
}
return removed;
}
public bool ContainsRegionWithName(string regionName)
{
UpdateRegions();
return GetRegionByName(regionName) != null;
}
/// <summary>
/// Adds a region to the regionmanager with the name received as argument.
/// </summary>
/// <param name="regionName">The name to be given to the region.</param>
/// <param name="region">The region to be added to the regionmanager.</param>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="region"/> is <see langword="null"/>.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="regionName"/> and <paramref name="region"/>'s name do not match and the <paramref name="region"/> <see cref="IRegion.Name"/> is not <see langword="null"/>.</exception>
public void Add(string regionName, IRegion region)
{
if (region == null) throw new ArgumentNullException("region");
if (region.Name != null && region.Name != regionName)
{
throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Resources.RegionManagerWithDifferentNameException, region.Name, regionName), "regionName");
}
if (region.Name == null)
{
region.Name = regionName;
}
Add(region);
}
private IRegion GetRegionByName(string regionName)
{
return this.regions.FirstOrDefault(r => r.Name == regionName);
}
private void OnCollectionChanged(NotifyCollectionChangedEventArgs notifyCollectionChangedEventArgs)
{
var handler = this.CollectionChanged;
if (handler != null)
{
handler(this, notifyCollectionChangedEventArgs);
}
}
}
}
}
| |
/*
* This file is part of UniERM ReportDesigner, based on reportFU by Josh Wilson,
* the work of Kim Sheffield and the fyiReporting project.
*
* Prior Copyrights:
* _________________________________________________________
* |Copyright (C) 2010 devFU Pty Ltd, Josh Wilson and Others|
* | (http://reportfu.org) |
* =========================================================
* _________________________________________________________
* |Copyright (C) 2004-2008 fyiReporting Software, LLC |
* |For additional information, email info@fyireporting.com |
* |or visit the website www.fyiReporting.com. |
* =========================================================
*
* License:
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Diagnostics;
namespace Reporting.Rdl
{
/// <summary>
/// The PageTree object contains references to all the pages used within the Pdf.
/// All individual pages are referenced through the kids string
/// </summary>
internal class PdfPageTree:PdfBase
{
private string pageTree;
private string kids;
private int MaxPages;
internal PdfPageTree(PdfAnchor pa):base(pa)
{
kids="[ ";
MaxPages=0;
}
/// <summary>
/// Add a page to the Page Tree. ObjNum is the object number of the page to be added.
/// pageNum is the page number of the page.
/// </summary>
/// <param name="objNum"></param>
internal void AddPage(int objNum)
{
Debug.Assert(objNum >= 0 && objNum <= this.Current);
MaxPages++;
string refPage=objNum+" 0 R ";
kids=kids+refPage;
}
/// <summary>
/// returns the Page Tree Dictionary
/// </summary>
/// <returns></returns>
internal byte[] GetPageTree(long filePos,out int size)
{
pageTree=string.Format("\r\n{0} 0 obj<</Count {1}/Kids {2}]>> endobj\t",
this.objectNum,MaxPages,kids);
return this.GetUTF8Bytes(pageTree,filePos,out size);
}
}
/// <summary>
/// This class represents individual pages within the pdf.
/// The contents of the page belong to this class
/// </summary>
internal class PdfPage:PdfBase
{
private string page;
private string pageSize;
private string fontRef;
private string imageRef;
private string patternRef;
private string colorSpaceRef;
private string resourceDict,contents;
private string annotsDict;
internal PdfPage(PdfAnchor pa):base(pa)
{
resourceDict=null;
contents=null;
pageSize=null;
fontRef=null;
imageRef=null;
annotsDict=null;
colorSpaceRef=null;
patternRef=null;
}
/// <summary>
/// Create The Pdf page
/// </summary>
internal void CreatePage(int refParent,PdfPageSize pSize)
{
pageSize=string.Format("[0 0 {0} {1}]",pSize.xWidth,pSize.yHeight);
page=string.Format("\r\n{0} 0 obj<</Type /Page/Parent {1} 0 R/Rotate 0/MediaBox {2}/CropBox {2}",
this.objectNum,refParent,pageSize);
}
internal void AddHyperlink(float x, float y, float height, float width, string url)
{
if (annotsDict == null)
annotsDict = "\r/Annots [";
annotsDict += string.Format(@"<</Type /Annot /Subtype /Link /Rect [{0} {1} {2} {3}] /Border [0 0 0] /A <</S /URI /URI ({4})>>>>",
x, y, x+width, y-height, url);
}
internal void AddToolTip(float x, float y, float height, float width, string tooltip)
{
if (annotsDict == null)
annotsDict = "\r/Annots [";
annotsDict += string.Format(@"<</Type /Annot /Rect [{0} {1} {2} {3}] /Border [0 0 0] /IC [1.0 1.0 0.666656] /CA 0.00500488 /C [1.0 0.0 0.0] /Name/Comment /T(Value) /Contents({4}) /F 288 /Subtype/Square>>", /*/A <</S /URI /URI ({4})>>*/
x, y, x + width, y - height, tooltip);
}
/// <summary>
/// Add Pattern Resources to the pdf page
/// </summary>
internal void AddResource(PdfPattern patterns,int contentRef)
{
foreach (PdfPatternEntry pat in patterns.Patterns.Values)
{
patternRef+=string.Format("/{0} {1} 0 R",pat.pattern,pat.objectNum);
}
if(contentRef>0)
{
contents=string.Format("/Contents {0} 0 R",contentRef);
}
}
/// <summary>
/// Add Font Resources to the pdf page
/// </summary>
internal void AddResource(PdfFonts fonts,int contentRef)
{
foreach (PdfFontEntry font in fonts.Fonts.Values)
{
fontRef+=string.Format("/{0} {1} 0 R",font.font,font.objectNum);
}
if(contentRef>0)
{
contents=string.Format("/Contents {0} 0 R",contentRef);
}
}
internal void AddResource(PatternObj po,int contentRef)
{
colorSpaceRef=string.Format("/CS1 {0} 0 R",po.objectNum);
}
/// <summary>
/// Add Image Resources to the pdf page
/// </summary>
internal void AddResource(PdfImageEntry ie,int contentRef)
{
if (imageRef == null || imageRef.IndexOf("/"+ie.name) < 0) // only need it once per page
// imageRef+=string.Format("/XObject << /{0} {1} 0 R >>",ie.name,ie.objectNum);
imageRef+=string.Format("/{0} {1} 0 R ",ie.name,ie.objectNum);
if(contentRef>0)
{
contents=string.Format("/Contents {0} 0 R",contentRef);
}
}
/// <summary>
/// Get the Page Dictionary to be written to the file
/// </summary>
/// <returns></returns>
internal byte[] GetPageDict(long filePos,out int size)
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
//will need to add pattern here
sb.AppendFormat("/Resources<<\r\n/Font<<{0}>>",fontRef);
if (patternRef != null)
sb.AppendFormat("\r\n/Pattern <<{0}>>",patternRef);
if (colorSpaceRef != null)
sb.AppendFormat("\r\n/ColorSpace <<{0}>>",colorSpaceRef);
sb.Append("\r\n/ProcSet[/PDF/Text");
if (imageRef == null)
sb.Append("]>>");
else
sb.AppendFormat("\r\n/ImageB]/XObject <<{0}>>>>",imageRef);
resourceDict = sb.ToString();
//if (imageRef == null)
// resourceDict=string.Format("/Resources<</Font<<{0}>>/ProcSet[/PDF/Text]>>",fontRef);
//else
// resourceDict=string.Format("/Resources<</Font<<{0}>>/ProcSet[/PDF/Text/ImageB]/XObject <<{1}>>>>",fontRef, imageRef);
if (annotsDict != null)
page += (annotsDict+"]\r");
page+=resourceDict+"\r\n"+contents+">>\r\nendobj\r\n";
return this.GetUTF8Bytes(page,filePos,out size);
}
}
/// <summary>
/// Specify the page size in 1/72 inches units.
/// </summary>
internal struct PdfPageSize
{
internal int xWidth;
internal int yHeight;
internal int leftMargin;
internal int rightMargin;
internal int topMargin;
internal int bottomMargin;
internal PdfPageSize(int width,int height)
{
xWidth=width;
yHeight=height;
leftMargin=0;
rightMargin=0;
topMargin=0;
bottomMargin=0;
}
internal void SetMargins(int L,int T,int R,int B)
{
leftMargin=L;
rightMargin=R;
topMargin=T;
bottomMargin=B;
}
}
}
| |
using System;
namespace Versioning
{
public class StockData : Sage_Container, IData
{
/* Autogenerated by sage_wrapper_generator.pl */
SageDataObject110.StockData sd11;
SageDataObject120.StockData sd12;
SageDataObject130.StockData sd13;
SageDataObject140.StockData sd14;
SageDataObject150.StockData sd15;
SageDataObject160.StockData sd16;
SageDataObject170.StockData sd17;
public StockData(object inner, int version)
: base(version) {
switch (m_version) {
case 11: {
sd11 = (SageDataObject110.StockData)inner;
m_fields = new Fields(sd11.Fields,m_version);
return;
}
case 12: {
sd12 = (SageDataObject120.StockData)inner;
m_fields = new Fields(sd12.Fields,m_version);
return;
}
case 13: {
sd13 = (SageDataObject130.StockData)inner;
m_fields = new Fields(sd13.Fields,m_version);
return;
}
case 14: {
sd14 = (SageDataObject140.StockData)inner;
m_fields = new Fields(sd14.Fields,m_version);
return;
}
case 15: {
sd15 = (SageDataObject150.StockData)inner;
m_fields = new Fields(sd15.Fields,m_version);
return;
}
case 16: {
sd16 = (SageDataObject160.StockData)inner;
m_fields = new Fields(sd16.Fields,m_version);
return;
}
case 17: {
sd17 = (SageDataObject170.StockData)inner;
m_fields = new Fields(sd17.Fields,m_version);
return;
}
default: throw new InvalidOperationException("ver");
}
}
/* Autogenerated with data_generator.pl */
const string ACCOUNT_REF = "ACCOUNT_REF";
const string STOCKDATA = "StockData";
public bool Open(OpenMode mode) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Open((SageDataObject110.OpenMode)mode);
break;
}
case 12: {
ret = sd12.Open((SageDataObject120.OpenMode)mode);
break;
}
case 13: {
ret = sd13.Open((SageDataObject130.OpenMode)mode);
break;
}
case 14: {
ret = sd14.Open((SageDataObject140.OpenMode)mode);
break;
}
case 15: {
ret = sd15.Open((SageDataObject150.OpenMode)mode);
break;
}
case 16: {
ret = sd16.Open((SageDataObject160.OpenMode)mode);
break;
}
case 17: {
ret = sd17.Open((SageDataObject170.OpenMode)mode);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public void Close() {
switch (m_version) {
case 11: {
sd11.Close();
break;
}
case 12: {
sd12.Close();
break;
}
case 13: {
sd13.Close();
break;
}
case 14: {
sd14.Close();
break;
}
case 15: {
sd15.Close();
break;
}
case 16: {
sd16.Close();
break;
}
case 17: {
sd17.Close();
break;
}
default: throw new InvalidOperationException("ver");
}
}
public bool Read(int IRecNo) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Read(IRecNo);
break;
}
case 12: {
ret = sd12.Read(IRecNo);
break;
}
case 13: {
ret = sd13.Read(IRecNo);
break;
}
case 14: {
ret = sd14.Read(IRecNo);
break;
}
case 15: {
ret = sd15.Read(IRecNo);
break;
}
case 16: {
ret = sd16.Read(IRecNo);
break;
}
case 17: {
ret = sd17.Read(IRecNo);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Write(int IRecNo) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Write(IRecNo);
break;
}
case 12: {
ret = sd12.Write(IRecNo);
break;
}
case 13: {
ret = sd13.Write(IRecNo);
break;
}
case 14: {
ret = sd14.Write(IRecNo);
break;
}
case 15: {
ret = sd15.Write(IRecNo);
break;
}
case 16: {
ret = sd16.Write(IRecNo);
break;
}
case 17: {
ret = sd17.Write(IRecNo);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Seek(int IRecNo) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Seek(IRecNo);
break;
}
case 12: {
ret = sd12.Seek(IRecNo);
break;
}
case 13: {
ret = sd13.Seek(IRecNo);
break;
}
case 14: {
ret = sd14.Seek(IRecNo);
break;
}
case 15: {
ret = sd15.Seek(IRecNo);
break;
}
case 16: {
ret = sd16.Seek(IRecNo);
break;
}
case 17: {
ret = sd17.Seek(IRecNo);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Lock(int IRecNo) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Lock(IRecNo);
break;
}
case 12: {
ret = sd12.Lock(IRecNo);
break;
}
case 13: {
ret = sd13.Lock(IRecNo);
break;
}
case 14: {
ret = sd14.Lock(IRecNo);
break;
}
case 15: {
ret = sd15.Lock(IRecNo);
break;
}
case 16: {
ret = sd16.Lock(IRecNo);
break;
}
case 17: {
ret = sd17.Lock(IRecNo);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Unlock(int IRecNo) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.Unlock(IRecNo);
break;
}
case 12: {
ret = sd12.Unlock(IRecNo);
break;
}
case 13: {
ret = sd13.Unlock(IRecNo);
break;
}
case 14: {
ret = sd14.Unlock(IRecNo);
break;
}
case 15: {
ret = sd15.Unlock(IRecNo);
break;
}
case 16: {
ret = sd16.Unlock(IRecNo);
break;
}
case 17: {
ret = sd17.Unlock(IRecNo);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool FindFirst(object varField, object varSearch) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.FindFirst(varField, varSearch);
break;
}
case 12: {
ret = sd12.FindFirst(varField, varSearch);
break;
}
case 13: {
ret = sd13.FindFirst(varField, varSearch);
break;
}
case 14: {
ret = sd14.FindFirst(varField, varSearch);
break;
}
case 15: {
ret = sd15.FindFirst(varField, varSearch);
break;
}
case 16: {
ret = sd16.FindFirst(varField, varSearch);
break;
}
case 17: {
ret = sd17.FindFirst(varField, varSearch);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool FindNext(object varField, object varSearch) {
bool ret;
switch (m_version) {
case 11: {
ret = sd11.FindNext(varField, varSearch);
break;
}
case 12: {
ret = sd12.FindNext(varField, varSearch);
break;
}
case 13: {
ret = sd13.FindNext(varField, varSearch);
break;
}
case 14: {
ret = sd14.FindNext(varField, varSearch);
break;
}
case 15: {
ret = sd15.FindNext(varField, varSearch);
break;
}
case 16: {
ret = sd16.FindNext(varField, varSearch);
break;
}
case 17: {
ret = sd17.FindNext(varField, varSearch);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public int Count {
get {
int ret;
switch (m_version) {
case 11: {
ret = sd11.Count();
break;
}
case 12: {
ret = sd12.Count();
break;
}
case 13: {
ret = sd13.Count();
break;
}
case 14: {
ret = sd14.Count();
break;
}
case 15: {
ret = sd15.Count();
break;
}
case 16: {
ret = sd16.Count();
break;
}
case 17: {
ret = sd17.Count();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
}
}
}
| |
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using TXTextControl.ReportingCloud;
using System.IO;
using System.Collections.Generic;
namespace UnitTestsCoreWrapper
{
[TestClass]
public class UnitTest1
{
string sUsername = "";
string sPassword = "";
Uri uriBasePath = new Uri("https://api.reporting.cloud/");
[TestMethod()]
public void GetDocumentTrackedChangesTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/tracked.tx");
List<TrackedChange> trackedChanges = rc.Processing.Review.GetTrackedChanges(bDocument);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void RemoveDocumentTrackedChangeTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/tracked.tx");
List<TrackedChange> trackedChanges = rc.Processing.Review.GetTrackedChanges(bDocument);
int numTrackedChanges = trackedChanges.Count;
TrackedChangeModifiedDocument modifiedDocument =
rc.Processing.Review.RemoveTrackedChange(bDocument, trackedChanges[0].Id, true);
if (modifiedDocument.Removed == true)
{
List<TrackedChange> trackedChangesModified =
rc.Processing.Review.GetTrackedChanges(modifiedDocument.Document);
Assert.IsFalse(trackedChanges.Count == trackedChangesModified.Count);
}
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ReportingCloudAPIKeyTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string sAPIKey;
bool bKeyCreated = false;
// create a new key, if no keys exist
if (rc.GetAccountAPIKeys().Count == 0)
{
sAPIKey = rc.CreateAccountAPIKey();
bKeyCreated = true;
}
else
sAPIKey = rc.GetAccountAPIKeys()[0].Key;
// create new instance with new API Key
ReportingCloud rc2 = new ReportingCloud(sAPIKey, uriBasePath);
// check account settings
var accountSettings = rc2.GetAccountSettings();
Assert.IsFalse(accountSettings.MaxDocuments == 0);
// remove created key
if (bKeyCreated == true)
rc.DeleteAccountAPIKey(sAPIKey);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetAccountAPIKeysTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// create a new key
string sAPIKey = rc.CreateAccountAPIKey();
// get all keys
List<APIKey> lAPIKeys = rc.GetAccountAPIKeys();
// check, if at least 1 key is in list
Assert.IsFalse(lAPIKeys.Count == 0);
// clean up
rc.DeleteAccountAPIKey(sAPIKey);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void AvailableDictionariesTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string[] saDictionaries = rc.GetAvailableDictionaries();
// check, if images are created
Assert.IsFalse(saDictionaries.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ShareDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
List<Template> lTemplates = rc.ListTemplates();
string sSharedHash = rc.ShareDocument(lTemplates[0].TemplateName);
// check, if images are created
Assert.IsFalse(sSharedHash.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod]
public void UploadTemplateTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void FindAndReplaceDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/replace_template.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// create a new FindAndReplaceBody object
FindAndReplaceBody body = new FindAndReplaceBody();
body.FindAndReplaceData = new List<string[]>()
{
new string[] { "%%TextToReplace%%", "ReplacedString" },
new string[] { "%%SecondTextToReplace%%", "ReplacedString2" }
};
// merge the document
byte[] results = rc.FindAndReplaceDocument(body, sTempFilename, ReturnFormat.HTML);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("ReplacedString"));
// delete the template
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void MergeDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
List<Invoice> invoices = new List<Invoice>();
// create dummy data
Invoice invoice = new Invoice();
invoice.yourcompany_companyname = "Text Control, LLC";
invoice.invoice_no = "Test_R667663";
invoice.billto_name = "<html><strong>Test</strong> <em>Company</em></html>";
Invoice invoice2 = new Invoice();
invoice2.yourcompany_companyname = "Text Control 2, LLC";
invoice2.invoice_no = "Test_R667663";
invoice2.billto_name = "<html><strong>Test</strong> <em>Company</em></html>";
invoices.Add(invoice);
invoices.Add(invoice2);
// create a new MergeBody object
MergeBody body = new MergeBody();
body.MergeData = invoices;
MergeSettings settings = new MergeSettings();
settings.Author = "Text Control GmbH";
settings.MergeHtml = true;
settings.Culture = "de-DE";
body.MergeSettings = settings;
// merge the document
List<byte[]> results = rc.MergeDocument(body, sTempFilename, ReturnFormat.HTML, false, false);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results[0]);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("Test_R667663"));
// delete the template
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void AppendDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// create a new MergeBody object
AppendBody body = new AppendBody();
body.Documents.Add(new AppendDocument()
{
Document = File.ReadAllBytes("documents/invoice.tx"),
DocumentDivider = DocumentDivider.None
});
body.Documents.Add(new AppendDocument()
{
Document = File.ReadAllBytes("documents/sample_docx.docx"),
DocumentDivider = DocumentDivider.NewSection
});
DocumentSettings settings = new DocumentSettings();
settings.Author = "Text Control GmbH";
body.DocumentSettings = settings;
// append the documents
byte[] results = rc.AppendDocument(body, ReturnFormat.HTML, true);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("<title>ReportingCloud Test Mode</title>"));
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ConvertDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
byte[] bHtml = rc.ConvertDocument(bDocument, ReturnFormat.HTML);
Assert.IsTrue(System.Text.Encoding.UTF8.GetString(bHtml).Contains("INVOICE"));
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateInfoTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/test.doc");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".doc";
rc.UploadTemplate(sTempFilename, bDocument);
// get template information
TemplateInfo templateInfo = rc.GetTemplateInfo(sTempFilename);
// check, if images are created
Assert.IsFalse(templateInfo.TemplateName == "");
// delete temp file
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetSuggestionsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string[] saSuggestions = rc.GetSuggestions("dooper", rc.GetAvailableDictionaries()[0], 10);
// check, if images are created
Assert.IsFalse(saSuggestions.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetAccountSettingsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
AccountSettings settings = rc.GetAccountSettings();
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(settings.UploadedTemplates + 1, rc.GetTemplateCount());
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateCountTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// store current template number
int iTemplateCount = rc.GetTemplateCount();
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(iTemplateCount + 1, rc.GetTemplateCount());
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void DownloadTemplateTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload local test document
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// download document
byte[] bTemplate = rc.DownloadTemplate(sTempFilename);
// compare documents
Assert.IsNotNull(bTemplate);
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplatePageCountTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(1, rc.GetTemplatePageCount(sTempFilename));
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ListTemplatesTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// list all templates
List<Template> templates = rc.ListTemplates();
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ListFonts()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// list all templates
string[] fonts = rc.ListFonts();
foreach (string font in fonts)
{
Console.WriteLine(font);
}
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetDocumentThumbnailsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
// create thumbnails
List<string> images = rc.GetDocumentThumbnails(bDocument, 20, 1, 1, ImageFormat.PNG);
// check, if images are created
Assert.IsFalse((images.Count == 0));
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateThumbnailsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// create thumbnails
List<string> images = rc.GetTemplateThumbnails(sTempFilename, 20, 1, 1, ImageFormat.PNG);
// check, if images are created
Assert.IsFalse((images.Count == 0));
// delete temp file
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
public class Invoice
{
public string yourcompany_companyname { get; set; }
public string invoice_no { get; set; }
public string billto_name { get; set; }
}
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Globalization;
using System.Diagnostics;
using System.Linq;
namespace YetiVSI.DebugEngine.NatvisEngine
{
public class TypeName
{
static readonly Regex IDENTIFIER_REGEX = new Regex("^[a-zA-Z$_][a-zA-Z$_0-9]*");
// only decimal constants
static readonly Regex NUMERIC_REGEX = new Regex("^-?[0-9]+");
// helper patterns
const string SIGN_PATTERN = @"(signed|unsigned)";
static readonly string LONG_PATTERN = $@"(({SIGN_PATTERN}\s+)?(long\s+)?long)";
// matches prefixes ending in '$' (e.g. void$Foo), these are checked for below
static readonly Regex SIMPLE_TYPE_REGEX = new Regex(
@"^unsigned\schar\s__attribute__\(\(ext_vector_type\(\d+\)\)\)|" + // ext vector
@"^(" +
@"(long\s+)?double|" + // double
@"(long\s+)?float|" + // float
@"char16_t|char32_t|wchar_t|" + // special chars
@"u?int(8|16|32|64)_t|" + // special ints
@"bool|void|" + // misc
$@"(({SIGN_PATTERN}|{LONG_PATTERN})\s+)?int|" + // int
$@"{LONG_PATTERN}|" + // long
$@"({SIGN_PATTERN}\s+)?short(\s+int)?|" + // short
$@"({SIGN_PATTERN}\s+)?char|" + // char
$@"{SIGN_PATTERN}" + // signed/unsigned
@")\b"
);
static readonly TypeName ANY_TYPE = new TypeName() {IsWildcard = true};
public string FullyQualifiedName { get; private set; }
public IReadOnlyList<TypeName> Qualifiers { get; private set; }
public string BaseName { get; set; }
// template arguements
public IReadOnlyList<TypeName> Args { get; private set; }
// parameter types
public IReadOnlyList<TypeName> Parameters { get; private set; }
public bool IsWildcard { get; private set; }
public bool IsArray { get; private set; }
public bool IsFunction { get; private set; }
public int[] Dimensions { get; private set; }
/// <summary>
/// Return a parsed type name
/// Acceptable name format:
/// typeName = ["const "] ([unqualifiedName "::"]* unqualifiedName | simpleTypeName)
/// ['*'] [Array] | functionDef
/// unqualifiedName = identifier | identifier '<' templateList '>'
/// templatelist = listElem | listElem ',' templateList
/// listElem = typeName | numericConstant | '*'
/// Array = '[]' | '[' [numericConstant ',']* numericConstant ']'
/// functionDef = typeName (parameterList)
///
/// </summary>
/// <param name="fullyQualifiedName"></param>
/// <returns></returns>
public static TypeName Parse(string fullyQualifiedName)
{
if (string.IsNullOrEmpty(fullyQualifiedName))
{
return null;
}
string rest;
var t = MatchTypeName(fullyQualifiedName.Trim(), out rest);
if (!string.IsNullOrWhiteSpace(rest))
{
Trace.WriteLine($"ERROR: Natvis failed to parse typename: {fullyQualifiedName}");
return null;
}
return t;
}
/// <summary>
/// Match this typeName to a candidate typeName. This type support wildcard matching against
/// the candidate
/// </summary>
/// <param name="t">A type to compare against</param>
/// <param name="score">Matching score that is updated during execution</param>
/// <returns></returns>
public bool Match(TypeName t, MatchScore score)
{
if (IsWildcard)
{
return true;
}
if (Qualifiers.Count != t.Qualifiers.Count)
{
return false;
}
if (BaseName != t.BaseName)
{
return false;
}
if (Qualifiers.Where((qualifier, i) => !qualifier.Match(t.Qualifiers[i], score)).Any())
{
return false;
}
// Template args must match one-for one, or if the last arg is a wildcard, it will
// match any number of additional args.
bool lastArgIsWildcard = Args.Count > 0 && Args[Args.Count - 1].IsWildcard;
if (!(Args.Count == t.Args.Count || (Args.Count < t.Args.Count && lastArgIsWildcard)))
{
return false;
}
if (Args.Where((arg, i) => !arg.Match(t.Args[i], score)).Any())
{
return false;
}
if (IsArray != t.IsArray)
{
return false;
}
if (IsArray && Dimensions.Length != t.Dimensions.Length)
{
return false;
}
if (IsArray && Dimensions.Where((dimension, i) => dimension != t.Dimensions[i])
.Any(dimension => dimension != -1))
{
return false;
}
score.ExactTypeMatchCount++;
score.ArgCountDifference += t.Args.Count - Args.Count;
return true;
}
/// <summary>
///
/// </summary>
/// <param name="name">Trimmed string containing a type name</param>
/// <param name="rest">Trimmed remainder of string after name match</param>
/// <returns></returns>
static TypeName MatchTypeName(string name, out string rest)
{
var original = name;
if (name.StartsWith("const ", StringComparison.Ordinal))
{
// TODO: we just ignore const
name = name.Substring(6).Trim();
}
var t = MatchSimpleTypeName(name, out rest);
if (t == null)
{
var qualifiers = new List<TypeName>();
t = MatchUnqualifiedName(name, out rest);
while (t != null && rest.Length > 2 &&
rest.StartsWith("::", StringComparison.Ordinal))
{
// process qualifiers
qualifiers.Add(t);
t = MatchUnqualifiedName(rest.Substring(2).Trim(), out rest);
}
if (t == null)
{
return null;
}
t.Qualifiers = qualifiers; // add qualifiers to the type
}
if (rest.StartsWith("const", StringComparison.Ordinal))
{
rest = rest.Substring(5).Trim();
}
while (rest.StartsWith("*", StringComparison.Ordinal) ||
rest.StartsWith("&", StringComparison.Ordinal))
{
t.BaseName += rest[0];
rest = rest.Substring(1).Trim();
if (rest.StartsWith("const", StringComparison.Ordinal))
{
rest = rest.Substring(5).Trim();
}
}
MatchArray(t, rest, out rest); // add array or pointer
if (rest.StartsWith("(", StringComparison.Ordinal))
{
t.IsFunction = true;
var parameters = new List<TypeName>();
if (!MatchParameterList(rest.Substring(1).Trim(), out rest, parameters))
{
return null;
}
t.Parameters = parameters;
if (rest.Length > 0 && rest[0] == ')')
{
rest = rest.Substring(1).Trim();
}
else
{
return null;
}
}
// complete the full name of the type
t.FullyQualifiedName = original.Substring(0, original.Length - rest.Length);
return t;
}
static TypeName MatchSimpleTypeName(string name, out string rest)
{
rest = string.Empty;
var m = SIMPLE_TYPE_REGEX.Match(name);
if (!m.Success) return null;
// The simpleType regular expression will succeed for strings that look like
// simple types, but are terminated by '$'.
// Since the $ is a valid C++ identifier character we check it here to make
// sure we haven't accidentally matched a prefix, e.g. int$Foo
var r = name.Substring(m.Length);
if (r.Length > 0 && r[0] == '$')
{
return null;
}
rest = r.Trim();
return new TypeName(m.Value);
}
static TypeName MatchUnqualifiedName(string name, out string rest)
{
var basename = MatchIdentifier(name, out rest);
if (string.IsNullOrEmpty(basename))
{
return null;
}
var t = new TypeName(basename);
if (rest.Length <= 0 || rest[0] != '<')
{
return t;
}
var args = new List<TypeName>();
if (!MatchTemplateList(rest.Substring(1).Trim(), out rest, args) ||
rest.Length < 1 || rest[0] != '>')
{
return null;
}
t.Args = args;
rest = rest.Substring(1).Trim();
return t;
}
static void MatchArray(TypeName t, string name, out string rest)
{
if (name.StartsWith("[]", StringComparison.Ordinal))
{
t.SetArraySize(new int[] {-1});
rest = name.Substring(2).Trim();
}
else if (name.StartsWith("[", StringComparison.Ordinal))
{
// TODO: handle multiple dimensions
var num = MatchConstant(name.Substring(1).Trim(), out rest);
if (rest.StartsWith("]", StringComparison.Ordinal))
{
t.SetArraySize(new[] {int.Parse(num, CultureInfo.InvariantCulture)});
}
rest = rest.Substring(1).Trim();
}
else
{
rest = name;
}
}
static string MatchIdentifier(string name, out string rest)
{
rest = string.Empty;
var m = IDENTIFIER_REGEX.Match(name);
if (m.Success)
{
rest = name.Substring(m.Length).Trim();
}
return m.Value;
}
static string MatchConstant(string name, out string rest)
{
rest = string.Empty;
var m = NUMERIC_REGEX.Match(name);
if (m.Success)
{
rest = name.Substring(m.Length).Trim();
}
return m.Value;
}
static bool MatchTemplateList(string templist, out string rest, ICollection<TypeName> args)
{
TypeName t;
// no constants allowed in parameter lists
var arg = MatchConstant(templist, out rest);
if (!string.IsNullOrEmpty(arg))
{
var constantArg = new TypeName(arg) {FullyQualifiedName = arg};
args.Add(constantArg);
}
else if (templist.StartsWith("*", StringComparison.Ordinal))
{
rest = templist.Substring(1).Trim();
args.Add(TypeName.ANY_TYPE);
}
else if ((t = MatchTypeName(templist, out rest)) != null)
{
args.Add(t);
}
else
{
return false;
}
if (rest.Length > 1 && rest[0] == ',')
{
return MatchTemplateList(rest.Substring(1).Trim(), out rest, args);
}
return true;
}
static bool MatchParameterList(string plist, out string rest, ICollection<TypeName> args)
{
rest = plist;
while (rest.Length > 0 && rest[0] != ')')
{
TypeName t;
if ((t = MatchTypeName(rest, out rest)) == null)
{
return false;
}
args.Add(t);
if (rest.Length > 1 && rest[0] == ',')
{
rest = rest.Substring(1).Trim();
}
}
return true;
}
TypeName()
{
Args = new List<TypeName>();
Qualifiers = new List<TypeName>();
Parameters = null;
IsWildcard = false;
IsArray = false;
}
TypeName(string name) : this()
{
BaseName = name;
}
void SetArraySize(int[] dimensions)
{
IsArray = true;
Dimensions = dimensions;
}
/// <summary>
/// Score for Match(). Used to prefer exact template matches over wildcard matches.
/// <example><![CDATA[
/// Natvis types:
/// 1) <T1, T2, *>
/// 2) <T1, T2, *, *>
/// 3) <T1, T2, T3>
///
/// C++ types:
/// <T1, T2, T3> should prefer 3) as Natvis type, even if it also matches 1).
/// <T1, T2, T3, T4> should prefer 2), even if it also matches 1).
/// ]]></example>
/// </summary>
public class MatchScore : IComparable<MatchScore>
{
public MatchScore()
{
}
/// <summary>
/// How many times types matched exactly. Higher is better.
/// </summary>
/// <example><![CDATA[
/// <*, T2, *> and <T1, T2, T3, T4> -> 1 exact match
/// ]]></example>
public int ExactTypeMatchCount { get; set; }
/// <summary>
/// Difference of C++ and Natvis type template arg counts. Lower is better.
/// </summary>
/// <example><![CDATA[
/// <T1, T2, *> and <T1, T2, T3> -> 0 (same number of args)
/// <T1, T2, *> and <T1, T2, T3, T4> -> 1 (right type has one more arg)
/// ]]></example>
public int ArgCountDifference { get; set; }
public int CompareTo(MatchScore other)
{
if (ExactTypeMatchCount != other.ExactTypeMatchCount)
{
// Since higher is better, do this.CompareTo(other).
return ExactTypeMatchCount.CompareTo(other.ExactTypeMatchCount);
}
// Since lower is better, do other.CompareTo(this).
return other.ArgCountDifference.CompareTo(ArgCountDifference);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using NUnit.Framework;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using Directory = Lucene.Net.Store.Directory;
using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using ScoreDoc = Lucene.Net.Search.ScoreDoc;
using TermQuery = Lucene.Net.Search.TermQuery;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Index
{
[TestFixture]
public class TestIndexWriterDelete:LuceneTestCase
{
private class AnonymousClassFailure:MockRAMDirectory.Failure
{
public AnonymousClassFailure(TestIndexWriterDelete enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TestIndexWriterDelete enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestIndexWriterDelete enclosingInstance;
public TestIndexWriterDelete Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal bool sawMaybe = false;
internal bool failed = false;
public override MockRAMDirectory.Failure Reset()
{
sawMaybe = false;
failed = false;
return this;
}
public override void Eval(MockRAMDirectory dir)
{
if (sawMaybe && !failed)
{
bool seen = false;
System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
for (int i = 0; i < trace.FrameCount; i++)
{
System.Diagnostics.StackFrame sf = trace.GetFrame(i);
if ("ApplyDeletes".Equals(sf.GetMethod().Name))
{
seen = true;
break;
}
}
if (!seen)
{
// Only fail once we are no longer in applyDeletes
failed = true;
throw new System.IO.IOException("fail after applyDeletes");
}
}
if (!failed)
{
System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
for (int i = 0; i < trace.FrameCount; i++)
{
System.Diagnostics.StackFrame sf = trace.GetFrame(i);
if ("ApplyDeletes".Equals(sf.GetMethod().Name))
{
sawMaybe = true;
break;
}
}
}
}
}
private class AnonymousClassFailure1:MockRAMDirectory.Failure
{
public AnonymousClassFailure1(TestIndexWriterDelete enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TestIndexWriterDelete enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestIndexWriterDelete enclosingInstance;
public TestIndexWriterDelete Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal bool failed = false;
public override MockRAMDirectory.Failure Reset()
{
failed = false;
return this;
}
public override void Eval(MockRAMDirectory dir)
{
if (!failed)
{
failed = true;
throw new System.IO.IOException("fail in add doc");
}
}
}
// test the simple case
[Test]
public virtual void TestSimpleCase()
{
System.String[] keywords = new System.String[]{"1", "2"};
System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
System.String[] text = new System.String[]{"Amsterdam", "Venice"};
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetUseCompoundFile(true);
modifier.SetMaxBufferedDeleteTerms(1);
for (int i = 0; i < keywords.Length; i++)
{
Document doc = new Document();
doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
modifier.AddDocument(doc);
}
modifier.Optimize();
modifier.Commit();
Term term = new Term("city", "Amsterdam");
int hitCount = GetHitCount(dir, term);
Assert.AreEqual(1, hitCount);
modifier.DeleteDocuments(term);
modifier.Commit();
hitCount = GetHitCount(dir, term);
Assert.AreEqual(0, hitCount);
modifier.Close();
dir.Close();
}
}
// test when delete terms only apply to disk segments
[Test]
public virtual void TestNonRAMDelete()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
Assert.IsTrue(0 < modifier.GetSegmentCount());
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
modifier.Commit();
reader = IndexReader.Open(dir);
Assert.AreEqual(0, reader.NumDocs());
reader.Close();
modifier.Close();
dir.Close();
}
}
[Test]
public virtual void TestMaxBufferedDeletes()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
writer.SetMaxBufferedDeleteTerms(1);
writer.DeleteDocuments(new Term("foobar", "1"));
writer.DeleteDocuments(new Term("foobar", "1"));
writer.DeleteDocuments(new Term("foobar", "1"));
Assert.AreEqual(3, writer.GetFlushDeletesCount());
writer.Close();
dir.Close();
}
}
// test when delete terms only apply to ram segments
[Test]
public virtual void TestRAMDeletes()
{
for (int pass = 0; pass < 2; pass++)
{
for (int t = 0; t < 2; t++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(4);
modifier.SetMaxBufferedDeleteTerms(4);
int id = 0;
int value_Renamed = 100;
AddDoc(modifier, ++id, value_Renamed);
if (0 == t)
modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
else
modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
AddDoc(modifier, ++id, value_Renamed);
if (0 == t)
{
modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
}
else
modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
AddDoc(modifier, ++id, value_Renamed);
Assert.AreEqual(0, modifier.GetSegmentCount());
modifier.Flush();
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(1, reader.NumDocs());
int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
Assert.AreEqual(1, hitCount);
reader.Close();
modifier.Close();
dir.Close();
}
}
}
// test when delete terms apply to both disk and ram segments
[Test]
public virtual void TestBothDeletes()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(100);
modifier.SetMaxBufferedDeleteTerms(100);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 5; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
value_Renamed = 200;
for (int i = 0; i < 5; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
for (int i = 0; i < 5; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(5, reader.NumDocs());
modifier.Close();
}
}
// test that batched delete terms are flushed together
[Test]
public virtual void TestBatchDeletes()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
id = 0;
modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
modifier.Commit();
reader = IndexReader.Open(dir);
Assert.AreEqual(5, reader.NumDocs());
reader.Close();
Term[] terms = new Term[3];
for (int i = 0; i < terms.Length; i++)
{
terms[i] = new Term("id", System.Convert.ToString(++id));
}
modifier.DeleteDocuments(terms);
modifier.Commit();
reader = IndexReader.Open(dir);
Assert.AreEqual(2, reader.NumDocs());
reader.Close();
modifier.Close();
dir.Close();
}
}
// test deleteAll()
[Test]
public virtual void TestDeleteAll()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
// Add 1 doc (so we will have something buffered)
AddDoc(modifier, 99, value_Renamed);
// Delete all
modifier.DeleteAll();
// Delete all shouldn't be on disk yet
reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
// Add a doc and update a doc (after the deleteAll, before the commit)
AddDoc(modifier, 101, value_Renamed);
UpdateDoc(modifier, 102, value_Renamed);
// commit the delete all
modifier.Commit();
// Validate there are no docs left
reader = IndexReader.Open(dir);
Assert.AreEqual(2, reader.NumDocs());
reader.Close();
modifier.Close();
dir.Close();
}
}
// test rollback of deleteAll()
[Test]
public virtual void TestDeleteAllRollback()
{
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
AddDoc(modifier, ++id, value_Renamed);
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
// Delete all
modifier.DeleteAll();
// Roll it back
modifier.Rollback();
modifier.Close();
// Validate that the docs are still there
reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
dir.Close();
}
// test deleteAll() w/ near real-time reader
[Test]
public virtual void TestDeleteAllNRT()
{
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
IndexReader reader = modifier.GetReader();
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
AddDoc(modifier, ++id, value_Renamed);
AddDoc(modifier, ++id, value_Renamed);
// Delete all
modifier.DeleteAll();
reader = modifier.GetReader();
Assert.AreEqual(0, reader.NumDocs());
reader.Close();
// Roll it back
modifier.Rollback();
modifier.Close();
// Validate that the docs are still there
reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
dir.Close();
}
private void UpdateDoc(IndexWriter modifier, int id, int value_Renamed)
{
Document doc = new Document();
doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
modifier.UpdateDocument(new Term("id", System.Convert.ToString(id)), doc);
}
private void AddDoc(IndexWriter modifier, int id, int value_Renamed)
{
Document doc = new Document();
doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
modifier.AddDocument(doc);
}
private int GetHitCount(Directory dir, Term term)
{
IndexSearcher searcher = new IndexSearcher(dir);
int hitCount = searcher.Search(new TermQuery(term), null, 1000).TotalHits;
searcher.Close();
return hitCount;
}
[Test]
public virtual void TestDeletesOnDiskFull()
{
TestOperationsOnDiskFull(false);
}
[Test]
public virtual void TestUpdatesOnDiskFull()
{
TestOperationsOnDiskFull(true);
}
/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
/// </summary>
private void TestOperationsOnDiskFull(bool updates)
{
bool debug = false;
Term searchTerm = new Term("content", "aaa");
int START_COUNT = 157;
int END_COUNT = 144;
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
// First build up a starting index:
MockRAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
for (int i = 0; i < 157; i++)
{
Document d = new Document();
d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(d);
}
writer.Close();
long diskUsage = startDir.SizeInBytes();
long diskFree = diskUsage + 10;
System.IO.IOException err = null;
bool done = false;
// Iterate w/ ever increasing free disk space:
while (!done)
{
MockRAMDirectory dir = new MockRAMDirectory(startDir);
dir.SetPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
modifier.SetMaxBufferedDocs(1000); // use flush or close
modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
// full; after, give it infinite disk space & turn
// off random IOExceptions & retry w/ same reader:
bool success = false;
for (int x = 0; x < 2; x++)
{
double rate = 0.1;
double diskRatio = ((double) diskFree) / diskUsage;
long thisDiskFree;
System.String testName;
if (0 == x)
{
thisDiskFree = diskFree;
if (diskRatio >= 2.0)
{
rate /= 2;
}
if (diskRatio >= 4.0)
{
rate /= 2;
}
if (diskRatio >= 6.0)
{
rate = 0.0;
}
if (debug)
{
System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
}
testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
}
else
{
thisDiskFree = 0;
rate = 0.0;
if (debug)
{
System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
}
testName = "reader re-use after disk full";
}
dir.SetMaxSizeInBytes(thisDiskFree);
dir.SetRandomIOExceptionRate(rate, diskFree);
try
{
if (0 == x)
{
int docId = 12;
for (int i = 0; i < 13; i++)
{
if (updates)
{
Document d = new Document();
d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
}
else
{
// deletes
modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
// modifier.setNorm(docId, "contents", (float)2.0);
}
docId += 12;
}
}
modifier.Close();
success = true;
if (0 == x)
{
done = true;
}
}
catch (System.IO.IOException e)
{
if (debug)
{
System.Console.Out.WriteLine(" hit IOException: " + e);
System.Console.Out.WriteLine(e.StackTrace);
}
err = e;
if (1 == x)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + " hit IOException after disk space was freed up");
}
}
// If the close() succeeded, make sure there are
// no unreferenced files.
if (success)
{
Lucene.Net.Util._TestUtil.CheckIndex(dir);
TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
}
// Finally, verify index is not corrupt, and, if
// we succeeded, we see all docs changed, and if
// we failed, we see either all docs or no docs
// changed (transactional semantics):
IndexReader newReader = null;
try
{
newReader = IndexReader.Open(dir);
}
catch (System.IO.IOException e)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
}
IndexSearcher searcher = new IndexSearcher(newReader);
ScoreDoc[] hits = null;
try
{
hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
}
catch (System.IO.IOException e)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + ": exception when searching: " + e);
}
int result2 = hits.Length;
if (success)
{
if (x == 0 && result2 != END_COUNT)
{
Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
}
else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
{
// It's possible that the first exception was
// "recoverable" wrt pending deletes, in which
// case the pending deletes are retained and
// then re-flushing (with plenty of disk
// space) will succeed in flushing the
// deletes:
Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
}
}
else
{
// On hitting exception we still may have added
// all docs:
if (result2 != START_COUNT && result2 != END_COUNT)
{
System.Console.Error.WriteLine(err.StackTrace);
Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
}
}
searcher.Close();
newReader.Close();
if (result2 == END_COUNT)
{
break;
}
}
dir.Close();
// Try again with 10 more bytes of free space:
diskFree += 10;
}
}
}
// This test tests that buffered deletes are cleared when
// an Exception is hit during flush.
[Test]
public virtual void TestErrorAfterApplyDeletes()
{
MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
// create a couple of files
System.String[] keywords = new System.String[]{"1", "2"};
System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
System.String[] text = new System.String[]{"Amsterdam", "Venice"};
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetUseCompoundFile(true);
modifier.SetMaxBufferedDeleteTerms(2);
dir.FailOn(failure.Reset());
for (int i = 0; i < keywords.Length; i++)
{
Document doc = new Document();
doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
modifier.AddDocument(doc);
}
// flush (and commit if ac)
modifier.Optimize();
modifier.Commit();
// one of the two files hits
Term term = new Term("city", "Amsterdam");
int hitCount = GetHitCount(dir, term);
Assert.AreEqual(1, hitCount);
// open the writer again (closed above)
// delete the doc
// max buf del terms is two, so this is buffered
modifier.DeleteDocuments(term);
// add a doc (needed for the !ac case; see below)
// doc remains buffered
Document doc2 = new Document();
modifier.AddDocument(doc2);
// commit the changes, the buffered deletes, and the new doc
// The failure object will fail on the first write after the del
// file gets created when processing the buffered delete
// in the ac case, this will be when writing the new segments
// files so we really don't need the new doc, but it's harmless
// in the !ac case, a new segments file won't be created but in
// this case, creation of the cfs file happens next so we need
// the doc (to test that it's okay that we don't lose deletes if
// failing while creating the cfs file)
bool failed = false;
try
{
modifier.Commit();
}
catch (System.IO.IOException ioe)
{
failed = true;
}
Assert.IsTrue(failed);
// The commit above failed, so we need to retry it (which will
// succeed, because the failure is a one-shot)
modifier.Commit();
hitCount = GetHitCount(dir, term);
// Make sure the delete was successfully flushed:
Assert.AreEqual(0, hitCount);
modifier.Close();
dir.Close();
}
}
// This test tests that the files created by the docs writer before
// a segment is written are cleaned up if there's an i/o error
[Test]
public virtual void TestErrorInDocsWriterAdd()
{
MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
// create a couple of files
System.String[] keywords = new System.String[]{"1", "2"};
System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
System.String[] text = new System.String[]{"Amsterdam", "Venice"};
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
dir.FailOn(failure.Reset());
for (int i = 0; i < keywords.Length; i++)
{
Document doc = new Document();
doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
try
{
modifier.AddDocument(doc);
}
catch (System.IO.IOException io)
{
break;
}
}
System.String[] startFiles = dir.ListAll();
SegmentInfos infos = new SegmentInfos();
infos.Read(dir);
new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
System.String[] endFiles = dir.ListAll();
if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
{
Assert.Fail("docswriter abort() failed to delete unreferenced files:\n before delete:\n " + ArrayToString(startFiles) + "\n after delete:\n " + ArrayToString(endFiles));
}
modifier.Close();
}
}
private System.String ArrayToString(System.String[] l)
{
System.String s = "";
for (int i = 0; i < l.Length; i++)
{
if (i > 0)
{
s += "\n ";
}
s += l[i];
}
return s;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// In the desktop version of the framework, this file is generated from ProviderBase\DbConnectionHelper.cs
// #line 1 "e:\\fxdata\\src\\ndp\\fx\\src\\data\\system\\data\\providerbase\\dbconnectionhelper.cs"
using System.Data.Common;
using System.Data.ProviderBase;
using System.Diagnostics;
using System.Threading;
namespace System.Data.SqlClient
{
public sealed partial class SqlConnection : DbConnection
{
private static readonly DbConnectionFactory s_connectionFactory = SqlConnectionFactory.SingletonInstance;
private DbConnectionOptions _userConnectionOptions;
private DbConnectionPoolGroup _poolGroup;
private DbConnectionInternal _innerConnection;
private int _closeCount;
public SqlConnection() : base()
{
GC.SuppressFinalize(this);
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
internal int CloseCount
{
get
{
return _closeCount;
}
}
internal DbConnectionFactory ConnectionFactory
{
get
{
return s_connectionFactory;
}
}
internal DbConnectionOptions ConnectionOptions
{
get
{
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup;
return ((null != poolGroup) ? poolGroup.ConnectionOptions : null);
}
}
private string ConnectionString_Get()
{
bool hidePassword = InnerConnection.ShouldHidePassword;
DbConnectionOptions connectionOptions = UserConnectionOptions;
return ((null != connectionOptions) ? connectionOptions.UsersConnectionString(hidePassword) : "");
}
private void ConnectionString_Set(DbConnectionPoolKey key)
{
DbConnectionOptions connectionOptions = null;
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = ConnectionFactory.GetConnectionPoolGroup(key, null, ref connectionOptions);
DbConnectionInternal connectionInternal = InnerConnection;
bool flag = connectionInternal.AllowSetConnectionString;
if (flag)
{
flag = SetInnerConnectionFrom(DbConnectionClosedBusy.SingletonInstance, connectionInternal);
if (flag)
{
_userConnectionOptions = connectionOptions;
_poolGroup = poolGroup;
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
}
if (!flag)
{
throw ADP.OpenConnectionPropertySet(ADP.ConnectionString, connectionInternal.State);
}
}
internal DbConnectionInternal InnerConnection
{
get
{
return _innerConnection;
}
}
internal System.Data.ProviderBase.DbConnectionPoolGroup PoolGroup
{
get
{
return _poolGroup;
}
set
{
Debug.Assert(null != value, "null poolGroup");
_poolGroup = value;
}
}
internal DbConnectionOptions UserConnectionOptions
{
get
{
return _userConnectionOptions;
}
}
internal void Abort(Exception e)
{
DbConnectionInternal innerConnection = _innerConnection;
if (ConnectionState.Open == innerConnection.State)
{
Interlocked.CompareExchange(ref _innerConnection, DbConnectionClosedPreviouslyOpened.SingletonInstance, innerConnection);
innerConnection.DoomThisConnection();
}
}
internal void AddWeakReference(object value, int tag)
{
InnerConnection.AddWeakReference(value, tag);
}
override protected DbCommand CreateDbCommand()
{
DbCommand command = null;
DbProviderFactory providerFactory = ConnectionFactory.ProviderFactory;
command = providerFactory.CreateCommand();
command.Connection = this;
return command;
}
override protected void Dispose(bool disposing)
{
if (disposing)
{
_userConnectionOptions = null;
_poolGroup = null;
Close();
}
DisposeMe(disposing);
base.Dispose(disposing);
}
partial void RepairInnerConnection();
internal void NotifyWeakReference(int message)
{
InnerConnection.NotifyWeakReference(message);
}
internal void PermissionDemand()
{
Debug.Assert(DbConnectionClosedConnecting.SingletonInstance == _innerConnection, "not connecting");
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup;
DbConnectionOptions connectionOptions = ((null != poolGroup) ? poolGroup.ConnectionOptions : null);
if ((null == connectionOptions) || connectionOptions.IsEmpty)
{
throw ADP.NoConnectionString();
}
DbConnectionOptions userConnectionOptions = UserConnectionOptions;
Debug.Assert(null != userConnectionOptions, "null UserConnectionOptions");
}
internal void RemoveWeakReference(object value)
{
InnerConnection.RemoveWeakReference(value);
}
internal void SetInnerConnectionEvent(DbConnectionInternal to)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
ConnectionState originalState = _innerConnection.State & ConnectionState.Open;
ConnectionState currentState = to.State & ConnectionState.Open;
if ((originalState != currentState) && (ConnectionState.Closed == currentState))
{
unchecked { _closeCount++; }
}
_innerConnection = to;
if (ConnectionState.Closed == originalState && ConnectionState.Open == currentState)
{
OnStateChange(DbConnectionInternal.StateChangeOpen);
}
else if (ConnectionState.Open == originalState && ConnectionState.Closed == currentState)
{
OnStateChange(DbConnectionInternal.StateChangeClosed);
}
else
{
Debug.Assert(false, "unexpected state switch");
if (originalState != currentState)
{
OnStateChange(new StateChangeEventArgs(originalState, currentState));
}
}
}
internal bool SetInnerConnectionFrom(DbConnectionInternal to, DbConnectionInternal from)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != from, "from null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
bool result = (from == Interlocked.CompareExchange<DbConnectionInternal>(ref _innerConnection, to, from));
return result;
}
internal void SetInnerConnectionTo(DbConnectionInternal to)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
_innerConnection = to;
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.ConstrainedExecution;
using System.Runtime.InteropServices;
using System.Security;
using System.Security.Cryptography;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
namespace Microsoft.Win32.SafeHandles {
/// <summary>
/// SafeHandle for buffers returned by the Axl APIs
/// </summary>
#if !FEATURE_CORESYSTEM
#pragma warning disable 618 // Have not migrated to v4 transparency yet
[System.Security.SecurityCritical(System.Security.SecurityCriticalScope.Everything)]
#pragma warning restore 618
#endif
internal sealed class SafeAxlBufferHandle : SafeHandleZeroOrMinusOneIsInvalid {
private SafeAxlBufferHandle() : base(true) {
return;
}
[DllImport("kernel32")]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
private static extern IntPtr GetProcessHeap();
[DllImport("kernel32")]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool HeapFree(IntPtr hHeap, int dwFlags, IntPtr lpMem);
protected override bool ReleaseHandle() {
// _AxlFree is a wrapper around HeapFree on the process heap. Since it is not exported from mscorwks
// we just call HeapFree directly. This needs to be updated if _AxlFree is ever changed.
HeapFree(GetProcessHeap(), 0, handle);
return true;
}
}
/// <summary>
/// SafeHandle base class for CAPI handles (such as HCRYPTKEY and HCRYPTHASH) which must keep their
/// CSP alive as long as they stay alive as well. CAPI requires that all child handles belonging to a
/// HCRYPTPROV must be destroyed up before the reference count to the HCRYPTPROV drops to zero.
/// Since we cannot control the order of finalization between the two safe handles, SafeCapiHandleBase
/// maintains a native refcount on its parent HCRYPTPROV to ensure that if the corresponding
/// SafeCspKeyHandle is finalized first CAPI still keeps the provider alive.
/// </summary>
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
#pragma warning disable 618 // Have not migrated to v4 transparency yet
[SecurityCritical(SecurityCriticalScope.Everything)]
#pragma warning restore 618
#endif
internal abstract class SafeCapiHandleBase : SafeHandleZeroOrMinusOneIsInvalid {
private IntPtr m_csp;
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
internal SafeCapiHandleBase() : base(true) {
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[DllImport("advapi32", SetLastError = true)]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptContextAddRef(IntPtr hProv,
IntPtr pdwReserved,
int dwFlags);
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[DllImport("advapi32")]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptReleaseContext(IntPtr hProv, int dwFlags);
protected IntPtr ParentCsp {
get { return m_csp; }
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)]
#endif
set {
// We should not be resetting the parent CSP if it's already been set once - that will
// lead to leaking the original handle.
Debug.Assert(m_csp == IntPtr.Zero);
int error = (int)CapiNative.ErrorCode.Success;
// A successful call to CryptContextAddRef and an assignment of the handle value to our field
// SafeHandle need to happen atomically, so we contain them within a CER.
RuntimeHelpers.PrepareConstrainedRegions();
try { }
finally {
if (CryptContextAddRef(value, IntPtr.Zero, 0)) {
m_csp = value;
}
else {
error = Marshal.GetLastWin32Error();
}
}
if (error != (int)CapiNative.ErrorCode.Success) {
throw new CryptographicException(error);
}
}
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)]
#endif
internal void SetParentCsp(SafeCspHandle parentCsp) {
bool addedRef = false;
RuntimeHelpers.PrepareConstrainedRegions();
try {
parentCsp.DangerousAddRef(ref addedRef);
IntPtr rawParentHandle = parentCsp.DangerousGetHandle();
ParentCsp = rawParentHandle;
}
finally {
if (addedRef) {
parentCsp.DangerousRelease();
}
}
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
protected abstract bool ReleaseCapiChildHandle();
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
protected override sealed bool ReleaseHandle() {
// Order is important here - we must destroy the child handle before the parent CSP
bool destroyedChild = ReleaseCapiChildHandle();
bool releasedCsp = true;
if (m_csp != IntPtr.Zero) {
releasedCsp = CryptReleaseContext(m_csp, 0);
}
return destroyedChild && releasedCsp;
}
}
/// <summary>
/// SafeHandle for CAPI hash algorithms (HCRYPTHASH)
/// </summary>
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
#pragma warning disable 618 // Have not migrated to v4 transparency yet
[System.Security.SecurityCritical(System.Security.SecurityCriticalScope.Everything)]
#pragma warning restore 618
#endif
internal sealed class SafeCapiHashHandle : SafeCapiHandleBase {
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
private SafeCapiHashHandle() {
}
/// <summary>
/// NULL hash handle
/// </summary>
public static SafeCapiHashHandle InvalidHandle {
get {
SafeCapiHashHandle handle = new SafeCapiHashHandle();
handle.SetHandle(IntPtr.Zero);
return handle;
}
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[DllImport("advapi32")]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptDestroyHash(IntPtr hHash);
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
protected override bool ReleaseCapiChildHandle() {
return CryptDestroyHash(handle);
}
}
/// <summary>
/// SafeHandle for CAPI keys (HCRYPTKEY)
/// </summary>
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
#pragma warning disable 618 // Have not migrated to v4 transparency yet
[System.Security.SecurityCritical(System.Security.SecurityCriticalScope.Everything)]
#pragma warning restore 618
#endif
internal sealed class SafeCapiKeyHandle : SafeCapiHandleBase {
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
private SafeCapiKeyHandle() {
}
/// <summary>
/// NULL key handle
/// </summary>
internal static SafeCapiKeyHandle InvalidHandle {
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
get {
SafeCapiKeyHandle handle = new SafeCapiKeyHandle();
handle.SetHandle(IntPtr.Zero);
return handle;
}
}
[DllImport("advapi32")]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptDestroyKey(IntPtr hKey);
/// <summary>
/// Make a copy of this key handle
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
internal SafeCapiKeyHandle Duplicate() {
Contract.Requires(!IsInvalid && !IsClosed);
Contract.Ensures(Contract.Result<SafeCapiKeyHandle>() != null && !Contract.Result<SafeCapiKeyHandle>().IsInvalid && !Contract.Result<SafeCapiKeyHandle>().IsClosed);
SafeCapiKeyHandle duplicate = null;
RuntimeHelpers.PrepareConstrainedRegions();
try {
if (!CapiNative.UnsafeNativeMethods.CryptDuplicateKey(this, IntPtr.Zero, 0, out duplicate)) {
throw new CryptographicException(Marshal.GetLastWin32Error());
}
}
finally {
if (duplicate != null && !duplicate.IsInvalid && ParentCsp != IntPtr.Zero) {
duplicate.ParentCsp = ParentCsp;
}
}
return duplicate;
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
protected override bool ReleaseCapiChildHandle() {
return CryptDestroyKey(handle);
}
}
/// <summary>
/// SafeHandle for crypto service providers (HCRYPTPROV)
/// </summary>
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#else
#pragma warning disable 618 // Have not migrated to v4 transparency yet
[System.Security.SecurityCritical(System.Security.SecurityCriticalScope.Everything)]
#pragma warning restore 618
#endif
internal sealed class SafeCspHandle : SafeHandleZeroOrMinusOneIsInvalid {
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
private SafeCspHandle() : base(true) {
return;
}
[DllImport("advapi32", SetLastError = true)]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptContextAddRef(SafeCspHandle hProv,
IntPtr pdwReserved,
int dwFlags);
[DllImport("advapi32")]
#if !FEATURE_CORESYSTEM
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#endif
[SuppressUnmanagedCodeSecurity]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool CryptReleaseContext(IntPtr hProv, int dwFlags);
/// <summary>
/// Create a second SafeCspHandle which refers to the same HCRYPTPROV
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
public SafeCspHandle Duplicate() {
Contract.Requires(!IsInvalid && !IsClosed);
// In the window between the call to CryptContextAddRef and when the raw handle value is assigned
// into this safe handle, there's a second reference to the original safe handle that the CLR does
// not know about, so we need to bump the reference count around this entire operation to ensure
// that we don't have the original handle closed underneath us.
bool acquired = false;
RuntimeHelpers.PrepareConstrainedRegions();
try {
DangerousAddRef(ref acquired);
IntPtr originalHandle = DangerousGetHandle();
int error = (int)CapiNative.ErrorCode.Success;
SafeCspHandle duplicate = new SafeCspHandle();
// A successful call to CryptContextAddRef and an assignment of the handle value to the duplicate
// SafeHandle need to happen atomically, so we contain them within a CER.
RuntimeHelpers.PrepareConstrainedRegions();
try { }
finally {
if (!CryptContextAddRef(this, IntPtr.Zero, 0)) {
error = Marshal.GetLastWin32Error();
}
else {
duplicate.SetHandle(originalHandle);
}
}
// If we could not call CryptContextAddRef succesfully, then throw the error here otherwise
// we should be in a valid state at this point.
if (error != (int)CapiNative.ErrorCode.Success) {
duplicate.Dispose();
throw new CryptographicException(error);
}
else {
Debug.Assert(!duplicate.IsInvalid, "Failed to duplicate handle successfully");
}
return duplicate;
}
finally {
if (acquired) {
DangerousRelease();
}
}
}
#if FEATURE_CORESYSTEM
[System.Security.SecurityCritical]
#endif
protected override bool ReleaseHandle() {
return CryptReleaseContext(handle, 0);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace SourceParkAPI.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Windows.Forms;
using Bloom.Book;
using Bloom.Api;
using L10NSharp;
using SIL.IO;
using SIL.Media;
#if __MonoCS__
using SIL.Media.AlsaAudio;
#else
using SIL.Media.Naudio;
#endif
using SIL.Reporting;
using Timer = System.Windows.Forms.Timer;
using System.Collections.Generic;
// Note: it is for the benefit of this component that Bloom references NAudio. We don't use it directly,
// but Palaso.Media does, and we need to make sure it gets copied to our output.
namespace Bloom.Edit
{
public delegate AudioRecording Factory(); //autofac uses this
/// <summary>
/// This is a clean back-end service that provides recording to files
/// via some http requests from the server.
/// It also delivers real time microphone peak level numbers over a WebSocket.
/// The client can be found at audioRecording.ts.
/// </summary>
public class AudioRecording :IDisposable
{
private readonly BookSelection _bookSelection;
private AudioRecorder _recorder;
private bool _exitHookSet;
BloomWebSocketServer _webSocketServer;
private const string kWebsocketContext = "audio-recording"; // must match that found in audioRecording.tsx
public const string kPublishableExtension = "mp3";
public const string kRecordableExtension = "wav";
/// <summary>
/// The file we want to record to
/// </summary>
public string PathToTemporaryWav;
//the ultimate destination, after we've cleaned up the recording
public string PathToRecordableAudioForCurrentSegment;
private string _backupPathForRecordableAudio; // If we are about to replace a recording, save the old one here; a temp file.
private string _backupPathForPublishableAudio;
private DateTime _startRecording; // For tracking recording length.
LameEncoder _mp3Encoder = new LameEncoder();
/// <summary>
/// This timer introduces a brief delay from the mouse click to actually starting to record.
/// Based on HearThis behavior, I think the purpose is to avoid recording the click,
/// and perhaps also experience indicates the user typically pauses slightly between clicking and actually talking.
/// HearThis uses a system timer rather than this normal form timer because with the latter, when the button "captured" the mouse, the timer refused to fire.
/// I don't think we can capture the mouse (at least not attempting it yet) so Bloom does not have this problem and uses a regular Windows.Forms timer.
/// </summary>
private Timer _startRecordingTimer;
private double _previousLevel;
private bool _disposed;
// This is a bit of a kludge. The server needs to be able to retrieve the data from AudioDevicesJson.
// It would be quite messy to give the image server access to the EditingModel which owns the instance of AudioRecording.
// However in practice (and very likely we would preserve this even if we had more than one book open at a time)
// there is only one current AudioRecording object supporting the one EditingModel. This variable keeps track
// of the one most recently created and uses it in the AudioDevicesJson method, which the server can therefore
// call directly since it is static.
private static AudioRecording CurrentRecording { get; set; }
private ManualResetEvent _completingRecording; // Note: For simplicity, recommend that any function needing this lock should just check it regardless of the file path. The file paths get tricky with the multiple extensions possible, sequencing, etc., so for now, we recommend avoiding pre-mature optimization until needed.
private int _collectionAudioTrimEndMilliseconds;
public AudioRecording(BookSelection bookSelection, BloomWebSocketServer bloomWebSocketServer)
{
_bookSelection = bookSelection;
_startRecordingTimer = new Timer();
_startRecordingTimer.Interval = 300; // ms from click to actual recording
_startRecordingTimer.Tick += OnStartRecordingTimer_Elapsed;
_backupPathForRecordableAudio = Path.GetTempFileName();
_backupPathForPublishableAudio = Path.GetTempFileName();
CurrentRecording = this;
_webSocketServer = bloomWebSocketServer;
// We create the ManualResetEvent in the "set" (non-blocking) state initially. The idea is to allow HandleEndRecord() to run,
// but then block functions like HandleAudioFileRequest() which relies on the contents of the audio folder until Recorder_Stopped() has reported finishing saving the audio file.
_completingRecording = new ManualResetEvent(true);
}
public void RegisterWithApiHandler(BloomApiHandler apiHandler)
{
// HandleStartRecording seems to need to be on the UI thread in order for HandleEndRecord() to detect the correct state.
apiHandler.RegisterEndpointHandler("audio/startRecord", HandleStartRecording, true).Measureable();
// Note: This handler locks and unlocks a shared resource (_completeRecording lock).
// Any other handlers depending on this resource should not wait on the same thread (i.e. the UI thread) or deadlock can occur.
apiHandler.RegisterEndpointHandler("audio/endRecord", HandleEndRecord, true).Measureable();
// Any handler which retrieves information from the audio folder SHOULD wait on the _completeRecording lock (call WaitForRecordingToComplete()) to ensure that it sees
// a consistent state of the audio folder, and therefore should NOT run on the UI thread.
// Also, explicitly setting requiresSync to true (even tho that's default anyway) to make concurrency less complicated to think about
apiHandler.RegisterEndpointHandler("audio/checkForAnyRecording", HandleCheckForAnyRecording, false, true);
apiHandler.RegisterEndpointHandler("audio/checkForAllRecording", HandleCheckForAllRecording, false, true);
apiHandler.RegisterEndpointHandler("audio/deleteSegment", HandleDeleteSegment, false, true);
apiHandler.RegisterEndpointHandler("audio/checkForSegment", HandleCheckForSegment, false, true);
apiHandler.RegisterEndpointHandler("audio/wavFile", HandleAudioFileRequest, false, true);
// Doesn't matter whether these are on UI thread or not, so using the old default which was true
apiHandler.RegisterEndpointHandler("audio/currentRecordingDevice", HandleCurrentRecordingDevice, true);
apiHandler.RegisterEndpointHandler("audio/devices", HandleAudioDevices, true);
apiHandler.RegisterEndpointHandler("audio/copyAudioFile", HandleCopyAudioFile, false);
Debug.Assert(BloomServer.portForHttp > 0,"Need the server to be listening before this can be registered (BL-3337).");
}
// Does this page have any audio at all? Used to enable 'Listen to the whole page'.
private void HandleCheckForAnyRecording(ApiRequest request)
{
var ids = request.RequiredParam("ids");
var idList = ids.Split(',');
if (idList.Any())
{
WaitForRecordingToComplete(); // More straightforward to test for the existence of the files by waiting until all the files have been written.
}
foreach (var id in idList)
{
if (RobustFile.Exists(GetPathToRecordableAudioForSegment(id)))
{
request.PostSucceeded();
return;
}
if (RobustFile.Exists(GetPathToPublishableAudioForSegment(id)))
{
request.PostSucceeded();
return;
}
}
request.Failed("no audio");
}
private void HandleCheckForAllRecording(ApiRequest request)
{
var ids = request.RequiredParam("ids");
var idList = ids.Split(',');
if (idList.Any())
{
WaitForRecordingToComplete(); // More straightforward to test for the existence of the files by waiting until all the files have been written.
}
foreach (var id in idList)
{
if (!RobustFile.Exists(GetPathToRecordableAudioForSegment(id)) && !RobustFile.Exists(GetPathToPublishableAudioForSegment(id)))
{
request.ReplyWithBoolean(false);
return;
}
}
request.ReplyWithBoolean(true);
}
/// <summary>
/// Returns a json string like {"devices":["microphone", "Logitech Headset"], "productName":"Logitech Headset", "genericName":"Headset"},
/// except that in practice currrently the generic and product names are the same and not as helpful as the above.
/// Devices is a list of product names (of available recording devices), the productName and genericName refer to the
/// current selection (or will be null, if no current device).
/// </summary>
public void HandleAudioDevices(ApiRequest request)
{
try
{
var sb = new StringBuilder("{\"devices\":[");
sb.Append(string.Join(",", RecordingDevices.Select(d => "\""+d.ProductName+"\"")));
sb.Append("],\"productName\":");
if (CurrentRecording.RecordingDevice != null)
sb.Append("\"" + CurrentRecording.RecordingDevice.ProductName + "\"");
else
sb.Append("null");
sb.Append(",\"genericName\":");
if (CurrentRecording.RecordingDevice != null)
sb.Append("\"" + CurrentRecording.RecordingDevice.GenericName + "\"");
else
sb.Append("null");
sb.Append("}");
request.ReplyWithJson(sb.ToString());
}
catch (Exception e)
{
Logger.WriteError("AudioRecording could not find devices: ", e);
// BL-7272 shows an exception occurred somewhere, and it may have been here.
// If so, we just assume no input devices could be found.
request.ReplyWithJson("{\"devices\":[],\"productName\":null,\"genericName\":null}");
}
}
/// <summary>
/// Used to initiate sending the PeakLevelChanged notifications.
/// Currently this typically happens when the Recorder instance is created,
/// which is usually when the talking book tool asks for the AudioDevicesJson.
/// This is not very intuitive, but it's the most easily detectable event
/// that indicates that the talking book tool is actually active.
/// </summary>
public void BeginMonitoring()
{
if (!RecordingDevices.Contains(RecordingDevice))
{
RecordingDevice = RecordingDevices.FirstOrDefault();
}
if (RecordingDevice != null)
{
Recorder.BeginMonitoring();
}
}
private void SetPeakLevel(PeakLevelEventArgs args)
{
var level = Math.Round(args.Level, 3);
if(level != _previousLevel)
{
_previousLevel = level;
_webSocketServer.SendString(kWebsocketContext, "peakAudioLevel", level.ToString(CultureInfo.InvariantCulture));
}
}
private void HandleEndRecord(ApiRequest request)
{
if (Recorder.RecordingState != RecordingState.Recording)
{
//usually, this is a result of us getting the "end" before we actually started, because it was too quick
if(TestForTooShortAndSendFailIfSo(request))
{
_startRecordingTimer.Enabled = false;//we don't want it firing in a few milliseconds from now
return;
}
if (RecordingDevice == null && Recorder.RecordingState == RecordingState.NotYetStarted)
{
// We've already complained about no recording device, no need to complain about not recording.
request.PostSucceeded();
return;
}
//but this would handle it if there was some other reason
request.Failed("Got endRecording, but was not recording");
return;
}
Exception exceptionCaught = null;
try
{
// We never want this thread blocked, but we want to block HandleAudioFileRequest()
// until Recorder_Stopped() succeeds.
_completingRecording.Reset();
Debug.WriteLine("Stop recording");
Recorder.Stopped += Recorder_Stopped;
//note, this doesn't actually stop... more like... starts the stopping. It does mark the time
//we requested to stop. A few seconds later (2, looking at the library code today), it will
//actually close the file and raise the Stopped event
Recorder.Stop();
}
catch (Exception ex)
{
// Swallow the exception for now. One reason (based on HearThis comment) is that the user
// didn't hold the record button down long enough, we detect this below.
exceptionCaught = ex;
Recorder.Stopped -= Recorder_Stopped;
Debug.WriteLine("Error stopping recording: " + ex.Message);
}
if (TestForTooShortAndSendFailIfSo(request))
{
_completingRecording.Set(); // not saving a recording, so don't block HandleAudioFileRequest
return;
}
else if (exceptionCaught != null)
{
ResetRecorderOnError();
_completingRecording.Set(); // not saving a recording, so don't block HandleAudioFileRequest
request.Failed("Stopping the recording caught an exception: " + exceptionCaught.Message);
}
else
{
// Report success now that we're sure we succeeded.
request.PostSucceeded();
}
}
private void ResetRecorderOnError()
{
Debug.WriteLine("Resetting the audio recorder");
// Try to delete the file we were writing to.
try
{
RobustFile.Delete(PathToRecordableAudioForCurrentSegment);
}
catch (Exception error)
{
Logger.WriteError("Audio Recording trying to delete "+PathToRecordableAudioForCurrentSegment, error);
}
// The recorder may well be in a bad state. Throw it away and get a new one.
// But maintain the assigned recording device.
var currentMic = RecordingDevice.ProductName;
_recorder.Dispose();
CreateRecorder();
SetRecordingDevice(currentMic);
}
private void Recorder_Stopped(IAudioRecorder arg1, ErrorEventArgs arg2)
{
Recorder.Stopped -= Recorder_Stopped;
Directory.CreateDirectory(System.IO.Path.GetDirectoryName(PathToRecordableAudioForCurrentSegment)); // make sure audio directory exists
try
{
var minimum = TimeSpan.FromMilliseconds(300); // this is arbitrary
AudioRecorder.TrimWavFile(PathToTemporaryWav, PathToRecordableAudioForCurrentSegment, new TimeSpan(), TimeSpan.FromMilliseconds(_collectionAudioTrimEndMilliseconds), minimum);
RobustFile.Delete(PathToTemporaryWav); // Otherwise, these continue to clutter up the temp directory.
}
catch (Exception error)
{
Logger.WriteEvent(error.Message);
RobustFile.Copy(PathToTemporaryWav,PathToRecordableAudioForCurrentSegment, true);
}
//We could put this off entirely until we make the ePUB.
//I'm just gating this for now because maybe the thought was that it's better to do it a little at a time?
//That's fine so long as it doesn't make the UI unresponsive on slow machines.
var mp3Path = _mp3Encoder.Encode(PathToRecordableAudioForCurrentSegment);
// Got a good new recording, can safely clean up all backups related to old one.
foreach (var path in Directory.EnumerateFiles(
Path.GetDirectoryName(PathToRecordableAudioForCurrentSegment),
Path.GetFileNameWithoutExtension(PathToRecordableAudioForCurrentSegment)+ "*"+ ".bak"))
{
RobustFile.Delete(path);
}
// BL-7617 Don't keep .wav file after .mp3 is created successfully.
if (!string.IsNullOrEmpty(mp3Path) && File.Exists(mp3Path))
{
RobustFile.Delete(PathToRecordableAudioForCurrentSegment);
}
_completingRecording.Set(); // will release HandleAudioFileRequest if it is waiting.
}
private bool TestForTooShortAndSendFailIfSo(ApiRequest request)
{
if ((DateTime.Now - _startRecording) < TimeSpan.FromSeconds(0.5))
{
CleanUpAfterPressTooShort();
var msg = LocalizationManager.GetString("EditTab.Toolbox.TalkingBook.PleaseHoldMessage",
"Please hold the button down until you have finished recording",
"Appears when the speak/record button is pressed very briefly");
request.Failed(msg);
return true;
}
return false;
}
public void HandleStartRecording(ApiRequest request)
{
// Precondition: HandleStartRecording shouldn't run until the previous HandleEndRecord() is completely done with PathToRecordableAudioForCurrentSegment
// Unfortunately this is not as easy to ensure on the code side due to HandleStartRecord() not being able to be moved off the UI thread, and deadlock potential
// I found it too difficult to actually violate this precondition from the user side.
// Therefore, I just assume this to be true.
if (Recording)
{
request.Failed("Already recording");
return;
}
string segmentId = request.RequiredParam("id");
PathToRecordableAudioForCurrentSegment = GetPathToRecordableAudioForSegment(segmentId); // Careful! Overwrites the previous value of the member variable.
PathToTemporaryWav = Path.GetTempFileName();
if (Recorder.RecordingState == RecordingState.RequestedStop)
{
request.Failed(LocalizationManager.GetString("EditTab.Toolbox.TalkingBook.BadState",
"Bloom recording is in an unusual state, possibly caused by unplugging a microphone. You will need to restart.","This is very low priority for translation."));
}
// If someone unplugged the microphone we were planning to use switch to another.
// This also triggers selecting the first one initially.
if (!RecordingDevices.Contains(RecordingDevice))
{
RecordingDevice = RecordingDevices.FirstOrDefault();
}
if (RecordingDevice == null)
{
ReportNoMicrophone();
request.Failed("No Microphone");
return ;
}
if(Recording)
{
request.Failed( "Already Recording");
return;
}
if (!PrepareBackupFile(PathToRecordableAudioForCurrentSegment, ref _backupPathForRecordableAudio, request)) return;
// There are two possible scenarios when starting to record.
// 1. We have a recordable file and corresponding publishable file.
// In that case, we need to make sure to restore the publishable file if we restore the recordable one so they stay in sync.
// 2. We have an publishable file with no corresponding recordable file.
// In that case, we need to restore it if there is any problem creating a new recordable file.
if (!PrepareBackupFile(GetPathToPublishableAudioForSegment(segmentId), ref _backupPathForPublishableAudio, request)) return;
_startRecording = DateTime.Now;
_startRecordingTimer.Start();
request.ReplyWithText("starting record soon");
}
// We want to move the file specified in the first path to a new location to use
// as a backup while we typically replace it.
// A previous backup, possibly of the same or another file, is no longer needed (if it exists)
// and should be deleted, if possible, on a background thread.
// The path to the backup will be updated to the new backup.
// Typically the new name matches the original with the extension changed to .bak.
// If necessary (because the desired backup file already exists), we will add a counter
// to get the a name that is not in use.
// A goal is (for performance reasons) not to have to wait while a file is deleted
// (and definitely not while one is copied).
private static bool PrepareBackupFile(string path, ref string backupPath, ApiRequest request)
{
int counter = 0;
backupPath = path + ".bak";
var originalExtension = Path.GetExtension(path);
var pathWithNoExtension = Path.GetFileNameWithoutExtension(path);
while (File.Exists(backupPath))
{
counter++;
backupPath = pathWithNoExtension + counter + originalExtension + ".bak";
}
// An earlier version copied the file to a temp file. We can't MOVE to a file in the system temp
// directory, though, because we're not sure it is on the same volume. And sometimes the time
// required to copy the file was noticeable and resulted in the user starting to speak before
// the system started recording. So we pay the price of a small chance of backups being left
// around the book directory to avoid that danger.
if (RobustFile.Exists(path))
{
try
{
RobustFile.Move(path, backupPath);
}
catch (Exception err)
{
ErrorReport.NotifyUserOfProblem(err,
"The old copy of the recording at " + path +
" is locked up, so Bloom can't record over it at the moment. If it remains stuck, you may need to restart your computer.");
request.Failed("Audio file locked");
return false;
}
}
return true;
}
private string GetPathToPublishableAudioForSegment(string segmentId)
{
if (_bookSelection?.CurrentSelection?.FolderPath == null)
{
return "";
}
return Path.Combine(_bookSelection.CurrentSelection.FolderPath, "audio", $"{segmentId}.{kPublishableExtension}");
}
private string GetPathToRecordableAudioForSegment(string segmentId)
{
if (_bookSelection?.CurrentSelection?.FolderPath == null)
{
return "";
}
return Path.Combine(_bookSelection.CurrentSelection.FolderPath, "audio", $"{segmentId}.{kRecordableExtension}");
}
public bool Recording
{
get
{
return Recorder.RecordingState == RecordingState.Recording ||
Recorder.RecordingState == RecordingState.RequestedStop;
}
}
private void OnStartRecordingTimer_Elapsed(object sender, EventArgs e)
{
_startRecordingTimer.Stop();
Debug.WriteLine("Start actual recording");
try
{
Recorder.BeginRecording(PathToTemporaryWav);
_webSocketServer.SendString(kWebsocketContext, "recordStartStatus", "success");
}
catch (InvalidOperationException)
{
// Likely a case of BL-7568, which as far as we can figure isn't Bloom's fault.
// Show a friendly message in the TalkingBook toolbox.
_webSocketServer.SendString(kWebsocketContext, "recordStartStatus", "failure");
}
}
private void CleanUpAfterPressTooShort()
{
// Seems sometimes on a very short click the recording actually got started while we were informing the user
// that he didn't click long enough. Before we try to delete the file where the recording is taking place,
// we have to stop it; otherwise, we will get an exception trying to delete it.
while (Recording)
{
try
{
Recorder.Stop();
Application.DoEvents();
}
catch (Exception)
{
}
}
// Don't kid the user we have a recording for this.
// Also, the absence of the file is how the UI knows to switch back to the state where 'speak'
// is the expected action.
try
{
// Delete doesn't throw if the FILE doesn't exist, but if the Directory doesn't, you're toast.
// And the very first time a user tries this, the audio directory probably doesn't exist...
if (Directory.Exists(Path.GetDirectoryName(PathToRecordableAudioForCurrentSegment)))
{
RobustFile.Delete(PathToRecordableAudioForCurrentSegment);
// BL-6881: "Play btn sometimes enabled after too short audio", because the .mp3 version was left behind.
var mp3Version = Path.ChangeExtension(PathToRecordableAudioForCurrentSegment, kPublishableExtension);
RobustFile.Delete(mp3Version);
}
}
catch (Exception error)
{
Logger.WriteError("Audio Recording trying to delete "+PathToRecordableAudioForCurrentSegment, error);
Debug.Fail("can't delete the recording even after we stopped:"+error.Message);
}
// If we had a prior recording, restore it...button press may have been a mistake.
if (RobustFile.Exists(_backupPathForRecordableAudio))
{
try
{
RobustFile.Move(_backupPathForRecordableAudio, PathToRecordableAudioForCurrentSegment);
}
catch (IOException e)
{
Logger.WriteError("Audio Recording could not restore backup " + _backupPathForRecordableAudio, e);
// if we can't restore it we can't. Review: are there other exception types we should ignore? Should we bother the user?
}
}
if (RobustFile.Exists(_backupPathForPublishableAudio))
{
try
{
RobustFile.Move(_backupPathForPublishableAudio, Path.ChangeExtension(PathToRecordableAudioForCurrentSegment, kPublishableExtension));
}
catch (IOException e)
{
Logger.WriteError("Audio Recording could not restore backup " + _backupPathForPublishableAudio, e);
}
}
}
public IRecordingDevice RecordingDevice
{
get { return Recorder.SelectedDevice; }
set { Recorder.SelectedDevice = value; }
}
private IEnumerable<IRecordingDevice> RecordingDevices
{
#if __MonoCS__
get { return SIL.Media.AlsaAudio.RecordingDevice.Devices; }
#else
get { return SIL.Media.Naudio.RecordingDevice.Devices; }
#endif
}
internal void ReportNoMicrophone()
{
MessageBox.Show(null,
LocalizationManager.GetString("EditTab.Toolbox.TalkingBook.NoMic", "This computer appears to have no sound recording device available. You will need one to record audio for a talking book."),
LocalizationManager.GetString("EditTab.Toolbox.TalkingBook.NoInput", "No input device"));
}
public void HandleCurrentRecordingDevice(ApiRequest request)
{
if(request.HttpMethod == HttpMethods.Post)
{
var name = request.RequiredPostString();
if (SetRecordingDevice(name))
request.PostSucceeded();
else
request.Failed("Could not find the device named " + name);
}
else request.Failed("Only Post is currently supported");
}
private bool SetRecordingDevice(string micName)
{
foreach (var d in RecordingDevices)
{
if (d.ProductName == micName)
{
RecordingDevice = d;
return true;
}
}
return false;
}
private void HandleCheckForSegment(ApiRequest request)
{
var segmentId = request.RequiredParam("id");
var path = GetPathToRecordableAudioForSegment(segmentId);
WaitForRecordingToComplete(); // Wait until the recording is flushed to disk before testing file existence
if (RobustFile.Exists(path))
request.ReplyWithText("exists");
else
{
path = GetPathToPublishableAudioForSegment(segmentId);
request.ReplyWithText(RobustFile.Exists(path) ? "exists" : "not found");
}
}
/// <summary>
/// Returns the content of the requested .mp3 file.
/// </summary>
/// <param name="request"></param>
private void HandleAudioFileRequest(ApiRequest request)
{
const string Api_Prefix = "bloom/";
if (request.HttpMethod == HttpMethods.Get)
{
// RequiredParam() decodes the url parameters, so we don't need to do any UrlPathString decoding here.
var idWithPrefix = request.RequiredParam("id");
var bloomIndex = idWithPrefix.IndexOf(Api_Prefix);
var id = idWithPrefix.Substring(bloomIndex + Api_Prefix.Length);
var segmentId = Path.GetFileNameWithoutExtension(id);
WaitForRecordingToComplete();
// return the audio file contents
var mp3File = GetPathToPublishableAudioForSegment(segmentId);
if (RobustFile.Exists(mp3File))
{
request.ReplyWithAudioFileContents(mp3File);
return;
}
request.Failed("Somehow we don't have the .mp3 file.");
}
else
request.Failed("Only Get is currently supported");
}
/// <summary>
/// Delete a recording segment, as requested by the Clear button in the talking book tool.
/// The corresponding mp3 should also be deleted.
/// </summary>
private void HandleDeleteSegment(ApiRequest request)
{
var segmentId = request.RequiredParam("id");
var recordablePath = GetPathToRecordableAudioForSegment(segmentId);
var publishablePath = GetPathToPublishableAudioForSegment(segmentId);
var success = true;
WaitForRecordingToComplete(); // Wait for any files to (potentially) flush to disk before trying to deleting them.
if(RobustFile.Exists(recordablePath))
success = DeleteFileReportingAnyProblem(recordablePath);
if (RobustFile.Exists(publishablePath))
success &= DeleteFileReportingAnyProblem(publishablePath);
if (success)
{
request.PostSucceeded();
}
else
{
request.Failed("could not delete at least one file");
}
}
private static bool DeleteFileReportingAnyProblem(string path)
{
try
{
RobustFile.Delete(path);
return true;
}
catch (IOException e)
{
var msg =
string.Format(
LocalizationManager.GetString("Errors.ProblemDeletingFile", "Bloom had a problem deleting this file: {0}"), path);
ErrorReport.NotifyUserOfProblem(e, msg + Environment.NewLine + e.Message);
}
return false;
}
/// <summary>
/// Waits (if necessary) for any recordings to complete (regardless of where it is trying to save the recording)
/// </summary>
private void WaitForRecordingToComplete()
{
_completingRecording.WaitOne(); // This will block if we ran HandleEndRecord, but haven't finished saving.
}
private void HandleCopyAudioFile(ApiRequest request)
{
var oldId = request.RequiredParam("oldId");
var newId = request.RequiredParam("newId");
var oldPath = GetPathToPublishableAudioForSegment(oldId);
var newPath = GetPathToPublishableAudioForSegment(newId);
if (RobustFile.Exists(oldPath))
{
RobustFile.Copy(oldPath, newPath);
}
// If the old file doesn't exist, it's probably because one hasn't been recorded before the user decided
// to copy and paste some text. See https://issues.bloomlibrary.org/youtrack/issue/BL-10291. Setting a
// new id in the copied element without having actual audio behind it is perfectly okay.
request.PostSucceeded();
}
// Palaso component to do the actual recording.
private AudioRecorder Recorder
{
get
{
// We postpone actually creating a recorder until something uses audio.
// Typically it is created when the talking book tool requests AudioDevicesJson
// to update the icon. At that point we start really sending volume requests.
if (_recorder == null)
{
var formToInvokeOn = Application.OpenForms.Cast<Form>().FirstOrDefault(f => f is Shell);
if (formToInvokeOn == null)
{
NonFatalProblem.Report(ModalIf.All, PassiveIf.All, "Bloom could not find a form on which to start the level monitoring code. Please restart Bloom.");
return null;
}
if(formToInvokeOn.InvokeRequired)
{
formToInvokeOn.Invoke((Action)(CreateRecorder));
}
else
{
CreateRecorder();
}
}
return _recorder;
}
}
private void CreateRecorder()
{
_collectionAudioTrimEndMilliseconds =
_bookSelection.CurrentSelection.CollectionSettings.AudioRecordingTrimEndMilliseconds;
_recorder = new AudioRecorder(1);
_recorder.PeakLevelChanged += ((s, e) => SetPeakLevel(e));
BeginMonitoring(); // could get here recursively _recorder isn't set by now!
if (_exitHookSet)
return;
// We want to do this only once.
Application.ApplicationExit += OnApplicationExit;
_exitHookSet = true;
}
private void OnApplicationExit(object sender, EventArgs args)
{
if (_recorder != null)
{
var temp = _recorder;
_recorder = null;
try
{
temp.Dispose();
}
catch (Exception)
{
// Not sure how this can fail, but we don't need to crash if
// something goes wrong trying to free the audio object.
Debug.Fail("Something went wrong disposing of AudioRecorder");
}
}
}
public virtual void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
// dispose-only, i.e. non-finalizable logic
if (_recorder != null)
{
_recorder.Dispose();
_recorder = null;
Application.ApplicationExit -= OnApplicationExit;
}
}
// shared (dispose and finalizable) cleanup logic
_disposed = true;
}
}
~AudioRecording()
{
if (!_disposed)
{
NonFatalProblem.Report(ModalIf.Alpha,PassiveIf.Alpha,"AudioRecording was not disposed");
}
}
}
}
| |
// ===========================================================
// Copyright (c) 2014-2015, Enrico Da Ros/kendar.org
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// ===========================================================
using System.Reflection;
using System.Threading;
using CoroutinesLib.Shared.Enumerators;
using CoroutinesLib.Shared.Enums;
using CoroutinesLib.Shared.Exceptions;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace CoroutinesLib.Shared
{
public abstract class CoroutineResult : ICoroutineResult
{
static CoroutineResult()
{
var type = Type.GetType("CoroutinesLib.RunnerFactory,CoroutinesLib");
if (type != null)
{
_createMethod = type.GetMethod("Create", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static);
}
}
public static Task WaitForCoroutine(ICoroutineThread toRun)
{
Exception problem = null;
var onError = new Action<Exception>((ex) =>
{
problem = ex;
});
var cmanager = (ICoroutinesManager)_createMethod.Invoke(null, new object[] { });
cmanager.StartCoroutine(toRun, onError);
var waitSlim = new ManualResetEventSlim(false);
var task = new Task(() =>
{
while ((long)RunningStatus.NotRunning > (long)toRun.Status)
{
waitSlim.Wait(50);
}
while (!toRun.Status.Is(RunningStatus.NotRunning))
{
waitSlim.Wait(50);
}
if (problem != null)
{
throw new Exception("Error running subtask", problem);
}
}, TaskCreationOptions.AttachedToParent);
task.Start();
return task;
}
public static IMessageBus MessageBus { get; set; }
public static CoroutineResult _wait = new ConcreteCoroutineResult(ResultType.Wait);
private static MethodInfo _createMethod;
public bool ShouldWait
{
get { return ResultType == ResultType.Wait; }
}
public static ICoroutineResult Enumerable(IEnumerable<ICoroutineResult> enumerable, string name)
{
var result = new CoroutineResultEnumerator(string.Format("Enumerator for '{0}'.", name), enumerable.GetEnumerator());
return result;
}
/// <summary>
/// Tells the caller to wait for further result
/// </summary>
public static CoroutineResult Wait { get { return _wait; } }
/// <summary>
/// Create a "real" return. No current function code will be exectued after this point.
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static CoroutineResult Return(object value)
{
return new ConcreteCoroutineResult(ResultType.Return, value);
}
/// <summary>
/// Create a "yield return" like return value.
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static CoroutineResult YieldReturn(object value)
{
return new ConcreteCoroutineResult(ResultType.YieldReturn, value);
}
/// <summary>
/// Like "yield break" stops the execution of the function.
/// </summary>
/// <returns></returns>
public static CoroutineResult YieldBreak()
{
return new ConcreteCoroutineResult(ResultType.YieldBreak);
}
/// <summary>
/// The type of result of the coroutine
/// </summary>
public ResultType ResultType { get; protected set; }
/// <summary>
/// The result content
/// </summary>
public object Result { get; protected set; }
private static IEnumerable<ICoroutineResult> WaitForMessageSent(IMessage message)
{
var timeout = message.Timeout;
var now = DateTime.UtcNow;
while (now < timeout)
{
if (message.IsCompleted.Value)
{
if (message.Response != null)
{
yield return Return(message.Response);
}
}
yield return Wait;
now = DateTime.UtcNow;
}
throw new CoroutineTimeoutException((long)(now - timeout).TotalMilliseconds);
}
public static IOnResponseMessage SendMessage(IMessage message)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.Message,
Message = message,
MessageBus = MessageBus
};
return result;
}
public static ICoroutineResult PostMessage(IMessage message)
{
MessageBus.Post(message);
return Wait;
}
public static IOnFunctionResult RunAndGetResult(IEnumerable<ICoroutineResult> waitForItem, string name = null)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.FunctionWithResult,
Enumerator = waitForItem.GetEnumerator(),
InstanceName = name
};
return result;
}
public static IOnComplete Run(IEnumerable<ICoroutineResult> waitForItem, string name = null)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.FunctionWithoutResult,
Enumerator = waitForItem.GetEnumerator(),
InstanceName = name
};
return result;
}
public static IForEachItem ForEachItem(IEnumerable<ICoroutineResult> waitForItem, string name = null)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.ForeachFunction,
Enumerator = waitForItem.GetEnumerator(),
InstanceName = name
};
return result;
}
public static IOnComplete RunTask(Task taskToRun, string name = null)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.FunctionWithoutResult,
Task = taskToRun,
Enumerator = RunTaskFunction(taskToRun).GetEnumerator(),
InstanceName = name
};
return result;
}
private static IEnumerable<ICoroutineResult> RunTaskFunction(Task task)
{
if (!task.IsCompleted && !task.IsFaulted && !task.IsCanceled &&
task.Status != TaskStatus.Running &&
//task.Status != TaskStatus.WaitingForActivation &&
task.Status != TaskStatus.WaitingForChildrenToComplete &&
//task.Status != TaskStatus.WaitingToRun &&
task.Status != TaskStatus.RanToCompletion)
{
// ReSharper disable EmptyGeneralCatchClause
try
{
task.Start();
}
catch (Exception)
{
}
// ReSharper restore EmptyGeneralCatchClause
}
while (!task.IsCompleted && !task.IsFaulted && !task.IsCanceled)
{
yield return Wait;
}
if (task.IsFaulted)
{
throw new CoroutineTaskException("Error executing task", task.Exception);
}
}
public static IOnComplete RunCoroutine(ICoroutineThread coroutine, string name = null)
{
var result = new FluentResultBuilder
{
Type = FluentResultType.CoroutineFunction,
Coroutine = coroutine,
InstanceName = name
};
return result;
}
}
}
| |
#region license
// Copyright (c) 2009 Rodrigo B. de Oliveira (rbo@acm.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Rodrigo B. de Oliveira nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
//
// DO NOT EDIT THIS FILE!
//
// This file was generated automatically by astgen.boo.
//
namespace Boo.Lang.Compiler.Ast
{
using System.Collections;
using System.Runtime.Serialization;
[System.Serializable]
public partial class SpliceParameterDeclaration : ParameterDeclaration
{
protected ParameterDeclaration _parameterDeclaration;
protected Expression _nameExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
new public SpliceParameterDeclaration CloneNode()
{
return (SpliceParameterDeclaration)Clone();
}
/// <summary>
/// <see cref="Node.CleanClone"/>
/// </summary>
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
new public SpliceParameterDeclaration CleanClone()
{
return (SpliceParameterDeclaration)base.CleanClone();
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public NodeType NodeType
{
get { return NodeType.SpliceParameterDeclaration; }
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public void Accept(IAstVisitor visitor)
{
visitor.OnSpliceParameterDeclaration(this);
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public bool Matches(Node node)
{
if (node == null) return false;
if (NodeType != node.NodeType) return false;
var other = ( SpliceParameterDeclaration)node;
if (_name != other._name) return NoMatch("SpliceParameterDeclaration._name");
if (!Node.Matches(_type, other._type)) return NoMatch("SpliceParameterDeclaration._type");
if (_modifiers != other._modifiers) return NoMatch("SpliceParameterDeclaration._modifiers");
if (!Node.AllMatch(_attributes, other._attributes)) return NoMatch("SpliceParameterDeclaration._attributes");
if (!Node.Matches(_parameterDeclaration, other._parameterDeclaration)) return NoMatch("SpliceParameterDeclaration._parameterDeclaration");
if (!Node.Matches(_nameExpression, other._nameExpression)) return NoMatch("SpliceParameterDeclaration._nameExpression");
return true;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public bool Replace(Node existing, Node newNode)
{
if (base.Replace(existing, newNode))
{
return true;
}
if (_type == existing)
{
this.Type = (TypeReference)newNode;
return true;
}
if (_attributes != null)
{
Attribute item = existing as Attribute;
if (null != item)
{
Attribute newItem = (Attribute)newNode;
if (_attributes.Replace(item, newItem))
{
return true;
}
}
}
if (_parameterDeclaration == existing)
{
this.ParameterDeclaration = (ParameterDeclaration)newNode;
return true;
}
if (_nameExpression == existing)
{
this.NameExpression = (Expression)newNode;
return true;
}
return false;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public object Clone()
{
SpliceParameterDeclaration clone = (SpliceParameterDeclaration)FormatterServices.GetUninitializedObject(typeof(SpliceParameterDeclaration));
clone._lexicalInfo = _lexicalInfo;
clone._endSourceLocation = _endSourceLocation;
clone._documentation = _documentation;
clone._entity = _entity;
if (_annotations != null) clone._annotations = (Hashtable)_annotations.Clone();
clone._name = _name;
if (null != _type)
{
clone._type = _type.Clone() as TypeReference;
clone._type.InitializeParent(clone);
}
clone._modifiers = _modifiers;
if (null != _attributes)
{
clone._attributes = _attributes.Clone() as AttributeCollection;
clone._attributes.InitializeParent(clone);
}
if (null != _parameterDeclaration)
{
clone._parameterDeclaration = _parameterDeclaration.Clone() as ParameterDeclaration;
clone._parameterDeclaration.InitializeParent(clone);
}
if (null != _nameExpression)
{
clone._nameExpression = _nameExpression.Clone() as Expression;
clone._nameExpression.InitializeParent(clone);
}
return clone;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override internal void ClearTypeSystemBindings()
{
_annotations = null;
_entity = null;
if (null != _type)
{
_type.ClearTypeSystemBindings();
}
if (null != _attributes)
{
_attributes.ClearTypeSystemBindings();
}
if (null != _parameterDeclaration)
{
_parameterDeclaration.ClearTypeSystemBindings();
}
if (null != _nameExpression)
{
_nameExpression.ClearTypeSystemBindings();
}
}
[System.Xml.Serialization.XmlElement]
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
public ParameterDeclaration ParameterDeclaration
{
get { return _parameterDeclaration; }
set
{
if (_parameterDeclaration != value)
{
_parameterDeclaration = value;
if (null != _parameterDeclaration)
{
_parameterDeclaration.InitializeParent(this);
}
}
}
}
[System.Xml.Serialization.XmlElement]
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
public Expression NameExpression
{
get { return _nameExpression; }
set
{
if (_nameExpression != value)
{
_nameExpression = value;
if (null != _nameExpression)
{
_nameExpression.InitializeParent(this);
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
//
// All P/invokes used by System.Private.Interop and MCG generated code goes here.
//
// !!IMPORTANT!!
//
// Do not rely on MCG to generate marshalling code for these p/invokes as MCG might not see them at all
// due to not seeing dependency to those calls (before the MCG generated code is generated). Instead,
// always manually marshal the arguments
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace System.Runtime.InteropServices
{
[CLSCompliant(false)]
public static partial class ExternalInterop
{
private static partial class Libraries
{
#if TARGET_CORE_API_SET
internal const string CORE_COM = "api-ms-win-core-com-l1-1-0.dll";
#else
internal const string CORE_COM = "ole32.dll";
#endif
// @TODO: What is the matching dll in CoreSys?
// @TODO: Replace the below by the correspondent api-ms-win-core-...-0.dll
internal const string CORE_COM_AUT = "OleAut32.dll";
}
#if CORECLR
public static unsafe void* CoTaskMemAlloc(IntPtr size)
{
return Marshal.AllocHGlobal(size).ToPointer();
}
public static unsafe void CoTaskMemFree(void* pv)
{
Marshal.FreeHGlobal(new IntPtr(pv));
}
public static unsafe void SafeCoTaskMemFree(void* pv)
{
// Even though CoTaskMemFree is a no-op for NULLs, skipping the interop call entirely is faster
if (pv != null)
CoTaskMemFree(pv);
}
public static unsafe IntPtr SysAllocStringLen(char* pStrIn, UInt32 dwSize)
{
string srcString = new string(pStrIn, 0, checked((int)dwSize));
return Marshal.StringToBSTR(srcString);
}
public static unsafe void SysFreeString(void* pBSTR)
{
SysFreeString(new IntPtr(pBSTR));
}
public static unsafe void SysFreeString(IntPtr pBSTR)
{
Marshal.FreeBSTR(pBSTR);
}
static internal void VariantClear(IntPtr pObject)
{
//Nop
}
static internal unsafe int CoGetMarshalSizeMax(out ulong pulSize, ref Guid iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags)
{
throw new PlatformNotSupportedException("CoGetMarshalSizeMax");
}
static internal unsafe int CoGetObjectContext(ref Guid iid, out IntPtr ppv)
{
throw new PlatformNotSupportedException("CoGetObjectContext");
}
static internal unsafe int CoMarshalInterface(IntPtr pStream, ref Guid iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags)
{
throw new PlatformNotSupportedException("CoMarshalInterface");
}
static internal unsafe int CoUnmarshalInterface(IntPtr pStream, ref Guid iid, out IntPtr ppv)
{
throw new PlatformNotSupportedException("CoUnmarshalInterface");
}
static internal int CoReleaseMarshalData(IntPtr pStream)
{
// Nop in CoreCLR
return 0;
}
#else
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
public static extern unsafe void* CoTaskMemAlloc(IntPtr size);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
public extern static unsafe void CoTaskMemFree(void* pv);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
public static extern unsafe int CoGetContextToken(IntPtr* ppToken);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
static internal extern IntPtr CoTaskMemRealloc(IntPtr pv, IntPtr size);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
static internal unsafe extern int CoGetObjectContext(Guid* iid, void* ppv);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
static internal unsafe extern int CoCreateInstanceFromApp(
Guid* clsid,
IntPtr pUnkOuter,
int context,
IntPtr reserved,
int count,
IntPtr results
);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
static internal unsafe extern int CoCreateFreeThreadedMarshaler(void* pOuter, void** ppunkMarshal);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
private extern unsafe static int CoMarshalInterface(IntPtr pStream, Guid* iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
private static unsafe extern int CoUnmarshalInterface(IntPtr pStream, Guid* iid, void** ppv);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
private static unsafe extern int CoGetMarshalSizeMax(ulong* pulSize, Guid* iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags);
[DllImport(Libraries.CORE_COM)]
[McgGeneratedNativeCallCodeAttribute]
[MethodImplAttribute(MethodImplOptions.NoInlining)]
static internal extern int CoReleaseMarshalData(IntPtr pStream);
[DllImport(Libraries.CORE_COM_AUT)]
[McgGeneratedNativeCallCodeAttribute]
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public static extern unsafe void SysFreeString(void* pBSTR);
public static unsafe void SysFreeString(IntPtr pBstr)
{
SysFreeString((void*)pBstr);
}
[DllImport(Libraries.CORE_COM_AUT)]
[McgGeneratedNativeCallCodeAttribute]
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public static extern unsafe uint SysStringLen(void* pBSTR);
public static unsafe uint SysStringLen(IntPtr pBSTR)
{
return SysStringLen((void*)pBSTR);
}
[DllImport(Libraries.CORE_COM_AUT)]
[McgGeneratedNativeCallCodeAttribute]
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public static extern unsafe IntPtr SysAllocString(IntPtr pStrIn);
[DllImport(Libraries.CORE_COM_AUT)]
[McgGeneratedNativeCallCodeAttribute]
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public static extern unsafe char* SysAllocStringLen(char* pStrIn, uint len);
[DllImport(Libraries.CORE_COM_AUT)]
[McgGeneratedNativeCallCodeAttribute]
static internal extern void VariantClear(IntPtr pObject);
static internal unsafe int CoGetObjectContext(ref Guid iid, out IntPtr ppv)
{
fixed (void* unsafe_ppv = &ppv)
{
fixed (Guid* unsafe_iid = &iid)
{
return CoGetObjectContext(unsafe_iid, (void**)unsafe_ppv);
}
}
}
/// <summary>
/// Marshal IUnknown * into IStream*
/// </summary>
/// <returns>HResult</returns>
static internal unsafe int CoMarshalInterface(IntPtr pStream, ref Guid iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags)
{
fixed (Guid* unsafe_iid = &iid)
{
return CoMarshalInterface(pStream, unsafe_iid, pUnk, dwDestContext, pvDestContext, mshlflags);
}
}
/// <summary>
/// Marshal IStream* into IUnknown*
/// </summary>
/// <returns>HResult</returns>
static internal unsafe int CoUnmarshalInterface(IntPtr pStream, ref Guid iid, out IntPtr ppv)
{
fixed (Guid* unsafe_iid = &iid)
{
fixed (void* unsafe_ppv = &ppv)
{
return CoUnmarshalInterface(pStream, unsafe_iid, (void**)unsafe_ppv);
}
}
}
/// <summary>
/// Returns an upper bound on the number of bytes needed to marshal the specified interface pointer to the specified object.
/// </summary>
/// <returns>HResult</returns>
static internal unsafe int CoGetMarshalSizeMax(out ulong pulSize, ref Guid iid, IntPtr pUnk, Interop.COM.MSHCTX dwDestContext, IntPtr pvDestContext, Interop.COM.MSHLFLAGS mshlflags)
{
fixed (ulong* unsafe_pulSize = &pulSize)
{
fixed (Guid* unsafe_iid = &iid)
{
return CoGetMarshalSizeMax(unsafe_pulSize, unsafe_iid, pUnk, dwDestContext, pvDestContext, mshlflags);
}
}
}
public static unsafe int CoGetContextToken(out IntPtr ppToken)
{
ppToken = IntPtr.Zero;
fixed (IntPtr* unsafePpToken = &ppToken)
{
return CoGetContextToken(unsafePpToken);
}
}
public static unsafe void SafeCoTaskMemFree(void* pv)
{
// Even though CoTaskMemFree is a no-op for NULLs, skipping the interop call entirely is faster
if (pv != null)
CoTaskMemFree(pv);
}
#endif //CORECLR
}
}
| |
using System;
using System.ComponentModel;
using System.Drawing;
using System.Runtime.InteropServices;
using LanExchange.SDK;
namespace LanExchange.Plugin.Windows.Utils
{
/// <summary>
/// Summary description for SysImageList.
/// </summary>
[Localizable(false)]
public sealed class SysImageList : IDisposable
{
#region UnmanagedCode
private const int MAX_PATH = 260;
private const int MAX_TYPE = 80;
[DllImport(ExternDll.Shell32, CharSet = CharSet.Unicode)]
private static extern IntPtr SHGetFileInfo (
string pszPath,
int dwFileAttributes,
ref SHFILEINFO psfi,
uint cbFileInfo,
uint uFlags);
//[DllImport("user32.dll")]
//private static extern int DestroyIcon(IntPtr hIcon);
private const int FILE_ATTRIBUTE_NORMAL = 0x80;
//private const int FILE_ATTRIBUTE_DIRECTORY = 0x10;
//private const int FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x100;
//private const int FORMAT_MESSAGE_ARGUMENT_ARRAY = 0x2000;
//private const int FORMAT_MESSAGE_FROM_HMODULE = 0x800;
//private const int FORMAT_MESSAGE_FROM_STRING = 0x400;
//private const int FORMAT_MESSAGE_FROM_SYSTEM = 0x1000;
//private const int FORMAT_MESSAGE_IGNORE_INSERTS = 0x200;
//private const int FORMAT_MESSAGE_MAX_WIDTH_MASK = 0xFF;
//[DllImport("kernel32")]
//private extern static int FormatMessage (
// int dwFlags,
// IntPtr lpSource,
// int dwMessageId,
// int dwLanguageId,
// string lpBuffer,
// uint nSize,
// int argumentsLong);
//[DllImport("kernel32")]
//private extern static int GetLastError();
[DllImport(ExternDll.Comctl32)]
private extern static int ImageList_Draw(
IntPtr hIml,
int i,
IntPtr hdcDst,
int x,
int y,
int fStyle);
[DllImport(ExternDll.Comctl32)]
private extern static int ImageList_DrawIndirect(
ref IMAGELISTDRAWPARAMS pimldp);
[DllImport(ExternDll.Comctl32)]
private extern static int ImageList_GetIconSize(
IntPtr himl,
ref int cx,
ref int cy);
[DllImport(ExternDll.Comctl32)]
private extern static IntPtr ImageList_GetIcon(
IntPtr himl,
int i,
int flags);
/// <summary>
/// SHGetImageList is not exported correctly in XP. See KB316931
/// http://support.microsoft.com/default.aspx?scid=kb;EN-US;Q316931
/// Apparently (and hopefully) ordinal 727 isn't going to change.
/// </summary>
[DllImport(ExternDll.Shell32, EntryPoint = "#727")]
private extern static int SHGetImageList(
int iImageList,
ref Guid riid,
ref IImageList ppv
);
[DllImport(ExternDll.Shell32, EntryPoint = "#727")]
private extern static int SHGetImageListHandle(
int iImageList,
ref Guid riid,
ref IntPtr handle
);
#endregion
#region Private Enumerations
[Flags]
private enum SHGetFileInfoConstants
{
//SHGFI_ICON = 0x100, // get icon
//SHGFI_DISPLAYNAME = 0x200, // get display name
//SHGFI_TYPENAME = 0x400, // get type name
//SHGFI_ATTRIBUTES = 0x800, // get attributes
//SHGFI_ICONLOCATION = 0x1000, // get icon location
//SHGFI_EXETYPE = 0x2000, // return exe type
SHGFI_SYSICONINDEX = 0x4000, // get system icon index
//SHGFI_LINKOVERLAY = 0x8000, // put a link overlay on icon
//SHGFI_SELECTED = 0x10000, // show icon in selected state
//SHGFI_ATTR_SPECIFIED = 0x20000, // get only specified attributes
//SHGFI_LARGEICON = 0x0, // get large icon
SHGFI_SMALLICON = 0x1, // get small icon
//SHGFI_OPENICON = 0x2, // get open icon
//SHGFI_SHELLICONSIZE = 0x4, // get shell size icon
//SHGFI_PIDL = 0x8, // pszPath is a pidl
SHGFI_USEFILEATTRIBUTES = 0x10, // use passed dwFileAttribute
//SHGFI_ADDOVERLAYS = 0x000000020, // apply the appropriate overlays
//SHGFI_OVERLAYINDEX = 0x000000040 // Get the index of the overlay
}
#endregion
#region Private ImageList structures
[StructLayout(LayoutKind.Sequential)]
private struct RECT
{
readonly int left;
readonly int top;
readonly int right;
readonly int bottom;
}
[StructLayout(LayoutKind.Sequential)]
private struct POINT
{
readonly int x;
readonly int y;
}
[StructLayout(LayoutKind.Sequential)]
private struct IMAGELISTDRAWPARAMS
{
public int cbSize;
public IntPtr himl;
public int i;
public IntPtr hdcDst;
public int x;
public int y;
public int cx;
public int cy;
private readonly int xBitmap; // x offest from the upperleft of bitmap
private readonly int yBitmap; // y offset from the upperleft of bitmap
private readonly int rgbBk;
public int rgbFg;
public int fStyle;
private readonly int dwRop;
private readonly int fState;
private readonly int Frame;
private readonly int crEffect;
}
[StructLayout(LayoutKind.Sequential)]
private struct IMAGEINFO
{
private readonly IntPtr hbmImage;
private readonly IntPtr hbmMask;
private readonly int Unused1;
private readonly int Unused2;
private readonly RECT rcImage;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
private struct SHFILEINFO
{
private readonly IntPtr hIcon;
public readonly int iIcon;
private readonly int dwAttributes;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst=MAX_PATH)]
private readonly string szDisplayName;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst=MAX_TYPE)]
private readonly string szTypeName;
}
#endregion
#region Private ImageList COM Interop (XP)
[ComImport]
[Guid("46EB5926-582E-4017-9FDF-E8998DAA0950")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
//helpstring("Image List"),
interface IImageList
{
[PreserveSig]
int Add(
IntPtr hbmImage,
IntPtr hbmMask,
ref int pi);
[PreserveSig]
int ReplaceIcon(
int i,
IntPtr hicon,
ref int pi);
[PreserveSig]
int SetOverlayImage(
int iImage,
int iOverlay);
[PreserveSig]
int Replace(
int i,
IntPtr hbmImage,
IntPtr hbmMask);
[PreserveSig]
int AddMasked(
IntPtr hbmImage,
int crMask,
ref int pi);
[PreserveSig]
int Draw(
ref IMAGELISTDRAWPARAMS pimldp);
[PreserveSig]
int Remove(
int i);
[PreserveSig]
int GetIcon(
int i,
int flags,
ref IntPtr picon);
[PreserveSig]
int GetImageInfo(
int i,
ref IMAGEINFO pImageInfo);
[PreserveSig]
int Copy(
int iDst,
IImageList punkSrc,
int iSrc,
int uFlags);
[PreserveSig]
int Merge(
int i1,
IImageList punk2,
int i2,
int dx,
int dy,
ref Guid riid,
ref IntPtr ppv);
[PreserveSig]
int Clone(
ref Guid riid,
ref IntPtr ppv);
[PreserveSig]
int GetImageRect(
int i,
ref RECT prc);
[PreserveSig]
int GetIconSize(
ref int cx,
ref int cy);
[PreserveSig]
int SetIconSize(
int cx,
int cy);
[PreserveSig]
int GetImageCount(
ref int pi);
[PreserveSig]
int SetImageCount(
int uNewCount);
[PreserveSig]
int SetBkColor(
int clrBk,
ref int pclr);
[PreserveSig]
int GetBkColor(
ref int pclr);
[PreserveSig]
int BeginDrag(
int iTrack,
int dxHotspot,
int dyHotspot);
[PreserveSig]
int EndDrag();
[PreserveSig]
int DragEnter(
IntPtr hwndLock,
int x,
int y);
[PreserveSig]
int DragLeave(
IntPtr hwndLock);
[PreserveSig]
int DragMove(
int x,
int y);
[PreserveSig]
int SetDragCursorImage(
ref IImageList punk,
int iDrag,
int dxHotspot,
int dyHotspot);
[PreserveSig]
int DragShowNolock(
int fShow);
[PreserveSig]
int GetDragImage(
ref POINT ppt,
ref POINT pptHotspot,
ref Guid riid,
ref IntPtr ppv);
[PreserveSig]
int GetItemFlags(
int i,
ref int dwFlags);
[PreserveSig]
int GetOverlayImage(
int iOverlay,
ref int piIndex);
};
#endregion
#region Member Variables
private IntPtr hIml = IntPtr.Zero;
private IImageList iImageList;
private SysImageListSize size = SysImageListSize.SmallIcons;
private bool disposed;
#endregion
#region Implementation
#region Properties
/// <summary>
/// Gets the hImageList handle
/// </summary>
public IntPtr Handle
{
get
{
return hIml;
}
}
/// <summary>
/// Gets/sets the size of System Image List to retrieve.
/// </summary>
public SysImageListSize ImageListSize
{
get
{
return size;
}
set
{
size = value;
Create();
}
}
/// <summary>
/// Returns the size of the Image List Icons.
/// </summary>
public Size Size
{
get
{
int cx = 0;
int cy = 0;
if (iImageList == null)
{
ImageList_GetIconSize(
hIml,
ref cx,
ref cy);
}
else
{
iImageList.GetIconSize(ref cx, ref cy);
}
return new Size(cx, cy);
}
}
#endregion
#region Methods
/// <summary>
/// Returns a GDI+ copy of the icon from the ImageList
/// at the specified index.
/// </summary>
/// <param name="index">The index to get the icon for</param>
/// <returns>The specified icon</returns>
public Icon Icon(int index)
{
Icon icon;
IntPtr hIcon = IntPtr.Zero;
if (iImageList == null)
{
hIcon = ImageList_GetIcon(
hIml,
index,
(int)ImageListDrawItemConstants.ILD_TRANSPARENT);
}
else
{
iImageList.GetIcon(
index,
(int)ImageListDrawItemConstants.ILD_TRANSPARENT,
ref hIcon);
}
if (hIcon != IntPtr.Zero)
icon = System.Drawing.Icon.FromHandle(hIcon);
else
icon = null;
return icon;
}
/// <summary>
/// Return the index of the icon for the specified file, always using
/// the cached version where possible.
/// </summary>
/// <param name="fileName">Filename to get icon for</param>
/// <returns>Index of the icon</returns>
public int IconIndex(string fileName)
{
return IconIndex(fileName, false);
}
/// <summary>
/// Returns the index of the icon for the specified file
/// </summary>
/// <param name="fileName">Filename to get icon for</param>
/// <param name="forceLoadFromDisk">If True, then hit the disk to get the icon,
/// otherwise only hit the disk if no cached icon is available.</param>
/// <returns>Index of the icon</returns>
public int IconIndex(
string fileName,
bool forceLoadFromDisk)
{
return IconIndex(
fileName,
forceLoadFromDisk,
ShellIconStateConstants.ShellIconStateNormal);
}
/// <summary>
/// Returns the index of the icon for the specified file
/// </summary>
/// <param name="fileName">Filename to get icon for</param>
/// <param name="forceLoadFromDisk">If True, then hit the disk to get the icon,
/// otherwise only hit the disk if no cached icon is available.</param>
/// <param name="iconState">Flags specifying the state of the icon
/// returned.</param>
/// <returns>Index of the icon</returns>
public int IconIndex(
string fileName,
bool forceLoadFromDisk,
ShellIconStateConstants iconState
)
{
var dwFlags = SHGetFileInfoConstants.SHGFI_SYSICONINDEX;
int dwAttr;
if (size == SysImageListSize.SmallIcons)
{
dwFlags |= SHGetFileInfoConstants.SHGFI_SMALLICON;
}
// We can choose whether to access the disk or not. If you don't
// hit the disk, you may get the wrong icon if the icon is
// not cached. Also only works for files.
if (!forceLoadFromDisk)
{
dwFlags |= SHGetFileInfoConstants.SHGFI_USEFILEATTRIBUTES;
dwAttr = FILE_ATTRIBUTE_NORMAL;
}
else
{
dwAttr = 0;
}
// sFileSpec can be any file. You can specify a
// file that does not exist and still get the
// icon, for example sFileSpec = "C:\PANTS.DOC"
var shfi = new SHFILEINFO();
var shfiSize = (uint)Marshal.SizeOf(shfi.GetType());
IntPtr retVal = SHGetFileInfo(
fileName, dwAttr, ref shfi, shfiSize,
((uint)(dwFlags) | (uint)iconState));
if (retVal.Equals(IntPtr.Zero))
{
System.Diagnostics.Debug.Assert((!retVal.Equals(IntPtr.Zero)),"Failed to get icon index");
return 0;
}
return shfi.iIcon;
}
public string DisplayName { get; set; }
/// <summary>
/// Draws an image
/// </summary>
/// <param name="hdc">Device context to draw to</param>
/// <param name="index">Index of image to draw</param>
/// <param name="x">X Position to draw at</param>
/// <param name="y">Y Position to draw at</param>
public void DrawImage(
IntPtr hdc,
int index,
int x,
int y
)
{
DrawImage(hdc, index, x, y, ImageListDrawItemConstants.ILD_TRANSPARENT);
}
/// <summary>
/// Draws an image using the specified flags
/// </summary>
/// <param name="hdc">Device context to draw to</param>
/// <param name="index">Index of image to draw</param>
/// <param name="x">X Position to draw at</param>
/// <param name="y">Y Position to draw at</param>
/// <param name="flags">Drawing flags</param>
public void DrawImage(
IntPtr hdc,
int index,
int x,
int y,
ImageListDrawItemConstants flags
)
{
if (iImageList == null)
{
ImageList_Draw(
hIml,
index,
hdc,
x,
y,
(int)flags);
}
else
{
var pimldp = new IMAGELISTDRAWPARAMS();
pimldp.hdcDst = hdc;
pimldp.cbSize = Marshal.SizeOf(pimldp.GetType());
pimldp.i = index;
pimldp.x = x;
pimldp.y = y;
pimldp.rgbFg = -1;
pimldp.fStyle = (int)flags;
iImageList.Draw(ref pimldp);
}
}
/// <summary>
/// Draws an image using the specified flags and specifies
/// the size to clip to (or to stretch to if ILD_SCALE
/// is provided).
/// </summary>
/// <param name="hdc">Device context to draw to</param>
/// <param name="index">Index of image to draw</param>
/// <param name="x">X Position to draw at</param>
/// <param name="y">Y Position to draw at</param>
/// <param name="flags">Drawing flags</param>
/// <param name="cx">Width to draw</param>
/// <param name="cy">Height to draw</param>
public void DrawImage(
IntPtr hdc,
int index,
int x,
int y,
ImageListDrawItemConstants flags,
int cx,
int cy
)
{
var pimldp = new IMAGELISTDRAWPARAMS();
pimldp.hdcDst = hdc;
pimldp.cbSize = Marshal.SizeOf(pimldp.GetType());
pimldp.i = index;
pimldp.x = x;
pimldp.y = y;
pimldp.cx = cx;
pimldp.cy = cy;
pimldp.fStyle = (int)flags;
if (iImageList == null)
{
pimldp.himl = hIml;
ImageList_DrawIndirect(ref pimldp);
}
else
{
iImageList.Draw(ref pimldp);
}
}
/// <summary>
/// Determines if the system is running Windows XP
/// or above
/// </summary>
/// <returns>True if system is running XP or above, False otherwise</returns>
private static bool IsXpOrAbove()
{
bool ret = false;
if (Environment.OSVersion.Version.Major > 5)
{
ret = true;
}
else if ((Environment.OSVersion.Version.Major == 5) &&
(Environment.OSVersion.Version.Minor >= 1))
{
ret = true;
}
return ret;
//return false;
}
/// <summary>
/// Creates the SystemImageList
/// </summary>
private void Create()
{
// forget last image list if any:
hIml = IntPtr.Zero;
if (IsXpOrAbove())
{
// Get the System IImageList object from the Shell:
var iidImageList = new Guid("46EB5926-582E-4017-9FDF-E8998DAA0950");
SHGetImageList(
(int)size,
ref iidImageList,
ref iImageList
);
// the image list handle is the IUnknown pointer, but
// using Marshal.GetIUnknownForObject doesn't return
// the right value. It really doesn't hurt to make
// a second call to get the handle:
SHGetImageListHandle((int)size, ref iidImageList, ref hIml);
}
else
{
// Prepare flags:
SHGetFileInfoConstants dwFlags = SHGetFileInfoConstants.SHGFI_USEFILEATTRIBUTES | SHGetFileInfoConstants.SHGFI_SYSICONINDEX ;
if (size == SysImageListSize.SmallIcons)
{
dwFlags |= SHGetFileInfoConstants.SHGFI_SMALLICON;
}
// Get image list
var shfi = new SHFILEINFO();
var shfiSize = (uint)Marshal.SizeOf(shfi.GetType());
// Call SHGetFileInfo to get the image list handle
// using an arbitrary file:
hIml = SHGetFileInfo(
".txt",
FILE_ATTRIBUTE_NORMAL,
ref shfi,
shfiSize,
(uint)dwFlags);
System.Diagnostics.Debug.Assert ((hIml != IntPtr.Zero),"Failed to create Image List");
}
}
#endregion
#region Constructor, Dispose, Destructor
/// <summary>
/// Creates a Small Icons SystemImageList
/// </summary>
public SysImageList()
{
Create();
}
/// <summary>
/// Creates a SystemImageList with the specified size
/// </summary>
/// <param name="size">Size of System ImageList</param>
public SysImageList(SysImageListSize size)
{
this.size = size;
Create();
}
/// <summary>
/// Clears up any resources associated with the SystemImageList
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Clears up any resources associated with the SystemImageList
/// when disposing is true.
/// </summary>
/// <param name="disposing">Whether the object is being disposed</param>
public void Dispose(bool disposing)
{
if (!disposed)
{
if (disposing)
{
if (iImageList != null)
{
Marshal.ReleaseComObject(iImageList);
}
iImageList = null;
}
}
disposed = true;
}
/// <summary>
/// Finalise for SysImageList
/// </summary>
~SysImageList()
{
Dispose(false);
}
}
#endregion
#endregion
}
| |
#region license
// Copyright (c) 2004, Rodrigo B. de Oliveira (rbo@acm.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Rodrigo B. de Oliveira nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Reflection;
using Boo.Lang.Runtime;
namespace Boo.Lang
{
/// <summary>
/// boo language builtin functions.
/// </summary>
public class Builtins
{
public class duck
{
}
public static System.Version BooVersion
{
get
{
return new System.Version("0.9.7.0");
}
}
public static void print(object o)
{
Console.WriteLine(o);
}
public static string gets()
{
return Console.ReadLine();
}
public static string prompt(string message)
{
Console.Write(message);
return Console.ReadLine();
}
public static string join(IEnumerable enumerable, string separator)
{
var sb = new StringBuilder();
var enumerator = enumerable.GetEnumerator();
using (enumerator as IDisposable)
{
if (enumerator.MoveNext())
{
sb.Append(enumerator.Current);
while (enumerator.MoveNext())
{
sb.Append(separator);
sb.Append(enumerator.Current);
}
}
}
return sb.ToString();
}
public static string join(IEnumerable enumerable, char separator)
{
return join(enumerable, separator.ToString());
}
public static string join(IEnumerable enumerable)
{
return join(enumerable, " ");
}
public static IEnumerable map(object enumerable, ICallable function)
{
if (null == enumerable) throw new ArgumentNullException("enumerable");
if (null == function) throw new ArgumentNullException("function");
object[] args = new object[1];
foreach (object item in iterator(enumerable))
{
args[0] = item;
yield return function.Call(args);
}
}
public static object[] array(IEnumerable enumerable)
{
return new List(enumerable).ToArray();
}
//fast path, that implementation detail does not really need to be API
private static Array ArrayFromCollection(Type elementType, ICollection collection)
{
if (null == elementType)
throw new ArgumentNullException("elementType");
if (null == collection)
throw new ArgumentNullException("collection");
Array array = Array.CreateInstance(elementType, collection.Count);
if (RuntimeServices.IsPromotableNumeric(Type.GetTypeCode(elementType)))
{
int i=0;
foreach (object item in collection)
{
object value = RuntimeServices.CheckNumericPromotion(item).ToType(elementType, null);
array.SetValue(value, i);
++i;
}
}
else
{
collection.CopyTo(array, 0);
}
return array;
}
[TypeInferenceRule(TypeInferenceRules.ArrayOfTypeReferencedByFirstArgument)]
public static Array array(Type elementType, IEnumerable enumerable)
{
if (null == elementType)
throw new ArgumentNullException("elementType");
if (null == enumerable)
throw new ArgumentNullException("enumerable");
#pragma warning disable 618 //obsolete
ICollection collection = enumerable as ICollection;
if (null != collection) //fast path
return ArrayFromCollection(elementType, collection);
#pragma warning restore 618
List l = null;
if (RuntimeServices.IsPromotableNumeric(Type.GetTypeCode(elementType)))
{
l = new List();
foreach (object item in enumerable)
{
object value = RuntimeServices.CheckNumericPromotion(item).ToType(elementType, null);
l.Add(value);
}
}
else
{
l = new List(enumerable);
}
return l.ToArray(elementType);
}
[TypeInferenceRule(TypeInferenceRules.ArrayOfTypeReferencedByFirstArgument)]
public static Array array(Type elementType, int length)
{
if (length < 0)
throw new ArgumentException("`length' cannot be negative", "length");
return matrix(elementType, length);
}
public static Array matrix(Type elementType, params int[] lengths)
{
if (null == elementType)
throw new ArgumentNullException("elementType");
if (null == lengths || 0 == lengths.Length)
throw new ArgumentException("A matrix must have at least one dimension", "lengths");
return Array.CreateInstance(elementType, lengths);
}
#region generic array/matrix builtins (v0.9.2+)
public static T[] array<T>(int length)
{
throw new NotSupportedException("Operation should have been optimized away by the compiler!");
}
public static T[,] matrix<T>(int length0, int length1)
{
throw new NotSupportedException("Operation should have been optimized away by the compiler!");
}
public static T[,,] matrix<T>(int length0, int length1, int length2)
{
throw new NotSupportedException("Operation should have been optimized away by the compiler!");
}
public static T[,,,] matrix<T>(int length0, int length1, int length2, int length3)
{
throw new NotSupportedException("Operation should have been optimized away by the compiler!");
}
#endregion
public static IEnumerable iterator(object enumerable)
{
return RuntimeServices.GetEnumerable(enumerable);
}
#if !NO_SYSTEM_PROCESS
public static System.Diagnostics.Process shellp(string filename, string arguments)
{
var p = new System.Diagnostics.Process();
p.StartInfo.Arguments = arguments;
p.StartInfo.CreateNoWindow = true;
p.StartInfo.UseShellExecute = false;
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardInput = true;
p.StartInfo.RedirectStandardError = true;
p.StartInfo.FileName = filename;
p.Start();
return p;
}
public static string shell(string filename, string arguments)
{
var p = shellp(filename, arguments);
var output = p.StandardOutput.ReadToEnd();
p.WaitForExit();
return output;
}
#endif
public static IEnumerable<object[]> enumerate(object enumerable)
{
int i = 0;
foreach (object item in iterator(enumerable))
{
yield return new object[] { i++, item };
}
}
public static IEnumerable<int> range(int max)
{
if (max < 0) /* added for coherence with behavior of compiler-optimized
* for-in-range() loops, should compiler loops automatically
* inverse iteration in this case? */
{
throw new ArgumentOutOfRangeException("max < 0");
}
return range(0, max);
}
public static IEnumerable<int> range(int begin, int end)
{
if (begin < end)
{
for (int i = begin; i < end; ++i) yield return i;
}
else if (begin > end)
{
for (int i = begin; i > end; --i) yield return i;
}
}
public static IEnumerable<int> range(int begin, int end, int step)
{
if (0 ==step)
{
throw new ArgumentOutOfRangeException("step == 0");
}
if (step < 0)
{
if (begin < end)
{
throw new ArgumentOutOfRangeException("begin < end && step < 0");
}
for (int i = begin; i > end; i += step) yield return i;
}
else
{
if (begin > end)
{
throw new ArgumentOutOfRangeException("begin > end && step > 0");
}
for (int i = begin; i < end; i += step) yield return i;
}
}
public static IEnumerable reversed(object enumerable)
{
return new List(iterator(enumerable)).Reversed;
}
public static ZipEnumerator zip(params object[] enumerables)
{
IEnumerator[] enumerators = new IEnumerator[enumerables.Length];
for (int i=0; i<enumerables.Length; ++i)
{
enumerators[i] = GetEnumerator(enumerables[i]);
}
return new ZipEnumerator(enumerators);
}
public static IEnumerable<object> cat(params object[] args)
{
foreach (object e in args)
{
foreach (object item in iterator(e))
{
yield return item;
}
}
}
[EnumeratorItemType(typeof(object[]))]
public class ZipEnumerator : IEnumerator, IEnumerable, IDisposable
{
IEnumerator[] _enumerators;
internal ZipEnumerator(params IEnumerator[] enumerators)
{
_enumerators = enumerators;
}
public void Dispose()
{
for (int i=0; i<_enumerators.Length; ++i)
{
IDisposable d = _enumerators[i] as IDisposable;
if (d != null)
d.Dispose();
}
}
public void Reset()
{
for (int i=0; i<_enumerators.Length; ++i)
{
_enumerators[i].Reset();
}
}
public bool MoveNext()
{
for (int i=0; i<_enumerators.Length; ++i)
{
if (!_enumerators[i].MoveNext())
{
return false;
}
}
return true;
}
public object Current
{
get
{
object[] current = new object[_enumerators.Length];
for (int i=0; i<current.Length; ++i)
{
current[i] = _enumerators[i].Current;
}
return current;
}
}
public IEnumerator GetEnumerator()
{
return this;
}
}
private static IEnumerator GetEnumerator(object enumerable)
{
return RuntimeServices.GetEnumerable(enumerable).GetEnumerator();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
//
//
// This file contains the primary interface and management of tasks and queues.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace System.Threading.Tasks
{
/// <summary>
/// Represents an abstract scheduler for tasks.
/// </summary>
/// <remarks>
/// <para>
/// <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see> acts as the extension point for all
/// pluggable scheduling logic. This includes mechanisms such as how to schedule a task for execution, and
/// how scheduled tasks should be exposed to debuggers.
/// </para>
/// <para>
/// All members of the abstract <see cref="TaskScheduler"/> type are thread-safe
/// and may be used from multiple threads concurrently.
/// </para>
/// </remarks>
[DebuggerDisplay("Id={Id}")]
[DebuggerTypeProxy(typeof(SystemThreadingTasks_TaskSchedulerDebugView))]
public abstract class TaskScheduler
{
////////////////////////////////////////////////////////////
//
// User Provided Methods and Properties
//
/// <summary>
/// Queues a <see cref="System.Threading.Tasks.Task">Task</see> to the scheduler.
/// </summary>
/// <remarks>
/// <para>
/// A class derived from <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see>
/// implements this method to accept tasks being scheduled on the scheduler.
/// A typical implementation would store the task in an internal data structure, which would
/// be serviced by threads that would execute those tasks at some time in the future.
/// </para>
/// <para>
/// This method is only meant to be called by the .NET Framework and
/// should not be called directly by the derived class. This is necessary
/// for maintaining the consistency of the system.
/// </para>
/// </remarks>
/// <param name="task">The <see cref="System.Threading.Tasks.Task">Task</see> to be queued.</param>
/// <exception cref="System.ArgumentNullException">The <paramref name="task"/> argument is null.</exception>
protected internal abstract void QueueTask(Task task);
/// <summary>
/// Determines whether the provided <see cref="System.Threading.Tasks.Task">Task</see>
/// can be executed synchronously in this call, and if it can, executes it.
/// </summary>
/// <remarks>
/// <para>
/// A class derived from <see cref="TaskScheduler">TaskScheduler</see> implements this function to
/// support inline execution of a task on a thread that initiates a wait on that task object. Inline
/// execution is optional, and the request may be rejected by returning false. However, better
/// scalability typically results the more tasks that can be inlined, and in fact a scheduler that
/// inlines too little may be prone to deadlocks. A proper implementation should ensure that a
/// request executing under the policies guaranteed by the scheduler can successfully inline. For
/// example, if a scheduler uses a dedicated thread to execute tasks, any inlining requests from that
/// thread should succeed.
/// </para>
/// <para>
/// If a scheduler decides to perform the inline execution, it should do so by calling to the base
/// TaskScheduler's
/// <see cref="TryExecuteTask">TryExecuteTask</see> method with the provided task object, propagating
/// the return value. It may also be appropriate for the scheduler to remove an inlined task from its
/// internal data structures if it decides to honor the inlining request. Note, however, that under
/// some circumstances a scheduler may be asked to inline a task that was not previously provided to
/// it with the <see cref="QueueTask"/> method.
/// </para>
/// <para>
/// The derived scheduler is responsible for making sure that the calling thread is suitable for
/// executing the given task as far as its own scheduling and execution policies are concerned.
/// </para>
/// </remarks>
/// <param name="task">The <see cref="System.Threading.Tasks.Task">Task</see> to be
/// executed.</param>
/// <param name="taskWasPreviouslyQueued">A Boolean denoting whether or not task has previously been
/// queued. If this parameter is True, then the task may have been previously queued (scheduled); if
/// False, then the task is known not to have been queued, and this call is being made in order to
/// execute the task inline without queueing it.</param>
/// <returns>A Boolean value indicating whether the task was executed inline.</returns>
/// <exception cref="System.ArgumentNullException">The <paramref name="task"/> argument is
/// null.</exception>
/// <exception cref="System.InvalidOperationException">The <paramref name="task"/> was already
/// executed.</exception>
protected abstract bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued);
/// <summary>
/// Generates an enumerable of <see cref="System.Threading.Tasks.Task">Task</see> instances
/// currently queued to the scheduler waiting to be executed.
/// </summary>
/// <remarks>
/// <para>
/// A class derived from <see cref="TaskScheduler"/> implements this method in order to support
/// integration with debuggers. This method will only be invoked by the .NET Framework when the
/// debugger requests access to the data. The enumerable returned will be traversed by debugging
/// utilities to access the tasks currently queued to this scheduler, enabling the debugger to
/// provide a representation of this information in the user interface.
/// </para>
/// <para>
/// It is important to note that, when this method is called, all other threads in the process will
/// be frozen. Therefore, it's important to avoid synchronization with other threads that may lead to
/// blocking. If synchronization is necessary, the method should prefer to throw a <see
/// cref="System.NotSupportedException"/>
/// than to block, which could cause a debugger to experience delays. Additionally, this method and
/// the enumerable returned must not modify any globally visible state.
/// </para>
/// <para>
/// The returned enumerable should never be null. If there are currently no queued tasks, an empty
/// enumerable should be returned instead.
/// </para>
/// <para>
/// For developers implementing a custom debugger, this method shouldn't be called directly, but
/// rather this functionality should be accessed through the internal wrapper method
/// GetScheduledTasksForDebugger:
/// <c>internal Task[] GetScheduledTasksForDebugger()</c>. This method returns an array of tasks,
/// rather than an enumerable. In order to retrieve a list of active schedulers, a debugger may use
/// another internal method: <c>internal static TaskScheduler[] GetTaskSchedulersForDebugger()</c>.
/// This static method returns an array of all active TaskScheduler instances.
/// GetScheduledTasksForDebugger then may be used on each of these scheduler instances to retrieve
/// the list of scheduled tasks for each.
/// </para>
/// </remarks>
/// <returns>An enumerable that allows traversal of tasks currently queued to this scheduler.
/// </returns>
/// <exception cref="System.NotSupportedException">
/// This scheduler is unable to generate a list of queued tasks at this time.
/// </exception>
protected abstract IEnumerable<Task>? GetScheduledTasks();
/// <summary>
/// Indicates the maximum concurrency level this
/// <see cref="TaskScheduler"/> is able to support.
/// </summary>
public virtual int MaximumConcurrencyLevel => int.MaxValue;
////////////////////////////////////////////////////////////
//
// Internal overridable methods
//
/// <summary>
/// Attempts to execute the target task synchronously.
/// </summary>
/// <param name="task">The task to run.</param>
/// <param name="taskWasPreviouslyQueued">True if the task may have been previously queued,
/// false if the task was absolutely not previously queued.</param>
/// <returns>True if it ran, false otherwise.</returns>
internal bool TryRunInline(Task task, bool taskWasPreviouslyQueued)
{
// Do not inline unstarted tasks (i.e., task.ExecutingTaskScheduler == null).
// Do not inline TaskCompletionSource-style (a.k.a. "promise") tasks.
// No need to attempt inlining if the task body was already run (i.e. either TASK_STATE_DELEGATE_INVOKED or TASK_STATE_CANCELED bits set)
TaskScheduler? ets = task.ExecutingTaskScheduler;
// Delegate cross-scheduler inlining requests to target scheduler
if (ets != this && ets != null) return ets.TryRunInline(task, taskWasPreviouslyQueued);
if ((ets == null) ||
(task.m_action == null) ||
task.IsDelegateInvoked ||
task.IsCanceled ||
!RuntimeHelpers.TryEnsureSufficientExecutionStack())
{
return false;
}
// Task class will still call into TaskScheduler.TryRunInline rather than TryExecuteTaskInline() so that
// 1) we can adjust the return code from TryExecuteTaskInline in case a buggy custom scheduler lies to us
// 2) we maintain a mechanism for the TLS lookup optimization that we used to have for the ConcRT scheduler (will potentially introduce the same for TP)
if (TplEventSource.Log.IsEnabled())
task.FireTaskScheduledIfNeeded(this);
bool inlined = TryExecuteTaskInline(task, taskWasPreviouslyQueued);
// If the custom scheduler returned true, we should either have the TASK_STATE_DELEGATE_INVOKED or TASK_STATE_CANCELED bit set
// Otherwise the scheduler is buggy
if (inlined && !(task.IsDelegateInvoked || task.IsCanceled))
{
throw new InvalidOperationException(SR.TaskScheduler_InconsistentStateAfterTryExecuteTaskInline);
}
return inlined;
}
/// <summary>
/// Attempts to dequeue a <see cref="System.Threading.Tasks.Task">Task</see> that was previously queued to
/// this scheduler.
/// </summary>
/// <param name="task">The <see cref="System.Threading.Tasks.Task">Task</see> to be dequeued.</param>
/// <returns>A Boolean denoting whether the <paramref name="task"/> argument was successfully dequeued.</returns>
/// <exception cref="System.ArgumentNullException">The <paramref name="task"/> argument is null.</exception>
protected internal virtual bool TryDequeue(Task task)
{
return false;
}
/// <summary>
/// Notifies the scheduler that a work item has made progress.
/// </summary>
internal virtual void NotifyWorkItemProgress()
{
}
/// <summary>
/// Indicates whether this is a custom scheduler, in which case the safe code paths will be taken upon task entry
/// using a CAS to transition from queued state to executing.
/// </summary>
internal virtual bool RequiresAtomicStartTransition => true;
/// <summary>
/// Calls QueueTask() after performing any needed firing of events
/// </summary>
internal void InternalQueueTask(Task task)
{
Debug.Assert(task != null);
if (TplEventSource.Log.IsEnabled())
task.FireTaskScheduledIfNeeded(this);
this.QueueTask(task);
}
////////////////////////////////////////////////////////////
//
// Member variables
//
// The global container that keeps track of TaskScheduler instances for debugging purposes.
private static ConditionalWeakTable<TaskScheduler, object?>? s_activeTaskSchedulers;
// An AppDomain-wide default manager.
private static readonly TaskScheduler s_defaultTaskScheduler = new ThreadPoolTaskScheduler();
// static counter used to generate unique TaskScheduler IDs
internal static int s_taskSchedulerIdCounter;
// this TaskScheduler's unique ID
private volatile int m_taskSchedulerId;
////////////////////////////////////////////////////////////
//
// Constructors and public properties
//
/// <summary>
/// Initializes the <see cref="System.Threading.Tasks.TaskScheduler"/>.
/// </summary>
protected TaskScheduler()
{
#if CORECLR // Debugger support
// Register the scheduler in the active scheduler list. This is only relevant when debugging,
// so we only pay the cost if the debugger is attached when the scheduler is created. This
// means that the internal TaskScheduler.GetTaskSchedulersForDebugger() will only include
// schedulers created while the debugger is attached.
if (Debugger.IsAttached)
{
AddToActiveTaskSchedulers();
}
#endif
}
/// <summary>Adds this scheduler ot the active schedulers tracking collection for debugging purposes.</summary>
private void AddToActiveTaskSchedulers()
{
ConditionalWeakTable<TaskScheduler, object?>? activeTaskSchedulers = s_activeTaskSchedulers;
if (activeTaskSchedulers == null)
{
Interlocked.CompareExchange(ref s_activeTaskSchedulers, new ConditionalWeakTable<TaskScheduler, object?>(), null);
activeTaskSchedulers = s_activeTaskSchedulers;
}
activeTaskSchedulers.Add(this, null);
}
/// <summary>
/// Gets the default <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see> instance.
/// </summary>
public static TaskScheduler Default => s_defaultTaskScheduler;
/// <summary>
/// Gets the <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see>
/// associated with the currently executing task.
/// </summary>
/// <remarks>
/// When not called from within a task, <see cref="Current"/> will return the <see cref="Default"/> scheduler.
/// </remarks>
public static TaskScheduler Current => InternalCurrent ?? Default;
/// <summary>
/// Gets the <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see>
/// associated with the currently executing task.
/// </summary>
/// <remarks>
/// When not called from within a task, <see cref="InternalCurrent"/> will return null.
/// </remarks>
internal static TaskScheduler? InternalCurrent
{
get
{
Task? currentTask = Task.InternalCurrent;
return ((currentTask != null)
&& ((currentTask.CreationOptions & TaskCreationOptions.HideScheduler) == 0)
) ? currentTask.ExecutingTaskScheduler : null;
}
}
/// <summary>
/// Creates a <see cref="TaskScheduler"/>
/// associated with the current <see cref="System.Threading.SynchronizationContext"/>.
/// </summary>
/// <remarks>
/// All <see cref="System.Threading.Tasks.Task">Task</see> instances queued to
/// the returned scheduler will be executed through a call to the
/// <see cref="System.Threading.SynchronizationContext.Post">Post</see> method
/// on that context.
/// </remarks>
/// <returns>
/// A <see cref="TaskScheduler"/> associated with
/// the current <see cref="System.Threading.SynchronizationContext">SynchronizationContext</see>, as
/// determined by <see cref="System.Threading.SynchronizationContext.Current">SynchronizationContext.Current</see>.
/// </returns>
/// <exception cref="System.InvalidOperationException">
/// The current SynchronizationContext may not be used as a TaskScheduler.
/// </exception>
public static TaskScheduler FromCurrentSynchronizationContext()
{
return new SynchronizationContextTaskScheduler();
}
/// <summary>
/// Gets the unique ID for this <see cref="TaskScheduler"/>.
/// </summary>
public int Id
{
get
{
if (m_taskSchedulerId == 0)
{
int newId;
// We need to repeat if Interlocked.Increment wraps around and returns 0.
// Otherwise next time this scheduler's Id is queried it will get a new value
do
{
newId = Interlocked.Increment(ref s_taskSchedulerIdCounter);
} while (newId == 0);
Interlocked.CompareExchange(ref m_taskSchedulerId, newId, 0);
}
return m_taskSchedulerId;
}
}
/// <summary>
/// Attempts to execute the provided <see cref="System.Threading.Tasks.Task">Task</see>
/// on this scheduler.
/// </summary>
/// <remarks>
/// <para>
/// Scheduler implementations are provided with <see cref="System.Threading.Tasks.Task">Task</see>
/// instances to be executed through either the <see cref="QueueTask"/> method or the
/// <see cref="TryExecuteTaskInline"/> method. When the scheduler deems it appropriate to run the
/// provided task, <see cref="TryExecuteTask"/> should be used to do so. TryExecuteTask handles all
/// aspects of executing a task, including action invocation, exception handling, state management,
/// and lifecycle control.
/// </para>
/// <para>
/// <see cref="TryExecuteTask"/> must only be used for tasks provided to this scheduler by the .NET
/// Framework infrastructure. It should not be used to execute arbitrary tasks obtained through
/// custom mechanisms.
/// </para>
/// </remarks>
/// <param name="task">
/// A <see cref="System.Threading.Tasks.Task">Task</see> object to be executed.</param>
/// <exception cref="System.InvalidOperationException">
/// The <paramref name="task"/> is not associated with this scheduler.
/// </exception>
/// <returns>A Boolean that is true if <paramref name="task"/> was successfully executed, false if it
/// was not. A common reason for execution failure is that the task had previously been executed or
/// is in the process of being executed by another thread.</returns>
protected bool TryExecuteTask(Task task)
{
if (task.ExecutingTaskScheduler != this)
{
throw new InvalidOperationException(SR.TaskScheduler_ExecuteTask_WrongTaskScheduler);
}
return task.ExecuteEntry();
}
////////////////////////////////////////////////////////////
//
// Events
//
/// <summary>
/// Occurs when a faulted <see cref="System.Threading.Tasks.Task"/>'s unobserved exception is about to trigger exception escalation
/// policy, which, by default, would terminate the process.
/// </summary>
/// <remarks>
/// This AppDomain-wide event provides a mechanism to prevent exception
/// escalation policy (which, by default, terminates the process) from triggering.
/// Each handler is passed a <see cref="System.Threading.Tasks.UnobservedTaskExceptionEventArgs"/>
/// instance, which may be used to examine the exception and to mark it as observed.
/// </remarks>
public static event EventHandler<UnobservedTaskExceptionEventArgs>? UnobservedTaskException;
////////////////////////////////////////////////////////////
//
// Internal methods
//
// This is called by the TaskExceptionHolder finalizer.
internal static void PublishUnobservedTaskException(object sender, UnobservedTaskExceptionEventArgs ueea)
{
UnobservedTaskException?.Invoke(sender, ueea);
}
/// <summary>
/// Provides an array of all queued <see cref="System.Threading.Tasks.Task">Task</see> instances
/// for the debugger.
/// </summary>
/// <remarks>
/// The returned array is populated through a call to <see cref="GetScheduledTasks"/>.
/// Note that this function is only meant to be invoked by a debugger remotely.
/// It should not be called by any other codepaths.
/// </remarks>
/// <returns>An array of <see cref="System.Threading.Tasks.Task">Task</see> instances.</returns>
/// <exception cref="System.NotSupportedException">
/// This scheduler is unable to generate a list of queued tasks at this time.
/// </exception>
internal Task[]? GetScheduledTasksForDebugger()
{
// this can throw InvalidOperationException indicating that they are unable to provide the info
// at the moment. We should let the debugger receive that exception so that it can indicate it in the UI
IEnumerable<Task>? activeTasksSource = GetScheduledTasks();
if (activeTasksSource == null)
return null;
// If it can be cast to an array, use it directly
if (!(activeTasksSource is Task[] activeTasksArray))
{
activeTasksArray = (new List<Task>(activeTasksSource)).ToArray();
}
// touch all Task.Id fields so that the debugger doesn't need to do a lot of cross-proc calls to generate them
foreach (Task t in activeTasksArray)
{
_ = t.Id;
}
return activeTasksArray;
}
/// <summary>
/// Provides an array of all active <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see>
/// instances for the debugger.
/// </summary>
/// <remarks>
/// This function is only meant to be invoked by a debugger remotely.
/// It should not be called by any other codepaths.
/// </remarks>
/// <returns>An array of <see cref="System.Threading.Tasks.TaskScheduler">TaskScheduler</see> instances.</returns>
internal static TaskScheduler[] GetTaskSchedulersForDebugger()
{
if (s_activeTaskSchedulers == null)
{
// No schedulers were tracked. Just give back the default.
return new TaskScheduler[] { s_defaultTaskScheduler };
}
List<TaskScheduler> schedulers = new List<TaskScheduler>();
foreach (KeyValuePair<TaskScheduler, object?> item in s_activeTaskSchedulers)
{
schedulers.Add(item.Key);
}
if (!schedulers.Contains(s_defaultTaskScheduler))
{
// Make sure the default is included, in case the debugger attached
// after it was created.
schedulers.Add(s_defaultTaskScheduler);
}
TaskScheduler[] arr = schedulers.ToArray();
foreach (TaskScheduler scheduler in arr)
{
Debug.Assert(scheduler != null, "Table returned an incorrect Count or CopyTo failed");
_ = scheduler.Id; // force Ids for debugger
}
return arr;
}
/// <summary>
/// Nested class that provides debugger view for TaskScheduler
/// </summary>
internal sealed class SystemThreadingTasks_TaskSchedulerDebugView
{
private readonly TaskScheduler m_taskScheduler;
public SystemThreadingTasks_TaskSchedulerDebugView(TaskScheduler scheduler)
{
m_taskScheduler = scheduler;
}
// returns the scheduler's Id
public int Id => m_taskScheduler.Id;
// returns the scheduler's GetScheduledTasks
public IEnumerable<Task>? ScheduledTasks => m_taskScheduler.GetScheduledTasks();
}
}
/// <summary>
/// A TaskScheduler implementation that executes all tasks queued to it through a call to
/// <see cref="System.Threading.SynchronizationContext.Post"/> on the <see cref="System.Threading.SynchronizationContext"/>
/// that its associated with. The default constructor for this class binds to the current <see cref="System.Threading.SynchronizationContext"/>
/// </summary>
internal sealed class SynchronizationContextTaskScheduler : TaskScheduler
{
private readonly SynchronizationContext m_synchronizationContext;
/// <summary>
/// Constructs a SynchronizationContextTaskScheduler associated with <see cref="System.Threading.SynchronizationContext.Current"/>
/// </summary>
/// <exception cref="System.InvalidOperationException">This constructor expects <see cref="System.Threading.SynchronizationContext.Current"/> to be set.</exception>
internal SynchronizationContextTaskScheduler()
{
m_synchronizationContext = SynchronizationContext.Current ??
// make sure we have a synccontext to work with
throw new InvalidOperationException(SR.TaskScheduler_FromCurrentSynchronizationContext_NoCurrent);
}
/// <summary>
/// Implementation of <see cref="System.Threading.Tasks.TaskScheduler.QueueTask"/> for this scheduler class.
///
/// Simply posts the tasks to be executed on the associated <see cref="System.Threading.SynchronizationContext"/>.
/// </summary>
/// <param name="task"></param>
protected internal override void QueueTask(Task task)
{
m_synchronizationContext.Post(s_postCallback, (object)task);
}
/// <summary>
/// Implementation of <see cref="System.Threading.Tasks.TaskScheduler.TryExecuteTaskInline"/> for this scheduler class.
///
/// The task will be executed inline only if the call happens within
/// the associated <see cref="System.Threading.SynchronizationContext"/>.
/// </summary>
/// <param name="task"></param>
/// <param name="taskWasPreviouslyQueued"></param>
protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
{
if (SynchronizationContext.Current == m_synchronizationContext)
{
return TryExecuteTask(task);
}
else
{
return false;
}
}
protected override IEnumerable<Task>? GetScheduledTasks()
{
return null;
}
/// <summary>
/// Implements the <see cref="System.Threading.Tasks.TaskScheduler.MaximumConcurrencyLevel"/> property for
/// this scheduler class.
///
/// By default it returns 1, because a <see cref="System.Threading.SynchronizationContext"/> based
/// scheduler only supports execution on a single thread.
/// </summary>
public override int MaximumConcurrencyLevel => 1;
// preallocated SendOrPostCallback delegate
private static readonly SendOrPostCallback s_postCallback = s =>
{
Debug.Assert(s is Task);
((Task)s).ExecuteEntry(); // with double-execute check because SC could be buggy
};
}
/// <summary>
/// Provides data for the event that is raised when a faulted <see cref="System.Threading.Tasks.Task"/>'s
/// exception goes unobserved.
/// </summary>
/// <remarks>
/// The Exception property is used to examine the exception without marking it
/// as observed, whereas the <see cref="SetObserved"/> method is used to mark the exception
/// as observed. Marking the exception as observed prevents it from triggering exception escalation policy
/// which, by default, terminates the process.
/// </remarks>
public class UnobservedTaskExceptionEventArgs : EventArgs
{
private readonly AggregateException? m_exception;
internal bool m_observed = false;
/// <summary>
/// Initializes a new instance of the <see cref="UnobservedTaskExceptionEventArgs"/> class
/// with the unobserved exception.
/// </summary>
/// <param name="exception">The Exception that has gone unobserved.</param>
public UnobservedTaskExceptionEventArgs(AggregateException? exception) { m_exception = exception; }
/// <summary>
/// Marks the <see cref="Exception"/> as "observed," thus preventing it
/// from triggering exception escalation policy which, by default, terminates the process.
/// </summary>
public void SetObserved() { m_observed = true; }
/// <summary>
/// Gets whether this exception has been marked as "observed."
/// </summary>
public bool Observed => m_observed;
/// <summary>
/// The Exception that went unobserved.
/// </summary>
public AggregateException? Exception => m_exception;
}
}
| |
/*
* Copyright (C) 2005-2015 Christoph Rupp (chris@crupp.de).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Text;
using System.Runtime.InteropServices;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Hamster
{
internal sealed class NativeMethods
{
private NativeMethods() {
}
[StructLayout(LayoutKind.Sequential)]
unsafe struct RecordStruct
{
public Int32 size;
public byte *data;
public Int32 partial_offset;
public Int32 partial_size;
public Int32 flags;
}
[StructLayout(LayoutKind.Sequential)]
unsafe struct KeyStruct
{
public Int16 size;
public byte *data;
public Int32 flags;
public Int32 _flags;
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_set_errhandler",
CallingConvention = CallingConvention.Cdecl)]
static public extern void SetErrorHandler(ErrorHandler eh);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_strerror",
CallingConvention=CallingConvention.Cdecl)]
static public extern String StringError(int error);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_get_version",
CallingConvention = CallingConvention.Cdecl)]
static public extern void GetVersion(out int major, out int minor,
out int revision);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_create",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvCreate(out IntPtr handle, String fileName,
int flags, int mode, Parameter[] parameters);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_open",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvOpen(out IntPtr handle, String fileName,
int flags, Parameter[] parameters);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_create_db",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvCreateDatabase(IntPtr handle,
out IntPtr dbhandle, short name, int flags,
Parameter[] parameters);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_open_db",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvOpenDatabase(IntPtr handle,
out IntPtr dbhandle, short name, int flags,
Parameter[] parameters);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_rename_db",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvRenameDatabase(IntPtr handle,
short oldName, short newName);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_erase_db",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvEraseDatabase(IntPtr handle,
short name, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_flush",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvFlush(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_get_database_names",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvGetDatabaseNamesLow(IntPtr handle,
IntPtr dbnames, ref int count);
static public int EnvGetDatabaseNames(IntPtr handle, out short[] names) {
// alloc space for 2000 database names
int count = 2000;
IntPtr array = Marshal.AllocHGlobal(2*count);
int st = EnvGetDatabaseNamesLow(handle, array, ref count);
if (st != 0) {
Marshal.FreeHGlobal(array);
names = null;
return st;
}
names = new short[count];
Marshal.Copy(array, names, 0, count);
Marshal.FreeHGlobal(array);
return 0;
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_env_close",
CallingConvention = CallingConvention.Cdecl)]
static public extern int EnvClose(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_txn_begin",
CallingConvention = CallingConvention.Cdecl)]
static public extern int TxnBegin(out IntPtr txnhandle, IntPtr envhandle,
String filename, IntPtr reserved, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_txn_commit",
CallingConvention = CallingConvention.Cdecl)]
static public extern int TxnCommit(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_txn_abort",
CallingConvention = CallingConvention.Cdecl)]
static public extern int TxnAbort(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_get_error",
CallingConvention = CallingConvention.Cdecl)]
static public extern int GetLastError(IntPtr handle);
// TODO this is new, but lots of effort b/c of complex
// marshalling. if you need this function pls drop me a mail.
/*
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_get_parameters",
CallingConvention = CallingConvention.Cdecl)]
static public extern int GetParameters(IntPtr handle, Parameter[] parameters);
*/
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_get_env",
CallingConvention = CallingConvention.Cdecl)]
static public extern IntPtr GetEnv(IntPtr handle);
// TODO this is new, but lots of effort b/c of complex
// marshalling. if you need this function pls drop me a mail.
/*
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_key_get_approximate_match",
CallingConvention = CallingConvention.Cdecl)]
static public extern int KeyGetApproximateMatch(ref KeyStruct key);
*/
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public delegate int CompareFunc(IntPtr handle,
IntPtr lhs, int lhsLength,
IntPtr rhs, int rhsLength);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_set_compare_func",
CallingConvention = CallingConvention.Cdecl)]
static public extern int SetCompareFunc(IntPtr handle,
NativeMethods.CompareFunc foo);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public delegate int DuplicateCompareFunc(IntPtr handle,
byte[] lhs, int lhsLength,
byte[] rhs, int rhsLength);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_find",
CallingConvention = CallingConvention.Cdecl)]
static private extern int FindLow(IntPtr handle, IntPtr txnhandle,
ref KeyStruct key, ref RecordStruct record, int flags);
static public unsafe int Find(IntPtr handle, IntPtr txnhandle,
ref byte[] keydata, ref byte[] recdata, int flags) {
KeyStruct key = new KeyStruct();
RecordStruct record = new RecordStruct();
key.size = (short)keydata.Length;
fixed (byte *bk = keydata) {
key.data = bk;
int st = FindLow(handle, txnhandle, ref key, ref record, flags);
if (st != 0)
return st;
// I didn't found a way to avoid the copying...
IntPtr recData = new IntPtr(record.data);
byte[] newRecData = new byte[record.size];
Marshal.Copy(recData, newRecData, 0, record.size);
recdata = newRecData;
// also copy the key data if approx. matching was requested
if ((flags & (HamConst.HAM_FIND_LT_MATCH | HamConst.HAM_FIND_GT_MATCH)) != 0) {
IntPtr keyData = new IntPtr(key.data);
byte[] newKeyData = new byte[key.size];
Marshal.Copy(keyData, newKeyData, 0, key.size);
keydata = newKeyData;
}
return 0;
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_insert",
CallingConvention = CallingConvention.Cdecl)]
static private extern int InsertLow(IntPtr handle, IntPtr txnhandle,
ref KeyStruct key, ref RecordStruct record, int flags);
static public unsafe int Insert(IntPtr handle, IntPtr txnhandle,
byte[] keyData, byte[] recordData, int flags) {
KeyStruct key = new KeyStruct();
RecordStruct record = new RecordStruct();
fixed (byte* br = recordData, bk = keyData) {
record.data = br;
record.size = recordData.GetLength(0);
key.data = bk;
key.size = (short)keyData.GetLength(0);
return InsertLow(handle, txnhandle, ref key, ref record, flags);
}
}
static public unsafe int InsertRecNo(IntPtr handle, IntPtr txnhandle,
ref byte[] keydata, byte[] recordData, int flags)
{
KeyStruct key = new KeyStruct();
RecordStruct record = new RecordStruct();
fixed (byte* br = recordData)
{
record.data = br;
record.size = recordData.GetLength(0);
key.data = null;
key.size = 0;
int st = InsertLow(handle, txnhandle, ref key, ref record, flags);
if (st != 0)
return st;
IntPtr keyData = new IntPtr(key.data);
byte[] newKeyData = new byte[key.size];
Marshal.Copy(keyData, newKeyData, 0, key.size);
keydata = newKeyData;
return 0;
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_erase",
CallingConvention = CallingConvention.Cdecl)]
static private extern int EraseLow(IntPtr handle, IntPtr txnhandle,
ref KeyStruct key, int flags);
static public unsafe int Erase(IntPtr handle, IntPtr txnhandle,
byte[] data, int flags) {
KeyStruct key = new KeyStruct();
fixed (byte* b = data) {
key.data = b;
key.size = (short)data.GetLength(0);
return EraseLow(handle, txnhandle, ref key, flags);
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_get_key_count",
CallingConvention = CallingConvention.Cdecl)]
static public extern int GetKeyCount(IntPtr handle, IntPtr txnhandle,
int flags, out Int64 keycount);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_db_close",
CallingConvention = CallingConvention.Cdecl)]
static public extern int Close(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_create",
CallingConvention = CallingConvention.Cdecl)]
static public extern int CursorCreate(out IntPtr chandle, IntPtr dbhandle,
IntPtr txnhandle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_clone",
CallingConvention = CallingConvention.Cdecl)]
static public extern int CursorClone(IntPtr handle, out IntPtr clone);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_move",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorMoveLow(IntPtr handle,
IntPtr key, IntPtr record, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_move",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorMoveLow(IntPtr handle,
ref KeyStruct key, IntPtr record, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_move",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorMoveLow(IntPtr handle,
IntPtr key, ref RecordStruct record, int flags);
static public int CursorMove(IntPtr handle, int flags) {
return CursorMoveLow(handle, IntPtr.Zero, IntPtr.Zero, flags);
}
static unsafe public byte[] CursorGetRecord(IntPtr handle, int flags) {
RecordStruct record = new RecordStruct();
int st = CursorMoveLow(handle, IntPtr.Zero, ref record, flags);
if (st == 0) {
// I didn't found a way to avoid the copying...
IntPtr recData = new IntPtr(record.data);
byte[] newArray = new byte[record.size];
Marshal.Copy(recData, newArray, 0, record.size);
return newArray;
}
throw new DatabaseException(st);
}
static unsafe public byte[] CursorGetKey(IntPtr handle, int flags) {
KeyStruct key = new KeyStruct();
int st = CursorMoveLow(handle, ref key, IntPtr.Zero, flags);
if (st == 0) {
// I didn't found a way to avoid the copying...
IntPtr keyData = new IntPtr(key.data);
byte[] newArray = new byte[key.size];
Marshal.Copy(keyData, newArray, 0, key.size);
return newArray;
}
throw new DatabaseException(st);
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_overwrite",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorOverwriteLow(IntPtr handle,
ref RecordStruct record, int flags);
static unsafe public int CursorOverwrite(IntPtr handle, byte[] data, int flags) {
RecordStruct record = new RecordStruct();
fixed (byte* b = data) {
record.data = b;
record.size = data.GetLength(0);
return CursorOverwriteLow(handle, ref record, flags);
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_find",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorFindLow(IntPtr handle,
ref KeyStruct key, ref RecordStruct record, int flags);
static unsafe public int CursorFind(IntPtr handle, ref byte[] keydata,
ref byte[] recdata, int flags) {
KeyStruct key = new KeyStruct();
RecordStruct record = new RecordStruct();
fixed (byte* bk = keydata) {
key.data = bk;
key.size = (short)keydata.Length;
int st = CursorFindLow(handle, ref key, ref record, flags);
if (st != 0)
return st;
// I didn't found a way to avoid the copying...
IntPtr recData = new IntPtr(record.data);
byte[] newRecData = new byte[record.size];
Marshal.Copy(recData, newRecData, 0, record.size);
recdata = newRecData;
// also copy the key data if approx. matching was requested
if ((flags & (HamConst.HAM_FIND_LT_MATCH | HamConst.HAM_FIND_GT_MATCH)) != 0) {
IntPtr keyData = new IntPtr(key.data);
byte[] newKeyData = new byte[key.size];
Marshal.Copy(keyData, newKeyData, 0, key.size);
keydata = newKeyData;
}
return 0;
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_insert",
CallingConvention = CallingConvention.Cdecl)]
static private extern int CursorInsertLow(IntPtr handle,
ref KeyStruct key, ref RecordStruct record, int flags);
static public unsafe int CursorInsert(IntPtr handle,
byte[] keyData, byte[] recordData, int flags) {
RecordStruct record = new RecordStruct();
KeyStruct key = new KeyStruct();
fixed (byte* br = recordData, bk = keyData) {
record.data = br;
record.size = recordData.GetLength(0);
key.data = bk;
key.size = (short)keyData.GetLength(0);
return CursorInsertLow(handle, ref key, ref record, flags);
}
}
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_erase",
CallingConvention = CallingConvention.Cdecl)]
static public extern int CursorErase(IntPtr handle, int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_get_duplicate_count",
CallingConvention = CallingConvention.Cdecl)]
static public extern int CursorGetDuplicateCount(IntPtr handle, out int count,
int flags);
[DllImport("hamsterdb-2.1.11.dll", EntryPoint = "ham_cursor_close",
CallingConvention = CallingConvention.Cdecl)]
static public extern int CursorClose(IntPtr handle);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Compute
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// ResourceSkusOperations operations.
/// </summary>
internal partial class ResourceSkusOperations : IServiceOperations<ComputeManagementClient>, IResourceSkusOperations
{
/// <summary>
/// Initializes a new instance of the ResourceSkusOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal ResourceSkusOperations(ComputeManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the ComputeManagementClient
/// </summary>
public ComputeManagementClient Client { get; private set; }
/// <summary>
/// Gets the list of Microsoft.Compute SKUs available for your Subscription.
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<ResourceSku>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2017-03-30";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Compute/skus").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<ResourceSku>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<ResourceSku>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets the list of Microsoft.Compute SKUs available for your Subscription.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<ResourceSku>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<ResourceSku>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<ResourceSku>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.Entity;
using System.Linq;
using System.Threading.Tasks;
using Abp;
using Abp.Configuration.Startup;
using Abp.Domain.Uow;
using Abp.Runtime.Session;
using Abp.TestBase;
using FreeInventoryManager.EntityFramework;
using FreeInventoryManager.Migrations.SeedData;
using FreeInventoryManager.MultiTenancy;
using FreeInventoryManager.Users;
using Castle.MicroKernel.Registration;
using Effort;
using EntityFramework.DynamicFilters;
namespace FreeInventoryManager.Tests
{
public abstract class FreeInventoryManagerTestBase : AbpIntegratedTestBase<FreeInventoryManagerTestModule>
{
private DbConnection _hostDb;
private Dictionary<int, DbConnection> _tenantDbs; //only used for db per tenant architecture
protected FreeInventoryManagerTestBase()
{
//Seed initial data for host
AbpSession.TenantId = null;
UsingDbContext(context =>
{
new InitialHostDbBuilder(context).Create();
new DefaultTenantCreator(context).Create();
});
//Seed initial data for default tenant
AbpSession.TenantId = 1;
UsingDbContext(context =>
{
new TenantRoleAndUserBuilder(context, 1).Create();
});
LoginAsDefaultTenantAdmin();
}
protected override void PreInitialize()
{
base.PreInitialize();
/* You can switch database architecture here: */
UseSingleDatabase();
//UseDatabasePerTenant();
}
/* Uses single database for host and all tenants.
*/
private void UseSingleDatabase()
{
_hostDb = DbConnectionFactory.CreateTransient();
LocalIocManager.IocContainer.Register(
Component.For<DbConnection>()
.UsingFactoryMethod(() => _hostDb)
.LifestyleSingleton()
);
}
/* Uses single database for host and Default tenant,
* but dedicated databases for all other tenants.
*/
private void UseDatabasePerTenant()
{
_hostDb = DbConnectionFactory.CreateTransient();
_tenantDbs = new Dictionary<int, DbConnection>();
LocalIocManager.IocContainer.Register(
Component.For<DbConnection>()
.UsingFactoryMethod((kernel) =>
{
lock (_tenantDbs)
{
var currentUow = kernel.Resolve<ICurrentUnitOfWorkProvider>().Current;
var abpSession = kernel.Resolve<IAbpSession>();
var tenantId = currentUow != null ? currentUow.GetTenantId() : abpSession.TenantId;
if (tenantId == null || tenantId == 1) //host and default tenant are stored in host db
{
return _hostDb;
}
if (!_tenantDbs.ContainsKey(tenantId.Value))
{
_tenantDbs[tenantId.Value] = DbConnectionFactory.CreateTransient();
}
return _tenantDbs[tenantId.Value];
}
}, true)
.LifestyleTransient()
);
}
#region UsingDbContext
protected IDisposable UsingTenantId(int? tenantId)
{
var previousTenantId = AbpSession.TenantId;
AbpSession.TenantId = tenantId;
return new DisposeAction(() => AbpSession.TenantId = previousTenantId);
}
protected void UsingDbContext(Action<FreeInventoryManagerDbContext> action)
{
UsingDbContext(AbpSession.TenantId, action);
}
protected Task UsingDbContextAsync(Func<FreeInventoryManagerDbContext, Task> action)
{
return UsingDbContextAsync(AbpSession.TenantId, action);
}
protected T UsingDbContext<T>(Func<FreeInventoryManagerDbContext, T> func)
{
return UsingDbContext(AbpSession.TenantId, func);
}
protected Task<T> UsingDbContextAsync<T>(Func<FreeInventoryManagerDbContext, Task<T>> func)
{
return UsingDbContextAsync(AbpSession.TenantId, func);
}
protected void UsingDbContext(int? tenantId, Action<FreeInventoryManagerDbContext> action)
{
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<FreeInventoryManagerDbContext>())
{
context.DisableAllFilters();
action(context);
context.SaveChanges();
}
}
}
protected async Task UsingDbContextAsync(int? tenantId, Func<FreeInventoryManagerDbContext, Task> action)
{
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<FreeInventoryManagerDbContext>())
{
context.DisableAllFilters();
await action(context);
await context.SaveChangesAsync();
}
}
}
protected T UsingDbContext<T>(int? tenantId, Func<FreeInventoryManagerDbContext, T> func)
{
T result;
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<FreeInventoryManagerDbContext>())
{
context.DisableAllFilters();
result = func(context);
context.SaveChanges();
}
}
return result;
}
protected async Task<T> UsingDbContextAsync<T>(int? tenantId, Func<FreeInventoryManagerDbContext, Task<T>> func)
{
T result;
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<FreeInventoryManagerDbContext>())
{
context.DisableAllFilters();
result = await func(context);
await context.SaveChangesAsync();
}
}
return result;
}
#endregion
#region Login
protected void LoginAsHostAdmin()
{
LoginAsHost(User.AdminUserName);
}
protected void LoginAsDefaultTenantAdmin()
{
LoginAsTenant(Tenant.DefaultTenantName, User.AdminUserName);
}
protected void LogoutAsDefaultTenant()
{
LogoutAsTenant(Tenant.DefaultTenantName);
}
protected void LoginAsHost(string userName)
{
AbpSession.TenantId = null;
var user =
UsingDbContext(
context =>
context.Users.FirstOrDefault(u => u.TenantId == AbpSession.TenantId && u.UserName == userName));
if (user == null)
{
throw new Exception("There is no user: " + userName + " for host.");
}
AbpSession.UserId = user.Id;
}
protected void LogoutAsHost()
{
Resolve<IMultiTenancyConfig>().IsEnabled = true;
AbpSession.TenantId = null;
AbpSession.UserId = null;
}
protected void LoginAsTenant(string tenancyName, string userName)
{
var tenant = UsingDbContext(context => context.Tenants.FirstOrDefault(t => t.TenancyName == tenancyName));
if (tenant == null)
{
throw new Exception("There is no tenant: " + tenancyName);
}
AbpSession.TenantId = tenant.Id;
var user =
UsingDbContext(
context =>
context.Users.FirstOrDefault(u => u.TenantId == AbpSession.TenantId && u.UserName == userName));
if (user == null)
{
throw new Exception("There is no user: " + userName + " for tenant: " + tenancyName);
}
AbpSession.UserId = user.Id;
}
protected void LogoutAsTenant(string tenancyName)
{
var tenant = UsingDbContext(context => context.Tenants.FirstOrDefault(t => t.TenancyName == tenancyName));
if (tenant == null)
{
throw new Exception("There is no tenant: " + tenancyName);
}
AbpSession.TenantId = tenant.Id;
AbpSession.UserId = null;
}
#endregion
/// <summary>
/// Gets current user if <see cref="IAbpSession.UserId"/> is not null.
/// Throws exception if it's null.
/// </summary>
protected async Task<User> GetCurrentUserAsync()
{
var userId = AbpSession.GetUserId();
return await UsingDbContext(context => context.Users.SingleAsync(u => u.Id == userId));
}
/// <summary>
/// Gets current tenant if <see cref="IAbpSession.TenantId"/> is not null.
/// Throws exception if there is no current tenant.
/// </summary>
protected async Task<Tenant> GetCurrentTenantAsync()
{
var tenantId = AbpSession.GetTenantId();
return await UsingDbContext(context => context.Tenants.SingleAsync(t => t.Id == tenantId));
}
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace ApplicationGateway
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// VirtualNetworkPeeringsOperations operations.
/// </summary>
internal partial class VirtualNetworkPeeringsOperations : IServiceOperations<NetworkClient>, IVirtualNetworkPeeringsOperations
{
/// <summary>
/// Initializes a new instance of the VirtualNetworkPeeringsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal VirtualNetworkPeeringsOperations(NetworkClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the NetworkClient
/// </summary>
public NetworkClient Client { get; private set; }
/// <summary>
/// Deletes the specified virtual network peering.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='virtualNetworkPeeringName'>
/// The name of the virtual network peering.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string virtualNetworkPeeringName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, virtualNetworkPeeringName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified virtual network peering.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='virtualNetworkPeeringName'>
/// The name of the virtual network peering.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<VirtualNetworkPeering>> GetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string virtualNetworkPeeringName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName");
}
if (virtualNetworkPeeringName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkPeeringName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkName", virtualNetworkName);
tracingParameters.Add("virtualNetworkPeeringName", virtualNetworkPeeringName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName));
_url = _url.Replace("{virtualNetworkPeeringName}", System.Uri.EscapeDataString(virtualNetworkPeeringName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<VirtualNetworkPeering>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkPeering>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a peering in the specified virtual network.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='virtualNetworkPeeringName'>
/// The name of the peering.
/// </param>
/// <param name='virtualNetworkPeeringParameters'>
/// Parameters supplied to the create or update virtual network peering
/// operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<VirtualNetworkPeering>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string virtualNetworkPeeringName, VirtualNetworkPeering virtualNetworkPeeringParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send Request
AzureOperationResponse<VirtualNetworkPeering> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, virtualNetworkPeeringName, virtualNetworkPeeringParameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets all virtual network peerings in a virtual network.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<VirtualNetworkPeering>>> ListWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkName", virtualNetworkName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<VirtualNetworkPeering>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<VirtualNetworkPeering>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Deletes the specified virtual network peering.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='virtualNetworkPeeringName'>
/// The name of the virtual network peering.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string virtualNetworkPeeringName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName");
}
if (virtualNetworkPeeringName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkPeeringName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkName", virtualNetworkName);
tracingParameters.Add("virtualNetworkPeeringName", virtualNetworkPeeringName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName));
_url = _url.Replace("{virtualNetworkPeeringName}", System.Uri.EscapeDataString(virtualNetworkPeeringName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 204 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a peering in the specified virtual network.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkName'>
/// The name of the virtual network.
/// </param>
/// <param name='virtualNetworkPeeringName'>
/// The name of the peering.
/// </param>
/// <param name='virtualNetworkPeeringParameters'>
/// Parameters supplied to the create or update virtual network peering
/// operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<VirtualNetworkPeering>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string virtualNetworkPeeringName, VirtualNetworkPeering virtualNetworkPeeringParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName");
}
if (virtualNetworkPeeringName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkPeeringName");
}
if (virtualNetworkPeeringParameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkPeeringParameters");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkName", virtualNetworkName);
tracingParameters.Add("virtualNetworkPeeringName", virtualNetworkPeeringName);
tracingParameters.Add("virtualNetworkPeeringParameters", virtualNetworkPeeringParameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName));
_url = _url.Replace("{virtualNetworkPeeringName}", System.Uri.EscapeDataString(virtualNetworkPeeringName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(virtualNetworkPeeringParameters != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(virtualNetworkPeeringParameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 201)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<VirtualNetworkPeering>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkPeering>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkPeering>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all virtual network peerings in a virtual network.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<VirtualNetworkPeering>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<VirtualNetworkPeering>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<VirtualNetworkPeering>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using Avalonia.Controls.Generators;
using Avalonia.Controls.Primitives;
using Avalonia.Controls.Shapes;
using Avalonia.Input;
using Avalonia.Layout;
using Avalonia.LogicalTree;
using Avalonia.Media;
using Avalonia.VisualTree;
namespace Avalonia.Controls
{
/// <summary>
/// A drop-down list control.
/// </summary>
public class DropDown : SelectingItemsControl
{
/// <summary>
/// Defines the <see cref="IsDropDownOpen"/> property.
/// </summary>
public static readonly DirectProperty<DropDown, bool> IsDropDownOpenProperty =
AvaloniaProperty.RegisterDirect<DropDown, bool>(
nameof(IsDropDownOpen),
o => o.IsDropDownOpen,
(o, v) => o.IsDropDownOpen = v);
/// <summary>
/// Defines the <see cref="MaxDropDownHeight"/> property.
/// </summary>
public static readonly StyledProperty<double> MaxDropDownHeightProperty =
AvaloniaProperty.Register<DropDown, double>(nameof(MaxDropDownHeight), 200);
/// <summary>
/// Defines the <see cref="SelectionBoxItem"/> property.
/// </summary>
public static readonly DirectProperty<DropDown, object> SelectionBoxItemProperty =
AvaloniaProperty.RegisterDirect<DropDown, object>("SelectionBoxItem", o => o.SelectionBoxItem);
private bool _isDropDownOpen;
private Popup _popup;
private object _selectionBoxItem;
/// <summary>
/// Initializes static members of the <see cref="DropDown"/> class.
/// </summary>
static DropDown()
{
FocusableProperty.OverrideDefaultValue<DropDown>(true);
SelectedItemProperty.Changed.AddClassHandler<DropDown>(x => x.SelectedItemChanged);
}
/// <summary>
/// Gets or sets a value indicating whether the dropdown is currently open.
/// </summary>
public bool IsDropDownOpen
{
get { return _isDropDownOpen; }
set { SetAndRaise(IsDropDownOpenProperty, ref _isDropDownOpen, value); }
}
/// <summary>
/// Gets or sets the maximum height for the dropdown list.
/// </summary>
public double MaxDropDownHeight
{
get { return GetValue(MaxDropDownHeightProperty); }
set { SetValue(MaxDropDownHeightProperty, value); }
}
/// <summary>
/// Gets or sets the item to display as the control's content.
/// </summary>
protected object SelectionBoxItem
{
get { return _selectionBoxItem; }
set { SetAndRaise(SelectionBoxItemProperty, ref _selectionBoxItem, value); }
}
/// <inheritdoc/>
protected override IItemContainerGenerator CreateItemContainerGenerator()
{
return new ItemContainerGenerator<DropDownItem>(
this,
DropDownItem.ContentProperty,
DropDownItem.ContentTemplateProperty);
}
/// <inheritdoc/>
protected override void OnAttachedToLogicalTree(LogicalTreeAttachmentEventArgs e)
{
base.OnAttachedToLogicalTree(e);
this.UpdateSelectionBoxItem(this.SelectedItem);
}
/// <inheritdoc/>
protected override void OnKeyDown(KeyEventArgs e)
{
base.OnKeyDown(e);
if (!e.Handled)
{
if (e.Key == Key.F4 ||
(e.Key == Key.Down && ((e.Modifiers & InputModifiers.Alt) != 0)))
{
IsDropDownOpen = !IsDropDownOpen;
e.Handled = true;
}
else if (IsDropDownOpen && (e.Key == Key.Escape || e.Key == Key.Enter))
{
IsDropDownOpen = false;
e.Handled = true;
}
}
}
/// <inheritdoc/>
protected override void OnPointerPressed(PointerPressedEventArgs e)
{
if (!IsDropDownOpen && ((IVisual)e.Source).GetVisualRoot() != typeof(PopupRoot))
{
IsDropDownOpen = true;
e.Handled = true;
}
if (!e.Handled)
{
if (UpdateSelectionFromEventSource(e.Source))
{
_popup?.Close();
e.Handled = true;
}
}
base.OnPointerPressed(e);
}
/// <inheritdoc/>
protected override void OnTemplateApplied(TemplateAppliedEventArgs e)
{
if (_popup != null)
{
_popup.Opened -= PopupOpened;
}
_popup = e.NameScope.Get<Popup>("PART_Popup");
_popup.Opened += PopupOpened;
}
private void PopupOpened(object sender, EventArgs e)
{
var selectedIndex = SelectedIndex;
if (selectedIndex != -1)
{
var container = ItemContainerGenerator.ContainerFromIndex(selectedIndex);
container?.Focus();
}
}
private void SelectedItemChanged(AvaloniaPropertyChangedEventArgs e)
{
UpdateSelectionBoxItem(e.NewValue);
}
private void UpdateSelectionBoxItem(object item)
{
var contentControl = item as IContentControl;
if (contentControl != null)
{
item = contentControl.Content;
}
var control = item as IControl;
if (control != null)
{
control.Measure(Size.Infinity);
SelectionBoxItem = new Rectangle
{
Width = control.DesiredSize.Width,
Height = control.DesiredSize.Height,
Fill = new VisualBrush
{
Visual = control,
Stretch = Stretch.None,
AlignmentX = AlignmentX.Left,
}
};
}
else
{
SelectionBoxItem = item;
}
}
}
}
| |
// Copyright 2011 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#if SPATIAL
namespace Microsoft.Data.Spatial
#else
namespace Microsoft.Data.OData.Json
#endif
{
#region Namespaces
using System;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Xml;
#endregion Namespaces
/// <summary>
/// Provides helper method for converting data values to and from the OData JSON format.
/// </summary>
internal static class JsonValueUtils
{
/// <summary>
/// Const tick value for caculating tick values.
/// </summary>
private static readonly long JsonDateTimeMinTimeTicks = (new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).Ticks;
/// <summary>
/// Write a boolean value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">The boolean value to write.</param>
internal static void WriteValue(TextWriter writer, bool value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
writer.Write(value ? JsonConstants.JsonTrueLiteral : JsonConstants.JsonFalseLiteral);
}
/// <summary>
/// Write an integer value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Integer value to be written.</param>
internal static void WriteValue(TextWriter writer, int value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
writer.Write(value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write a float value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Float value to be written.</param>
internal static void WriteValue(TextWriter writer, float value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
if (float.IsInfinity(value) || float.IsNaN(value))
{
WriteQuoted(writer, value.ToString(null, CultureInfo.InvariantCulture));
}
else
{
// float.ToString() supports a max scale of six,
// whereas float.MinValue and float.MaxValue have 8 digits scale. Hence we need
// to use XmlConvert in all other cases, except infinity
writer.Write(XmlConvert.ToString(value));
}
}
/// <summary>
/// Write a short value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Short value to be written.</param>
internal static void WriteValue(TextWriter writer, short value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
writer.Write(value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write a long value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Long value to be written.</param>
internal static void WriteValue(TextWriter writer, long value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
// Since Json only supports number, we need to convert long into string to prevent data loss
WriteQuoted(writer, value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write a double value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Double value to be written.</param>
internal static void WriteValue(TextWriter writer, double value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
if (double.IsInfinity(value) || double.IsNaN(value))
{
WriteQuoted(writer, value.ToString(null, CultureInfo.InvariantCulture));
}
else
{
// double.ToString() supports a max scale of 14,
// whereas float.MinValue and float.MaxValue have 16 digits scale. Hence we need
// to use XmlConvert in all other cases, except infinity
writer.Write(XmlConvert.ToString(value));
}
}
/// <summary>
/// Write a Guid value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Guid value to be written.</param>
internal static void WriteValue(TextWriter writer, Guid value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
WriteQuoted(writer, value.ToString());
}
/// <summary>
/// Write a decimal value
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Decimal value to be written.</param>
internal static void WriteValue(TextWriter writer, decimal value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
// Since Json doesn't have decimal support (it only has one data type - number),
// we need to convert decimal to string to prevent data loss
WriteQuoted(writer, value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write a DateTime value
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">DateTime value to be written.</param>
/// <param name="dateTimeFormat">The format to write out the DateTime value in.</param>
#if SPATIAL
[SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "Microsoft.Data.Spatial.JsonValueUtils.WriteQuoted(System.IO.TextWriter,System.String)", Justification = "Constant defined by the JSON spec.")]
#else
[SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "Microsoft.Data.OData.Json.JsonValueUtils.WriteQuoted(System.IO.TextWriter,System.String)", Justification = "Constant defined by the JSON spec.")]
#endif
internal static void WriteValue(TextWriter writer, DateTime value, ODataJsonDateTimeFormat dateTimeFormat)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
switch (dateTimeFormat)
{
case ODataJsonDateTimeFormat.ISO8601DateTime:
{
// jsonDateTime= quotation-mark
// YYYY-MM-DDThh:mm:ss.sTZD
// [("+" / "-") offset]
// quotation-mark
string textValue = PlatformHelper.ConvertDateTimeToString(value);
WriteQuoted(writer, textValue);
}
break;
case ODataJsonDateTimeFormat.ODataDateTime:
{
// taken from the Atlas serializer
// DevDiv 41127: Never confuse atlas serialized strings with dates
// Serialized date: "\/Date(123)\/"
// sb.Append(@"""\/Date(");
// sb.Append((datetime.ToUniversalTime().Ticks - DatetimeMinTimeTicks) / 10000);
// sb.Append(@")\/""");
value = GetUniversalDate(value);
System.Diagnostics.Debug.Assert(value.Kind == DateTimeKind.Utc, "dateTime.Kind == DateTimeKind.Utc");
string textValue = String.Format(
CultureInfo.InvariantCulture,
JsonConstants.ODataDateTimeFormat,
DateTimeTicksToJsonTicks(value.Ticks));
WriteQuoted(writer, textValue);
}
break;
}
}
/// <summary>
/// Write a DateTimeOffset value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">DateTimeOffset value to be written.</param>
/// <param name="dateTimeFormat">The format to write out the DateTime value in.</param>
#if SPATIAL
[SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "Microsoft.Data.Spatial.JsonValueUtils.WriteQuoted(System.IO.TextWriter,System.String)", Justification = "Constant defined by the JSON spec.")]
#else
[SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "Microsoft.Data.OData.Json.JsonValueUtils.WriteQuoted(System.IO.TextWriter,System.String)", Justification = "Constant defined by the JSON spec.")]
#endif
internal static void WriteValue(TextWriter writer, DateTimeOffset value, ODataJsonDateTimeFormat dateTimeFormat)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
Int32 offsetMinutes = (Int32)value.Offset.TotalMinutes;
switch (dateTimeFormat)
{
case ODataJsonDateTimeFormat.ISO8601DateTime:
{
// Uses the same format as DateTime but with offset:
// jsonDateTime= quotation-mark
// YYYY-MM-DDThh:mm:ss.sTZD
// [("+" / "-") offset]
// quotation-mark
//
// offset = 4DIGIT
string textValue = XmlConvert.ToString(value);
WriteQuoted(writer, textValue);
}
break;
case ODataJsonDateTimeFormat.ODataDateTime:
{
// Uses the same format as DateTime but with offset:
// jsonDateTime= quotation-mark
// "\/Date("
// ticks
// [("+" / "-") offset]
// ")\/"
// quotation-mark
//
// ticks = *DIGIT
// offset = 4DIGIT
string textValue = String.Format(
CultureInfo.InvariantCulture,
JsonConstants.ODataDateTimeOffsetFormat,
DateTimeTicksToJsonTicks(value.Ticks),
offsetMinutes >= 0 ? JsonConstants.ODataDateTimeOffsetPlusSign : string.Empty,
offsetMinutes);
WriteQuoted(writer, textValue);
}
break;
}
}
/// <summary>
/// Write a TimeSpan value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">TimeSpan value to be written.</param>
internal static void WriteValue(TextWriter writer, TimeSpan value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
WriteQuoted(writer, XmlConvert.ToString(value));
}
/// <summary>
/// Write a byte value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">Byte value to be written.</param>
internal static void WriteValue(TextWriter writer, byte value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
writer.Write(value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write an sbyte value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">SByte value to be written.</param>
internal static void WriteValue(TextWriter writer, sbyte value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
writer.Write(value.ToString(CultureInfo.InvariantCulture));
}
/// <summary>
/// Write a string value.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="value">String value to be written.</param>
internal static void WriteValue(TextWriter writer, string value)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
if (value == null)
{
writer.Write(JsonConstants.JsonNullLiteral);
}
else
{
WriteEscapedJsonString(writer, value);
}
}
/// <summary>
/// Returns the string value with special characters escaped.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="inputString">Input string value.</param>
internal static void WriteEscapedJsonString(TextWriter writer, string inputString)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(writer != null, "writer != null");
Debug.Assert(inputString != null, "The string value must not be null.");
writer.Write(JsonConstants.QuoteCharacter);
int startIndex = 0;
int traversedCharacterCount = 0;
for (int i = 0; i < inputString.Length; i++)
{
char c = inputString[i];
// Append the unhandled characters (that do not require special treament)
// to the string builder when special characters are detected.
if (c == '\r' || c == '\t' || c == '\"' ||
c == '\\' || c == '\n' || c < ' ' || c > 0x7F || c == '\b' || c == '\f')
{
// Flush out the unescaped characters we've built so far.
writer.Write(inputString.Substring(startIndex, traversedCharacterCount));
startIndex = i + 1;
traversedCharacterCount = 0;
}
else
{
traversedCharacterCount++;
continue;
}
switch (c)
{
case '\r':
writer.Write("\\r");
break;
case '\t':
writer.Write("\\t");
break;
case '\"':
writer.Write("\\\"");
break;
case '\\':
writer.Write("\\\\");
break;
case '\n':
writer.Write("\\n");
break;
case '\b':
writer.Write("\\b");
break;
case '\f':
writer.Write("\\f");
break;
default:
Debug.Assert((c < ' ') || (c > 0x7F), "Unexpected character.");
string value = string.Format(CultureInfo.InvariantCulture, "\\u{0:x4}", (int)c);
writer.Write(value);
break;
}
}
if (traversedCharacterCount > 0)
{
writer.Write(inputString.Substring(startIndex, traversedCharacterCount));
}
writer.Write(JsonConstants.QuoteCharacter);
}
/// <summary>
/// Converts the number of ticks from the JSON date time format to the one used in .NET DateTime or DateTimeOffset structure.
/// </summary>
/// <param name="ticks">The ticks to from the JSON date time format.</param>
/// <returns>The ticks to use in the .NET DateTime of DateTimeOffset structure.</returns>
internal static long JsonTicksToDateTimeTicks(long ticks)
{
DebugUtils.CheckNoExternalCallers();
// Ticks in .NET are in 100-nanoseconds and start at 1.1.0001.
// Ticks in the JSON date time format are in milliseconds and start at 1.1.1970.
return (ticks * 10000) + JsonDateTimeMinTimeTicks;
}
/// <summary>
/// Write the string value with quotes.
/// </summary>
/// <param name="writer">The text writer to write the output to.</param>
/// <param name="text">String value to be written.</param>
private static void WriteQuoted(TextWriter writer, string text)
{
writer.Write(JsonConstants.QuoteCharacter);
writer.Write(text);
writer.Write(JsonConstants.QuoteCharacter);
}
/// <summary>
/// Converts the number of ticks from the .NET DateTime or DateTimeOffset structure to the ticks use in the JSON date time format.
/// </summary>
/// <param name="ticks">The ticks from the .NET DateTime of DateTimeOffset structure.</param>
/// <returns>The ticks to use in the JSON date time format.</returns>
private static long DateTimeTicksToJsonTicks(long ticks)
{
// Ticks in .NET are in 100-nanoseconds and start at 1.1.0001.
// Ticks in the JSON date time format are in milliseconds and start at 1.1.1970.
return (ticks - JsonDateTimeMinTimeTicks) / 10000;
}
/// <summary>
/// Converts a given date time to its universal date time equivalent.
/// </summary>
/// <param name="value">The date time to convert to UTC</param>
/// <returns>universal date time equivalent of the value.</returns>
private static DateTime GetUniversalDate(DateTime value)
{
switch (value.Kind)
{
case DateTimeKind.Local:
value = value.ToUniversalTime();
break;
case DateTimeKind.Unspecified:
value = new DateTime(value.Ticks, DateTimeKind.Utc);
break;
case DateTimeKind.Utc:
break;
}
return value;
}
}
}
| |
using UnityEngine;
using System.Collections;
public class Main_Camera_GameDisplayScript_t : MonoBehaviour {
float[] animVar;
float deltaTime;
float startTime;
void Start(){
startTime = Time.time;
deltaTime = 1f/60f;
animVar = new float[560];
animVar[0] = 0.000000f;
animVar[1] = 0.000000f;
animVar[2] = 0.000000f;
animVar[3] = 0.000000f;
animVar[4] = 0.000000f;
animVar[5] = 0.000000f;
animVar[6] = 0.000000f;
animVar[7] = 0.000000f;
animVar[8] = 0.000000f;
animVar[9] = 0.000000f;
animVar[10] = 0.000000f;
animVar[11] = 0.000000f;
animVar[12] = 0.000000f;
animVar[13] = 0.000000f;
animVar[14] = 0.000000f;
animVar[15] = 0.000000f;
animVar[16] = 0.000000f;
animVar[17] = 0.000000f;
animVar[18] = 0.000000f;
animVar[19] = 0.000000f;
animVar[20] = 0.000000f;
animVar[21] = 0.000000f;
animVar[22] = 0.000000f;
animVar[23] = 0.000000f;
animVar[24] = 0.000000f;
animVar[25] = 0.000000f;
animVar[26] = 0.000000f;
animVar[27] = 0.000000f;
animVar[28] = 0.000000f;
animVar[29] = 0.000000f;
animVar[30] = 0.000000f;
animVar[31] = 0.000000f;
animVar[32] = 0.000000f;
animVar[33] = 0.000000f;
animVar[34] = 0.000000f;
animVar[35] = 0.000000f;
animVar[36] = 0.000000f;
animVar[37] = 0.000000f;
animVar[38] = 0.000000f;
animVar[39] = 0.000000f;
animVar[40] = 0.000000f;
animVar[41] = 0.000000f;
animVar[42] = 0.000000f;
animVar[43] = 0.000000f;
animVar[44] = 0.000000f;
animVar[45] = 0.000000f;
animVar[46] = 0.000000f;
animVar[47] = 0.000000f;
animVar[48] = 0.000000f;
animVar[49] = 0.000000f;
animVar[50] = 0.000000f;
animVar[51] = 0.000000f;
animVar[52] = 0.000000f;
animVar[53] = 0.000000f;
animVar[54] = 0.000000f;
animVar[55] = 0.000000f;
animVar[56] = 0.000000f;
animVar[57] = 0.000000f;
animVar[58] = 0.000000f;
animVar[59] = 0.000000f;
animVar[60] = 0.000000f;
animVar[61] = 0.000000f;
animVar[62] = 0.000000f;
animVar[63] = 0.000000f;
animVar[64] = 0.000000f;
animVar[65] = 0.000000f;
animVar[66] = 0.000000f;
animVar[67] = 0.000000f;
animVar[68] = 0.000000f;
animVar[69] = 0.000000f;
animVar[70] = 0.000000f;
animVar[71] = 0.000000f;
animVar[72] = 0.000000f;
animVar[73] = 0.000000f;
animVar[74] = 0.000000f;
animVar[75] = 0.000000f;
animVar[76] = 0.000000f;
animVar[77] = 0.000000f;
animVar[78] = 0.000000f;
animVar[79] = 0.000000f;
animVar[80] = 0.000000f;
animVar[81] = 0.000000f;
animVar[82] = 0.000000f;
animVar[83] = 0.000000f;
animVar[84] = 0.000000f;
animVar[85] = 0.000000f;
animVar[86] = 0.000000f;
animVar[87] = 0.000000f;
animVar[88] = 0.000000f;
animVar[89] = 0.000000f;
animVar[90] = 0.000000f;
animVar[91] = 0.000000f;
animVar[92] = 0.000000f;
animVar[93] = 0.000000f;
animVar[94] = 0.000000f;
animVar[95] = 0.000000f;
animVar[96] = 0.000000f;
animVar[97] = 0.000000f;
animVar[98] = 0.000000f;
animVar[99] = 0.000000f;
animVar[100] = 0.000000f;
animVar[101] = 0.000000f;
animVar[102] = 0.000000f;
animVar[103] = 0.000000f;
animVar[104] = 0.000000f;
animVar[105] = 0.000000f;
animVar[106] = 0.000000f;
animVar[107] = 0.000000f;
animVar[108] = 0.000000f;
animVar[109] = 0.000000f;
animVar[110] = 0.000000f;
animVar[111] = 0.000000f;
animVar[112] = 0.000000f;
animVar[113] = 0.000000f;
animVar[114] = 0.000000f;
animVar[115] = 0.000000f;
animVar[116] = 0.000000f;
animVar[117] = 0.000000f;
animVar[118] = 0.000000f;
animVar[119] = 0.000000f;
animVar[120] = 0.000000f;
animVar[121] = 0.000000f;
animVar[122] = 0.000000f;
animVar[123] = 0.000000f;
animVar[124] = 0.000000f;
animVar[125] = 0.000000f;
animVar[126] = 0.000000f;
animVar[127] = 0.000000f;
animVar[128] = 0.000000f;
animVar[129] = 0.000000f;
animVar[130] = 0.000000f;
animVar[131] = 0.000000f;
animVar[132] = 0.000000f;
animVar[133] = 0.000000f;
animVar[134] = 0.000000f;
animVar[135] = 0.000000f;
animVar[136] = 0.000000f;
animVar[137] = 0.000000f;
animVar[138] = 0.000000f;
animVar[139] = 0.000000f;
animVar[140] = 0.000000f;
animVar[141] = 0.000000f;
animVar[142] = 0.000000f;
animVar[143] = 0.000000f;
animVar[144] = 0.000000f;
animVar[145] = 0.000000f;
animVar[146] = 0.000000f;
animVar[147] = 0.000000f;
animVar[148] = 0.000000f;
animVar[149] = 0.000000f;
animVar[150] = 0.000000f;
animVar[151] = 0.000000f;
animVar[152] = 0.000000f;
animVar[153] = 0.000000f;
animVar[154] = 0.000000f;
animVar[155] = 0.000000f;
animVar[156] = 0.000000f;
animVar[157] = 0.000000f;
animVar[158] = 0.000000f;
animVar[159] = 0.000000f;
animVar[160] = 0.000000f;
animVar[161] = 0.000000f;
animVar[162] = 0.000000f;
animVar[163] = 0.000000f;
animVar[164] = 0.000000f;
animVar[165] = 0.000000f;
animVar[166] = 0.000000f;
animVar[167] = 0.000000f;
animVar[168] = 0.000000f;
animVar[169] = 0.000000f;
animVar[170] = 0.000000f;
animVar[171] = 0.000000f;
animVar[172] = 0.000000f;
animVar[173] = 0.000000f;
animVar[174] = 0.000000f;
animVar[175] = 0.000000f;
animVar[176] = 0.000000f;
animVar[177] = 0.000000f;
animVar[178] = 0.000000f;
animVar[179] = 0.000000f;
animVar[180] = 0.000000f;
animVar[181] = 0.000000f;
animVar[182] = 0.000000f;
animVar[183] = 0.000000f;
animVar[184] = 0.000000f;
animVar[185] = 0.000000f;
animVar[186] = 0.000000f;
animVar[187] = 0.000000f;
animVar[188] = 0.000000f;
animVar[189] = 0.000000f;
animVar[190] = 0.000000f;
animVar[191] = 0.000000f;
animVar[192] = 0.000000f;
animVar[193] = 0.000000f;
animVar[194] = 0.000000f;
animVar[195] = 0.000000f;
animVar[196] = 0.000000f;
animVar[197] = 0.000000f;
animVar[198] = 0.000000f;
animVar[199] = 0.000000f;
animVar[200] = 0.000000f;
animVar[201] = 0.000000f;
animVar[202] = 0.000000f;
animVar[203] = 0.000000f;
animVar[204] = 0.000000f;
animVar[205] = 0.000000f;
animVar[206] = 0.000000f;
animVar[207] = 0.000000f;
animVar[208] = 0.000000f;
animVar[209] = 0.000000f;
animVar[210] = 0.000000f;
animVar[211] = 0.000000f;
animVar[212] = 0.000000f;
animVar[213] = 0.000000f;
animVar[214] = 0.000000f;
animVar[215] = 0.000000f;
animVar[216] = 0.000000f;
animVar[217] = 0.000000f;
animVar[218] = 0.000000f;
animVar[219] = 0.000000f;
animVar[220] = 0.000000f;
animVar[221] = 0.000000f;
animVar[222] = 0.000000f;
animVar[223] = 0.000000f;
animVar[224] = 0.000000f;
animVar[225] = 0.000000f;
animVar[226] = 0.000000f;
animVar[227] = 0.000000f;
animVar[228] = 0.000000f;
animVar[229] = 0.000000f;
animVar[230] = 0.000000f;
animVar[231] = 0.000000f;
animVar[232] = 0.000000f;
animVar[233] = 0.000000f;
animVar[234] = 0.000000f;
animVar[235] = 0.000000f;
animVar[236] = 0.000000f;
animVar[237] = 0.000000f;
animVar[238] = 0.000000f;
animVar[239] = 0.000000f;
animVar[240] = 0.000000f;
animVar[241] = 0.000000f;
animVar[242] = 0.000000f;
animVar[243] = 0.000000f;
animVar[244] = 0.000000f;
animVar[245] = 0.000000f;
animVar[246] = 0.000000f;
animVar[247] = 0.000000f;
animVar[248] = 0.000000f;
animVar[249] = 0.000000f;
animVar[250] = 0.000000f;
animVar[251] = 0.000000f;
animVar[252] = 0.000000f;
animVar[253] = 0.000000f;
animVar[254] = 0.000000f;
animVar[255] = 0.000000f;
animVar[256] = 0.000000f;
animVar[257] = 0.000000f;
animVar[258] = 0.000000f;
animVar[259] = 0.000000f;
animVar[260] = 0.000000f;
animVar[261] = 0.000000f;
animVar[262] = 0.000000f;
animVar[263] = 0.000000f;
animVar[264] = 0.000000f;
animVar[265] = 0.000000f;
animVar[266] = 0.000000f;
animVar[267] = 0.000000f;
animVar[268] = 0.000000f;
animVar[269] = 0.000000f;
animVar[270] = 0.000000f;
animVar[271] = 0.000000f;
animVar[272] = 0.000000f;
animVar[273] = 0.000000f;
animVar[274] = 0.000000f;
animVar[275] = 0.000000f;
animVar[276] = 0.000000f;
animVar[277] = 0.000000f;
animVar[278] = 0.000000f;
animVar[279] = 0.000000f;
animVar[280] = 0.000000f;
animVar[281] = 0.000000f;
animVar[282] = 0.000000f;
animVar[283] = 0.000000f;
animVar[284] = 0.000000f;
animVar[285] = 0.000000f;
animVar[286] = 0.000000f;
animVar[287] = 0.000000f;
animVar[288] = 0.000000f;
animVar[289] = 0.000000f;
animVar[290] = 0.000000f;
animVar[291] = 0.000000f;
animVar[292] = 0.000000f;
animVar[293] = 0.000000f;
animVar[294] = 0.000000f;
animVar[295] = 0.000000f;
animVar[296] = 0.000000f;
animVar[297] = 0.000000f;
animVar[298] = 0.000000f;
animVar[299] = 0.000000f;
animVar[300] = 0.000000f;
animVar[301] = 0.000000f;
animVar[302] = 0.000000f;
animVar[303] = 0.000000f;
animVar[304] = 0.000000f;
animVar[305] = 0.000000f;
animVar[306] = 0.000000f;
animVar[307] = 0.000000f;
animVar[308] = 0.000000f;
animVar[309] = 0.000000f;
animVar[310] = 0.000000f;
animVar[311] = 0.000000f;
animVar[312] = 0.000000f;
animVar[313] = 0.000000f;
animVar[314] = 0.000000f;
animVar[315] = 0.000000f;
animVar[316] = 0.000000f;
animVar[317] = 0.000000f;
animVar[318] = 0.000000f;
animVar[319] = 0.000000f;
animVar[320] = 0.000000f;
animVar[321] = 0.000000f;
animVar[322] = 0.000000f;
animVar[323] = 0.000000f;
animVar[324] = 0.000000f;
animVar[325] = 0.000000f;
animVar[326] = 0.000000f;
animVar[327] = 0.000000f;
animVar[328] = 0.000000f;
animVar[329] = 0.000000f;
animVar[330] = 0.000000f;
animVar[331] = 0.000000f;
animVar[332] = 0.000000f;
animVar[333] = 0.000000f;
animVar[334] = 0.000000f;
animVar[335] = 0.000000f;
animVar[336] = 0.000000f;
animVar[337] = 0.000000f;
animVar[338] = 0.000000f;
animVar[339] = 0.000000f;
animVar[340] = 0.000000f;
animVar[341] = 0.000000f;
animVar[342] = 0.000000f;
animVar[343] = 0.000000f;
animVar[344] = 0.000000f;
animVar[345] = 0.000000f;
animVar[346] = 0.000000f;
animVar[347] = 0.000000f;
animVar[348] = 0.000000f;
animVar[349] = 0.000000f;
animVar[350] = 0.000000f;
animVar[351] = 0.000000f;
animVar[352] = 0.000228f;
animVar[353] = 0.012067f;
animVar[354] = 0.040338f;
animVar[355] = 0.082925f;
animVar[356] = 0.137706f;
animVar[357] = 0.202567f;
animVar[358] = 0.275384f;
animVar[359] = 0.354046f;
animVar[360] = 0.436427f;
animVar[361] = 0.520416f;
animVar[362] = 0.603891f;
animVar[363] = 0.684730f;
animVar[364] = 0.760821f;
animVar[365] = 0.830040f;
animVar[366] = 0.890274f;
animVar[367] = 0.939400f;
animVar[368] = 0.975303f;
animVar[369] = 0.995863f;
animVar[370] = 1.000000f;
animVar[371] = 1.000000f;
animVar[372] = 1.000000f;
animVar[373] = 1.000000f;
animVar[374] = 1.000000f;
animVar[375] = 1.000000f;
animVar[376] = 1.000000f;
animVar[377] = 1.000000f;
animVar[378] = 1.000000f;
animVar[379] = 1.000000f;
animVar[380] = 1.000000f;
animVar[381] = 1.000000f;
animVar[382] = 1.000000f;
animVar[383] = 1.000000f;
animVar[384] = 1.000000f;
animVar[385] = 1.000000f;
animVar[386] = 1.000000f;
animVar[387] = 1.000000f;
animVar[388] = 1.000000f;
animVar[389] = 1.000000f;
animVar[390] = 1.000000f;
animVar[391] = 1.000000f;
animVar[392] = 1.000000f;
animVar[393] = 1.000000f;
animVar[394] = 1.000000f;
animVar[395] = 1.000000f;
animVar[396] = 1.000000f;
animVar[397] = 1.000000f;
animVar[398] = 1.000000f;
animVar[399] = 1.000000f;
animVar[400] = 1.000000f;
animVar[401] = 1.000000f;
animVar[402] = 1.000000f;
animVar[403] = 1.000000f;
animVar[404] = 1.000000f;
animVar[405] = 1.000000f;
animVar[406] = 1.000000f;
animVar[407] = 1.000000f;
animVar[408] = 1.000000f;
animVar[409] = 1.000000f;
animVar[410] = 1.000000f;
animVar[411] = 1.000000f;
animVar[412] = 1.000000f;
animVar[413] = 1.000000f;
animVar[414] = 1.000000f;
animVar[415] = 1.000000f;
animVar[416] = 1.000000f;
animVar[417] = 1.000000f;
animVar[418] = 1.000000f;
animVar[419] = 1.000000f;
animVar[420] = 1.000000f;
animVar[421] = 1.000000f;
animVar[422] = 1.000000f;
animVar[423] = 1.000000f;
animVar[424] = 1.000000f;
animVar[425] = 1.000000f;
animVar[426] = 1.000000f;
animVar[427] = 1.000000f;
animVar[428] = 1.000000f;
animVar[429] = 1.000000f;
animVar[430] = 1.000000f;
animVar[431] = 1.000000f;
animVar[432] = 1.000000f;
animVar[433] = 1.000000f;
animVar[434] = 1.000000f;
animVar[435] = 1.000000f;
animVar[436] = 1.000000f;
animVar[437] = 1.000000f;
animVar[438] = 1.000000f;
animVar[439] = 1.000000f;
animVar[440] = 1.000000f;
animVar[441] = 1.000000f;
animVar[442] = 1.000000f;
animVar[443] = 1.000000f;
animVar[444] = 1.000000f;
animVar[445] = 1.000000f;
animVar[446] = 1.000000f;
animVar[447] = 1.000000f;
animVar[448] = 1.000000f;
animVar[449] = 1.000000f;
animVar[450] = 1.000000f;
animVar[451] = 1.000000f;
animVar[452] = 1.000000f;
animVar[453] = 1.000000f;
animVar[454] = 1.000000f;
animVar[455] = 1.000000f;
animVar[456] = 1.000000f;
animVar[457] = 1.000000f;
animVar[458] = 1.000000f;
animVar[459] = 1.000000f;
animVar[460] = 1.000000f;
animVar[461] = 1.000000f;
animVar[462] = 1.000000f;
animVar[463] = 1.000000f;
animVar[464] = 1.000000f;
animVar[465] = 1.000000f;
animVar[466] = 1.000000f;
animVar[467] = 1.000000f;
animVar[468] = 1.000000f;
animVar[469] = 1.000000f;
animVar[470] = 1.000000f;
animVar[471] = 1.000000f;
animVar[472] = 1.000000f;
animVar[473] = 1.000000f;
animVar[474] = 1.000000f;
animVar[475] = 1.000000f;
animVar[476] = 1.000000f;
animVar[477] = 1.000000f;
animVar[478] = 1.000000f;
animVar[479] = 1.000000f;
animVar[480] = 1.000000f;
animVar[481] = 1.000000f;
animVar[482] = 1.000000f;
animVar[483] = 1.000000f;
animVar[484] = 1.000000f;
animVar[485] = 1.000000f;
animVar[486] = 1.000000f;
animVar[487] = 1.000000f;
animVar[488] = 1.000000f;
animVar[489] = 1.000000f;
animVar[490] = 1.000000f;
animVar[491] = 1.000000f;
animVar[492] = 1.000000f;
animVar[493] = 1.000000f;
animVar[494] = 1.000000f;
animVar[495] = 1.000000f;
animVar[496] = 1.000000f;
animVar[497] = 1.000000f;
animVar[498] = 1.000000f;
animVar[499] = 1.000000f;
animVar[500] = 1.000000f;
animVar[501] = 1.000000f;
animVar[502] = 1.000000f;
animVar[503] = 1.000000f;
animVar[504] = 1.000000f;
animVar[505] = 1.000000f;
animVar[506] = 1.000000f;
animVar[507] = 1.000000f;
animVar[508] = 1.000000f;
animVar[509] = 1.000000f;
animVar[510] = 1.000000f;
animVar[511] = 1.000000f;
animVar[512] = 1.000000f;
animVar[513] = 1.000000f;
animVar[514] = 1.000000f;
animVar[515] = 1.000000f;
animVar[516] = 1.000000f;
animVar[517] = 1.000000f;
animVar[518] = 1.000000f;
animVar[519] = 1.000000f;
animVar[520] = 1.000000f;
animVar[521] = 1.000000f;
animVar[522] = 1.000000f;
animVar[523] = 1.000000f;
animVar[524] = 1.000000f;
animVar[525] = 1.000000f;
animVar[526] = 1.000000f;
animVar[527] = 1.000000f;
animVar[528] = 1.000000f;
animVar[529] = 1.000000f;
animVar[530] = 1.000000f;
animVar[531] = 1.000000f;
animVar[532] = 1.000000f;
animVar[533] = 1.000000f;
animVar[534] = 1.000000f;
animVar[535] = 1.000000f;
animVar[536] = 1.000000f;
animVar[537] = 1.000000f;
animVar[538] = 1.000000f;
animVar[539] = 1.000000f;
animVar[540] = 1.000000f;
animVar[541] = 1.000000f;
animVar[542] = 1.000000f;
animVar[543] = 1.000000f;
animVar[544] = 1.000000f;
animVar[545] = 1.000000f;
animVar[546] = 1.000000f;
animVar[547] = 1.000000f;
animVar[548] = 1.000000f;
animVar[549] = 1.000000f;
animVar[550] = 1.000000f;
animVar[551] = 1.000000f;
animVar[552] = 1.000000f;
animVar[553] = 1.000000f;
animVar[554] = 1.000000f;
animVar[555] = 1.000000f;
animVar[556] = 1.000000f;
animVar[557] = 1.000000f;
animVar[558] = 1.000000f;
animVar[559] = 1.000000f;
comp = gameObject.GetComponent<GameDisplayScript>();
}
GameDisplayScript comp;
public float numFrame;
void Update () {
numFrame = (Time.time-startTime)/deltaTime;
if( numFrame>=(float)animVar.Length-1 ) {numFrame=(float)animVar.Length-1.01f;}
float alpha = numFrame-Mathf.Floor(numFrame)/deltaTime;
comp.t = Mathf.Lerp(animVar[Mathf.FloorToInt(numFrame)],animVar[Mathf.CeilToInt(numFrame)],alpha);
}
}
| |
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Dynamic;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
using Bloom.Api;
using Bloom.Book;
using SIL.Code;
using SIL.Extensions;
using SIL.Reporting;
using SIL.Windows.Forms.ClearShare;
using L10NSharp;
using SIL.IO;
using SIL.Xml;
namespace Bloom.web.controllers
{
public class ImageApi
{
private readonly BookSelection _bookSelection;
private readonly string[] _doNotPasteArray;
public ImageApi(BookSelection bookSelection)
{
_bookSelection = bookSelection;
// The following is a list of image files that we don't want to paste image credits for.
// It includes CC license image, placeholder and branding images.
_doNotPasteArray = GetImageFilesToNotPasteCreditsFor().ToArray();
}
private static IEnumerable<string> GetImageFilesToNotPasteCreditsFor()
{
// Handles all except placeholder images. They can show up with digits after them, so we do them differently.
var imageFiles = new HashSet<string> { "license.png" };
var brandingDirectory = BloomFileLocator.GetBrowserDirectory("branding");
foreach (var brandDirectory in Directory.GetDirectories(brandingDirectory))
{
imageFiles.AddRange(Directory.EnumerateFiles(brandDirectory).Where(IsSvgOrPng).Select(Path.GetFileName));
}
return imageFiles;
}
private static bool IsSvgOrPng(string filename)
{
var lcFilename = filename.ToLowerInvariant();
return Path.GetExtension(lcFilename) == ".svg" || Path.GetExtension(lcFilename) == ".png";
}
public void RegisterWithApiHandler(BloomApiHandler apiHandler)
{
// These are both just retrieving information about files, apart from using _bookSelection.CurrentSelection.FolderPath.
apiHandler.RegisterEndpointHandler("image/info", HandleImageInfo, false);
apiHandler.RegisterEndpointHandler("image/imageCreditsForWholeBook", HandleCopyImageCreditsForWholeBook, false);
}
/// <summary>
/// Returns a Dictionary keyed on image name that references an ordered set of page numbers where that image is used.
/// We use string for the page numbers both for non-decimal numeral systems and because xmatter images will be
/// referenced by page label (e.g. 'Front Cover').
/// Public for testing.
/// </summary>
/// <param name="domBody"></param>
/// <returns></returns>
public Dictionary<string, List<string>> GetFilteredImageNameToPagesDictionary(XmlNode domBody, IEnumerable<string> langs = null)
{
var result = new Dictionary<string, List<string>>();
result.AddRange(GetWhichImagesAreUsedOnWhichPages(domBody, langs).Where(kvp => !DoNotPasteCreditsImages(kvp.Key)));
return result;
}
private void HandleCopyImageCreditsForWholeBook(ApiRequest request)
{
// This method is called on a fileserver thread. To minimize the chance that the current selection somehow
// changes while it is running, we capture the things that depend on it in variables right at the start.
var domBody = _bookSelection.CurrentSelection.RawDom.DocumentElement.SelectSingleNode("//body");
var currentSelectionFolderPath = _bookSelection.CurrentSelection.FolderPath;
IEnumerable<string> langs;
if (request.CurrentCollectionSettings != null)
langs = _bookSelection.CurrentSelection.GetLanguagePrioritiesForLocalizedTextOnPage();
else
langs = new List<string> { "en" }; // emergency fall back -- probably never used.
var imageNameToPages = GetFilteredImageNameToPagesDictionary(domBody, langs);
var credits = new Dictionary<string, List<string>>();
var missingCredits = new List<string>();
foreach (var kvp in imageNameToPages)
{
var path = currentSelectionFolderPath.CombineForPath(kvp.Key);
if (!RobustFile.Exists(path))
continue;
var meta = Metadata.FromFile(path);
string dummy;
var credit = meta.MinimalCredits(langs, out dummy);
if (string.IsNullOrEmpty(credit))
missingCredits.Add(kvp.Key);
if (!string.IsNullOrEmpty(credit))
{
var pageList = kvp.Value;
BuildCreditsDictionary(credits, credit, pageList);
}
}
var collectedCredits = CollectFormattedCredits(credits, langs);
var total = collectedCredits.Aggregate(new StringBuilder(), (all,credit) => {
all.AppendFormat("<p>{0}</p>{1}", credit, Environment.NewLine);
return all;
});
// Notify the user of images with missing credits.
if (missingCredits.Count > 0)
{
string dummyId;
var missing = LocalizationManager.GetString("EditTab.FrontMatter.PasteMissingCredits", "Missing credits:", "", langs, out dummyId);
var missingImage = LocalizationManager.GetString("EditTab.FrontMatter.ImageCreditMissing", " {0} (page {1})",
"The {0} is replaced by the filename of an image. The {1} is replaced by a reference to the first page in the book where that image occurs.", langs, out dummyId);
total.AppendFormat("<p>{0}", missing);
for (var i = 0; i < missingCredits.Count; ++i)
{
if (i > 0)
total.Append(",");
total.AppendFormat(missingImage, missingCredits[i], imageNameToPages[missingCredits[i]].First());
}
total.AppendFormat("</p>{0}", System.Environment.NewLine);
}
request.ReplyWithText(total.ToString());
}
/// <summary>
/// Internal for testing.
/// </summary>
/// <param name="credits"></param>
/// <returns></returns>
internal static IEnumerable<string> CollectFormattedCredits(Dictionary<string, List<string>> credits, IEnumerable<string> langs = null)
{
if (langs == null)
langs = new string[] { "en" };
string dummyId;
// Dictionary Key is metadata.MinimalCredits, Value is the list of pages that have images this credits string applies to.
// Generate a formatted credit string like:
// Image on page 2 by John Doe, Copyright John Doe, 2016, CC-BY-NC. or
// Images on pages 1, 3-4 by Art of Reading, Copyright SIL International 2017, CC-BY-SA.
// The goal is to return one string for each credit source listing the pages that apply.
if (credits.Keys.Count == 1)
{
// If all the images are credited to the same illustrator, don't bother listing page numbers.
// Like: Images by John Doe, Copyright John Doe, 2016, CC-BY-NC.
var bookSingleCredit = LocalizationManager.GetString("EditTab.FrontMatter.BookSingleCredit", "Images by {0}.", "", langs, out dummyId);
var key = credits.Keys.First();
yield return string.Format(bookSingleCredit, key);
yield break;
}
var singleCredit = LocalizationManager.GetString("EditTab.FrontMatter.SingleImageCredit", "Image on page {0} by {1}.", "", langs, out dummyId);
var multipleCredit = LocalizationManager.GetString("EditTab.FrontMatter.MultipleImageCredit", "Images on pages {0} by {1}.", "", langs, out dummyId);
foreach (var kvp in credits) // we assume here that credits is built in page order
{
var pages = kvp.Value.ToList();
var credit = kvp.Key;
if (pages.Count == 1)
{
// use singleCredit format
yield return string.Format(singleCredit, pages[0], credit);
}
else
{
// use multipleCredit format
var mpr = CreateMultiplePageReference(pages);
yield return string.Format(multipleCredit, mpr, credit);
}
}
}
private static string CreateMultiplePageReference(List<string> pages)
{
const string ndash = "\u2013";
const string commaSpace = ", ";
bool workingOnDash = false;
var sb = new StringBuilder();
sb.Append(pages[0]);
int previousPage;
if (!Int32.TryParse(pages[0], out previousPage))
previousPage = -1;
for (var i = 1; i < pages.Count; i++)
{
var thisPage = pages[i];
if (PageShouldBeGroupedWithPreviousPage(previousPage, thisPage))
{
if (!workingOnDash)
{
workingOnDash = true;
sb.Append(ndash);
}
}
else
{
if (workingOnDash)
{
sb.Append(previousPage.ToString());
workingOnDash = false;
}
sb.Append(commaSpace + thisPage);
}
if (!Int32.TryParse(thisPage, out previousPage))
previousPage = -1;
}
if (workingOnDash)
{
sb.Append(previousPage.ToString());
}
return sb.ToString();
}
private static bool PageShouldBeGroupedWithPreviousPage(int prevPage, string page)
{
int nextPageNum;
if (!Int32.TryParse(page, out nextPageNum))
{
return false;
}
return nextPageNum == prevPage + 1;
}
private static void BuildCreditsDictionary(Dictionary<string, List<string>> credits, string credit, List<string> listOfPageUsages)
{
// need to see if 'credit' string is already in dict
// if not, add it along with the associated 'List<string> listOfPageUsages'
// if yes, aggregate the new List<string> pages with what's already on file in the dict.
List<string> oldPagesList;
if (credits.TryGetValue(credit, out oldPagesList))
{
oldPagesList.AddRange(listOfPageUsages);
credits[credit] = oldPagesList;
}
else
{
credits.Add(credit, listOfPageUsages);
}
}
/// <summary>
/// Determine whether or not a particular image should have its credits pasted.
/// </summary>
/// <param name="name"></param>
/// <returns></returns>
private bool DoNotPasteCreditsImages(string name)
{
// returns 'true' if 'name' is among the list of ones we don't want to paste image credits for
// includes CC license image, placeholder and branding images
return _doNotPasteArray.Contains(name.ToLowerInvariant()) || name.ToLowerInvariant().StartsWith("placeholder");
}
/// <summary>
/// Returns a Dictionary that contains each image name as a key and a list of page
/// numbers that contain that image (with no duplicates and in order of occurrence).
/// </summary>
/// <param name="domBody"></param>
public static Dictionary<string, List<string>> GetWhichImagesAreUsedOnWhichPages(XmlNode domBody, IEnumerable<string> langs)
{
var imageNameToPages = new Dictionary<string, List<string>>();
foreach (XmlElement img in HtmlDom.SelectChildImgAndBackgroundImageElements(domBody as XmlElement))
{
if (IsImgNotInAPage(img))
continue;
if (IsImgInsideBrandingElement(img))
continue;
var name = HtmlDom.GetImageElementUrl(img).PathOnly.NotEncoded;
var pageNum = HtmlDom.GetNumberOrLabelOfPageWhereElementLives(img, langs);
if (string.IsNullOrWhiteSpace(pageNum))
continue; // This image is on a page with no pagenumber or something is drastically wrong.
List<string> currentList;
if (imageNameToPages.TryGetValue(name, out currentList))
{
if (currentList.Contains(pageNum))
continue; // already got this image on this page
currentList.Add(pageNum);
}
else
{
imageNameToPages.Add(name, new List<string> { pageNum });
}
}
return imageNameToPages;
}
private static bool IsImgNotInAPage(XmlElement imgElement)
{
return !(imgElement.SelectSingleNode("ancestor-or-self::div[contains(@class,'bloom-page')]") is XmlElement);
}
private static bool IsImgInsideBrandingElement(XmlElement imgElement)
{
return imgElement.SelectSingleNode("ancestor-or-self::div[contains(@data-book,'branding')]") is XmlElement;
}
/// <summary>
/// Get a json of stats about the image. It is used to populate a tooltip when you hover over an image container
/// </summary>
private void HandleImageInfo(ApiRequest request)
{
try
{
var fileName = request.RequiredParam("image");
Guard.AgainstNull(_bookSelection.CurrentSelection, "CurrentBook");
var path = Path.Combine(_bookSelection.CurrentSelection.FolderPath, fileName);
while (!RobustFile.Exists(path) && fileName.Contains('%'))
{
var fileName1 = fileName;
// We can be fed doubly-encoded filenames. So try to decode a second time and see if that works.
// See https://silbloom.myjetbrains.com/youtrack/issue/BL-3749.
// Effectively triple-encoded filenames have cropped up for particular books. Such files are
// already handled okay by BloomServer.ProcessAnyFileContent(). This code can handle
// any depth of url-encoding.
// See https://silbloom.myjetbrains.com/youtrack/issue/BL-5757.
fileName = System.Web.HttpUtility.UrlDecode(fileName);
if (fileName == fileName1)
break;
path = Path.Combine(_bookSelection.CurrentSelection.FolderPath, fileName);
}
dynamic result = new ExpandoObject();
result.name = fileName;
if (!RobustFile.Exists(path))
{
result.bytes = -1;
result.width = -1;
result.height = -1;
result.bitDepth = "unknown";
}
else if (path.ToLowerInvariant().EndsWith("svg"))
{
result.bytes = -1;
result.width = -1;
result.height = -1;
result.bitDepth = "unknown";
}
else
{
var fileInfo = new FileInfo(path);
result.bytes = fileInfo.Length;
// Using a stream this way, according to one source,
// http://stackoverflow.com/questions/552467/how-do-i-reliably-get-an-image-dimensions-in-net-without-loading-the-image,
// supposedly avoids loading the image into memory when we only want its dimensions
using (var stream = RobustFile.OpenRead(path))
using (var img = Image.FromStream(stream, false, false))
{
result.width = img.Width;
result.height = img.Height;
switch (img.PixelFormat)
{
case PixelFormat.Format32bppArgb:
case PixelFormat.Format32bppRgb:
case PixelFormat.Format32bppPArgb:
result.bitDepth = "32";
break;
case PixelFormat.Format24bppRgb:
result.bitDepth = "24";
break;
case PixelFormat.Format16bppArgb1555:
case PixelFormat.Format16bppGrayScale:
result.bitDepth = "16";
break;
case PixelFormat.Format8bppIndexed:
result.bitDepth = "8";
break;
case PixelFormat.Format1bppIndexed:
result.bitDepth = "1";
break;
default:
result.bitDepth = "unknown";
break;
}
}
}
request.ReplyWithJson((object)result);
}
catch (Exception e)
{
Logger.WriteError("Error in server imageInfo/: url was " + request.LocalPath(), e);
request.Failed(e.Message);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
namespace Prism.Modularity
{
/// <summary>
/// Loads modules from an arbitrary location on the filesystem. This typeloader is only called if
/// <see cref="ModuleInfo"/> classes have a Ref parameter that starts with "file://".
/// This class is only used on the Desktop version of the Prism Library.
/// </summary>
public class FileModuleTypeLoader : IModuleTypeLoader, IDisposable
{
private const string RefFilePrefix = "file://";
private readonly IAssemblyResolver assemblyResolver;
private HashSet<Uri> downloadedUris = new HashSet<Uri>();
/// <summary>
/// Initializes a new instance of the <see cref="FileModuleTypeLoader"/> class.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "This is disposed of in the Dispose method.")]
public FileModuleTypeLoader()
: this(new AssemblyResolver())
{
}
/// <summary>
/// Initializes a new instance of the <see cref="FileModuleTypeLoader"/> class.
/// </summary>
/// <param name="assemblyResolver">The assembly resolver.</param>
public FileModuleTypeLoader(IAssemblyResolver assemblyResolver)
{
this.assemblyResolver = assemblyResolver;
}
/// <summary>
/// Raised repeatedly to provide progress as modules are loaded in the background.
/// </summary>
public event EventHandler<ModuleDownloadProgressChangedEventArgs> ModuleDownloadProgressChanged;
private void RaiseModuleDownloadProgressChanged(ModuleInfo moduleInfo, long bytesReceived, long totalBytesToReceive)
{
this.RaiseModuleDownloadProgressChanged(new ModuleDownloadProgressChangedEventArgs(moduleInfo, bytesReceived, totalBytesToReceive));
}
private void RaiseModuleDownloadProgressChanged(ModuleDownloadProgressChangedEventArgs e)
{
if (this.ModuleDownloadProgressChanged != null)
{
this.ModuleDownloadProgressChanged(this, e);
}
}
/// <summary>
/// Raised when a module is loaded or fails to load.
/// </summary>
public event EventHandler<LoadModuleCompletedEventArgs> LoadModuleCompleted;
private void RaiseLoadModuleCompleted(ModuleInfo moduleInfo, Exception error)
{
this.RaiseLoadModuleCompleted(new LoadModuleCompletedEventArgs(moduleInfo, error));
}
private void RaiseLoadModuleCompleted(LoadModuleCompletedEventArgs e)
{
if (this.LoadModuleCompleted != null)
{
this.LoadModuleCompleted(this, e);
}
}
/// <summary>
/// Evaluates the <see cref="ModuleInfo.Ref"/> property to see if the current typeloader will be able to retrieve the <paramref name="moduleInfo"/>.
/// Returns true if the <see cref="ModuleInfo.Ref"/> property starts with "file://", because this indicates that the file
/// is a local file.
/// </summary>
/// <param name="moduleInfo">Module that should have it's type loaded.</param>
/// <returns>
/// <see langword="true"/> if the current typeloader is able to retrieve the module, otherwise <see langword="false"/>.
/// </returns>
/// <exception cref="ArgumentNullException">An <see cref="ArgumentNullException"/> is thrown if <paramref name="moduleInfo"/> is null.</exception>
public bool CanLoadModuleType(ModuleInfo moduleInfo)
{
if (moduleInfo == null)
{
throw new ArgumentNullException(nameof(moduleInfo));
}
return moduleInfo.Ref != null && moduleInfo.Ref.StartsWith(RefFilePrefix, StringComparison.Ordinal);
}
/// <summary>
/// Retrieves the <paramref name="moduleInfo"/>.
/// </summary>
/// <param name="moduleInfo">Module that should have it's type loaded.</param>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Exception is rethrown as part of a completion event")]
public void LoadModuleType(ModuleInfo moduleInfo)
{
if (moduleInfo == null)
{
throw new ArgumentNullException(nameof(moduleInfo));
}
try
{
Uri uri = new Uri(moduleInfo.Ref, UriKind.RelativeOrAbsolute);
// If this module has already been downloaded, I fire the completed event.
if (this.IsSuccessfullyDownloaded(uri))
{
this.RaiseLoadModuleCompleted(moduleInfo, null);
}
else
{
string path;
if (moduleInfo.Ref.StartsWith(RefFilePrefix + "/", StringComparison.Ordinal))
{
path = moduleInfo.Ref.Substring(RefFilePrefix.Length + 1);
}
else
{
path = moduleInfo.Ref.Substring(RefFilePrefix.Length);
}
long fileSize = -1L;
if (File.Exists(path))
{
FileInfo fileInfo = new FileInfo(path);
fileSize = fileInfo.Length;
}
// Although this isn't asynchronous, nor expected to take very long, I raise progress changed for consistency.
this.RaiseModuleDownloadProgressChanged(moduleInfo, 0, fileSize);
this.assemblyResolver.LoadAssemblyFrom(moduleInfo.Ref);
// Although this isn't asynchronous, nor expected to take very long, I raise progress changed for consistency.
this.RaiseModuleDownloadProgressChanged(moduleInfo, fileSize, fileSize);
// I remember the downloaded URI.
this.RecordDownloadSuccess(uri);
this.RaiseLoadModuleCompleted(moduleInfo, null);
}
}
catch (Exception ex)
{
this.RaiseLoadModuleCompleted(moduleInfo, ex);
}
}
private bool IsSuccessfullyDownloaded(Uri uri)
{
lock (this.downloadedUris)
{
return this.downloadedUris.Contains(uri);
}
}
private void RecordDownloadSuccess(Uri uri)
{
lock (this.downloadedUris)
{
this.downloadedUris.Add(uri);
}
}
#region Implementation of IDisposable
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
/// <remarks>Calls <see cref="Dispose(bool)"/></remarks>.
/// <filterpriority>2</filterpriority>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Disposes the associated <see cref="AssemblyResolver"/>.
/// </summary>
/// <param name="disposing">When <see langword="true"/>, it is being called from the Dispose method.</param>
protected virtual void Dispose(bool disposing)
{
IDisposable disposableResolver = this.assemblyResolver as IDisposable;
if (disposableResolver != null)
{
disposableResolver.Dispose();
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.X509Certificates.Tests
{
public static class PublicKeyTests
{
private static PublicKey GetTestRsaKey()
{
using (var cert = new X509Certificate2(TestData.MsCertificate))
{
return cert.PublicKey;
}
}
private static PublicKey GetTestDsaKey()
{
using (var cert = new X509Certificate2(TestData.DssCer))
{
return cert.PublicKey;
}
}
private static PublicKey GetTestECDsaKey()
{
using (var cert = new X509Certificate2(TestData.ECDsa256Certificate))
{
return cert.PublicKey;
}
}
/// <summary>
/// First parameter is the cert, the second is a hash of "Hello"
/// </summary>
public static IEnumerable<object[]> BrainpoolCurves
{
get
{
yield return new object[] {
TestData.ECDsabrainpoolP160r1_CertificatePemBytes,
"9145C79DD4DF758EB377D13B0DB81F83CE1A63A4099DDC32FE228B06EB1F306423ED61B6B4AF4691".HexToByteArray() };
yield return new object[] {
TestData.ECDsabrainpoolP160r1_ExplicitCertificatePemBytes,
"6D74F1C9BCBBA5A25F67E670B3DABDB36C24E8FAC3266847EB2EE7E3239208ADC696BB421AB380B4".HexToByteArray() };
}
}
[Fact]
public static void TestOid_RSA()
{
PublicKey pk = GetTestRsaKey();
Assert.Equal("1.2.840.113549.1.1.1", pk.Oid.Value);
}
[Fact]
public static void TestOid_DSA()
{
PublicKey pk = GetTestDsaKey();
Assert.Equal("1.2.840.10040.4.1", pk.Oid.Value);
}
[Fact]
public static void TestOid_ECDSA()
{
PublicKey pk = GetTestECDsaKey();
Assert.Equal("1.2.840.10045.2.1", pk.Oid.Value);
}
[Fact]
public static void TestPublicKey_Key_RSA()
{
PublicKey pk = GetTestRsaKey();
using (AsymmetricAlgorithm alg = pk.Key)
{
Assert.NotNull(alg);
Assert.Same(alg, pk.Key);
Assert.Equal(2048, alg.KeySize);
Assert.IsAssignableFrom(typeof(RSA), alg);
VerifyKey_RSA( /* cert */ null, (RSA)alg);
}
}
[Fact]
public static void TestPublicKey_Key_DSA()
{
PublicKey pk = GetTestDsaKey();
using (AsymmetricAlgorithm alg = pk.Key)
{
Assert.NotNull(alg);
Assert.Same(alg, pk.Key);
Assert.Equal(1024, alg.KeySize);
Assert.IsAssignableFrom(typeof(DSA), alg);
VerifyKey_DSA((DSA)alg);
}
}
[Fact]
public static void TestPublicKey_Key_ECDSA()
{
PublicKey pk = GetTestECDsaKey();
Assert.Throws<NotSupportedException>(() => pk.Key);
}
private static void VerifyKey_DSA(DSA dsa)
{
DSAParameters dsaParameters = dsa.ExportParameters(false);
byte[] expected_g = (
"859B5AEB351CF8AD3FABAC22AE0350148FD1D55128472691709EC08481584413" +
"E9E5E2F61345043B05D3519D88C021582CCEF808AF8F4B15BD901A310FEFD518" +
"AF90ABA6F85F6563DB47AE214A84D0B7740C9394AA8E3C7BFEF1BEEDD0DAFDA0" +
"79BF75B2AE4EDB7480C18B9CDFA22E68A06C0685785F5CFB09C2B80B1D05431D").HexToByteArray();
byte[] expected_p = (
"871018CC42552D14A5A9286AF283F3CFBA959B8835EC2180511D0DCEB8B97928" +
"5708C800FC10CB15337A4AC1A48ED31394072015A7A6B525986B49E5E1139737" +
"A794833C1AA1E0EAAA7E9D4EFEB1E37A65DBC79F51269BA41E8F0763AA613E29" +
"C81C3B977AEEB3D3C3F6FEB25C270CDCB6AEE8CD205928DFB33C44D2F2DBE819").HexToByteArray();
byte[] expected_q = "E241EDCF37C1C0E20AADB7B4E8FF7AA8FDE4E75D".HexToByteArray();
byte[] expected_y = (
"089A43F439B924BEF3529D8D6206D1FCA56A55CAF52B41D6CE371EBF07BDA132" +
"C8EADC040007FCF4DA06C1F30504EBD8A77D301F5A4702F01F0D2A0707AC1DA3" +
"8DD3251883286E12456234DA62EDA0DF5FE2FA07CD5B16F3638BECCA7786312D" +
"A7D3594A4BB14E353884DA0E9AECB86E3C9BDB66FCA78EA85E1CC3F2F8BF0963").HexToByteArray();
Assert.Equal(expected_g, dsaParameters.G);
Assert.Equal(expected_p, dsaParameters.P);
Assert.Equal(expected_q, dsaParameters.Q);
Assert.Equal(expected_y, dsaParameters.Y);
}
[Fact]
public static void TestEncodedKeyValue_RSA()
{
byte[] expectedPublicKey = (
"3082010a0282010100e8af5ca2200df8287cbc057b7fadeeeb76ac28533f3adb" +
"407db38e33e6573fa551153454a5cfb48ba93fa837e12d50ed35164eef4d7adb" +
"137688b02cf0595ca9ebe1d72975e41b85279bf3f82d9e41362b0b40fbbe3bba" +
"b95c759316524bca33c537b0f3eb7ea8f541155c08651d2137f02cba220b10b1" +
"109d772285847c4fb91b90b0f5a3fe8bf40c9a4ea0f5c90a21e2aae3013647fd" +
"2f826a8103f5a935dc94579dfb4bd40e82db388f12fee3d67a748864e162c425" +
"2e2aae9d181f0e1eb6c2af24b40e50bcde1c935c49a679b5b6dbcef9707b2801" +
"84b82a29cfbfa90505e1e00f714dfdad5c238329ebc7c54ac8e82784d37ec643" +
"0b950005b14f6571c50203010001").HexToByteArray();
PublicKey pk = GetTestRsaKey();
Assert.Equal(expectedPublicKey, pk.EncodedKeyValue.RawData);
}
[Fact]
public static void TestEncodedKeyValue_DSA()
{
byte[] expectedPublicKey = (
"028180089a43f439b924bef3529d8d6206d1fca56a55caf52b41d6ce371ebf07" +
"bda132c8eadc040007fcf4da06c1f30504ebd8a77d301f5a4702f01f0d2a0707" +
"ac1da38dd3251883286e12456234da62eda0df5fe2fa07cd5b16f3638becca77" +
"86312da7d3594a4bb14e353884da0e9aecb86e3c9bdb66fca78ea85e1cc3f2f8" +
"bf0963").HexToByteArray();
PublicKey pk = GetTestDsaKey();
Assert.Equal(expectedPublicKey, pk.EncodedKeyValue.RawData);
}
[Fact]
public static void TestEncodedKeyValue_ECDSA()
{
// Uncompressed key (04), then the X coord, then the Y coord.
string expectedPublicKeyHex =
"04" +
"448D98EE08AEBA0D8B40F3C6DBD500E8B69F07C70C661771655228EA5A178A91" +
"0EF5CB1759F6F2E062021D4F973F5BB62031BE87AE915CFF121586809E3219AF";
PublicKey pk = GetTestECDsaKey();
Assert.Equal(expectedPublicKeyHex, pk.EncodedKeyValue.RawData.ByteArrayToHex());
}
[Fact]
public static void TestEncodedParameters_RSA()
{
PublicKey pk = GetTestRsaKey();
// RSA has no key parameters, so the answer is always
// DER:NULL (type 0x05, length 0x00)
Assert.Equal(new byte[] { 0x05, 0x00 }, pk.EncodedParameters.RawData);
}
[Fact]
public static void TestEncodedParameters_DSA()
{
byte[] expectedParameters = (
"3082011F02818100871018CC42552D14A5A9286AF283F3CFBA959B8835EC2180" +
"511D0DCEB8B979285708C800FC10CB15337A4AC1A48ED31394072015A7A6B525" +
"986B49E5E1139737A794833C1AA1E0EAAA7E9D4EFEB1E37A65DBC79F51269BA4" +
"1E8F0763AA613E29C81C3B977AEEB3D3C3F6FEB25C270CDCB6AEE8CD205928DF" +
"B33C44D2F2DBE819021500E241EDCF37C1C0E20AADB7B4E8FF7AA8FDE4E75D02" +
"818100859B5AEB351CF8AD3FABAC22AE0350148FD1D55128472691709EC08481" +
"584413E9E5E2F61345043B05D3519D88C021582CCEF808AF8F4B15BD901A310F" +
"EFD518AF90ABA6F85F6563DB47AE214A84D0B7740C9394AA8E3C7BFEF1BEEDD0" +
"DAFDA079BF75B2AE4EDB7480C18B9CDFA22E68A06C0685785F5CFB09C2B80B1D" +
"05431D").HexToByteArray();
PublicKey pk = GetTestDsaKey();
Assert.Equal(expectedParameters, pk.EncodedParameters.RawData);
}
[Fact]
public static void TestEncodedParameters_ECDSA()
{
// OID: 1.2.840.10045.3.1.7
string expectedParametersHex = "06082A8648CE3D030107";
PublicKey pk = GetTestECDsaKey();
Assert.Equal(expectedParametersHex, pk.EncodedParameters.RawData.ByteArrayToHex());
}
[Fact]
public static void TestKey_RSA()
{
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
RSA rsa = cert.GetRSAPublicKey();
VerifyKey_RSA(cert, rsa);
}
}
private static void VerifyKey_RSA(X509Certificate2 cert, RSA rsa)
{
RSAParameters rsaParameters = rsa.ExportParameters(false);
byte[] expectedModulus = (
"E8AF5CA2200DF8287CBC057B7FADEEEB76AC28533F3ADB407DB38E33E6573FA5" +
"51153454A5CFB48BA93FA837E12D50ED35164EEF4D7ADB137688B02CF0595CA9" +
"EBE1D72975E41B85279BF3F82D9E41362B0B40FBBE3BBAB95C759316524BCA33" +
"C537B0F3EB7EA8F541155C08651D2137F02CBA220B10B1109D772285847C4FB9" +
"1B90B0F5A3FE8BF40C9A4EA0F5C90A21E2AAE3013647FD2F826A8103F5A935DC" +
"94579DFB4BD40E82DB388F12FEE3D67A748864E162C4252E2AAE9D181F0E1EB6" +
"C2AF24B40E50BCDE1C935C49A679B5B6DBCEF9707B280184B82A29CFBFA90505" +
"E1E00F714DFDAD5C238329EBC7C54AC8E82784D37EC6430B950005B14F6571C5").HexToByteArray();
byte[] expectedExponent = new byte[] { 0x01, 0x00, 0x01 };
byte[] originalModulus = rsaParameters.Modulus;
byte[] originalExponent = rsaParameters.Exponent;
if (!expectedModulus.SequenceEqual(rsaParameters.Modulus) ||
!expectedExponent.SequenceEqual(rsaParameters.Exponent))
{
Console.WriteLine("Modulus or Exponent not equal");
rsaParameters = rsa.ExportParameters(false);
if (!expectedModulus.SequenceEqual(rsaParameters.Modulus) ||
!expectedExponent.SequenceEqual(rsaParameters.Exponent))
{
Console.WriteLine("Second call to ExportParameters did not produce valid data either");
}
if (cert != null)
{
rsa = cert.GetRSAPublicKey();
rsaParameters = rsa.ExportParameters(false);
if (!expectedModulus.SequenceEqual(rsaParameters.Modulus) ||
!expectedExponent.SequenceEqual(rsaParameters.Exponent))
{
Console.WriteLine("New key handle ExportParameters was not successful either");
}
}
}
Assert.Equal(expectedModulus, originalModulus);
Assert.Equal(expectedExponent, originalExponent);
}
[Fact]
public static void TestKey_RSA384_ValidatesSignature()
{
byte[] signature =
{
0x79, 0xD9, 0x3C, 0xBF, 0x54, 0xFA, 0x55, 0x8C,
0x44, 0xC3, 0xC3, 0x83, 0x85, 0xBB, 0x78, 0x44,
0xCD, 0x0F, 0x5A, 0x8E, 0x71, 0xC9, 0xC2, 0x68,
0x68, 0x0A, 0x33, 0x93, 0x19, 0x37, 0x02, 0x06,
0xE2, 0xF7, 0x67, 0x97, 0x3C, 0x67, 0xB3, 0xF4,
0x11, 0xE0, 0x6E, 0xD2, 0x22, 0x75, 0xE7, 0x7C,
};
byte[] helloBytes = Encoding.ASCII.GetBytes("Hello");
using (var cert = new X509Certificate2(TestData.Rsa384CertificatePemBytes))
using (RSA rsa = cert.GetRSAPublicKey())
{
Assert.True(rsa.VerifyData(helloBytes, signature, HashAlgorithmName.SHA1, RSASignaturePadding.Pkcs1));
}
}
[Theory, MemberData(nameof(BrainpoolCurves))]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "brainpool GetECDsaPublicKey fails on current netfx")]
public static void TestKey_ECDsabrainpool_PublicKey(byte[] curveData, byte[] notUsed)
{
byte[] helloBytes = Encoding.ASCII.GetBytes("Hello");
try
{
using (var cert = new X509Certificate2(curveData))
{
using (ECDsa ec = cert.GetECDsaPublicKey())
{
Assert.Equal(160, ec.KeySize);
// The public key should be unable to sign.
Assert.ThrowsAny<CryptographicException>(() => ec.SignData(helloBytes, HashAlgorithmName.SHA256));
}
}
}
catch (CryptographicException)
{
// Windows 7, Windows 8, Ubuntu 14, CentOS can fail. Verify known good platforms don't fail.
Assert.False(PlatformDetection.IsWindows && PlatformDetection.WindowsVersion >= 10);
Assert.False(PlatformDetection.IsUbuntu && !PlatformDetection.IsUbuntu1404);
}
}
[Fact]
public static void TestECDsaPublicKey()
{
byte[] helloBytes = Encoding.ASCII.GetBytes("Hello");
using (var cert = new X509Certificate2(TestData.ECDsa384Certificate))
using (ECDsa publicKey = cert.GetECDsaPublicKey())
{
Assert.Equal(384, publicKey.KeySize);
// The public key should be unable to sign.
Assert.ThrowsAny<CryptographicException>(() => publicKey.SignData(helloBytes, HashAlgorithmName.SHA256));
}
}
[Fact]
public static void TestECDsaPublicKey_ValidatesSignature()
{
// This signature was produced as the output of ECDsaCng.SignData with the same key
// on .NET 4.6. Ensure it is verified here as a data compatibility test.
//
// Note that since ECDSA signatures contain randomness as an input, this value is unlikely
// to be reproduced by another equivalent program.
byte[] existingSignature =
{
// r:
0x7E, 0xD7, 0xEF, 0x46, 0x04, 0x92, 0x61, 0x27,
0x9F, 0xC9, 0x1B, 0x7B, 0x8A, 0x41, 0x6A, 0xC6,
0xCF, 0xD4, 0xD4, 0xD1, 0x73, 0x05, 0x1F, 0xF3,
0x75, 0xB2, 0x13, 0xFA, 0x82, 0x2B, 0x55, 0x11,
0xBE, 0x57, 0x4F, 0x20, 0x07, 0x24, 0xB7, 0xE5,
0x24, 0x44, 0x33, 0xC3, 0xB6, 0x8F, 0xBC, 0x1F,
// s:
0x48, 0x57, 0x25, 0x39, 0xC0, 0x84, 0xB9, 0x0E,
0xDA, 0x32, 0x35, 0x16, 0xEF, 0xA0, 0xE2, 0x34,
0x35, 0x7E, 0x10, 0x38, 0xA5, 0xE4, 0x8B, 0xD3,
0xFC, 0xE7, 0x60, 0x25, 0x4E, 0x63, 0xF7, 0xDB,
0x7C, 0xBF, 0x18, 0xD6, 0xD3, 0x49, 0xD0, 0x93,
0x08, 0xC5, 0xAA, 0xA6, 0xE5, 0xFD, 0xD0, 0x96,
};
byte[] helloBytes = Encoding.ASCII.GetBytes("Hello");
using (var cert = new X509Certificate2(TestData.ECDsa384Certificate))
using (ECDsa publicKey = cert.GetECDsaPublicKey())
{
Assert.Equal(384, publicKey.KeySize);
bool isSignatureValid = publicKey.VerifyData(helloBytes, existingSignature, HashAlgorithmName.SHA256);
Assert.True(isSignatureValid, "isSignatureValid");
}
}
[Theory, MemberData(nameof(BrainpoolCurves))]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "brainpool GetECDsaPublicKey fails on current netfx")]
public static void TestECDsaPublicKey_BrainpoolP160r1_ValidatesSignature(byte[] curveData, byte[] existingSignature)
{
byte[] helloBytes = Encoding.ASCII.GetBytes("Hello");
try
{
using (var cert = new X509Certificate2(curveData))
{
using (ECDsa publicKey = cert.GetECDsaPublicKey())
{
Assert.Equal(160, publicKey.KeySize);
// It is an Elliptic Curve Cryptography public key.
Assert.Equal("1.2.840.10045.2.1", cert.PublicKey.Oid.Value);
bool isSignatureValid = publicKey.VerifyData(helloBytes, existingSignature, HashAlgorithmName.SHA256);
Assert.True(isSignatureValid, "isSignatureValid");
unchecked
{
--existingSignature[existingSignature.Length - 1];
}
isSignatureValid = publicKey.VerifyData(helloBytes, existingSignature, HashAlgorithmName.SHA256);
Assert.False(isSignatureValid, "isSignatureValidNeg");
}
}
}
catch (CryptographicException)
{
// Windows 7, Windows 8, Ubuntu 14, CentOS can fail. Verify known good platforms don't fail.
Assert.False(PlatformDetection.IsWindows && PlatformDetection.WindowsVersion >= 10);
Assert.False(PlatformDetection.IsUbuntu && !PlatformDetection.IsUbuntu1404);
}
}
[Fact]
public static void TestECDsaPublicKey_NonSignatureCert()
{
using (var cert = new X509Certificate2(TestData.EccCert_KeyAgreement))
using (ECDsa publicKey = cert.GetECDsaPublicKey())
{
// It is an Elliptic Curve Cryptography public key.
Assert.Equal("1.2.840.10045.2.1", cert.PublicKey.Oid.Value);
// But, due to KeyUsage, it shouldn't be used for ECDSA.
Assert.Null(publicKey);
}
}
[Fact]
public static void TestECDsa224PublicKey()
{
using (var cert = new X509Certificate2(TestData.ECDsa224Certificate))
{
// It is an Elliptic Curve Cryptography public key.
Assert.Equal("1.2.840.10045.2.1", cert.PublicKey.Oid.Value);
ECDsa ecdsa;
try
{
ecdsa = cert.GetECDsaPublicKey();
}
catch (CryptographicException)
{
// Windows 7, Windows 8, CentOS.
return;
}
// Other Unix
using (ecdsa)
{
byte[] data = ByteUtils.AsciiBytes("Hello");
byte[] signature = (
// r
"8ede5053d546d35c1aba829bca3ecf493eb7a73f751548bd4cf2ad10" +
// s
"5e3da9d359001a6be18e2b4e49205e5219f30a9daeb026159f41b9de").HexToByteArray();
Assert.True(ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA1));
}
}
}
#if !NO_DSA_AVAILABLE
[Fact]
public static void TestDSAPublicKey()
{
using (var cert = new X509Certificate2(TestData.DssCer))
using (DSA pubKey = cert.GetDSAPublicKey())
{
Assert.NotNull(pubKey);
VerifyKey_DSA(pubKey);
}
}
[Fact]
public static void TestDSAPublicKey_VerifiesSignature()
{
byte[] data = { 1, 2, 3, 4, 5 };
byte[] wrongData = { 0xFE, 2, 3, 4, 5 };
byte[] signature =
"B06E26CFC939F25B864F52ABD3288222363A164259B0027FFC95DBC88F9204F7A51A901F3005C9F7".HexToByteArray();
using (var cert = new X509Certificate2(TestData.Dsa1024Cert))
using (DSA pubKey = cert.GetDSAPublicKey())
{
Assert.True(pubKey.VerifyData(data, signature, HashAlgorithmName.SHA1), "pubKey verifies signature");
Assert.False(pubKey.VerifyData(wrongData, signature, HashAlgorithmName.SHA1), "pubKey verifies tampered data");
signature[0] ^= 0xFF;
Assert.False(pubKey.VerifyData(data, signature, HashAlgorithmName.SHA1), "pubKey verifies tampered signature");
}
}
[Fact]
public static void TestDSAPublicKey_RSACert()
{
using (var cert = new X509Certificate2(TestData.Rsa384CertificatePemBytes))
using (DSA pubKey = cert.GetDSAPublicKey())
{
Assert.Null(pubKey);
}
}
[Fact]
public static void TestDSAPublicKey_ECDSACert()
{
using (var cert = new X509Certificate2(TestData.ECDsa256Certificate))
using (DSA pubKey = cert.GetDSAPublicKey())
{
Assert.Null(pubKey);
}
}
#endif
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses P/Invokes
public static void TestKey_ECDsaCng256()
{
TestKey_ECDsaCng(TestData.ECDsa256Certificate, TestData.ECDsaCng256PublicKey);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses P/Invokes
public static void TestKey_ECDsaCng384()
{
TestKey_ECDsaCng(TestData.ECDsa384Certificate, TestData.ECDsaCng384PublicKey);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses P/Invokes
public static void TestKey_ECDsaCng521()
{
TestKey_ECDsaCng(TestData.ECDsa521Certificate, TestData.ECDsaCng521PublicKey);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses P/Invokes
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "brainpool GetECDsaPublicKey fails on current netfx")]
public static void TestKey_BrainpoolP160r1()
{
if (PlatformDetection.WindowsVersion >= 10)
{
TestKey_ECDsaCng(TestData.ECDsabrainpoolP160r1_CertificatePemBytes, TestData.ECDsabrainpoolP160r1_PublicKey);
}
}
private static void TestKey_ECDsaCng(byte[] certBytes, TestData.ECDsaCngKeyValues expected)
{
using (X509Certificate2 cert = new X509Certificate2(certBytes))
{
ECDsaCng e = (ECDsaCng)(cert.GetECDsaPublicKey());
CngKey k = e.Key;
byte[] blob = k.Export(CngKeyBlobFormat.EccPublicBlob);
using (BinaryReader br = new BinaryReader(new MemoryStream(blob)))
{
int magic = br.ReadInt32();
int cbKey = br.ReadInt32();
Assert.Equal(expected.QX.Length, cbKey);
byte[] qx = br.ReadBytes(cbKey);
byte[] qy = br.ReadBytes(cbKey);
Assert.Equal<byte>(expected.QX, qx);
Assert.Equal<byte>(expected.QY, qy);
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web.Mvc;
using Orchard.ContentManagement;
using Orchard.ContentManagement.MetaData;
using Orchard.ContentManagement.MetaData.Models;
using Orchard.Core.Common.Models;
using Orchard.Core.Containers.Models;
using Orchard.Core.Containers.Services;
using Orchard.Core.Containers.ViewModels;
using Orchard.Core.Contents;
using Orchard.Core.Contents.ViewModels;
using Orchard.Core.Title.Models;
using Orchard.Data;
using Orchard.DisplayManagement;
using Orchard.Lists.Helpers;
using Orchard.Lists.ViewModels;
using Orchard.Localization;
using Orchard.Logging;
using Orchard.Mvc;
using Orchard.Mvc.Extensions;
using Orchard.UI.Navigation;
using Orchard.UI.Notify;
using ContentOptions = Orchard.Lists.ViewModels.ContentOptions;
using ContentsBulkAction = Orchard.Lists.ViewModels.ContentsBulkAction;
using ListContentsViewModel = Orchard.Lists.ViewModels.ListContentsViewModel;
namespace Orchard.Lists.Controllers {
public class AdminController : Controller {
private readonly IContentManager _contentManager;
private readonly IContentDefinitionManager _contentDefinitionManager;
private readonly IOrchardServices _services;
private readonly IContainerService _containerService;
private readonly IListViewService _listViewService;
private readonly ITransactionManager _transactionManager;
public AdminController(
IOrchardServices services,
IContentDefinitionManager contentDefinitionManager,
IShapeFactory shapeFactory,
IContainerService containerService,
IListViewService listViewService,
ITransactionManager transactionManager) {
_services = services;
_contentManager = services.ContentManager;
_contentDefinitionManager = contentDefinitionManager;
T = NullLocalizer.Instance;
Logger = NullLogger.Instance;
Shape = shapeFactory;
_containerService = containerService;
_listViewService = listViewService;
_transactionManager = transactionManager;
}
public Localizer T { get; set; }
public ILogger Logger { get; set; }
dynamic Shape { get; set; }
public ActionResult Index(Core.Contents.ViewModels.ListContentsViewModel model, PagerParameters pagerParameters) {
var query = _containerService.GetContainersQuery(VersionOptions.Latest);
if (!String.IsNullOrEmpty(model.TypeName)) {
var contentTypeDefinition = _contentDefinitionManager.GetTypeDefinition(model.TypeName);
if (contentTypeDefinition == null)
return HttpNotFound();
model.TypeDisplayName = !String.IsNullOrWhiteSpace(contentTypeDefinition.DisplayName)
? contentTypeDefinition.DisplayName
: contentTypeDefinition.Name;
query = query.ForType(model.TypeName);
}
switch (model.Options.OrderBy) {
case ContentsOrder.Modified:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.ModifiedUtc);
break;
case ContentsOrder.Published:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.PublishedUtc);
break;
case ContentsOrder.Created:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.CreatedUtc);
break;
}
model.Options.SelectedFilter = model.TypeName;
model.Options.FilterOptions = _containerService.GetContainerTypes()
.Select(ctd => new KeyValuePair<string, string>(ctd.Name, ctd.DisplayName))
.ToList().OrderBy(kvp => kvp.Value);
var pager = new Pager(_services.WorkContext.CurrentSite, pagerParameters);
var pagerShape = Shape.Pager(pager).TotalItemCount(query.Count());
var pageOfLists = query.Slice(pager.GetStartIndex(), pager.PageSize);
var listsShape = Shape.List();
listsShape.AddRange(pageOfLists.Select(x => _contentManager.BuildDisplay(x, "SummaryAdmin")).ToList());
var viewModel = Shape.ViewModel()
.Lists(listsShape)
.Pager(pagerShape)
.Options(model.Options);
return View(viewModel);
}
[HttpPost, ActionName("Index")]
[FormValueRequired("submit.Filter")]
public ActionResult ListFilterPOST(ContentOptions options) {
var routeValues = ControllerContext.RouteData.Values;
if (options != null) {
routeValues["Options.OrderBy"] = options.OrderBy;
if (_containerService.GetContainerTypes().Any(ctd => string.Equals(ctd.Name, options.SelectedFilter, StringComparison.OrdinalIgnoreCase))) {
routeValues["id"] = options.SelectedFilter;
}
else {
routeValues.Remove("id");
}
}
return RedirectToAction("Index", routeValues);
}
[HttpPost, ActionName("Index")]
[FormValueRequired("submit.BulkEdit")]
public ActionResult ListPOST(ContentOptions options, IEnumerable<int> itemIds, PagerParameters pagerParameters) {
if (itemIds != null) {
var checkedContentItems = _contentManager.GetMany<ContentItem>(itemIds, VersionOptions.Latest, QueryHints.Empty);
switch (options.BulkAction) {
case ContentsBulkAction.None:
break;
case ContentsBulkAction.PublishNow:
foreach (var item in checkedContentItems) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.PublishContent, item, T("Couldn't publish selected lists."))) {
_transactionManager.Cancel();
return new HttpUnauthorizedResult();
}
_contentManager.Publish(item);
}
_services.Notifier.Success(T("Lists successfully published."));
break;
case ContentsBulkAction.Unpublish:
foreach (var item in checkedContentItems) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.PublishContent, item, T("Couldn't unpublish selected lists."))) {
_transactionManager.Cancel();
return new HttpUnauthorizedResult();
}
_contentManager.Unpublish(item);
}
_services.Notifier.Success(T("Lists successfully unpublished."));
break;
case ContentsBulkAction.Remove:
foreach (var item in checkedContentItems) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.DeleteContent, item, T("Couldn't remove selected lists."))) {
_transactionManager.Cancel();
return new HttpUnauthorizedResult();
}
_contentManager.Remove(item);
}
_services.Notifier.Success(T("Lists successfully removed."));
break;
default:
throw new ArgumentOutOfRangeException();
}
}
return RedirectToAction("Index", new { page = pagerParameters.Page, pageSize = pagerParameters.PageSize });
}
public ActionResult Create(string id) {
if (String.IsNullOrWhiteSpace(id)) {
var containerTypes = _containerService.GetContainerTypes().ToList();
if (containerTypes.Count > 1) {
return RedirectToAction("SelectType");
}
return RedirectToAction("Create", new {id = containerTypes.First().Name});
}
return RedirectToAction("Create", "Admin", new {area = "Contents", id, returnUrl = Url.Action("Index", "Admin", new { area = "Orchard.Lists" })});
}
public ActionResult SelectType() {
var viewModel = Shape.ViewModel().ContainerTypes(_containerService.GetContainerTypes().ToList());
return View(viewModel);
}
public ActionResult List(ListContentsViewModel model, PagerParameters pagerParameters) {
var pager = new Pager(_services.WorkContext.CurrentSite, pagerParameters);
var container = _contentManager.GetLatest(model.ContainerId);
if (container == null || !container.Has<ContainerPart>()) {
return HttpNotFound();
}
model.ContainerDisplayName = container.ContentManager.GetItemMetadata(container).DisplayText;
if (string.IsNullOrEmpty(model.ContainerDisplayName)) {
model.ContainerDisplayName = container.ContentType;
}
var query = GetListContentItemQuery(model.ContainerId);
if (query == null) {
return HttpNotFound();
}
var containerPart = container.As<ContainerPart>();
if (containerPart.EnablePositioning) {
query = OrderByPosition(query);
}
else {
switch (model.Options.OrderBy) {
case SortBy.Modified:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.ModifiedUtc);
break;
case SortBy.Published:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.PublishedUtc);
break;
case SortBy.Created:
query = query.OrderByDescending<CommonPartRecord>(cr => cr.CreatedUtc);
break;
case SortBy.DisplayText:
// Note: This will obviously not work for items without a TitlePart, but we're OK with that.
query = query.OrderBy<TitlePartRecord>(cr => cr.Title);
break;
}
}
var listView = containerPart.AdminListView.BuildDisplay(new BuildListViewDisplayContext {
New = _services.New,
Container = containerPart,
ContentQuery = query,
Pager = pager,
ContainerDisplayName = model.ContainerDisplayName
});
var viewModel = Shape.ViewModel()
.Pager(pager)
.ListView(listView)
.ListViewProvider(containerPart.AdminListView)
.ListViewProviders(_listViewService.Providers.ToList())
.Options(model.Options)
.Container(container)
.ContainerId(model.ContainerId)
.ContainerDisplayName(model.ContainerDisplayName)
.ContainerContentType(container.ContentType)
.ItemContentTypes(container.As<ContainerPart>().ItemContentTypes.ToList())
;
if (containerPart.Is<ContainablePart>()) {
viewModel.ListNavigation(_services.New.ListNavigation(ContainablePart: containerPart.As<ContainablePart>()));
}
return View(viewModel);
}
[HttpPost, ActionName("List")]
[FormValueRequired("submit.Order")]
public ActionResult ListOrderPOST(ContentOptions options) {
var routeValues = ControllerContext.RouteData.Values;
if (options != null) {
routeValues["Options.OrderBy"] = options.OrderBy;
}
return RedirectToAction("List", routeValues);
}
[HttpPost, ActionName("List")]
[FormValueRequired("submit.BulkEdit")]
public ActionResult ListPOST(ContentOptions options, IEnumerable<int> itemIds, int? targetContainerId, PagerParameters pagerParameters, string returnUrl) {
if (itemIds != null) {
switch (options.BulkAction) {
case ContentsBulkAction.None:
break;
case ContentsBulkAction.PublishNow:
if (!BulkPublishNow(itemIds)) {
return new HttpUnauthorizedResult();
}
break;
case ContentsBulkAction.Unpublish:
if (!BulkUnpublish(itemIds)) {
return new HttpUnauthorizedResult();
}
break;
case ContentsBulkAction.Remove:
if (!BulkRemove(itemIds)) {
return new HttpUnauthorizedResult();
}
break;
case ContentsBulkAction.RemoveFromList:
if (!BulkRemoveFromList(itemIds)) {
return new HttpUnauthorizedResult();
}
break;
case ContentsBulkAction.MoveToList:
if (!BulkMoveToList(itemIds, targetContainerId)) {
return new HttpUnauthorizedResult();
}
break;
default:
throw new ArgumentOutOfRangeException();
}
}
return this.RedirectLocal(returnUrl, () => RedirectToAction("List", new { page = pagerParameters.Page, pageSize = pagerParameters.PageSize }));
}
[HttpPost]
public ActionResult Insert(int containerId, int itemId, PagerParameters pagerParameters) {
var container = _containerService.Get(containerId, VersionOptions.Latest);
var item = _contentManager.Get(itemId, VersionOptions.Latest, new QueryHints().ExpandParts<CommonPart, ContainablePart>());
var commonPart = item.As<CommonPart>();
var previousItemContainer = commonPart.Container;
var itemMetadata = _contentManager.GetItemMetadata(item);
var containerMetadata = _contentManager.GetItemMetadata(container);
var position = _containerService.GetFirstPosition(containerId) + 1;
LocalizedString message;
if (previousItemContainer == null) {
message = T("{0} was moved to <a href=\"{1}\">{2}</a>", itemMetadata.DisplayText, Url.RouteUrl(containerMetadata.AdminRouteValues), containerMetadata.DisplayText);
}
else if (previousItemContainer.Id != containerId) {
var previousItemContainerMetadata = _contentManager.GetItemMetadata(commonPart.Container);
message = T("{0} was moved from <a href=\"{3}\">{4}</a> to <a href=\"{1}\">{2}</a>",
itemMetadata.DisplayText,
Url.RouteUrl(containerMetadata.AdminRouteValues),
containerMetadata.DisplayText,
Url.RouteUrl(previousItemContainerMetadata.AdminRouteValues),
previousItemContainerMetadata.DisplayText);
}
else {
message = T("{0} is already part of this list and was moved to the top.", itemMetadata.DisplayText);
}
_containerService.MoveItem(item.As<ContainablePart>(), container, position);
_services.Notifier.Information(message);
return RedirectToAction("List", new { containerId, page = pagerParameters.Page, pageSize = pagerParameters.PageSize });
}
[HttpPost]
public ActionResult UpdatePositions(int containerId, int oldIndex, int newIndex, PagerParameters pagerParameters) {
var pager = new Pager(_services.WorkContext.CurrentSite, pagerParameters);
var query = OrderByPosition(GetListContentItemQuery(containerId));
if (query == null) {
return HttpNotFound();
}
var pageOfContentItems = query.Slice(pager.GetStartIndex(), pager.PageSize).ToList();
var contentItem = pageOfContentItems[oldIndex];
pageOfContentItems.Remove(contentItem);
pageOfContentItems.Insert(newIndex, contentItem);
var index = pager.GetStartIndex() + pageOfContentItems.Count;
foreach (var item in pageOfContentItems.Select(x => x.As<ContainablePart>())) {
item.Position = --index;
RePublish(item);
}
return new EmptyResult();
}
[ActionName("List")]
[HttpPost, FormValueRequired("submit.ListOp")]
public ActionResult ListOperation(int containerId, ListOperation operation, SortBy? sortBy, SortDirection? sortByDirection, PagerParameters pagerParameters) {
var items = _containerService.GetContentItems(containerId, VersionOptions.Latest).Select(x => x.As<ContainablePart>());
switch (operation) {
case ViewModels.ListOperation.Reverse:
_containerService.Reverse(items);
_services.Notifier.Success(T("The list has been reversed."));
break;
case ViewModels.ListOperation.Shuffle:
_containerService.Shuffle(items);
_services.Notifier.Success(T("The list has been shuffled."));
break;
case ViewModels.ListOperation.Sort:
_containerService.Sort(items, sortBy.GetValueOrDefault(), sortByDirection.GetValueOrDefault());
_services.Notifier.Success(T("The list has been sorted."));
break;
default:
_services.Notifier.Error(T("Please select an operation to perform on the list."));
break;
}
return RedirectToAction("List", new {containerId, page = pagerParameters.Page, pageSize = pagerParameters.PageSize});
}
[HttpPost, ActionName("List")]
[FormValueRequired("listViewName")]
public ActionResult ChangeListView(int containerId, string listViewName, PagerParameters pagerParameters) {
var container = _containerService.Get(containerId, VersionOptions.Latest);
if (container == null || !container.Has<ContainerPart>()) {
return HttpNotFound();
}
container.Record.AdminListViewName = listViewName;
return RedirectToAction("List", new { containerId, page = pagerParameters.Page, pageSize = pagerParameters.PageSize });
}
/// <summary>
/// Only publishes the content if it is already published.
/// </summary>
private void RePublish(IContent content) {
if(content.ContentItem.VersionRecord.Published)
_contentManager.Publish(content.ContentItem);
}
private IContentQuery<ContentItem> GetListContentItemQuery(int containerId) {
var containableTypes = GetContainableTypes().Select(ctd => ctd.Name).ToList();
if (containableTypes.Count == 0) {
// Force the name to be matched against empty and return no items in the query
containableTypes.Add(string.Empty);
}
var query = _contentManager
.Query(VersionOptions.Latest, containableTypes.ToArray())
.Join<CommonPartRecord>().Where(cr => cr.Container.Id == containerId);
return query;
}
private IContentQuery<ContentItem> OrderByPosition(IContentQuery<ContentItem> query) {
return query.Join<ContainablePartRecord>().OrderByDescending(x => x.Position);
}
private IEnumerable<ContentTypeDefinition> GetContainableTypes() {
return _contentDefinitionManager.ListTypeDefinitions().Where(ctd => ctd.Parts.Any(c => c.PartDefinition.Name == "ContainablePart"));
}
private bool BulkMoveToList(IEnumerable<int> selectedIds, int? targetContainerId) {
if (!targetContainerId.HasValue) {
_services.Notifier.Information(T("Please select the list to move the items to."));
return true;
}
var id = targetContainerId.Value;
var targetContainer = _contentManager.Get<ContainerPart>(id);
if (targetContainer == null) {
_services.Notifier.Information(T("Please select the list to move the items to."));
return true;
}
var itemContentTypes = targetContainer.ItemContentTypes.ToList();
var containerDisplayText = _contentManager.GetItemMetadata(targetContainer).DisplayText ?? targetContainer.ContentItem.ContentType;
var selectedItems = _contentManager.GetMany<ContainablePart>(selectedIds, VersionOptions.Latest, QueryHints.Empty);
foreach (var item in selectedItems) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.EditContent, item, T("Couldn't move selected content."))) {
return false;
}
// Ensure the item can be in that container.
if (itemContentTypes.Any() && itemContentTypes.All(x => x.Name != item.ContentItem.ContentType)) {
_services.TransactionManager.Cancel();
_services.Notifier.Warning(T("One or more items could not be moved to '{0}' because it is restricted to containing items of type '{1}'.", containerDisplayText, itemContentTypes.Select(x => x.DisplayName).ToOrString(T)));
return true; // todo: transactions
}
_containerService.MoveItem(item, targetContainer);
}
_services.Notifier.Success(T("Content successfully moved to <a href=\"{0}\">{1}</a>.", Url.Action("List", new { containerId = targetContainerId }), containerDisplayText));
return true;
}
private bool BulkRemoveFromList(IEnumerable<int> itemIds) {
var selectedItems = _contentManager.GetMany<ContainablePart>(itemIds, VersionOptions.Latest, QueryHints.Empty);
foreach (var item in selectedItems) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.EditContent, item, T("Couldn't remove selected content from the list."))) {
_services.TransactionManager.Cancel();
return false;
}
item.As<CommonPart>().Record.Container = null;
_containerService.UpdateItemPath(item.ContentItem);
}
_services.Notifier.Success(T("Content successfully removed from the list."));
return true;
}
private bool BulkRemove(IEnumerable<int> itemIds) {
foreach (var item in itemIds.Select(itemId => _contentManager.GetLatest(itemId))) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.DeleteContent, item, T("Couldn't remove selected content."))) {
_services.TransactionManager.Cancel();
return false;
}
_contentManager.Remove(item);
}
_services.Notifier.Success(T("Content successfully removed."));
return true;
}
private bool BulkUnpublish(IEnumerable<int> itemIds) {
foreach (var item in itemIds.Select(itemId => _contentManager.GetLatest(itemId))) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.PublishContent, item, T("Couldn't unpublish selected content."))) {
_services.TransactionManager.Cancel();
return false;
}
_contentManager.Unpublish(item);
}
_services.Notifier.Success(T("Content successfully unpublished."));
return true;
}
private bool BulkPublishNow(IEnumerable<int> itemIds) {
foreach (var item in itemIds.Select(itemId => _contentManager.GetLatest(itemId))) {
if (!_services.Authorizer.Authorize(Orchard.Core.Contents.Permissions.PublishContent, item, T("Couldn't publish selected content."))) {
_services.TransactionManager.Cancel();
return false;
}
_contentManager.Publish(item);
}
_services.Notifier.Success(T("Content successfully published."));
return true;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.