context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using DebuggerTests;
using Microsoft.PythonTools.Debugger;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Repl;
using Microsoft.VisualStudio.InteractiveWindow.Commands;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using TestUtilities;
using TestUtilities.Mocks;
using TestUtilities.Python;
namespace PythonToolsTests {
[TestClass]
public class DebugReplEvaluatorTests {
private PythonDebugReplEvaluator _evaluator;
private MockReplWindow _window;
private List<PythonProcess> _processes;
[ClassInitialize]
public static void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal virtual string DebuggerTestPath {
get {
return TestData.GetPath(@"TestData\DebuggerProject\");
}
}
internal virtual PythonVersion Version {
get {
return PythonPaths.Python26 ?? PythonPaths.Python26_x64;
}
}
[TestInitialize]
public void TestInit() {
Version.AssertInstalled();
var serviceProvider = PythonToolsTestUtilities.CreateMockServiceProvider();
_evaluator = new PythonDebugReplEvaluator(serviceProvider);
_window = new MockReplWindow(_evaluator);
_evaluator._Initialize(_window);
_processes = new List<PythonProcess>();
}
[TestCleanup]
public void TestClean() {
foreach (var proc in _processes) {
try {
proc.ResumeAsync(TimeoutToken()).WaitAndUnwrapExceptions();
} catch (Exception ex) {
Console.WriteLine("Failed to continue process");
Console.WriteLine(ex);
}
if (!proc.WaitForExit(5000)) {
try {
proc.Terminate();
} catch (Exception ex) {
Console.WriteLine("Failed to terminate process");
Console.WriteLine(ex);
}
}
}
if (_window != null) {
Console.WriteLine("Stdout:");
Console.Write(_window.Output);
Console.WriteLine("Stderr:");
Console.Write(_window.Error);
}
}
[TestMethod, Priority(2)]
public async Task DisplayVariables() {
await AttachAsync("DebugReplTest1.py", 3);
Assert.AreEqual("hello", ExecuteText("print(a)"));
Assert.AreEqual("'hello'", ExecuteText("a"));
}
[TestMethod, Priority(3)]
public async Task DisplayFunctionLocalsAndGlobals() {
await AttachAsync("DebugReplTest2.py", 13);
Assert.AreEqual("51", ExecuteText("print(innermost_val)"));
Assert.AreEqual("5", ExecuteText("print(global_val)"));
}
[TestMethod, Priority(3)]
public async Task ErrorInInput() {
await AttachAsync("DebugReplTest2.py", 13);
Assert.AreEqual("", ExecuteText("print(does_not_exist)", false));
Assert.AreEqual(@"Traceback (most recent call last):
File ""<debug input>"", line 1, in <module>
NameError: name 'does_not_exist' is not defined
".Replace("\r\n", "\n"), _window.Error.Replace("\r\n", "\n"));
}
[TestMethod, Priority(3)]
public async Task ChangeVariables() {
await AttachAsync("DebugReplTest2.py", 13);
Assert.AreEqual("", ExecuteText("innermost_val = 1"));
Assert.AreEqual("1", ExecuteText("print(innermost_val)"));
}
[TestMethod, Priority(3)]
public async Task ChangeVariablesAndRefreshFrames() {
// This is really a test for PythonProcess' RefreshFramesAsync
// but it's convenient to have it here, as this is the exact
// scenario where it's used in the product.
// We call RefreshFramesAsync multiple times, which validates a bug fix.
await AttachAsync("DebugReplTest2.py", 13);
var process = _processes[0];
var thread = process.GetThreads().FirstOrDefault();
var variables = thread.Frames[0].Locals.ToArray();
Assert.AreEqual("innermost_val", variables[0].ChildName);
Assert.AreEqual("51", variables[0].StringRepr);
// Refresh before changing anything, local variable should remain the same
await process.RefreshThreadFramesAsync(thread.Id, TimeoutToken());
variables = thread.Frames[0].Locals.ToArray();
Assert.AreEqual("innermost_val", variables[0].ChildName);
Assert.AreEqual("51", variables[0].StringRepr);
Assert.AreEqual("", ExecuteText("innermost_val = 1"));
Assert.AreEqual("1", ExecuteText("print(innermost_val)"));
// This should now produce an updated local variable
await process.RefreshThreadFramesAsync(thread.Id, TimeoutToken());
variables = thread.Frames[0].Locals.ToArray();
Assert.AreEqual("innermost_val", variables[0].ChildName);
Assert.AreEqual("1", variables[0].StringRepr);
}
[TestMethod, Priority(0)]
public async Task AvailableScopes() {
await AttachAsync("DebugReplTest1.py", 3);
var expectedData = new Dictionary<string, string>() {
{ "<Current Frame>", null },
{ "abc", "abc" },
{ "dis", "dis" },
};
Assert.IsTrue(_evaluator.EnableMultipleScopes);
var scopes = _evaluator.GetAvailableScopes().ToArray();
foreach (var expectedItem in expectedData) {
CollectionAssert.Contains(scopes, expectedItem.Key);
}
var scopesAndPaths = _evaluator.GetAvailableScopesAndPaths().ToArray();
foreach (var expectedItem in expectedData) {
var actualItem = scopesAndPaths.SingleOrDefault(d => d.Key == expectedItem.Key);
Assert.IsNotNull(actualItem);
if (!string.IsNullOrEmpty(expectedItem.Value)) {
Assert.IsTrue(PathUtils.IsSamePath(Path.Combine(Version.PrefixPath, "lib", expectedItem.Value), Path.ChangeExtension(actualItem.Value, null)));
} else {
Assert.IsNull(actualItem.Value);
}
}
}
[TestMethod, Priority(0)]
public virtual async Task ChangeModule() {
await AttachAsync("DebugReplTest1.py", 3);
Assert.AreEqual("'hello'", ExecuteText("a"));
// Change to the dis module
Assert.AreEqual("Current module changed to dis", ExecuteCommand(new SwitchModuleCommand(), "dis"));
Assert.AreEqual("dis", _evaluator.CurrentScopeName);
Assert.IsTrue(PathUtils.IsSamePath(Path.ChangeExtension(_evaluator.CurrentScopePath, null), Path.Combine(Version.PrefixPath, "lib", "dis")));
Assert.AreEqual("", ExecuteText("test = 'world'"));
Assert.AreEqual("'world'", ExecuteText("test"));
// Change back to the current frame (using localized name)
Assert.AreEqual("Current module changed to <Current Frame>", ExecuteCommand(new SwitchModuleCommand(), "<Current Frame>"));
Assert.AreEqual("<Current Frame>", _evaluator.CurrentScopeName);
Assert.AreEqual("", _evaluator.CurrentScopePath);
Assert.AreEqual("", ExecuteText("test", false));
Assert.IsTrue(_window.Error.Contains("NameError:"));
Assert.AreEqual("'hello'", ExecuteText("a"));
// Change back to the current frame (using fixed and backwards compatible name)
Assert.AreEqual("Current module changed to <Current Frame>", ExecuteCommand(new SwitchModuleCommand(), "<CurrentFrame>"));
Assert.AreEqual("<Current Frame>", _evaluator.CurrentScopeName);
Assert.AreEqual("", _evaluator.CurrentScopePath);
Assert.AreEqual("", ExecuteText("test", false));
Assert.IsTrue(_window.Error.Contains("NameError:"));
Assert.AreEqual("'hello'", ExecuteText("a"));
}
[TestMethod, Priority(0)]
public virtual async Task ChangeFrame() {
await AttachAsync("DebugReplTest2.py", 13);
// We are broken in the innermost function
string stack;
stack = ExecuteCommand(new DebugReplFramesCommand(), "");
Assert.IsTrue(stack.StartsWith(@"=> Frame id=0, function=innermost
Frame id=1, function=inner
Frame id=2, function=outer
Frame id=3, function=<module>"));
Assert.AreEqual("0", ExecuteCommand(new DebugReplFrameCommand(), ""));
Assert.AreEqual("51", ExecuteText("print(innermost_val)"));
// Move up the stack to the inner function
Assert.AreEqual("Current frame changed to 1", ExecuteCommand(new DebugReplFrameUpCommand(), ""));
stack = ExecuteCommand(new DebugReplFramesCommand(), "");
Assert.IsTrue(stack.StartsWith(@" Frame id=0, function=innermost
=> Frame id=1, function=inner
Frame id=2, function=outer
Frame id=3, function=<module>"));
Assert.AreEqual("1", ExecuteCommand(new DebugReplFrameCommand(), ""));
Assert.AreEqual("50", ExecuteText("print(inner_val)"));
// Move to frame 2, the outer function
Assert.AreEqual("Current frame changed to 2", ExecuteCommand(new DebugReplFrameCommand(), "2"));
Assert.AreEqual("2", ExecuteCommand(new DebugReplFrameCommand(), ""));
Assert.AreEqual("10", ExecuteText("print(outer_val)"));
// Move down the stack, back to the inner function
Assert.AreEqual("Current frame changed to 1", ExecuteCommand(new DebugReplFrameDownCommand(), ""));
Assert.AreEqual("1", ExecuteCommand(new DebugReplFrameCommand(), ""));
}
[TestMethod, Priority(3)]
[TestCategory("10s")]
public async Task ChangeThread() {
await AttachAsync("DebugReplTest3.py", 39);
var threads = _processes[0].GetThreads();
PythonThread main = threads.SingleOrDefault(t => t.Frames[0].FunctionName == "threadmain");
PythonThread worker1 = threads.SingleOrDefault(t => t.Frames[0].FunctionName == "thread1");
PythonThread worker2 = threads.SingleOrDefault(t => t.Frames[0].FunctionName == "thread2");
// We are broken in the the main thread
string text;
text = ExecuteCommand(new DebugReplThreadsCommand(), "");
Assert.IsTrue(text.Contains(String.Format("=> Thread id={0}, name=", main.Id)));
Assert.IsTrue(text.Contains(String.Format(" Thread id={0}, name=", worker1.Id)));
Assert.IsTrue(text.Contains(String.Format(" Thread id={0}, name=", worker2.Id)));
Assert.AreEqual(main.Id.ToString(), ExecuteCommand(new DebugReplThreadCommand(), ""));
Assert.AreEqual("False", ExecuteText("t1_done"));
Assert.AreEqual("False", ExecuteText("t2_done"));
// Switch to worker thread 1
Assert.AreEqual(String.Format("Current thread changed to {0}, frame 0", worker1.Id), ExecuteCommand(new DebugReplThreadCommand(), worker1.Id.ToString()));
text = ExecuteCommand(new DebugReplThreadsCommand(), "");
Assert.IsTrue(text.Contains(String.Format(" Thread id={0}, name=", main.Id)));
Assert.IsTrue(text.Contains(String.Format("=> Thread id={0}, name=", worker1.Id)));
Assert.IsTrue(text.Contains(String.Format(" Thread id={0}, name=", worker2.Id)));
Assert.AreEqual(worker1.Id.ToString(), ExecuteCommand(new DebugReplThreadCommand(), ""));
Assert.AreEqual("'thread1'", ExecuteText("t1_val"));
}
[TestMethod, Priority(0)]
public virtual async Task ChangeProcess() {
await AttachAsync("DebugReplTest4A.py", 3);
await AttachAsync("DebugReplTest4B.py", 3);
PythonProcess proc1 = _processes[0];
PythonProcess proc2 = _processes[1];
// We are broken in process 2 (the last one attached is the current one)
string text;
text = ExecuteCommand(new DebugReplProcessesCommand(), "");
Assert.AreEqual(String.Format(@" Process id={0}, Language version={2}
=> Process id={1}, Language version={2}", proc1.Id, proc2.Id, Version.Version), text);
// Switch to process 1
Assert.AreEqual(String.Format("Current process changed to {0}", proc1.Id), ExecuteCommand(new DebugReplProcessCommand(), proc1.Id.ToString()));
Assert.AreEqual(String.Format("{0}", proc1.Id), ExecuteCommand(new DebugReplProcessCommand(), String.Empty));
Assert.AreEqual("'hello'", ExecuteText("a1"));
Assert.AreEqual("30", ExecuteText("b1"));
// Switch to process 2
Assert.AreEqual(String.Format("Current process changed to {0}", proc2.Id), ExecuteCommand(new DebugReplProcessCommand(), proc2.Id.ToString()));
Assert.AreEqual(String.Format("{0}", proc2.Id), ExecuteCommand(new DebugReplProcessCommand(), String.Empty));
Assert.AreEqual("'world'", ExecuteText("a2"));
Assert.AreEqual("60", ExecuteText("b2"));
}
[TestMethod, Priority(3)]
[TestCategory("10s")]
public async Task Abort() {
await AttachAsync("DebugReplTest5.py", 3);
_window.ClearScreen();
var execute = _evaluator.ExecuteText("for i in range(0,20): time.sleep(0.5)");
_evaluator.AbortExecution();
execute.Wait();
Assert.IsTrue(execute.Result.IsSuccessful);
Assert.AreEqual("Abort is not supported.", _window.Error.TrimEnd());
}
[TestMethod, Priority(0)]
public async Task StepInto() {
// Make sure that we don't step into the internal repl code
// http://pytools.codeplex.com/workitem/777
await AttachAsync("DebugReplTest6.py", 2);
var thread = _processes[0].GetThreads()[0];
await thread.StepIntoAsync(TimeoutToken());
// Result of step into is not immediate
Thread.Sleep(1000);
// We should still be in the <module>, not in the internals of print in repl code
foreach (var frame in thread.Frames) {
Console.WriteLine("{0}:{1} [{2}]", frame.FunctionName, frame.LineNo, frame.FileName);
}
Assert.AreEqual(1, thread.Frames.Count);
Assert.AreEqual("<module>", thread.Frames[0].FunctionName);
}
private string ExecuteCommand(IInteractiveWindowCommand cmd, string args) {
_window.ClearScreen();
var execute = cmd.Execute(_window, args);
execute.Wait();
Assert.IsTrue(execute.Result.IsSuccessful);
return _window.Output.TrimEnd();
}
private string ExecuteText(string executionText) {
return ExecuteText(executionText, true);
}
private string ExecuteText(string executionText, bool expectSuccess) {
_window.ClearScreen();
var execute = _evaluator.ExecuteText(executionText);
execute.Wait();
Assert.AreEqual(expectSuccess, execute.Result.IsSuccessful);
return _window.Output.TrimEnd();
}
private void SafeSetEvent(AutoResetEvent evt) {
try {
evt.Set();
} catch (ObjectDisposedException) {
}
}
private async Task AttachAsync(string filename, int lineNo) {
var debugger = new PythonDebugger();
PythonProcess process = debugger.DebugProcess(Version, DebuggerTestPath + filename, null, async (newproc, newthread) => {
var breakPoint = newproc.AddBreakpointByFileExtension(lineNo, filename);
await breakPoint.AddAsync(TimeoutToken());
});
_processes.Add(process);
long? threadAtBreakpoint = null;
using (var brkHit = new AutoResetEvent(false))
using (var procExited = new AutoResetEvent(false)) {
EventHandler<BreakpointHitEventArgs> breakpointHitHandler = (s, e) => {
threadAtBreakpoint = e.Thread.Id;
SafeSetEvent(brkHit);
};
EventHandler<ProcessExitedEventArgs> processExitedHandler = (s, e) => SafeSetEvent(procExited);
process.BreakpointHit += breakpointHitHandler;
process.ProcessExited += processExitedHandler;
try {
await process.StartAsync();
} catch (Win32Exception ex) {
_processes.Remove(process);
if (ex.HResult == -2147467259 /*0x80004005*/) {
Assert.Inconclusive("Required Python interpreter is not installed");
} else {
Assert.Fail("Process start failed:\r\n" + ex.ToString());
}
}
var handles = new[] { brkHit, procExited };
if (WaitHandle.WaitAny(handles, 25000) != 0) {
Assert.Fail("Failed to wait on event");
}
process.BreakpointHit -= breakpointHitHandler;
process.ProcessExited -= processExitedHandler;
}
await _evaluator.AttachProcessAsync(process, new MockThreadIdMapper());
// AttachProcessAsync calls InitializeAsync which sets the active
// thread by using the DTE (which is null in these tests), so we
// adjust it to the correct thread where breakpoint was hit.
if (threadAtBreakpoint != null) {
_evaluator.ChangeActiveThread(threadAtBreakpoint.Value, false);
}
}
private class MockThreadIdMapper : IThreadIdMapper {
public long? GetPythonThreadId(uint vsThreadId) {
return vsThreadId;
}
}
protected static CancellationToken TimeoutToken() {
return CancellationTokens.After5s;
}
}
[TestClass]
public class DebugReplEvaluatorTests31 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python31 ?? PythonPaths.Python31_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests32 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python32 ?? PythonPaths.Python32_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests33 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python33 ?? PythonPaths.Python33_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests34 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python34 ?? PythonPaths.Python34_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests35 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python35 ?? PythonPaths.Python35_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests36 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python36 ?? PythonPaths.Python36_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests37 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python37 ?? PythonPaths.Python37_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTests27 : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.Python27 ?? PythonPaths.Python27_x64;
}
}
}
[TestClass]
public class DebugReplEvaluatorTestsIPy : DebugReplEvaluatorTests {
[ClassInitialize]
public static new void DoDeployment(TestContext context) {
AssertListener.Initialize();
}
internal override PythonVersion Version {
get {
return PythonPaths.IronPython27 ?? PythonPaths.IronPython27_x64;
}
}
[TestMethod, Priority(2)]
public override async Task ChangeFrame() => await base.ChangeFrame();
[TestMethod, Priority(2)]
public override async Task ChangeModule() => await base.ChangeModule();
[TestMethod, Priority(2)]
public override async Task ChangeProcess() => await base.ChangeProcess();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Reflection.Metadata;
using System.Threading;
using Debug = System.Diagnostics.Debug;
using Internal.TypeSystem;
using Internal.NativeFormat;
namespace Internal.TypeSystem.Ecma
{
/// <summary>
/// Override of MetadataType that uses actual Ecma335 metadata.
/// </summary>
public sealed partial class EcmaType : MetadataType, EcmaModule.IEntityHandleObject
{
private EcmaModule _module;
private TypeDefinitionHandle _handle;
private TypeDefinition _typeDefinition;
// Cached values
private string _typeName;
private string _typeNamespace;
private TypeDesc[] _genericParameters;
private MetadataType _baseType;
private int _hashcode;
internal EcmaType(EcmaModule module, TypeDefinitionHandle handle)
{
_module = module;
_handle = handle;
_typeDefinition = module.MetadataReader.GetTypeDefinition(handle);
_baseType = this; // Not yet initialized flag
#if DEBUG
// Initialize name eagerly in debug builds for convenience
this.ToString();
#endif
}
public override int GetHashCode()
{
if (_hashcode != 0)
{
return _hashcode;
}
int nameHash = TypeHashingAlgorithms.ComputeNameHashCode(this.GetFullName());
TypeDesc containingType = ContainingType;
if (containingType == null)
{
_hashcode = nameHash;
}
else
{
_hashcode = TypeHashingAlgorithms.ComputeNestedTypeHashCode(containingType.GetHashCode(), nameHash);
}
return _hashcode;
}
EntityHandle EcmaModule.IEntityHandleObject.Handle
{
get
{
return _handle;
}
}
// TODO: Use stable hashcode based on the type name?
// public override int GetHashCode()
// {
// }
public override TypeSystemContext Context
{
get
{
return _module.Context;
}
}
private void ComputeGenericParameters()
{
var genericParameterHandles = _typeDefinition.GetGenericParameters();
int count = genericParameterHandles.Count;
if (count > 0)
{
TypeDesc[] genericParameters = new TypeDesc[count];
int i = 0;
foreach (var genericParameterHandle in genericParameterHandles)
{
genericParameters[i++] = new EcmaGenericParameter(_module, genericParameterHandle);
}
Interlocked.CompareExchange(ref _genericParameters, genericParameters, null);
}
else
{
_genericParameters = TypeDesc.EmptyTypes;
}
}
public override Instantiation Instantiation
{
get
{
if (_genericParameters == null)
ComputeGenericParameters();
return new Instantiation(_genericParameters);
}
}
public override ModuleDesc Module
{
get
{
return _module;
}
}
public EcmaModule EcmaModule
{
get
{
return _module;
}
}
public MetadataReader MetadataReader
{
get
{
return _module.MetadataReader;
}
}
public TypeDefinitionHandle Handle
{
get
{
return _handle;
}
}
private MetadataType InitializeBaseType()
{
var baseTypeHandle = _typeDefinition.BaseType;
if (baseTypeHandle.IsNil)
{
_baseType = null;
return null;
}
var type = _module.GetType(baseTypeHandle) as MetadataType;
if (type == null)
{
throw new BadImageFormatException();
}
_baseType = type;
return type;
}
public override DefType BaseType
{
get
{
if (_baseType == this)
return InitializeBaseType();
return _baseType;
}
}
public override MetadataType MetadataBaseType
{
get
{
if (_baseType == this)
return InitializeBaseType();
return _baseType;
}
}
protected override TypeFlags ComputeTypeFlags(TypeFlags mask)
{
TypeFlags flags = 0;
if ((mask & TypeFlags.ContainsGenericVariablesComputed) != 0)
{
flags |= TypeFlags.ContainsGenericVariablesComputed;
// TODO: Do we really want to get the instantiation to figure out whether the type is generic?
if (this.HasInstantiation)
flags |= TypeFlags.ContainsGenericVariables;
}
if ((mask & TypeFlags.CategoryMask) != 0)
{
TypeDesc baseType = this.BaseType;
if (_module.Context.IsWellKnownType(baseType, WellKnownType.ValueType))
{
flags |= TypeFlags.ValueType;
}
else
if (_module.Context.IsWellKnownType(baseType, WellKnownType.Enum))
{
flags |= TypeFlags.Enum;
}
else
{
if ((_typeDefinition.Attributes & TypeAttributes.Interface) != 0)
flags |= TypeFlags.Interface;
else
flags |= TypeFlags.Class;
}
// All other cases are handled during TypeSystemContext intitialization
}
Debug.Assert((flags & mask) != 0);
return flags;
}
private string InitializeName()
{
var metadataReader = this.MetadataReader;
_typeName = metadataReader.GetString(_typeDefinition.Name);
return _typeName;
}
public override string Name
{
get
{
if (_typeName == null)
return InitializeName();
return _typeName;
}
}
private string InitializeNamespace()
{
var metadataReader = this.MetadataReader;
_typeNamespace = metadataReader.GetString(_typeDefinition.Namespace);
return _typeNamespace;
}
public override string Namespace
{
get
{
if (_typeNamespace == null)
return InitializeNamespace();
return _typeNamespace;
}
}
public override IEnumerable<MethodDesc> GetMethods()
{
foreach (var handle in _typeDefinition.GetMethods())
{
yield return (MethodDesc)_module.GetObject(handle);
}
}
public override MethodDesc GetMethod(string name, MethodSignature signature)
{
var metadataReader = this.MetadataReader;
var stringComparer = metadataReader.StringComparer;
foreach (var handle in _typeDefinition.GetMethods())
{
if (stringComparer.Equals(metadataReader.GetMethodDefinition(handle).Name, name))
{
MethodDesc method = (MethodDesc)_module.GetObject(handle);
if (signature == null || signature.Equals(method.Signature))
return method;
}
}
return null;
}
public override MethodDesc GetStaticConstructor()
{
var metadataReader = this.MetadataReader;
var stringComparer = metadataReader.StringComparer;
foreach (var handle in _typeDefinition.GetMethods())
{
var methodDefinition = metadataReader.GetMethodDefinition(handle);
if ((methodDefinition.Attributes & MethodAttributes.SpecialName) != 0 &&
stringComparer.Equals(methodDefinition.Name, ".cctor"))
{
MethodDesc method = (MethodDesc)_module.GetObject(handle);
return method;
}
}
return null;
}
public override MethodDesc GetFinalizer()
{
// System.Object defines Finalize but doesn't use it, so we can determine that a type has a Finalizer
// by checking for a virtual method override that lands anywhere other than Object in the inheritance
// chain.
if (!HasBaseType)
return null;
TypeDesc objectType = Context.GetWellKnownType(WellKnownType.Object);
MethodDesc decl = objectType.GetMethod("Finalize", null);
if (decl != null)
{
MethodDesc impl = VirtualFunctionResolution.FindVirtualFunctionTargetMethodOnObjectType(decl, this);
if (impl.OwningType != objectType)
{
return impl;
}
return null;
}
// TODO: Better exception type. Should be: "CoreLib doesn't have a required thing in it".
throw new NotImplementedException();
}
public override IEnumerable<FieldDesc> GetFields()
{
foreach (var handle in _typeDefinition.GetFields())
{
var field = (EcmaField)_module.GetObject(handle);
yield return field;
}
}
public override FieldDesc GetField(string name)
{
var metadataReader = this.MetadataReader;
var stringComparer = metadataReader.StringComparer;
foreach (var handle in _typeDefinition.GetFields())
{
if (stringComparer.Equals(metadataReader.GetFieldDefinition(handle).Name, name))
{
var field = (EcmaField)_module.GetObject(handle);
return field;
}
}
return null;
}
public override IEnumerable<MetadataType> GetNestedTypes()
{
foreach (var handle in _typeDefinition.GetNestedTypes())
{
yield return (MetadataType)_module.GetObject(handle);
}
}
public override MetadataType GetNestedType(string name)
{
var metadataReader = this.MetadataReader;
var stringComparer = metadataReader.StringComparer;
foreach (var handle in _typeDefinition.GetNestedTypes())
{
if (stringComparer.Equals(metadataReader.GetTypeDefinition(handle).Name, name))
return (MetadataType)_module.GetObject(handle);
}
return null;
}
public TypeAttributes Attributes
{
get
{
return _typeDefinition.Attributes;
}
}
public override MetadataType ContainingType
{
get
{
if (!_typeDefinition.Attributes.IsNested())
return null;
var handle = _typeDefinition.GetDeclaringType();
return (MetadataType)_module.GetType(handle);
}
}
public override bool HasCustomAttribute(string attributeNamespace, string attributeName)
{
return MetadataReader.HasCustomAttribute(_typeDefinition.GetCustomAttributes(),
attributeNamespace, attributeName);
}
public override string ToString()
{
return "[" + _module.GetName().Name + "]" + this.GetFullName();
}
public override ClassLayoutMetadata GetClassLayout()
{
TypeLayout layout = _typeDefinition.GetLayout();
ClassLayoutMetadata result;
result.PackingSize = layout.PackingSize;
result.Size = layout.Size;
// Skip reading field offsets if this is not explicit layout
if (IsExplicitLayout)
{
var fieldDefinitionHandles = _typeDefinition.GetFields();
var numInstanceFields = 0;
foreach (var handle in fieldDefinitionHandles)
{
var fieldDefinition = MetadataReader.GetFieldDefinition(handle);
if ((fieldDefinition.Attributes & FieldAttributes.Static) != 0)
continue;
numInstanceFields++;
}
result.Offsets = new FieldAndOffset[numInstanceFields];
int index = 0;
foreach (var handle in fieldDefinitionHandles)
{
var fieldDefinition = MetadataReader.GetFieldDefinition(handle);
if ((fieldDefinition.Attributes & FieldAttributes.Static) != 0)
continue;
// Note: GetOffset() returns -1 when offset was not set in the metadata which maps nicely
// to FieldAndOffset.InvalidOffset.
Debug.Assert(FieldAndOffset.InvalidOffset == -1);
result.Offsets[index] =
new FieldAndOffset((FieldDesc)_module.GetObject(handle), fieldDefinition.GetOffset());
index++;
}
}
else
result.Offsets = null;
return result;
}
public override bool IsExplicitLayout
{
get
{
return (_typeDefinition.Attributes & TypeAttributes.ExplicitLayout) != 0;
}
}
public override bool IsSequentialLayout
{
get
{
return (_typeDefinition.Attributes & TypeAttributes.SequentialLayout) != 0;
}
}
public override bool IsBeforeFieldInit
{
get
{
return (_typeDefinition.Attributes & TypeAttributes.BeforeFieldInit) != 0;
}
}
public override bool IsSealed
{
get
{
return (_typeDefinition.Attributes & TypeAttributes.Sealed) != 0;
}
}
}
}
| |
using Aardvark.Base;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
namespace Aardvark.Data.Vrml97
{
/// <summary>
/// Symbol table.
/// </summary>
public static class Vrml97Sym
{
#pragma warning disable 1591
public static readonly Symbol Vrml97 = "Vrml97";
public static readonly Symbol url = "url";
public static readonly Symbol texture = "texture";
public static readonly Symbol name = "name";
public static readonly Symbol filename = "filename";
public static readonly Symbol node = "node";
public static readonly Symbol root = "root";
public static readonly Symbol appearance = "appearance";
public static readonly Symbol material = "material";
public static readonly Symbol textureTransform = "textureTransform";
public static readonly Symbol center = "center";
public static readonly Symbol rotation = "rotation";
public static readonly Symbol scale = "scale";
public static readonly Symbol translation = "translation";
public static readonly Symbol scaleOrientation = "scaleOrientation";
public static readonly Symbol DEF = "DEF";
public static readonly Symbol USE = "USE";
public static readonly Symbol ROUTE = "ROUTE";
public static readonly Symbol NULL = "NULL";
#pragma warning restore 1591
}
/// <summary>
/// Vrml97 parser.
/// Creates a parse tree from a file, or a stream reader.
///
/// Example:
/// Parser parser = new Parser("myVrmlFile.wrl");
/// SymMapBase parseTree = parser.Perform();
///
/// </summary>
internal class Parser : IDisposable
{
Stream m_inputStream;
#region Public interface.
/// <summary>
/// Constructs a Parser for the given input stream.
/// In order to actually parse the data, call the
/// Perform method, which returns a SymMapBase containing
/// the parse tree.
/// </summary>
/// <param name="input">Input stream.</param>
/// <param name="fileName"></param>
public Parser(Stream input, string fileName)
{
m_result.TypeName = Vrml97Sym.Vrml97;
m_result[Vrml97Sym.filename] = fileName;
m_inputStream = input;
m_tokenizer = new Tokenizer(input);
}
/// <summary>
/// Constructs a Parser for the given file.
/// In order to actually parse the data, call the
/// Perform method, which returns a SymMapBase
/// containing the parse tree.
/// This constructor creates an internal file stream and Dispose
/// needs to be called when the Parser is no longer needed.
/// </summary>
/// <param name="fileName">Input filename.</param>
public Parser(string fileName)
{
m_result.TypeName = Vrml97Sym.Vrml97;
m_result[Vrml97Sym.filename] = fileName;
m_inputStream = new FileStream(
fileName,
FileMode.Open, FileAccess.Read, FileShare.Read,
4096, false
);
m_tokenizer = new Tokenizer(m_inputStream);
}
/// <summary>
/// Parses the input data and returns a SymMapBase
/// containing the parse tree.
/// </summary>
/// <returns>Parse tree.</returns>
public Vrml97Scene Perform()
{
var root = new List<SymMapBase>();
while (true)
{
try
{
var node = ParseNode(m_tokenizer);
if (node == null) break;
root.Add(node);
Thread.Sleep(0);
}
catch (ParseException e)
{
Console.WriteLine("WARNING: Caught exception while parsing: {0}!", e.Message);
Console.WriteLine("WARNING: Result may contain partial, incorrect or invalid data!");
break;
}
}
m_result[Vrml97Sym.root] = root;
return new Vrml97Scene(m_result);
}
#endregion
#region Node specs.
/** Static constructor. */
static Parser()
{
var SFBool = new FieldParser(ParseSFBool);
//var MFBool = new FieldParser(ParseMFBool);
var SFColor = new FieldParser(ParseSFColor);
var MFColor = new FieldParser(ParseMFColor);
var SFFloat = new FieldParser(ParseSFFloat);
var MFFloat = new FieldParser(ParseMFFloat);
var SFImage = new FieldParser(ParseSFImage);
var SFInt32 = new FieldParser(ParseSFInt32);
var MFInt32 = new FieldParser(ParseMFInt32);
var SFNode = new FieldParser(ParseSFNode);
var MFNode = new FieldParser(ParseMFNode);
var SFRotation = new FieldParser(ParseSFRotation);
var MFRotation = new FieldParser(ParseMFRotation);
var SFString = new FieldParser(ParseSFString);
var MFString = new FieldParser(ParseMFString);
var SFTime = new FieldParser(ParseSFFloat);
//var MFTime = new FieldParser(ParseMFFloat);
var SFVec2f = new FieldParser(ParseSFVec2f);
var MFVec2f = new FieldParser(ParseMFVec2f);
var SFVec3f = new FieldParser(ParseSFVec3f);
var MFVec3f = new FieldParser(ParseMFVec3f);
// Dictionary<string, (FieldParser, object)> fields;
// Lookup table for Vrml97 node types.
// For each node type a NodeParseInfo entry specifies how
// to handle this kind of node.
m_parseInfoMap = new SymbolDict<NodeParseInfo>
{
// DEF
[Vrml97Sym.DEF] = new NodeParseInfo(new NodeParser(ParseDEF)),
// USE
[Vrml97Sym.USE] = new NodeParseInfo(new NodeParser(ParseUSE)),
// ROUTE
[Vrml97Sym.ROUTE] = new NodeParseInfo(new NodeParser(ParseROUTE)),
// NULL
[Vrml97Sym.NULL] = new NodeParseInfo(new NodeParser(ParseNULL))
};
var defaultBBoxCenter = (SFVec3f, (object)V3f.Zero);
var defaultBBoxSize = (SFVec3f, (object)new V3f(-1, -1, -1));
(FieldParser, object) fdd(FieldParser fp, object obj) => (fp, obj);
(FieldParser, object) fd(FieldParser fp) => (fp, null);
// Anchor
m_parseInfoMap["Anchor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "description", fd(SFString) },
{ "parameter", fd(MFString) },
{ "url", fd(MFString) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize}
});
// Appearance
m_parseInfoMap["Appearance"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "material", fd(SFNode) },
{ "texture", fd(SFNode) },
{ "textureTransform", fd(SFNode) }
});
// AudioClip
m_parseInfoMap["AudioClip"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "description", fd(SFString) },
{ "loop", fdd(SFBool, false) },
{ "pitch", fdd(SFFloat, 1.0f) },
{ "startTime", fdd(SFTime, 0.0f)},
{ "stopTime", fdd(SFTime, 0.0f)},
{ "url", fd(MFString)}
});
// Background
m_parseInfoMap["Background"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "groundAngle", fd(MFFloat) },
{ "groundColor", fd(MFColor) },
{ "backUrl", fd(MFString) },
{ "bottomUrl", fd(MFString) },
{ "frontUrl", fd(MFString) },
{ "leftUrl", fd(MFString) },
{ "rightUrl", fd(MFString) },
{ "topUrl", fd(MFString) },
{ "skyAngle", fd(MFFloat) },
{ "skyColor", fdd(MFColor, C3f.Black) }
});
// Billboard
m_parseInfoMap["Billboard"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "axisOfRotation", fdd(SFVec3f, new V3f(0.0f, 1.0f, 0.0f)) },
{ "children", fd(MFNode) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize}
});
// Box
m_parseInfoMap["Box"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "size", fdd(SFVec3f, new V3f(2.0f, 2.0f, 2.0f)) }
});
// Collision
m_parseInfoMap["Collision"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "collide", fdd(SFBool, true) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize},
{ "proxy", fd(SFNode) }
});
// Color
m_parseInfoMap["Color"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(MFColor) }
});
// ColorInterpolator
m_parseInfoMap["ColorInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFColor) }
});
// Cone
m_parseInfoMap["Cone"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "bottomRadius", fdd(SFFloat, 1.0f) },
{ "height", fdd(SFFloat, 2.0f) },
{ "side", fdd(SFBool, true) },
{ "bottom", fdd(SFBool, true) }
});
// Coordinate
m_parseInfoMap["Coordinate"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "point", fd(MFVec3f) }
});
// CoordinateInterpolator
m_parseInfoMap["CoordinateInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// Cylinder
m_parseInfoMap["Cylinder"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "bottom", fdd(SFBool, true) },
{ "height", fdd(SFFloat, 2.0f) },
{ "radius", fdd(SFFloat, 1.0f) },
{ "side", fdd(SFBool, true) },
{ "top", fdd(SFBool, true) }
});
// CylinderSensor
m_parseInfoMap["CylinderSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "diskAngle", fdd(SFFloat, 0.262f) },
{ "enabled", fdd(SFBool, true) },
{ "maxAngle", fdd(SFFloat, -1.0f) },
{ "minAngle", fdd(SFFloat, 0.0f) },
{ "offset", fdd(SFFloat, 0.0f) }
});
// DirectionalLight
m_parseInfoMap["DirectionalLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "color", fdd(SFColor, C3f.White) },
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, -1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "on", fdd(SFBool, true) }
});
// ElevationGrid
m_parseInfoMap["ElevationGrid"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "normal", fd(SFNode) },
{ "texCoord", fd(SFNode) },
{ "height", fd(MFFloat) },
{ "ccw", fdd(SFBool, true) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "normalPerVertex", fdd(SFBool, true) },
{ "solid", fdd(SFBool, true) },
{ "xDimension", fdd(SFInt32, 0) },
{ "xSpacing", fdd(SFFloat, 1.0f) },
{ "zDimension", fdd(SFInt32, 0) },
{ "zSpacing", fdd(SFFloat, 1.0f) }
});
// Extrusion
m_parseInfoMap["Extrusion"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "beginCap", fdd(SFBool, true) },
{ "ccw", fdd(SFBool, true) },
{ "convex", fdd(SFBool, true) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "crossSection", fdd(MFVec2f, new List<V2f>() {new V2f(1.0f, 1.0f), new V2f(1.0f, -1.0f), new V2f(-1.0f, -1.0f), new V2f(-1.0f, 1.0f), new V2f(1.0f, 1.0f) }) },
{ "endCap", fdd(SFBool, true) },
{ "orientation", fdd(MFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "scale", fdd(MFVec2f, new V2f(1.0f, 1.0f)) },
{ "solid", fdd(SFBool, true) },
{ "spine", fdd(MFVec3f, new List<V3f>() { V3f.Zero, new V3f(0.0f, 1.0f, 0.0f) }) }
});
// Fog
m_parseInfoMap["Fog"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fdd(SFColor, C3f.White) },
{ "fogType", fdd(SFString, "LINEAR") },
{ "visibilityRange", fdd(SFFloat, 0.0f) }
});
// FontStyle
m_parseInfoMap["FontStyle"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "family", fdd(MFString, "SERIF") },
{ "horizontal", fdd(SFBool, true) },
{ "justify", fdd(MFString, "BEGIN") },
{ "language", fd(SFString) },
{ "leftToRight", fdd(SFBool, true) },
{ "size", fdd(SFFloat, 1.0f) },
{ "spacing", fdd(SFFloat, 1.0f) },
{ "style", fdd(SFString, "PLAIN") },
{ "topToBottom", fdd(SFBool, true) }
});
// Group
m_parseInfoMap["Group"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// ImageTexture
m_parseInfoMap["ImageTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "url", fd(MFString) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// IndexedFaceSet
m_parseInfoMap["IndexedFaceSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) },
{ "normal", fd(SFNode) },
{ "texCoord", fd(SFNode) },
{ "ccw", fdd(SFBool, true) },
{ "colorIndex", fd(MFInt32) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "convex", fdd(SFBool, true) },
{ "coordIndex", fd(MFInt32) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "normalIndex", fd(MFInt32) },
{ "normalPerVertex", fdd(SFBool, true) },
{ "solid", fdd(SFBool, true) },
{ "texCoordIndex", fd(MFInt32) },
{ "edgeSharpness", fd(MFFloat) },
{ "edgeSharpnessIndex", fd(MFInt32) },
{ "neighborMesh", fd(MFString) },
{ "neighborIndex", fd(MFInt32) },
{ "neighborSide", fd(MFInt32) },
{ "neighborFace", fd(MFInt32) },
{ "meshName", fd(SFString) },
{ "topologyHoles", fd(SFInt32) }
});
// IndexedLineSet
m_parseInfoMap["IndexedLineSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) },
{ "colorIndex", fd(MFInt32) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "coordIndex", fd(MFInt32) }
});
// Inline
m_parseInfoMap["Inline"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "url", fd(MFString) },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// LOD
m_parseInfoMap["LOD"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "level", fd(MFNode) },
{ "center", defaultBBoxCenter },
{ "range", fd(MFFloat) }
});
// Material
m_parseInfoMap["Material"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.2f) },
{ "diffuseColor", fdd(SFColor, new C3f(0.8f, 0.8f, 0.8f)) },
{ "emissiveColor", fdd(SFColor, C3f.Black) },
{ "shininess", fdd(SFFloat, 0.2f) },
{ "specularColor", fdd(SFColor, C3f.Black) },
{ "transparency", fdd(SFFloat, 0.0f) }
});
// MovieTexture
m_parseInfoMap["MovieTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "loop", fdd(SFBool, false) },
{ "speed", fdd(SFFloat, 1.0f) },
{ "startTime", fdd(SFTime, 1.0f) },
{ "stopTime", fdd(SFTime, 1.0f) },
{ "url", fd(MFString) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// NavigationInfo
m_parseInfoMap["NavigationInfo"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "avatarSize", fdd(MFFloat, new List<float>() {0.25f, 1.6f, 0.75f}) },
{ "headlight", fdd(SFBool, true) },
{ "speed", fdd(SFFloat, 1.0f) },
{ "type", fdd(MFString, new List<string>() {"WALK", "ANY"}) },
{ "visibilityLimit", fdd(SFFloat, 0.0f) }
});
// Normal
m_parseInfoMap["Normal"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "vector", fd(MFVec3f) }
});
// NormalInterpolator
m_parseInfoMap["NormalInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// OrientationInterpolator
m_parseInfoMap["OrientationInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFRotation) }
});
// PixelTexture
m_parseInfoMap["PixelTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "image", fdd(SFImage, new List<uint>() {0, 0, 0}) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// PlaneSensor
m_parseInfoMap["PlaneSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "enabled", fdd(SFBool, true) },
{ "maxPosition", fdd(SFVec2f, new V2f(-1.0f, -1.0f)) },
{ "minPosition", fdd(SFVec2f, V2f.Zero) },
{ "offset", defaultBBoxCenter }
});
// PointLight
m_parseInfoMap["PointLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "attenuation", fdd(SFVec3f, new V3f(1.0f, 0.0f, 0.0f)) },
{ "color", fdd(SFColor, C3f.White) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "on", fdd(SFBool, true) },
{ "radius", fdd(SFFloat, 100.0f) }
});
// PointSet
m_parseInfoMap["PointSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) }
});
// PositionInterpolator
m_parseInfoMap["PositionInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// ProximitySensor
m_parseInfoMap["ProximitySensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "size", defaultBBoxCenter },
{ "enabled", fdd(SFBool, true) }
});
// ScalarInterpolator
m_parseInfoMap["ScalarInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFFloat) }
});
// Script
// skipped
// Shape
m_parseInfoMap["Shape"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "appearance", fd(SFNode) },
{ "geometry", fd(SFNode) },
});
// Sound
m_parseInfoMap["Sound"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, 1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "maxBack", fdd(SFFloat, 10.0f) },
{ "maxFront", fdd(SFFloat, 10.0f) },
{ "minBack", fdd(SFFloat, 1.0f) },
{ "minFront", fdd(SFFloat, 1.0f) },
{ "priority", fdd(SFFloat, 0.0f) },
{ "source", fd(SFNode) },
{ "spatialize", fdd(SFBool, true) }
});
// Sphere
m_parseInfoMap["Sphere"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "radius", fdd(SFFloat, 1.0f) }
});
// SphereSensor
m_parseInfoMap["SphereSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "enabled", fdd(SFBool, true) },
{ "offset", fdd(SFRotation, new V4f(0.0f, 1.0f, 0.0f, 0.0f)) }
});
// SpotLight
m_parseInfoMap["SpotLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "attenuation", fdd(SFVec3f, new V3f(1.0f, 0.0f, 0.0f)) },
{ "beamWidth", fdd(SFFloat, 1.570796f) },
{ "color", fdd(SFColor, C3f.White) },
{ "cutOffAngle", fdd(SFFloat, 0.785398f) },
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, -1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "on", fdd(SFBool, true) },
{ "radius", fdd(SFFloat, 100.0f) }
});
// Switch
m_parseInfoMap["Switch"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "choice", fd(MFNode) },
{ "whichChoice", fdd(SFInt32, -1) }
});
// Text
m_parseInfoMap["Text"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "string", fd(MFString) },
{ "fontStyle", fd(SFNode) },
{ "length", fd(MFFloat) },
{ "maxExtent", fdd(SFFloat, 0.0f) }
});
// TextureCoordinate
m_parseInfoMap["TextureCoordinate"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "point", fd(MFVec2f) }
});
// TextureTransform
m_parseInfoMap["TextureTransform"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", fdd(SFVec2f, V2f.Zero) },
{ "rotation", fdd(SFFloat, 0.0f) },
{ "scale", fdd(SFVec2f, new V2f(1.0f, 1.0f)) },
{ "translation", fdd(SFVec2f, V2f.Zero) }
});
// TimeSensor
m_parseInfoMap["TimeSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "cycleInterval", fdd(SFTime, 1.0f) },
{ "enabled", fdd(SFBool, true) },
{ "loop", fdd(SFBool, false) },
{ "startTime", fdd(SFTime, 0.0f) },
{ "stopTime", fdd(SFTime, 0.0f) }
});
// TouchSensor
m_parseInfoMap["TouchSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "enabled", fdd(SFBool, true) }
});
// Transform
m_parseInfoMap["Transform"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "children", fd(MFNode) },
{ "rotation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "scale", fdd(SFVec3f, new V3f(1.0f, 1.0f, 1.0f)) },
{ "scaleOrientation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "translation", defaultBBoxCenter },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// Viewpoint
m_parseInfoMap["Viewpoint"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "fieldOfView", fdd(SFFloat, 0.785398f) },
{ "jump", fdd(SFBool, true) },
{ "orientation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "position", fdd(SFVec3f, new V3f(0.0f, 0.0f, 10.0f)) },
{ "description", fd(SFString) }
});
// VisibilitySensor
m_parseInfoMap["VisibilitySensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "enabled", fdd(SFBool, true) },
{ "size", defaultBBoxCenter }
});
// WorldInfo
m_parseInfoMap["WorldInfo"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "title", fd(SFString) },
{ "info", fd(MFString) }
});
}
private static SymMapBase ParseDEF(Tokenizer t)
{
var result = new SymMapBase();
result["name"] = t.NextNameToken().ToString();
result["node"] = ParseNode(t);
return result;
}
private static SymMapBase ParseUSE(Tokenizer t)
{
var result = new SymMapBase();
result["name"] = t.NextNameToken().ToString();
return result;
}
private static SymMapBase ParseROUTE(Tokenizer t)
{
var result = new SymMapBase();
// nodeNameId.eventOutId
result["out"] = t.NextNameToken().ToString();
// "TO"
t.NextToken();
// nodeNameId.eventInId
result["in"] = t.NextNameToken().ToString();
return result;
}
private static SymMapBase ParseNULL(Tokenizer t) => null;
#endregion
#region Helper functions.
private static object ParseSFBool(Tokenizer t) => t.NextToken().ToBool();
private static List<bool> ParseMFBool(Tokenizer t)
{
var result = new List<bool>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToBool());
token = t.NextToken();
}
}
else
{
result.Add(token.ToBool());
}
return result;
}
private static object ParseSFFloat(Tokenizer t) => t.NextToken().ToFloat();
private static List<float> ParseMFFloat(Tokenizer t)
{
var result = new List<float>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToFloat());
token = t.NextToken();
}
}
else
{
result.Add(token.ToFloat());
}
return result;
}
private static List<uint> ParseSFImage(Tokenizer t)
{
var result = new List<uint>
{
t.NextToken().ToUInt32(), // width
t.NextToken().ToUInt32(), // height
t.NextToken().ToUInt32() // num components
};
uint imax = result[0] * result[1];
for (uint i = 0; i < imax; i++)
{
result.Add(t.NextToken().ToUInt32());
}
return result;
}
private static object ParseSFInt32(Tokenizer t) => t.NextToken().ToInt32();
private static List<int> ParseMFInt32(Tokenizer t)
{
var result = new List<int>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToInt32());
token = t.NextToken();
}
}
else
{
result.Add(token.ToInt32());
}
return result;
}
private static SymMapBase ParseSFNode(Tokenizer t) => ParseNode(t);
private static List<SymMapBase> ParseMFNode(Tokenizer t)
{
var result = new List<SymMapBase>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
t.PushBack(token);
result.Add(ParseNode(t));
token = t.NextToken();
}
}
else
{
t.PushBack(token);
result.Add(ParseNode(t));
}
return result;
}
private static object ParseSFRotation(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
return new V4f(x, y, z, w);
}
private static List<V4f> ParseMFRotation(Tokenizer t)
{
var result = new List<V4f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
result.Add(new V4f(x, y, z, w));
token = t.NextToken();
}
}
else
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
result.Add(new V4f(x, y, z, w));
}
return result;
}
private static string ParseSFString(Tokenizer t)
=> t.NextToken().GetCheckedUnquotedString();
private static List<string> ParseMFString(Tokenizer t)
{
var result = new List<string>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.GetCheckedUnquotedString());
token = t.NextToken();
}
}
else
{
result.Add(token.GetCheckedUnquotedString());
}
return result;
}
private static object ParseSFVec2f(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
return new V2f(x, y);
}
private static List<V2f> ParseMFVec2f(Tokenizer t)
{
var result = new List<V2f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
float x = token.ToFloat();
float y = t.NextToken().ToFloat();
result.Add(new V2f(x, y));
token = t.NextToken();
}
}
else
{
float x = token.ToFloat();
float y = t.NextToken().ToFloat();
result.Add(new V2f(x, y));
}
return result;
}
private static object ParseSFVec3f(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
return new V3f(x, y, z);
}
private static List<V3f> ParseMFVec3f(Tokenizer t)
{
var result = new List<V3f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
result.Add(new V3f(x, y, z));
token = t.NextToken();
}
}
else
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
result.Add(new V3f(x, y, z));
}
return result;
}
private static object ParseSFColor(Tokenizer t)
{
var r = t.NextToken().ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
return new C3f(r, g, b);
}
private static List<C3f> ParseMFColor(Tokenizer t)
{
var result = new List<C3f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var r = token.ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
result.Add(new C3f(r, g, b));
token = t.NextToken();
}
}
else
{
var r = token.ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
result.Add(new C3f(r, g, b));
}
return result;
}
private static void ExpectBraceOpen(Tokenizer t)
{
var token = t.NextToken();
if (token.IsBraceOpen) return;
throw new ParseException(
"Token '{' expected. Found " + token.ToString() + " instead!"
);
}
private static void ExpectBraceClose(Tokenizer t)
{
var token = t.NextToken();
if (token.IsBraceClose) return;
throw new ParseException(
"Token '}' expected. Found " + token.ToString() + " instead!"
);
}
#endregion
#region Internal stuff.
private static SymMapBase ParseNode(Tokenizer t)
{
// Next token is expected to be a Vrml97 node type.
var nodeType = t.NextToken().ToString();
if (nodeType == null) return null;
SymMapBase node;
// If a field description is available for this type,
// then use the generic node parser, else use the custom
// parse function.
if (m_parseInfoMap.ContainsKey(nodeType))
{
var info = m_parseInfoMap[nodeType];
node = (info.FieldDefs == null) ?
info.NodeParser(t) :
ParseGenericNode(t, info);
}
else
{
// unknown node type
node = ParseUnknownNode(t);
}
if (node != null)
node.TypeName = nodeType;
return node;
}
/**
* Specifies how to parse a node.
**/
private struct NodeParseInfo
{
private NodeParser m_parseFunction;
public readonly SymbolDict<(FieldParser, object)> FieldDefs;
public NodeParseInfo(NodeParser parseFunction)
: this(parseFunction, null)
{ }
public NodeParseInfo(
SymbolDict<(FieldParser, object)> fields)
: this(null, fields)
{ }
public NodeParseInfo(
NodeParser parseFunction,
SymbolDict<(FieldParser, object)> fields)
{
m_parseFunction = parseFunction;
FieldDefs = fields;
}
public NodeParser NodeParser { get { return m_parseFunction; } }
public FieldParser FieldParser(string fieldName)
{
if (fieldName == "ROUTE") return new FieldParser(ParseROUTE);
return FieldDefs[fieldName].Item1;
}
public object DefaultValue(string fieldName)
{
return FieldDefs[fieldName].Item2;
}
}
private static SymMapBase ParseGenericNode(
Tokenizer t,
NodeParseInfo info
)
{
var result = new SymMapBase();
ExpectBraceOpen(t);
// populate fields with default values
foreach (var kvp in info.FieldDefs)
{
if (kvp.Value.Item2 == null) continue;
result[kvp.Key] = kvp.Value.Item2;
}
Tokenizer.Token token = t.NextToken();
while (!token.IsBraceClose)
{
string fieldName = token.ToString();
result[fieldName] = info.FieldParser(fieldName)(t);
token = t.NextToken();
Thread.Sleep(0);
}
return result;
}
private static SymMapBase ParseUnknownNode(Tokenizer t)
{
ExpectBraceOpen(t);
var level = 1;
var sb = new StringBuilder("{");
do
{
var token = t.NextToken();
sb.Append(" " + token);
if (token.IsBraceOpen) level++;
if (token.IsBraceClose) level--;
}
while (level > 0);
var result = new SymMapBase();
result["unknownNode"] = true;
result["content"] = sb.ToString();
return result;
}
/// <summary>
/// Disposed the input stream: Either the FileStream created by
/// FromFile or the Stream passed when creating.
/// </summary>
public void Dispose()
{
m_inputStream.Dispose();
}
private delegate SymMapBase NodeParser(Tokenizer t);
private delegate object FieldParser(Tokenizer t);
private static SymbolDict<NodeParseInfo> m_parseInfoMap;
private SymMapBase m_result = new SymMapBase();
private Tokenizer m_tokenizer;
#endregion
}
}
| |
/***************************************************************************
* AudioCdSource.cs
*
* Copyright (C) 2005-2006 Novell, Inc.
* Written by Aaron Bockover <aaron@abock.org>
****************************************************************************/
/* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW:
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Data;
using System.Collections;
using System.Collections.Generic;
using Mono.Unix;
using Gtk;
using Banshee.Base;
using Banshee.Widgets;
namespace Banshee.Sources
{
public class AudioCdSource : Source, IImportSource
{
private AudioCdDisk disk;
private VBox box;
private Alignment container;
private AudioCdRipper ripper;
private ActionButton copy_button;
private HighlightMessageArea audiocd_statusbar;
public override string UnmapLabel {
get { return Catalog.GetString("Eject CD"); }
}
public override string UnmapIcon {
get { return "media-eject"; }
}
public override string GenericName {
get { return Catalog.GetString("Audio CD"); }
}
public AudioCdSource(AudioCdDisk disk) : base(disk.Title, 200)
{
this.disk = disk;
disk.Updated += OnDiskUpdated;
container = new Alignment(0.0f, 0.0f, 1.0f, 1.0f);
audiocd_statusbar = new HighlightMessageArea();
audiocd_statusbar.BorderWidth = 5;
audiocd_statusbar.LeftPadding = 15;
audiocd_statusbar.ButtonClicked += delegate { this.disk.QueryMetadata(); };
box = new VBox();
box.Spacing = 5;
box.PackStart(container, true, true, 0);
box.PackStart(audiocd_statusbar, false, false, 0);
container.Show();
box.Show();
CreateActions();
copy_button = new ActionButton(Globals.ActionManager["DuplicateDiscAction"]);
copy_button.Pixbuf = IconThemeUtils.LoadIcon(22, "media-cdrom", Stock.Cdrom);
SourceManager.SourceRemoved += OnSourceRemoved;
}
private void UpdateAudioCdStatus()
{
string status = null;
Gdk.Pixbuf icon = null;
switch(disk.Status) {
case AudioCdLookupStatus.ReadingDisk:
status = Catalog.GetString("Reading table of contents from CD...");
icon = IconThemeUtils.LoadIcon(22, "media-cdrom", "gnome-dev-cdrom-audio", "source-cd-audio");
audiocd_statusbar.ShowCloseButton = false;
break;
case AudioCdLookupStatus.SearchingMetadata:
status = Catalog.GetString("Searching for CD metadata...");
icon = IconThemeUtils.LoadIcon(22, "system-search", Stock.Find);
audiocd_statusbar.ShowCloseButton = false;
break;
case AudioCdLookupStatus.SearchingCoverArt:
status = Catalog.GetString("Searching for CD cover art...");
icon = IconThemeUtils.LoadIcon(22, "system-search", Stock.Find);
audiocd_statusbar.ShowCloseButton = false;
break;
case AudioCdLookupStatus.ErrorNoConnection:
status = Catalog.GetString("Cannot search for CD metadata: " +
"there is no available Internet connection");
icon = IconThemeUtils.LoadIcon(22, "network-wired", Stock.Network);
audiocd_statusbar.ShowCloseButton = true;
break;
case AudioCdLookupStatus.ErrorLookup:
status = Catalog.GetString("Could not fetch metadata for CD.");
icon = IconThemeUtils.LoadIcon(22, Stock.DialogError);
audiocd_statusbar.ShowCloseButton = true;
break;
case AudioCdLookupStatus.Success:
default:
status = null;
icon = null;
break;
}
if(disk.Status == AudioCdLookupStatus.ErrorLookup) {
audiocd_statusbar.ButtonLabel = Stock.Refresh;
audiocd_statusbar.ButtonUseStock = true;
} else {
audiocd_statusbar.ButtonLabel = null;
}
audiocd_statusbar.Visible = status != null;
audiocd_statusbar.Message = String.Format("<big>{0}</big>", GLib.Markup.EscapeText(status));
audiocd_statusbar.Pixbuf = icon;
}
private void CreateActions()
{
action_group = new Gtk.ActionGroup("AudioCD");
action_group.Add(new Gtk.ActionEntry [] {
new Gtk.ActionEntry("DuplicateDiscAction", null,
Catalog.GetString("Copy CD"), null, null,
delegate {
foreach(Banshee.Cdrom.IDrive drive in Banshee.Burner.BurnerCore.DriveFactory) {
if(drive.Device == disk.DeviceNode) {
Banshee.Burner.BurnerCore.DiscDuplicator.Duplicate(drive);
return;
}
}
})
});
Globals.ActionManager.UI.AddUiFromString(@"
<ui>
<popup name='AudioCDMenu' action='AudioCDMenuActions'>
<menuitem name='ImportSource' action='ImportSourceAction' />
<menuitem name='DuplicateDisc' action='DuplicateDiscAction' />
<separator />
<menuitem name='UnmapSource' action='UnmapSourceAction' />
</popup>
</ui>
");
Globals.ActionManager.UI.InsertActionGroup(action_group, 0);
}
public override bool Unmap()
{
if(!disk.Eject()) {
return false;
}
SourceManager.RemoveSource(this);
return true;
}
public override void Activate()
{
InterfaceElements.DetachPlaylistContainer();
container.Add(InterfaceElements.PlaylistContainer);
InterfaceElements.ActionButtonBox.PackStart(copy_button, false, false, 0);
UpdateAudioCdStatus();
}
public override void Deactivate()
{
InterfaceElements.ActionButtonBox.Remove(copy_button);
}
public void Import()
{
SourceManager.SetActiveSource(this);
ImportDisk();
OnUpdated();
}
private void ImportDisk()
{
if(disk.IsRipping) {
Console.WriteLine("CD is already ripping");
return;
}
disk.IsRipping = true;
ArrayList list = new ArrayList();
foreach(AudioCdTrackInfo track in disk.Tracks) {
if(track.CanRip) {
list.Add(track);
}
}
if(list.Count > 0) {
ripper = new AudioCdRipper();
ripper.Finished += OnRipperFinished;
ripper.HaveTrackInfo += OnRipperHaveTrackInfo;
foreach(AudioCdTrackInfo track in list) {
ripper.QueueTrack(track);
}
AudioCdTrackInfo playing_track = PlayerEngineCore.CurrentTrack as AudioCdTrackInfo;
if(playing_track != null && playing_track.Disk == disk) {
PlayerEngineCore.Close();
}
ripper.Start();
} else {
HigMessageDialog dialog = new HigMessageDialog(InterfaceElements.MainWindow, DialogFlags.Modal,
MessageType.Info, ButtonsType.Ok,
Catalog.GetString("Invalid Selection"),
Catalog.GetString("You must select at least one track to import.")
);
dialog.Run();
dialog.Destroy();
disk.IsRipping = false;
}
}
private void OnSourceRemoved(SourceEventArgs args)
{
if(args.Source == this && ripper != null) {
ripper.Cancel();
}
}
private void OnRipperHaveTrackInfo(object o, HaveTrackInfoArgs args)
{
OnUpdated();
}
private void OnRipperFinished(object o, EventArgs args)
{
disk.IsRipping = false;
ripper = null;
OnUpdated();
}
private void OnDiskUpdated(object o, EventArgs args)
{
ThreadAssist.ProxyToMain(delegate {
Name = disk.Title;
UpdateAudioCdStatus();
OnUpdated();
});
}
private Gtk.ActionGroup action_group = null;
public override string ActionPath {
get {
if(action_group != null) {
return "/AudioCDMenu";
}
CreateActions();
return "/AudioCDMenu";
}
}
private static Gdk.Pixbuf icon = IconThemeUtils.LoadIcon(22, "media-cdrom", "gnome-dev-cdrom-audio", "source-cd-audio");
public override Gdk.Pixbuf Icon {
get { return icon; }
}
public override int Count {
get { return disk.TrackCount; }
}
public AudioCdDisk Disk {
get { return disk; }
}
public override IEnumerable<TrackInfo> Tracks {
get { return disk.Tracks; }
}
public override bool SearchEnabled {
get { return false; }
}
public override bool CanWriteToCD {
get { return false; }
}
public override Gtk.Widget ViewWidget {
get { return box; }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.ObjectModel;
using System.IdentityModel.Tokens;
using System.Xml;
namespace System.ServiceModel.Security
{
public abstract class SecurityAlgorithmSuite
{
private static SecurityAlgorithmSuite s_basic256;
private static SecurityAlgorithmSuite s_tripleDes;
private static SecurityAlgorithmSuite s_basic256Sha256;
static public SecurityAlgorithmSuite Default
{
get
{
return Basic256;
}
}
static public SecurityAlgorithmSuite Basic256
{
get
{
if (s_basic256 == null)
{
s_basic256 = new Basic256SecurityAlgorithmSuite();
}
return s_basic256;
}
}
static public SecurityAlgorithmSuite TripleDes
{
get
{
if (s_tripleDes == null)
{
s_tripleDes = new TripleDesSecurityAlgorithmSuite();
}
return s_tripleDes;
}
}
static public SecurityAlgorithmSuite Basic256Sha256
{
get
{
if (s_basic256Sha256 == null)
{
s_basic256Sha256 = new Basic256Sha256SecurityAlgorithmSuite();
}
return s_basic256Sha256;
}
}
public abstract string DefaultCanonicalizationAlgorithm { get; }
public abstract string DefaultDigestAlgorithm { get; }
public abstract string DefaultEncryptionAlgorithm { get; }
public abstract int DefaultEncryptionKeyDerivationLength { get; }
public abstract string DefaultSymmetricKeyWrapAlgorithm { get; }
public abstract string DefaultAsymmetricKeyWrapAlgorithm { get; }
public abstract string DefaultSymmetricSignatureAlgorithm { get; }
public abstract string DefaultAsymmetricSignatureAlgorithm { get; }
public abstract int DefaultSignatureKeyDerivationLength { get; }
public abstract int DefaultSymmetricKeyLength { get; }
internal virtual XmlDictionaryString DefaultCanonicalizationAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultDigestAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultEncryptionAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultSymmetricKeyWrapAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultAsymmetricKeyWrapAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultSymmetricSignatureAlgorithmDictionaryString { get { return null; } }
internal virtual XmlDictionaryString DefaultAsymmetricSignatureAlgorithmDictionaryString { get { return null; } }
protected SecurityAlgorithmSuite() { }
public virtual bool IsCanonicalizationAlgorithmSupported(string algorithm) { return algorithm == DefaultCanonicalizationAlgorithm; }
public virtual bool IsDigestAlgorithmSupported(string algorithm) { return algorithm == DefaultDigestAlgorithm; }
public virtual bool IsEncryptionAlgorithmSupported(string algorithm) { return algorithm == DefaultEncryptionAlgorithm; }
public virtual bool IsEncryptionKeyDerivationAlgorithmSupported(string algorithm) { return (algorithm == SecurityAlgorithms.Psha1KeyDerivation) || (algorithm == SecurityAlgorithms.Psha1KeyDerivationDec2005); }
public virtual bool IsSymmetricKeyWrapAlgorithmSupported(string algorithm) { return algorithm == DefaultSymmetricKeyWrapAlgorithm; }
public virtual bool IsAsymmetricKeyWrapAlgorithmSupported(string algorithm) { return algorithm == DefaultAsymmetricKeyWrapAlgorithm; }
public virtual bool IsSymmetricSignatureAlgorithmSupported(string algorithm) { return algorithm == DefaultSymmetricSignatureAlgorithm; }
public virtual bool IsAsymmetricSignatureAlgorithmSupported(string algorithm) { return algorithm == DefaultAsymmetricSignatureAlgorithm; }
public virtual bool IsSignatureKeyDerivationAlgorithmSupported(string algorithm) { return (algorithm == SecurityAlgorithms.Psha1KeyDerivation) || (algorithm == SecurityAlgorithms.Psha1KeyDerivationDec2005); }
public abstract bool IsSymmetricKeyLengthSupported(int length);
public abstract bool IsAsymmetricKeyLengthSupported(int length);
internal void GetSignatureAlgorithmAndKey(SecurityToken token, out string signatureAlgorithm, out SecurityKey key, out XmlDictionaryString signatureAlgorithmDictionaryString)
{
ReadOnlyCollection<SecurityKey> keys = token.SecurityKeys;
if (keys == null || keys.Count == 0)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.SigningTokenHasNoKeys, token)));
}
for (int i = 0; i < keys.Count; i++)
{
if (keys[i].IsSupportedAlgorithm(DefaultSymmetricSignatureAlgorithm))
{
signatureAlgorithm = DefaultSymmetricSignatureAlgorithm;
signatureAlgorithmDictionaryString = DefaultSymmetricSignatureAlgorithmDictionaryString;
key = keys[i];
return;
}
else if (keys[i].IsSupportedAlgorithm(DefaultAsymmetricSignatureAlgorithm))
{
signatureAlgorithm = DefaultAsymmetricSignatureAlgorithm;
signatureAlgorithmDictionaryString = DefaultAsymmetricSignatureAlgorithmDictionaryString;
key = keys[i];
return;
}
}
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.SigningTokenHasNoKeysSupportingTheAlgorithmSuite, token, this)));
}
}
public class Basic256SecurityAlgorithmSuite : SecurityAlgorithmSuite
{
public Basic256SecurityAlgorithmSuite() : base() { }
public override string DefaultCanonicalizationAlgorithm { get { return DefaultCanonicalizationAlgorithmDictionaryString.Value; } }
public override string DefaultDigestAlgorithm { get { return DefaultDigestAlgorithmDictionaryString.Value; } }
public override string DefaultEncryptionAlgorithm { get { return DefaultEncryptionAlgorithmDictionaryString.Value; } }
public override int DefaultEncryptionKeyDerivationLength { get { return 256; } }
public override string DefaultSymmetricKeyWrapAlgorithm { get { return DefaultSymmetricKeyWrapAlgorithmDictionaryString.Value; } }
public override string DefaultAsymmetricKeyWrapAlgorithm { get { return DefaultAsymmetricKeyWrapAlgorithmDictionaryString.Value; } }
public override string DefaultSymmetricSignatureAlgorithm { get { return DefaultSymmetricSignatureAlgorithmDictionaryString.Value; } }
public override string DefaultAsymmetricSignatureAlgorithm { get { return DefaultAsymmetricSignatureAlgorithmDictionaryString.Value; } }
public override int DefaultSignatureKeyDerivationLength { get { return 192; } }
public override int DefaultSymmetricKeyLength { get { return 256; } }
public override bool IsSymmetricKeyLengthSupported(int length) { return length == 256; }
public override bool IsAsymmetricKeyLengthSupported(int length) { return length >= 1024 && length <= 4096; }
internal override XmlDictionaryString DefaultCanonicalizationAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.ExclusiveC14n; } }
internal override XmlDictionaryString DefaultDigestAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.Sha1Digest; } }
internal override XmlDictionaryString DefaultEncryptionAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.Aes256Encryption; } }
internal override XmlDictionaryString DefaultSymmetricKeyWrapAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.Aes256KeyWrap; } }
internal override XmlDictionaryString DefaultAsymmetricKeyWrapAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.RsaOaepKeyWrap; } }
internal override XmlDictionaryString DefaultSymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.HmacSha1Signature; } }
internal override XmlDictionaryString DefaultAsymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.RsaSha1Signature; } }
public override string ToString()
{
return "Basic256";
}
}
public class TripleDesSecurityAlgorithmSuite : SecurityAlgorithmSuite
{
public TripleDesSecurityAlgorithmSuite() : base() { }
public override string DefaultCanonicalizationAlgorithm { get { return DefaultCanonicalizationAlgorithmDictionaryString.Value; } }
public override string DefaultDigestAlgorithm { get { return DefaultDigestAlgorithmDictionaryString.Value; } }
public override string DefaultEncryptionAlgorithm { get { return DefaultEncryptionAlgorithmDictionaryString.Value; } }
public override int DefaultEncryptionKeyDerivationLength { get { return 192; } }
public override string DefaultSymmetricKeyWrapAlgorithm { get { return DefaultSymmetricKeyWrapAlgorithmDictionaryString.Value; } }
public override string DefaultAsymmetricKeyWrapAlgorithm { get { return this.DefaultAsymmetricKeyWrapAlgorithmDictionaryString.Value; } }
public override string DefaultSymmetricSignatureAlgorithm { get { return DefaultSymmetricSignatureAlgorithmDictionaryString.Value; } }
public override string DefaultAsymmetricSignatureAlgorithm { get { return DefaultAsymmetricSignatureAlgorithmDictionaryString.Value; } }
public override int DefaultSignatureKeyDerivationLength { get { return 192; } }
public override int DefaultSymmetricKeyLength { get { return 192; } }
public override bool IsSymmetricKeyLengthSupported(int length) { return length >= 192 && length <= 256; }
public override bool IsAsymmetricKeyLengthSupported(int length) { return length >= 1024 && length <= 4096; }
internal override XmlDictionaryString DefaultCanonicalizationAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.ExclusiveC14n; } }
internal override XmlDictionaryString DefaultDigestAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.Sha1Digest; } }
internal override XmlDictionaryString DefaultEncryptionAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.TripleDesEncryption; } }
internal override XmlDictionaryString DefaultSymmetricKeyWrapAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.TripleDesKeyWrap; } }
internal override XmlDictionaryString DefaultAsymmetricKeyWrapAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.RsaOaepKeyWrap; } }
internal override XmlDictionaryString DefaultSymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.HmacSha1Signature; } }
internal override XmlDictionaryString DefaultAsymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.RsaSha1Signature; } }
public override string ToString()
{
return "TripleDes";
}
}
public class Basic256Sha256SecurityAlgorithmSuite : Basic256SecurityAlgorithmSuite
{
public Basic256Sha256SecurityAlgorithmSuite() : base() { }
internal override XmlDictionaryString DefaultDigestAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.Sha256Digest; } }
internal override XmlDictionaryString DefaultSymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.HmacSha256Signature; } }
internal override XmlDictionaryString DefaultAsymmetricSignatureAlgorithmDictionaryString { get { return XD.SecurityAlgorithmDictionary.RsaSha256Signature; } }
public override string ToString()
{
return "Basic256Sha256";
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using AutoMapper.Configuration;
using AutoMapper.Execution;
namespace AutoMapper
{
using Internal;
/// <summary>
/// Main configuration object holding all mapping configuration for a source and destination type
/// </summary>
[DebuggerDisplay("{SourceType.Name} -> {DestinationType.Name}")]
public class TypeMap
{
private readonly List<LambdaExpression> _afterMapActions = new List<LambdaExpression>();
private readonly List<LambdaExpression> _beforeMapActions = new List<LambdaExpression>();
private readonly HashSet<TypePair> _includedDerivedTypes = new HashSet<TypePair>();
private readonly HashSet<TypePair> _includedBaseTypes = new HashSet<TypePair>();
private readonly List<PropertyMap> _propertyMaps = new List<PropertyMap>();
private readonly List<PathMap> _pathMaps = new List<PathMap>();
private readonly List<SourceMemberConfig> _sourceMemberConfigs = new List<SourceMemberConfig>();
private readonly IList<PropertyMap> _inheritedMaps = new List<PropertyMap>();
private PropertyMap[] _orderedPropertyMaps;
private bool _sealed;
private readonly IList<TypeMap> _inheritedTypeMaps = new List<TypeMap>();
public TypeMap(TypeDetails sourceType, TypeDetails destinationType, MemberList memberList, ProfileMap profile)
{
SourceTypeDetails = sourceType;
DestinationTypeDetails = destinationType;
Types = new TypePair(sourceType.Type, destinationType.Type);
Profile = profile;
ConfiguredMemberList = memberList;
}
public PathMap FindOrCreatePathMapFor(LambdaExpression destinationExpression, MemberPath path, TypeMap typeMap)
{
var pathMap = _pathMaps.SingleOrDefault(p => p.MemberPath == path);
if(pathMap == null)
{
pathMap = new PathMap(destinationExpression, path, typeMap);
_pathMaps.Add(pathMap);
}
return pathMap;
}
public LambdaExpression MapExpression { get; private set; }
public TypePair Types { get; }
public ConstructorMap ConstructorMap { get; set; }
public TypeDetails SourceTypeDetails { get; }
public TypeDetails DestinationTypeDetails { get; }
public Type SourceType => SourceTypeDetails.Type;
public Type DestinationType => DestinationTypeDetails.Type;
public ProfileMap Profile { get; }
public LambdaExpression CustomMapper { get; set; }
public LambdaExpression CustomProjection { get; set; }
public LambdaExpression DestinationCtor { get; set; }
public Type DestinationTypeOverride { get; set; }
public Type DestinationTypeToUse => DestinationTypeOverride ?? DestinationType;
public bool ConstructDestinationUsingServiceLocator { get; set; }
public MemberList ConfiguredMemberList { get; }
public IEnumerable<TypePair> IncludedDerivedTypes => _includedDerivedTypes;
public IEnumerable<TypePair> IncludedBaseTypes => _includedBaseTypes;
public IEnumerable<LambdaExpression> BeforeMapActions => _beforeMapActions;
public IEnumerable<LambdaExpression> AfterMapActions => _afterMapActions;
public bool PreserveReferences { get; set; }
public LambdaExpression Condition { get; set; }
public int MaxDepth { get; set; }
public LambdaExpression Substitution { get; set; }
public LambdaExpression ConstructExpression { get; set; }
public Type TypeConverterType { get; set; }
public bool DisableConstructorValidation { get; set; }
public PropertyMap[] GetPropertyMaps() => _orderedPropertyMaps ?? _propertyMaps.Concat(_inheritedMaps).ToArray();
public IEnumerable<PathMap> PathMaps => _pathMaps;
public bool ConstructorParameterMatches(string destinationPropertyName)
{
return ConstructorMap?.CtorParams.Any(c => !c.DefaultValue && string.Equals(c.Parameter.Name, destinationPropertyName, StringComparison.OrdinalIgnoreCase)) == true;
}
public void AddPropertyMap(MemberInfo destProperty, IEnumerable<MemberInfo> resolvers)
{
var propertyMap = new PropertyMap(destProperty, this);
propertyMap.ChainMembers(resolvers);
_propertyMaps.Add(propertyMap);
}
public string[] GetUnmappedPropertyNames()
{
string GetPropertyName(PropertyMap pm) => ConfiguredMemberList == MemberList.Destination
? pm.DestinationProperty.Name
: pm.SourceMember != null
? pm.SourceMember.Name
: pm.DestinationProperty.Name;
string[] GetPropertyNames(IEnumerable<PropertyMap> propertyMaps) => propertyMaps.Where(pm => pm.IsMapped()).Select(GetPropertyName).ToArray();
var autoMappedProperties = GetPropertyNames(_propertyMaps);
var inheritedProperties = GetPropertyNames(_inheritedMaps);
IEnumerable<string> properties;
if(ConfiguredMemberList == MemberList.Destination)
{
properties = DestinationTypeDetails.PublicWriteAccessors
.Select(p => p.Name)
.Except(autoMappedProperties)
.Except(inheritedProperties);
}
else
{
var redirectedSourceMembers = _propertyMaps
.Where(pm => pm.IsMapped() && pm.SourceMember != null && pm.SourceMember.Name != pm.DestinationProperty.Name)
.Select(pm => pm.SourceMember.Name);
var ignoredSourceMembers = _sourceMemberConfigs
.Where(smc => smc.IsIgnored())
.Select(pm => pm.SourceMember.Name).ToList();
properties = SourceTypeDetails.PublicReadAccessors
.Select(p => p.Name)
.Except(autoMappedProperties)
.Except(inheritedProperties)
.Except(redirectedSourceMembers)
.Except(ignoredSourceMembers);
}
return properties.Where(memberName => !Profile.GlobalIgnores.Any(memberName.StartsWith)).ToArray();
}
public bool PassesCtorValidation()
{
if (DisableConstructorValidation)
return true;
if (DestinationCtor != null)
return true;
if (ConstructDestinationUsingServiceLocator)
return true;
if (ConstructorMap?.CanResolve == true)
return true;
if (DestinationTypeToUse.IsInterface())
return true;
if (DestinationTypeToUse.IsAbstract())
return true;
if (DestinationTypeToUse.IsGenericTypeDefinition())
return true;
if (DestinationTypeToUse.IsValueType())
return true;
var constructors = DestinationTypeToUse
.GetDeclaredConstructors()
.Where(ci => !ci.IsStatic);
//find a ctor with only optional args
var ctorWithOptionalArgs = constructors.FirstOrDefault(c => c.GetParameters().All(p => p.IsOptional));
return ctorWithOptionalArgs != null;
}
public PropertyMap FindOrCreatePropertyMapFor(MemberInfo destinationProperty)
{
var propertyMap = GetExistingPropertyMapFor(destinationProperty);
if (propertyMap != null) return propertyMap;
propertyMap = new PropertyMap(destinationProperty, this);
_propertyMaps.Add(propertyMap);
return propertyMap;
}
public void IncludeDerivedTypes(Type derivedSourceType, Type derivedDestinationType)
{
var derivedTypes = new TypePair(derivedSourceType, derivedDestinationType);
if (derivedTypes.Equals(Types))
{
throw new InvalidOperationException("You cannot include a type map into itself.");
}
_includedDerivedTypes.Add(derivedTypes);
}
public void IncludeBaseTypes(Type baseSourceType, Type baseDestinationType)
{
var baseTypes = new TypePair(baseSourceType, baseDestinationType);
if (baseTypes.Equals(Types))
{
throw new InvalidOperationException("You cannot include a type map into itself.");
}
_includedBaseTypes.Add(baseTypes);
}
internal void IgnorePaths(MemberInfo destinationMember)
{
foreach(var pathMap in _pathMaps.Where(pm => pm.MemberPath.First == destinationMember))
{
pathMap.Ignored = true;
}
}
public Type GetDerivedTypeFor(Type derivedSourceType)
{
if (DestinationTypeOverride != null)
{
return DestinationTypeOverride;
}
// This might need to be fixed for multiple derived source types to different dest types
var match = _includedDerivedTypes.FirstOrDefault(tp => tp.SourceType == derivedSourceType);
return match.DestinationType ?? DestinationType;
}
public bool TypeHasBeenIncluded(TypePair derivedTypes) => _includedDerivedTypes.Contains(derivedTypes);
public bool HasDerivedTypesToInclude() => _includedDerivedTypes.Any() || DestinationTypeOverride != null;
public void AddBeforeMapAction(LambdaExpression beforeMap)
{
if(!_beforeMapActions.Contains(beforeMap))
{
_beforeMapActions.Add(beforeMap);
}
}
public void AddAfterMapAction(LambdaExpression afterMap)
{
if(!_afterMapActions.Contains(afterMap))
{
_afterMapActions.Add(afterMap);
}
}
public void Seal(IConfigurationProvider configurationProvider, Stack<TypeMap> typeMapsPath = null)
{
if(_sealed)
{
return;
}
_sealed = true;
foreach (var inheritedTypeMap in _inheritedTypeMaps)
{
ApplyInheritedTypeMap(inheritedTypeMap);
}
_orderedPropertyMaps =
_propertyMaps
.Union(_inheritedMaps)
.OrderBy(map => map.MappingOrder).ToArray();
MapExpression = new TypeMapPlanBuilder(configurationProvider, this).CreateMapperLambda(typeMapsPath);
}
public PropertyMap GetExistingPropertyMapFor(MemberInfo destinationProperty)
{
if (!destinationProperty.DeclaringType.IsAssignableFrom(DestinationType))
return null;
var propertyMap =
_propertyMaps.FirstOrDefault(pm => pm.DestinationProperty.Name.Equals(destinationProperty.Name));
if (propertyMap != null)
return propertyMap;
propertyMap =
_inheritedMaps.FirstOrDefault(pm => pm.DestinationProperty.Name.Equals(destinationProperty.Name));
if (propertyMap == null)
return null;
var propertyInfo = propertyMap.DestinationProperty as PropertyInfo;
if (propertyInfo == null)
return propertyMap;
var baseAccessor = propertyInfo.GetGetMethod();
if (baseAccessor.IsAbstract || baseAccessor.IsVirtual)
return propertyMap;
var accessor = ((PropertyInfo)destinationProperty).GetGetMethod();
if (baseAccessor.DeclaringType == accessor.DeclaringType)
return propertyMap;
return null;
}
public void InheritTypes(TypeMap inheritedTypeMap)
{
foreach (var includedDerivedType in inheritedTypeMap._includedDerivedTypes
.Where(includedDerivedType => !_includedDerivedTypes.Contains(includedDerivedType)))
{
_includedDerivedTypes.Add(includedDerivedType);
}
}
public SourceMemberConfig FindOrCreateSourceMemberConfigFor(MemberInfo sourceMember)
{
var config = _sourceMemberConfigs.FirstOrDefault(smc => Equals(smc.SourceMember, sourceMember));
if (config != null) return config;
config = new SourceMemberConfig(sourceMember);
_sourceMemberConfigs.Add(config);
return config;
}
public void AddInheritedMap(TypeMap inheritedTypeMap)
{
_inheritedTypeMaps.Add(inheritedTypeMap);
}
public bool ShouldCheckForValid() => CustomMapper == null
&& CustomProjection == null
&& TypeConverterType == null
&& DestinationTypeOverride == null
&& ConfiguredMemberList != MemberList.None;
private void ApplyInheritedTypeMap(TypeMap inheritedTypeMap)
{
foreach (var inheritedMappedProperty in inheritedTypeMap.GetPropertyMaps().Where(m => m.IsMapped()))
{
var conventionPropertyMap = GetPropertyMaps()
.SingleOrDefault(m =>
m.DestinationProperty.Name == inheritedMappedProperty.DestinationProperty.Name);
if (conventionPropertyMap != null)
{
conventionPropertyMap.ApplyInheritedPropertyMap(inheritedMappedProperty);
}
else
{
var propertyMap = new PropertyMap(inheritedMappedProperty, this);
_inheritedMaps.Add(propertyMap);
}
}
//Include BeforeMap
foreach (var beforeMapAction in inheritedTypeMap._beforeMapActions)
{
AddBeforeMapAction(beforeMapAction);
}
//Include AfterMap
foreach (var afterMapAction in inheritedTypeMap._afterMapActions)
{
AddAfterMapAction(afterMapAction);
}
var notOverridenSourceConfigs =
inheritedTypeMap._sourceMemberConfigs.Where(
baseConfig => _sourceMemberConfigs.All(derivedConfig => derivedConfig.SourceMember != baseConfig.SourceMember));
_sourceMemberConfigs.AddRange(notOverridenSourceConfigs);
var notOverridenPathMaps =
inheritedTypeMap.PathMaps.Where(
baseConfig => PathMaps.All(derivedConfig => derivedConfig.MemberPath != baseConfig.MemberPath));
_pathMaps.AddRange(notOverridenPathMaps);
}
}
}
| |
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.Tokens;
using Microsoft.SharePoint.Client;
using System;
using System.Net;
using System.Security.Principal;
using System.Web;
using System.Web.Configuration;
namespace OfficeDevPnP.PartnerPack.ScheduledJob
{
/// <summary>
/// Encapsulates all the information from SharePoint.
/// </summary>
public abstract class SharePointContext
{
public const string SPHostUrlKey = "SPHostUrl";
public const string SPAppWebUrlKey = "SPAppWebUrl";
public const string SPLanguageKey = "SPLanguage";
public const string SPClientTagKey = "SPClientTag";
public const string SPProductNumberKey = "SPProductNumber";
protected static readonly TimeSpan AccessTokenLifetimeTolerance = TimeSpan.FromMinutes(5.0);
private readonly Uri spHostUrl;
private readonly Uri spAppWebUrl;
private readonly string spLanguage;
private readonly string spClientTag;
private readonly string spProductNumber;
// <AccessTokenString, UtcExpiresOn>
protected Tuple<string, DateTime> userAccessTokenForSPHost;
protected Tuple<string, DateTime> userAccessTokenForSPAppWeb;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPHost;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPAppWeb;
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
string spHostUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SPHostUrlKey]);
Uri spHostUrl;
if (Uri.TryCreate(spHostUrlString, UriKind.Absolute, out spHostUrl) &&
(spHostUrl.Scheme == Uri.UriSchemeHttp || spHostUrl.Scheme == Uri.UriSchemeHttps))
{
return spHostUrl;
}
return null;
}
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequest httpRequest)
{
return GetSPHostUrl(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// The SharePoint host url.
/// </summary>
public Uri SPHostUrl
{
get { return this.spHostUrl; }
}
/// <summary>
/// The SharePoint app web url.
/// </summary>
public Uri SPAppWebUrl
{
get { return this.spAppWebUrl; }
}
/// <summary>
/// The SharePoint language.
/// </summary>
public string SPLanguage
{
get { return this.spLanguage; }
}
/// <summary>
/// The SharePoint client tag.
/// </summary>
public string SPClientTag
{
get { return this.spClientTag; }
}
/// <summary>
/// The SharePoint product number.
/// </summary>
public string SPProductNumber
{
get { return this.spProductNumber; }
}
/// <summary>
/// The user access token for the SharePoint host.
/// </summary>
public abstract string UserAccessTokenForSPHost
{
get;
}
/// <summary>
/// The user access token for the SharePoint app web.
/// </summary>
public abstract string UserAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// The app only access token for the SharePoint host.
/// </summary>
public abstract string AppOnlyAccessTokenForSPHost
{
get;
}
/// <summary>
/// The app only access token for the SharePoint app web.
/// </summary>
public abstract string AppOnlyAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
protected SharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber)
{
if (spHostUrl == null)
{
throw new ArgumentNullException("spHostUrl");
}
if (string.IsNullOrEmpty(spLanguage))
{
throw new ArgumentNullException("spLanguage");
}
if (string.IsNullOrEmpty(spClientTag))
{
throw new ArgumentNullException("spClientTag");
}
if (string.IsNullOrEmpty(spProductNumber))
{
throw new ArgumentNullException("spProductNumber");
}
this.spHostUrl = spHostUrl;
this.spAppWebUrl = spAppWebUrl;
this.spLanguage = spLanguage;
this.spClientTag = spClientTag;
this.spProductNumber = spProductNumber;
}
/// <summary>
/// Creates a user ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.UserAccessTokenForSPHost);
}
/// <summary>
/// Creates a user ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.UserAccessTokenForSPAppWeb);
}
/// <summary>
/// Creates app only ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.AppOnlyAccessTokenForSPHost);
}
/// <summary>
/// Creates an app only ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.AppOnlyAccessTokenForSPAppWeb);
}
/// <summary>
/// Gets the database connection string from SharePoint for autohosted app.
/// This method is deprecated because the autohosted option is no longer available.
/// </summary>
[ObsoleteAttribute("This method is deprecated because the autohosted option is no longer available.", true)]
public string GetDatabaseConnectionString()
{
throw new NotSupportedException("This method is deprecated because the autohosted option is no longer available.");
}
/// <summary>
/// Determines if the specified access token is valid.
/// It considers an access token as not valid if it is null, or it has expired.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <returns>True if the access token is valid.</returns>
protected static bool IsAccessTokenValid(Tuple<string, DateTime> accessToken)
{
return accessToken != null &&
!string.IsNullOrEmpty(accessToken.Item1) &&
accessToken.Item2 > DateTime.UtcNow;
}
/// <summary>
/// Creates a ClientContext with the specified SharePoint site url and the access token.
/// </summary>
/// <param name="spSiteUrl">The site url.</param>
/// <param name="accessToken">The access token.</param>
/// <returns>A ClientContext instance.</returns>
private static ClientContext CreateClientContext(Uri spSiteUrl, string accessToken)
{
if (spSiteUrl != null && !string.IsNullOrEmpty(accessToken))
{
return TokenHelper.GetClientContextWithAccessToken(spSiteUrl.AbsoluteUri, accessToken);
}
return null;
}
}
/// <summary>
/// Redirection status.
/// </summary>
public enum RedirectionStatus
{
Ok,
ShouldRedirect,
CanNotRedirect
}
/// <summary>
/// Provides SharePointContext instances.
/// </summary>
public abstract class SharePointContextProvider
{
private static SharePointContextProvider current;
/// <summary>
/// The current SharePointContextProvider instance.
/// </summary>
public static SharePointContextProvider Current
{
get { return SharePointContextProvider.current; }
}
/// <summary>
/// Initializes the default SharePointContextProvider instance.
/// </summary>
static SharePointContextProvider()
{
if (!TokenHelper.IsHighTrustApp())
{
SharePointContextProvider.current = new SharePointAcsContextProvider();
}
else
{
SharePointContextProvider.current = new SharePointHighTrustContextProvider();
}
}
/// <summary>
/// Registers the specified SharePointContextProvider instance as current.
/// It should be called by Application_Start() in Global.asax.
/// </summary>
/// <param name="provider">The SharePointContextProvider to be set as current.</param>
public static void Register(SharePointContextProvider provider)
{
if (provider == null)
{
throw new ArgumentNullException("provider");
}
SharePointContextProvider.current = provider;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContextBase httpContext, out Uri redirectUrl)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
redirectUrl = null;
bool contextTokenExpired = false;
try
{
if (SharePointContextProvider.Current.GetSharePointContext(httpContext) != null)
{
return RedirectionStatus.Ok;
}
}
catch (SecurityTokenExpiredException)
{
contextTokenExpired = true;
}
const string SPHasRedirectedToSharePointKey = "SPHasRedirectedToSharePoint";
if (!string.IsNullOrEmpty(httpContext.Request.QueryString[SPHasRedirectedToSharePointKey]) && !contextTokenExpired)
{
return RedirectionStatus.CanNotRedirect;
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return RedirectionStatus.CanNotRedirect;
}
if (StringComparer.OrdinalIgnoreCase.Equals(httpContext.Request.HttpMethod, "POST"))
{
return RedirectionStatus.CanNotRedirect;
}
Uri requestUrl = httpContext.Request.Url;
var queryNameValueCollection = HttpUtility.ParseQueryString(requestUrl.Query);
// Removes the values that are included in {StandardTokens}, as {StandardTokens} will be inserted at the beginning of the query string.
queryNameValueCollection.Remove(SharePointContext.SPHostUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPAppWebUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPLanguageKey);
queryNameValueCollection.Remove(SharePointContext.SPClientTagKey);
queryNameValueCollection.Remove(SharePointContext.SPProductNumberKey);
// Adds SPHasRedirectedToSharePoint=1.
queryNameValueCollection.Add(SPHasRedirectedToSharePointKey, "1");
UriBuilder returnUrlBuilder = new UriBuilder(requestUrl);
returnUrlBuilder.Query = queryNameValueCollection.ToString();
// Inserts StandardTokens.
const string StandardTokens = "{StandardTokens}";
string returnUrlString = returnUrlBuilder.Uri.AbsoluteUri;
returnUrlString = returnUrlString.Insert(returnUrlString.IndexOf("?") + 1, StandardTokens + "&");
// Constructs redirect url.
string redirectUrlString = TokenHelper.GetAppContextTokenRequestUrl(spHostUrl.AbsoluteUri, Uri.EscapeDataString(returnUrlString));
redirectUrl = new Uri(redirectUrlString, UriKind.Absolute);
return RedirectionStatus.ShouldRedirect;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContext httpContext, out Uri redirectUrl)
{
return CheckRedirectionStatus(new HttpContextWrapper(httpContext), out redirectUrl);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
// SPHostUrl
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpRequest);
if (spHostUrl == null)
{
return null;
}
// SPAppWebUrl
string spAppWebUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SharePointContext.SPAppWebUrlKey]);
Uri spAppWebUrl;
if (!Uri.TryCreate(spAppWebUrlString, UriKind.Absolute, out spAppWebUrl) ||
!(spAppWebUrl.Scheme == Uri.UriSchemeHttp || spAppWebUrl.Scheme == Uri.UriSchemeHttps))
{
spAppWebUrl = null;
}
// SPLanguage
string spLanguage = httpRequest.QueryString[SharePointContext.SPLanguageKey];
if (string.IsNullOrEmpty(spLanguage))
{
return null;
}
// SPClientTag
string spClientTag = httpRequest.QueryString[SharePointContext.SPClientTagKey];
if (string.IsNullOrEmpty(spClientTag))
{
return null;
}
// SPProductNumber
string spProductNumber = httpRequest.QueryString[SharePointContext.SPProductNumberKey];
if (string.IsNullOrEmpty(spProductNumber))
{
return null;
}
return CreateSharePointContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, httpRequest);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequest httpRequest)
{
return CreateSharePointContext(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContextBase httpContext)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return null;
}
SharePointContext spContext = LoadSharePointContext(httpContext);
if (spContext == null || !ValidateSharePointContext(spContext, httpContext))
{
spContext = CreateSharePointContext(httpContext.Request);
if (spContext != null)
{
SaveSharePointContext(spContext, httpContext);
}
}
return spContext;
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContext httpContext)
{
return GetSharePointContext(new HttpContextWrapper(httpContext));
}
/// <summary>
/// Creates a SharePointContext instance.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
protected abstract SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest);
/// <summary>
/// Validates if the given SharePointContext can be used with the specified HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext.</param>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>True if the given SharePointContext can be used with the specified HTTP context.</returns>
protected abstract bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
/// <summary>
/// Loads the SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found.</returns>
protected abstract SharePointContext LoadSharePointContext(HttpContextBase httpContext);
/// <summary>
/// Saves the specified SharePointContext instance associated with the specified HTTP context.
/// <c>null</c> is accepted for clearing the SharePointContext instance associated with the HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext instance to be saved, or <c>null</c>.</param>
/// <param name="httpContext">The HTTP context.</param>
protected abstract void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
}
#region ACS
/// <summary>
/// Encapsulates all the information from SharePoint in ACS mode.
/// </summary>
public class SharePointAcsContext : SharePointContext
{
private readonly string contextToken;
private readonly SharePointContextToken contextTokenObj;
/// <summary>
/// The context token.
/// </summary>
public string ContextToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextToken : null; }
}
/// <summary>
/// The context token's "CacheKey" claim.
/// </summary>
public string CacheKey
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.CacheKey : null; }
}
/// <summary>
/// The context token's "refreshtoken" claim.
/// </summary>
public string RefreshToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.RefreshToken : null; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPHostUrl.Authority));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPAppWebUrl.Authority));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPHostUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPHostUrl)));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPAppWebUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPAppWebUrl)));
}
}
public SharePointAcsContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, string contextToken, SharePointContextToken contextTokenObj)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (string.IsNullOrEmpty(contextToken))
{
throw new ArgumentNullException("contextToken");
}
if (contextTokenObj == null)
{
throw new ArgumentNullException("contextTokenObj");
}
this.contextToken = contextToken;
this.contextTokenObj = contextTokenObj;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
try
{
OAuth2AccessTokenResponse oAuth2AccessTokenResponse = tokenRenewalHandler();
DateTime expiresOn = oAuth2AccessTokenResponse.ExpiresOn;
if ((expiresOn - oAuth2AccessTokenResponse.NotBefore) > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(oAuth2AccessTokenResponse.AccessToken, expiresOn);
}
catch (WebException)
{
}
}
}
/// <summary>
/// Default provider for SharePointAcsContext.
/// </summary>
public class SharePointAcsContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
private const string SPCacheKeyKey = "SPCacheKey";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
string contextTokenString = TokenHelper.GetContextTokenFromRequest(httpRequest);
if (string.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = null;
try
{
contextToken = TokenHelper.ReadAndValidateContextToken(contextTokenString, httpRequest.Url.Authority);
}
catch (WebException)
{
return null;
}
catch (AudienceUriValidationFailedException)
{
return null;
}
return new SharePointAcsContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, contextTokenString, contextToken);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
string contextToken = TokenHelper.GetContextTokenFromRequest(httpContext.Request);
HttpCookie spCacheKeyCookie = httpContext.Request.Cookies[SPCacheKeyKey];
string spCacheKey = spCacheKeyCookie != null ? spCacheKeyCookie.Value : null;
return spHostUrl == spAcsContext.SPHostUrl &&
!string.IsNullOrEmpty(spAcsContext.CacheKey) &&
spCacheKey == spAcsContext.CacheKey &&
!string.IsNullOrEmpty(spAcsContext.ContextToken) &&
(string.IsNullOrEmpty(contextToken) || contextToken == spAcsContext.ContextToken);
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointAcsContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
HttpCookie spCacheKeyCookie = new HttpCookie(SPCacheKeyKey)
{
Value = spAcsContext.CacheKey,
Secure = true,
HttpOnly = true
};
httpContext.Response.AppendCookie(spCacheKeyCookie);
}
httpContext.Session[SPContextKey] = spAcsContext;
}
}
#endregion ACS
#region HighTrust
/// <summary>
/// Encapsulates all the information from SharePoint in HighTrust mode.
/// </summary>
public class SharePointHighTrustContext : SharePointContext
{
private readonly WindowsIdentity logonUserIdentity;
/// <summary>
/// The Windows identity for the current user.
/// </summary>
public WindowsIdentity LogonUserIdentity
{
get { return this.logonUserIdentity; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, this.LogonUserIdentity));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, this.LogonUserIdentity));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, null));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, null));
}
}
public SharePointHighTrustContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, WindowsIdentity logonUserIdentity)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (logonUserIdentity == null)
{
throw new ArgumentNullException("logonUserIdentity");
}
this.logonUserIdentity = logonUserIdentity;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
DateTime expiresOn = DateTime.UtcNow.Add(TokenHelper.HighTrustAccessTokenLifetime);
if (TokenHelper.HighTrustAccessTokenLifetime > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(tokenRenewalHandler(), expiresOn);
}
}
/// <summary>
/// Default provider for SharePointHighTrustContext.
/// </summary>
public class SharePointHighTrustContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
WindowsIdentity logonUserIdentity = httpRequest.LogonUserIdentity;
if (logonUserIdentity == null || !logonUserIdentity.IsAuthenticated || logonUserIdentity.IsGuest || logonUserIdentity.User == null)
{
return null;
}
return new SharePointHighTrustContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, logonUserIdentity);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointHighTrustContext spHighTrustContext = spContext as SharePointHighTrustContext;
if (spHighTrustContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
WindowsIdentity logonUserIdentity = httpContext.Request.LogonUserIdentity;
return spHostUrl == spHighTrustContext.SPHostUrl &&
logonUserIdentity != null &&
logonUserIdentity.IsAuthenticated &&
!logonUserIdentity.IsGuest &&
logonUserIdentity.User == spHighTrustContext.LogonUserIdentity.User;
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointHighTrustContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
httpContext.Session[SPContextKey] = spContext as SharePointHighTrustContext;
}
}
#endregion HighTrust
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using Hyak.Common;
using Microsoft.Azure.Management.DataLake.AnalyticsJob.Models;
namespace Microsoft.Azure.Management.DataLake.AnalyticsJob.Models
{
/// <summary>
/// The Data Lake Analytics U-SQL specific job properties.
/// </summary>
public partial class USqlProperties : JobProperties
{
private string _algebraFilePath;
/// <summary>
/// Optional. Gets or sets the U-SQL algebra file path after the job
/// has completed
/// </summary>
public string AlgebraFilePath
{
get { return this._algebraFilePath; }
set { this._algebraFilePath = value; }
}
private string _compileMode;
/// <summary>
/// Optional. Gets or sets the compile mode for the job.
/// </summary>
public string CompileMode
{
get { return this._compileMode; }
set { this._compileMode = value; }
}
private JobDataPath _debugData;
/// <summary>
/// Optional. Gets or sets the job specific debug data locations.
/// </summary>
public JobDataPath DebugData
{
get { return this._debugData; }
set { this._debugData = value; }
}
private IList<Diagnostics> _diagnostics;
/// <summary>
/// Optional. Gets or sets the diagnostics for the job.
/// </summary>
public IList<Diagnostics> Diagnostics
{
get { return this._diagnostics; }
set { this._diagnostics = value; }
}
private IList<JobResource> _resources;
/// <summary>
/// Optional. Gets or sets the list of resources that are required by
/// the job
/// </summary>
public IList<JobResource> Resources
{
get { return this._resources; }
set { this._resources = value; }
}
private System.Guid? _rootProcessNodeId;
/// <summary>
/// Optional. Gets or sets the ID used to identify the job manager
/// coordinating job execution.This value should not be set by the
/// user and will be ignored if it is.
/// </summary>
public System.Guid? RootProcessNodeId
{
get { return this._rootProcessNodeId; }
set { this._rootProcessNodeId = value; }
}
private JobStatistics _statistics;
/// <summary>
/// Optional. Gets or sets the job specific statistics.
/// </summary>
public JobStatistics Statistics
{
get { return this._statistics; }
set { this._statistics = value; }
}
private System.TimeSpan? _totalCompilationTime;
/// <summary>
/// Optional. Gets or sets the total time this job spent compiling.
/// This value should not be set by the user and will be ignored if it
/// is.
/// </summary>
public System.TimeSpan? TotalCompilationTime
{
get { return this._totalCompilationTime; }
set { this._totalCompilationTime = value; }
}
private System.TimeSpan? _totalPausedTime;
/// <summary>
/// Optional. Gets or sets the total time this job spent paused. This
/// value should not be set by the user and will be ignored if it is.
/// </summary>
public System.TimeSpan? TotalPausedTime
{
get { return this._totalPausedTime; }
set { this._totalPausedTime = value; }
}
private System.TimeSpan? _totalQueuedTime;
/// <summary>
/// Optional. Gets or sets the total time this job spent queued. This
/// value should not be set by the user and will be ignored if it is.
/// </summary>
public System.TimeSpan? TotalQueuedTime
{
get { return this._totalQueuedTime; }
set { this._totalQueuedTime = value; }
}
private System.TimeSpan? _totalRunningTime;
/// <summary>
/// Optional. Gets or sets the total time this job spent executing.
/// This value should not be set by the user and will be ignored if it
/// is.
/// </summary>
public System.TimeSpan? TotalRunningTime
{
get { return this._totalRunningTime; }
set { this._totalRunningTime = value; }
}
private int? _yarnApplicationId;
/// <summary>
/// Optional. Gets or sets the ID used to identify the yarn application
/// executing the job.This value should not be set by the user and
/// will be ignored if it is.
/// </summary>
public int? YarnApplicationId
{
get { return this._yarnApplicationId; }
set { this._yarnApplicationId = value; }
}
private long? _yarnApplicationTimeStamp;
/// <summary>
/// Optional. Gets or sets the timestamp (int ticks) for the yarn
/// application executing the job.This value should not be set by the
/// user and will be ignored if it is.
/// </summary>
public long? YarnApplicationTimeStamp
{
get { return this._yarnApplicationTimeStamp; }
set { this._yarnApplicationTimeStamp = value; }
}
/// <summary>
/// Initializes a new instance of the USqlProperties class.
/// </summary>
public USqlProperties()
{
this.Diagnostics = new LazyList<Diagnostics>();
this.Resources = new LazyList<JobResource>();
}
/// <summary>
/// Initializes a new instance of the USqlProperties class with
/// required arguments.
/// </summary>
public USqlProperties(string script)
: this()
{
if (script == null)
{
throw new ArgumentNullException("script");
}
this.Script = script;
}
}
}
| |
/*
Copyright 2012 Michael Edwards
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-CRE-
using System;
using Glass.Mapper.Configuration;
using Glass.Mapper.Pipelines.ConfigurationResolver.Tasks.OnDemandResolver;
using Glass.Mapper.Sc.Configuration;
using Sitecore.Data;
using Sitecore.Data.Items;
using Sitecore.Mvc.Configuration;
using Sitecore.Mvc.Data;
using Sitecore.Mvc.Extensions;
using Sitecore.Mvc.Pipelines.Response.GetModel;
using Sitecore.Mvc.Presentation;
namespace Glass.Mapper.Sc.Pipelines.Response
{
/// <summary>
///
/// </summary>
public class GetModel : GetModelProcessor
{
/// <summary>
/// The model type field
/// </summary>
public const string ModelTypeField = "Model Type";
/// <summary>
/// The model field
/// </summary>
public const string ModelField = "Model";
/// <summary>
/// Initializes a new instance of the <see cref="GetModel"/> class.
/// </summary>
public GetModel()
{
ContextName = "Default";
}
/// <summary>
/// Gets or sets the name of the context.
/// </summary>
/// <value>
/// The name of the context.
/// </value>
public string ContextName { get; set; }
/// <summary>
/// Processes the specified args.
/// </summary>
/// <param name="args">The args.</param>
public override void Process(GetModelArgs args)
{
if (args.Result == null)
{
Rendering rendering = args.Rendering;
if (rendering.RenderingType == "Layout")
{
args.Result = GetFromItem(rendering, args);
if (args.Result == null)
{
args.Result = GetFromLayout(rendering, args);
}
}
if (args.Result == null)
{
args.Result = GetFromPropertyValue(rendering, args);
}
if (args.Result == null)
{
args.Result = GetFromField(rendering, args);
}
}
}
/// <summary>
/// Gets from field.
/// </summary>
/// <param name="rendering">The rendering.</param>
/// <param name="args">The args.</param>
/// <returns></returns>
protected virtual object GetFromField(Rendering rendering, GetModelArgs args)
{
Item obj = ObjectExtensions.ValueOrDefault<RenderingItem, Item>(rendering.RenderingItem, (Func<RenderingItem, Item>)(i => i.InnerItem));
if (obj == null)
return (object)null;
else
return GetObject(obj[ModelField], rendering.Item.Database, rendering);
}
/// <summary>
/// Gets from property value.
/// </summary>
/// <param name="rendering">The rendering.</param>
/// <param name="args">The args.</param>
/// <returns></returns>
protected virtual object GetFromPropertyValue(Rendering rendering, GetModelArgs args)
{
string model = rendering.Properties[ModelField];
if (StringExtensions.IsWhiteSpaceOrNull(model))
return (object)null;
else
return GetObject(model, rendering.Item.Database, rendering);
}
/// <summary>
/// Gets from layout.
/// </summary>
/// <param name="rendering">The rendering.</param>
/// <param name="args">The args.</param>
/// <returns></returns>
protected virtual object GetFromLayout(Rendering rendering, GetModelArgs args)
{
string pathOrId = rendering.Properties["LayoutId"];
if (StringExtensions.IsWhiteSpaceOrNull(pathOrId))
return (object)null;
string model = ObjectExtensions.ValueOrDefault<Item, string>(MvcSettings.GetRegisteredObject<ItemLocator>().GetItem(pathOrId), (Func<Item, string>)(i => i["Model"]));
if (StringExtensions.IsWhiteSpaceOrNull(model))
return (object) null;
else
return GetObject(model, rendering.Item.Database, rendering);
}
/// <summary>
/// Gets from item.
/// </summary>
/// <param name="rendering">The rendering.</param>
/// <param name="args">The args.</param>
/// <returns></returns>
protected virtual object GetFromItem(Rendering rendering, GetModelArgs args)
{
string model = ObjectExtensions.ValueOrDefault<Item, string>(rendering.Item, (Func<Item, string>)(i => i["MvcLayoutModel"]));
if (StringExtensions.IsWhiteSpaceOrNull(model))
return (object) null;
else
return GetObject(model, rendering.Item.Database, rendering);
}
/// <summary>
/// Gets the object.
/// </summary>
/// <param name="model">The model.</param>
/// <param name="db">The db.</param>
/// <returns></returns>
/// <exception cref="Glass.Mapper.MapperException">Failed to find context {0}.Formatted(ContextName)</exception>
public object GetObject(string model, Database db, Rendering renderingItem)
{
if (model.IsNullOrEmpty())
return null;
//must be a path to a Model item
if (model.StartsWith("/sitecore"))
{
var target = db.GetItem(model);
if (target == null)
return null;
string newModel = target[ModelTypeField];
return GetObject(newModel, db, renderingItem);
}
//if guid must be that to Model item
Guid targetId;
if (Guid.TryParse(model, out targetId))
{
var target = db.GetItem(new ID(targetId));
if (target == null)
return null;
string newModel = target[ModelTypeField];
return GetObject(newModel, db, renderingItem);
}
var type = Type.GetType(model, false);
if (type == null)
return null;
var context = Context.Contexts[ContextName];
if (context == null) throw new MapperException("Failed to find context {0}".Formatted(ContextName));
//this is really aggressive
if (!context.TypeConfigurations.ContainsKey(type))
{
//if the config is null then it is probably an ondemand mapping so we have to load the ondemand part
IConfigurationLoader loader =
new OnDemandLoader<SitecoreTypeConfiguration>(type);
context.Load(loader);
}
ISitecoreContext scContext = new SitecoreContext(context);
if (renderingItem.DataSource.IsNotNullOrEmpty())
{
var item = scContext.Database.GetItem(renderingItem.DataSource);
return scContext.CreateType(type, item, false, false, null);
}
return scContext.GetCurrentItem(type);
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="NaccacheSternEngine.cs">
// Copyright (c) 2014 Alexander Logger.
// Copyright (c) 2000 - 2013 The Legion of the Bouncy Castle Inc. (http://www.bouncycastle.org).
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System;
using System.Collections;
using System.Diagnostics;
using BigMath;
using Raksha.Crypto.Parameters;
using Raksha.Math;
using Raksha.Utilities;
namespace Raksha.Crypto.Engines
{
/// <summary>
/// NaccacheStern Engine.
/// </summary>
/// <remarks>
/// For details on this cipher, please see http://www.gemplus.com/smart/rd/publications/pdf/NS98pkcs.pdf
/// </remarks>
public class NaccacheSternEngine : IAsymmetricBlockCipher
{
private bool _debug;
private bool _forEncryption;
private NaccacheSternKeyParameters _key;
private IList[] _lookup;
public bool ShowDebug
{
get { return _debug; }
set { _debug = value; }
}
public string AlgorithmName
{
get { return "NaccacheStern"; }
}
/**
* Initializes this algorithm. Must be called before all other Functions.
*
* @see org.bouncycastle.crypto.AsymmetricBlockCipher#init(bool,
* org.bouncycastle.crypto.CipherParameters)
*/
public void Init(bool forEncryption, ICipherParameters parameters)
{
_forEncryption = forEncryption;
if (parameters is ParametersWithRandom)
{
parameters = ((ParametersWithRandom) parameters).Parameters;
}
_key = (NaccacheSternKeyParameters) parameters;
// construct lookup table for faster decryption if necessary
if (!_forEncryption)
{
if (_debug)
{
Debug.WriteLine("Constructing lookup Array");
}
var priv = (NaccacheSternPrivateKeyParameters) _key;
IList primes = priv.SmallPrimesList;
_lookup = new IList[primes.Count];
for (int i = 0; i < primes.Count; i++)
{
var actualPrime = (BigInteger) primes[i];
int actualPrimeValue = actualPrime.IntValue;
_lookup[i] = Platform.CreateArrayList(actualPrimeValue);
_lookup[i].Add(BigInteger.One);
if (_debug)
{
Debug.WriteLine("Constructing lookup ArrayList for " + actualPrimeValue);
}
BigInteger accJ = BigInteger.Zero;
for (int j = 1; j < actualPrimeValue; j++)
{
// BigInteger bigJ = BigInteger.ValueOf(j);
// accJ = priv.PhiN.Multiply(bigJ);
accJ = accJ.Add(priv.PhiN);
BigInteger comp = accJ.Divide(actualPrime);
_lookup[i].Add(priv.G.ModPow(comp, priv.Modulus));
}
}
}
}
/**
* Returns the input block size of this algorithm.
*
* @see org.bouncycastle.crypto.AsymmetricBlockCipher#GetInputBlockSize()
*/
public int GetInputBlockSize()
{
if (_forEncryption)
{
// We can only encrypt values up to lowerSigmaBound
return (_key.LowerSigmaBound + 7)/8 - 1;
}
// We pad to modulus-size bytes for easier decryption.
// return key.Modulus.ToByteArray().Length;
return _key.Modulus.BitLength/8 + 1;
}
/**
* Returns the output block size of this algorithm.
*
* @see org.bouncycastle.crypto.AsymmetricBlockCipher#GetOutputBlockSize()
*/
public int GetOutputBlockSize()
{
if (_forEncryption)
{
// encrypted Data is always padded up to modulus size
// return key.Modulus.ToByteArray().Length;
return _key.Modulus.BitLength/8 + 1;
}
// decrypted Data has upper limit lowerSigmaBound
return (_key.LowerSigmaBound + 7)/8 - 1;
}
/**
* Process a single Block using the Naccache-Stern algorithm.
*
* @see org.bouncycastle.crypto.AsymmetricBlockCipher#ProcessBlock(byte[],
* int, int)
*/
public byte[] ProcessBlock(byte[] inBytes, int inOff, int length)
{
if (_key == null)
{
throw new InvalidOperationException("NaccacheStern engine not initialised");
}
if (length > (GetInputBlockSize() + 1))
{
throw new DataLengthException("input too large for Naccache-Stern cipher.\n");
}
if (!_forEncryption)
{
// At decryption make sure that we receive padded data blocks
if (length < GetInputBlockSize())
{
throw new InvalidCipherTextException("BlockLength does not match modulus for Naccache-Stern cipher.\n");
}
}
// transform input into BigInteger
var input = new BigInteger(1, inBytes, inOff, length);
if (_debug)
{
Debug.WriteLine("input as BigInteger: " + input);
}
byte[] output;
if (_forEncryption)
{
output = Encrypt(input);
}
else
{
IList plain = Platform.CreateArrayList();
var priv = (NaccacheSternPrivateKeyParameters) _key;
IList primes = priv.SmallPrimesList;
// Get Chinese Remainders of CipherText
for (int i = 0; i < primes.Count; i++)
{
BigInteger exp = input.ModPow(priv.PhiN.Divide((BigInteger) primes[i]), priv.Modulus);
IList al = _lookup[i];
if (_lookup[i].Count != ((BigInteger) primes[i]).IntValue)
{
if (_debug)
{
Debug.WriteLine("Prime is " + primes[i] + ", lookup table has size " + al.Count);
}
throw new InvalidCipherTextException("Error in lookup Array for " + ((BigInteger) primes[i]).IntValue +
": Size mismatch. Expected ArrayList with length " + ((BigInteger) primes[i]).IntValue + " but found ArrayList of length " + _lookup[i].Count);
}
int lookedup = al.IndexOf(exp);
if (lookedup == -1)
{
if (_debug)
{
Debug.WriteLine("Actual prime is " + primes[i]);
Debug.WriteLine("Decrypted value is " + exp);
Debug.WriteLine("LookupList for " + primes[i] + " with size " + _lookup[i].Count + " is: ");
for (int j = 0; j < _lookup[i].Count; j++)
{
Debug.WriteLine(_lookup[i][j]);
}
}
throw new InvalidCipherTextException("Lookup failed");
}
plain.Add(BigInteger.ValueOf(lookedup));
}
BigInteger test = ChineseRemainder(plain, primes);
// Should not be used as an oracle, so reencrypt output to see
// if it corresponds to input
// this breaks probabilisic encryption, so disable it. Anyway, we do
// use the first n primes for key generation, so it is pretty easy
// to guess them. But as stated in the paper, this is not a security
// breach. So we can just work with the correct sigma.
// if (debug) {
// Debug.WriteLine("Decryption is " + test);
// }
// if ((key.G.ModPow(test, key.Modulus)).Equals(input)) {
// output = test.ToByteArray();
// } else {
// if(debug){
// Debug.WriteLine("Engine seems to be used as an oracle,
// returning null");
// }
// output = null;
// }
output = test.ToByteArray();
}
return output;
}
/**
* Encrypts a BigInteger aka Plaintext with the public key.
*
* @param plain
* The BigInteger to encrypt
* @return The byte[] representation of the encrypted BigInteger (i.e.
* crypted.toByteArray())
*/
public byte[] Encrypt(BigInteger plain)
{
// Always return modulus size values 0-padded at the beginning
// 0-padding at the beginning is correctly parsed by BigInteger :)
// byte[] output = key.Modulus.ToByteArray();
// Array.Clear(output, 0, output.Length);
var output = new byte[_key.Modulus.BitLength/8 + 1];
byte[] tmp = _key.G.ModPow(plain, _key.Modulus).ToByteArray();
Array.Copy(tmp, 0, output, output.Length - tmp.Length, tmp.Length);
if (_debug)
{
Debug.WriteLine("Encrypted value is: " + new BigInteger(output));
}
return output;
}
/**
* Adds the contents of two encrypted blocks mod sigma
*
* @param block1
* the first encrypted block
* @param block2
* the second encrypted block
* @return encrypt((block1 + block2) mod sigma)
* @throws InvalidCipherTextException
*/
public byte[] AddCryptedBlocks(byte[] block1, byte[] block2)
{
// check for correct blocksize
if (_forEncryption)
{
if ((block1.Length > GetOutputBlockSize()) || (block2.Length > GetOutputBlockSize()))
{
throw new InvalidCipherTextException("BlockLength too large for simple addition.\n");
}
}
else
{
if ((block1.Length > GetInputBlockSize()) || (block2.Length > GetInputBlockSize()))
{
throw new InvalidCipherTextException("BlockLength too large for simple addition.\n");
}
}
// calculate resulting block
var m1Crypt = new BigInteger(1, block1);
var m2Crypt = new BigInteger(1, block2);
BigInteger m1m2Crypt = m1Crypt.Multiply(m2Crypt);
m1m2Crypt = m1m2Crypt.Mod(_key.Modulus);
if (_debug)
{
Debug.WriteLine("c(m1) as BigInteger:....... " + m1Crypt);
Debug.WriteLine("c(m2) as BigInteger:....... " + m2Crypt);
Debug.WriteLine("c(m1)*c(m2)%n = c(m1+m2)%n: " + m1m2Crypt);
}
//byte[] output = key.Modulus.ToByteArray();
//Array.Clear(output, 0, output.Length);
var output = new byte[_key.Modulus.BitLength/8 + 1];
byte[] m1m2CryptBytes = m1m2Crypt.ToByteArray();
Array.Copy(m1m2CryptBytes, 0, output, output.Length - m1m2CryptBytes.Length, m1m2CryptBytes.Length);
return output;
}
/**
* Convenience Method for data exchange with the cipher.
*
* Determines blocksize and splits data to blocksize.
*
* @param data the data to be processed
* @return the data after it went through the NaccacheSternEngine.
* @throws InvalidCipherTextException
*/
public byte[] ProcessData(byte[] data)
{
if (_debug)
{
Debug.WriteLine(string.Empty);
}
if (data.Length > GetInputBlockSize())
{
int inBlocksize = GetInputBlockSize();
int outBlocksize = GetOutputBlockSize();
if (_debug)
{
Debug.WriteLine("Input blocksize is: " + inBlocksize + " bytes");
Debug.WriteLine("Output blocksize is: " + outBlocksize + " bytes");
Debug.WriteLine("Data has length:.... " + data.Length + " bytes");
}
int datapos = 0;
int retpos = 0;
var retval = new byte[(data.Length/inBlocksize + 1)*outBlocksize];
while (datapos < data.Length)
{
byte[] tmp;
if (datapos + inBlocksize < data.Length)
{
tmp = ProcessBlock(data, datapos, inBlocksize);
datapos += inBlocksize;
}
else
{
tmp = ProcessBlock(data, datapos, data.Length - datapos);
datapos += data.Length - datapos;
}
if (_debug)
{
Debug.WriteLine("new datapos is " + datapos);
}
if (tmp != null)
{
tmp.CopyTo(retval, retpos);
retpos += tmp.Length;
}
else
{
if (_debug)
{
Debug.WriteLine("cipher returned null");
}
throw new InvalidCipherTextException("cipher returned null");
}
}
var ret = new byte[retpos];
Array.Copy(retval, 0, ret, 0, retpos);
if (_debug)
{
Debug.WriteLine("returning " + ret.Length + " bytes");
}
return ret;
}
if (_debug)
{
Debug.WriteLine("data size is less then input block size, processing directly");
}
return ProcessBlock(data, 0, data.Length);
}
/**
* Computes the integer x that is expressed through the given primes and the
* congruences with the chinese remainder theorem (CRT).
*
* @param congruences
* the congruences c_i
* @param primes
* the primes p_i
* @return an integer x for that x % p_i == c_i
*/
private static BigInteger ChineseRemainder(IList congruences, IList primes)
{
BigInteger retval = BigInteger.Zero;
BigInteger all = BigInteger.One;
for (int i = 0; i < primes.Count; i++)
{
all = all.Multiply((BigInteger) primes[i]);
}
for (int i = 0; i < primes.Count; i++)
{
var a = (BigInteger) primes[i];
BigInteger b = all.Divide(a);
BigInteger b2 = b.ModInverse(a);
BigInteger tmp = b.Multiply(b2);
tmp = tmp.Multiply((BigInteger) congruences[i]);
retval = retval.Add(tmp);
}
return retval.Mod(all);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace jcIQAPI.Tests.WebAPI.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/admin/v1/iam.proto
// Original file comments:
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#region Designer generated code
using System;
using System.Threading;
using System.Threading.Tasks;
using Grpc.Core;
namespace Google.Iam.Admin.V1 {
/// <summary>
/// Creates and manages service account objects.
///
/// Service account is an account that belongs to your project instead
/// of to an individual end user. It is used to authenticate calls
/// to a Google API.
///
/// To create a service account, specify the `project_id` and `account_id`
/// for the account. The `account_id` is unique within the project, and used
/// to generate the service account email address and a stable
/// `unique_id`.
///
/// All other methods can identify accounts using the format
/// `projects/{project}/serviceAccounts/{account}`.
/// Using `-` as a wildcard for the project will infer the project from
/// the account. The `account` value can be the `email` address or the
/// `unique_id` of the service account.
/// </summary>
public static class IAM
{
static readonly string __ServiceName = "google.iam.admin.v1.IAM";
static readonly Marshaller<global::Google.Iam.Admin.V1.ListServiceAccountsRequest> __Marshaller_ListServiceAccountsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ListServiceAccountsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.ListServiceAccountsResponse> __Marshaller_ListServiceAccountsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ListServiceAccountsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.GetServiceAccountRequest> __Marshaller_GetServiceAccountRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.GetServiceAccountRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.ServiceAccount> __Marshaller_ServiceAccount = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ServiceAccount.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.CreateServiceAccountRequest> __Marshaller_CreateServiceAccountRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.CreateServiceAccountRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.DeleteServiceAccountRequest> __Marshaller_DeleteServiceAccountRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.DeleteServiceAccountRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Protobuf.WellKnownTypes.Empty> __Marshaller_Empty = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Protobuf.WellKnownTypes.Empty.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest> __Marshaller_ListServiceAccountKeysRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse> __Marshaller_ListServiceAccountKeysResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest> __Marshaller_GetServiceAccountKeyRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.ServiceAccountKey> __Marshaller_ServiceAccountKey = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.ServiceAccountKey.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest> __Marshaller_CreateServiceAccountKeyRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest> __Marshaller_DeleteServiceAccountKeyRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.SignBlobRequest> __Marshaller_SignBlobRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.SignBlobRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.SignBlobResponse> __Marshaller_SignBlobResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.SignBlobResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Iam.V1.GetIamPolicyRequest> __Marshaller_GetIamPolicyRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.GetIamPolicyRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Iam.V1.Policy> __Marshaller_Policy = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.Policy.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Iam.V1.SetIamPolicyRequest> __Marshaller_SetIamPolicyRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.SetIamPolicyRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest> __Marshaller_TestIamPermissionsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.TestIamPermissionsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> __Marshaller_TestIamPermissionsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.TestIamPermissionsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.QueryGrantableRolesRequest> __Marshaller_QueryGrantableRolesRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.QueryGrantableRolesRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Iam.Admin.V1.QueryGrantableRolesResponse> __Marshaller_QueryGrantableRolesResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Iam.Admin.V1.QueryGrantableRolesResponse.Parser.ParseFrom);
static readonly Method<global::Google.Iam.Admin.V1.ListServiceAccountsRequest, global::Google.Iam.Admin.V1.ListServiceAccountsResponse> __Method_ListServiceAccounts = new Method<global::Google.Iam.Admin.V1.ListServiceAccountsRequest, global::Google.Iam.Admin.V1.ListServiceAccountsResponse>(
MethodType.Unary,
__ServiceName,
"ListServiceAccounts",
__Marshaller_ListServiceAccountsRequest,
__Marshaller_ListServiceAccountsResponse);
static readonly Method<global::Google.Iam.Admin.V1.GetServiceAccountRequest, global::Google.Iam.Admin.V1.ServiceAccount> __Method_GetServiceAccount = new Method<global::Google.Iam.Admin.V1.GetServiceAccountRequest, global::Google.Iam.Admin.V1.ServiceAccount>(
MethodType.Unary,
__ServiceName,
"GetServiceAccount",
__Marshaller_GetServiceAccountRequest,
__Marshaller_ServiceAccount);
static readonly Method<global::Google.Iam.Admin.V1.CreateServiceAccountRequest, global::Google.Iam.Admin.V1.ServiceAccount> __Method_CreateServiceAccount = new Method<global::Google.Iam.Admin.V1.CreateServiceAccountRequest, global::Google.Iam.Admin.V1.ServiceAccount>(
MethodType.Unary,
__ServiceName,
"CreateServiceAccount",
__Marshaller_CreateServiceAccountRequest,
__Marshaller_ServiceAccount);
static readonly Method<global::Google.Iam.Admin.V1.ServiceAccount, global::Google.Iam.Admin.V1.ServiceAccount> __Method_UpdateServiceAccount = new Method<global::Google.Iam.Admin.V1.ServiceAccount, global::Google.Iam.Admin.V1.ServiceAccount>(
MethodType.Unary,
__ServiceName,
"UpdateServiceAccount",
__Marshaller_ServiceAccount,
__Marshaller_ServiceAccount);
static readonly Method<global::Google.Iam.Admin.V1.DeleteServiceAccountRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_DeleteServiceAccount = new Method<global::Google.Iam.Admin.V1.DeleteServiceAccountRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"DeleteServiceAccount",
__Marshaller_DeleteServiceAccountRequest,
__Marshaller_Empty);
static readonly Method<global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest, global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse> __Method_ListServiceAccountKeys = new Method<global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest, global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse>(
MethodType.Unary,
__ServiceName,
"ListServiceAccountKeys",
__Marshaller_ListServiceAccountKeysRequest,
__Marshaller_ListServiceAccountKeysResponse);
static readonly Method<global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest, global::Google.Iam.Admin.V1.ServiceAccountKey> __Method_GetServiceAccountKey = new Method<global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest, global::Google.Iam.Admin.V1.ServiceAccountKey>(
MethodType.Unary,
__ServiceName,
"GetServiceAccountKey",
__Marshaller_GetServiceAccountKeyRequest,
__Marshaller_ServiceAccountKey);
static readonly Method<global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest, global::Google.Iam.Admin.V1.ServiceAccountKey> __Method_CreateServiceAccountKey = new Method<global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest, global::Google.Iam.Admin.V1.ServiceAccountKey>(
MethodType.Unary,
__ServiceName,
"CreateServiceAccountKey",
__Marshaller_CreateServiceAccountKeyRequest,
__Marshaller_ServiceAccountKey);
static readonly Method<global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_DeleteServiceAccountKey = new Method<global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"DeleteServiceAccountKey",
__Marshaller_DeleteServiceAccountKeyRequest,
__Marshaller_Empty);
static readonly Method<global::Google.Iam.Admin.V1.SignBlobRequest, global::Google.Iam.Admin.V1.SignBlobResponse> __Method_SignBlob = new Method<global::Google.Iam.Admin.V1.SignBlobRequest, global::Google.Iam.Admin.V1.SignBlobResponse>(
MethodType.Unary,
__ServiceName,
"SignBlob",
__Marshaller_SignBlobRequest,
__Marshaller_SignBlobResponse);
static readonly Method<global::Google.Cloud.Iam.V1.GetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy> __Method_GetIamPolicy = new Method<global::Google.Cloud.Iam.V1.GetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy>(
MethodType.Unary,
__ServiceName,
"GetIamPolicy",
__Marshaller_GetIamPolicyRequest,
__Marshaller_Policy);
static readonly Method<global::Google.Cloud.Iam.V1.SetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy> __Method_SetIamPolicy = new Method<global::Google.Cloud.Iam.V1.SetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy>(
MethodType.Unary,
__ServiceName,
"SetIamPolicy",
__Marshaller_SetIamPolicyRequest,
__Marshaller_Policy);
static readonly Method<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest, global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> __Method_TestIamPermissions = new Method<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest, global::Google.Cloud.Iam.V1.TestIamPermissionsResponse>(
MethodType.Unary,
__ServiceName,
"TestIamPermissions",
__Marshaller_TestIamPermissionsRequest,
__Marshaller_TestIamPermissionsResponse);
static readonly Method<global::Google.Iam.Admin.V1.QueryGrantableRolesRequest, global::Google.Iam.Admin.V1.QueryGrantableRolesResponse> __Method_QueryGrantableRoles = new Method<global::Google.Iam.Admin.V1.QueryGrantableRolesRequest, global::Google.Iam.Admin.V1.QueryGrantableRolesResponse>(
MethodType.Unary,
__ServiceName,
"QueryGrantableRoles",
__Marshaller_QueryGrantableRolesRequest,
__Marshaller_QueryGrantableRolesResponse);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Google.Iam.Admin.V1.IamReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of IAM</summary>
public abstract class IAMBase
{
/// <summary>
/// Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ListServiceAccountsResponse> ListServiceAccounts(global::Google.Iam.Admin.V1.ListServiceAccountsRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ServiceAccount> GetServiceAccount(global::Google.Iam.Admin.V1.GetServiceAccountRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]
/// and returns it.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ServiceAccount> CreateServiceAccount(global::Google.Iam.Admin.V1.CreateServiceAccountRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
///
/// Currently, only the following fields are updatable:
/// `display_name` .
/// The `etag` is mandatory.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ServiceAccount> UpdateServiceAccount(global::Google.Iam.Admin.V1.ServiceAccount request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccount(global::Google.Iam.Admin.V1.DeleteServiceAccountRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse> ListServiceAccountKeys(global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// by key id.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ServiceAccountKey> GetServiceAccountKey(global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// and returns it.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.ServiceAccountKey> CreateServiceAccountKey(global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccountKey(global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Signs a blob using a service account's system-managed private key.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.SignBlobResponse> SignBlob(global::Google.Iam.Admin.V1.SignBlobRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Returns the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.Policy> GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Sets the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.Policy> SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Tests the specified permissions against the IAM access control policy
/// for a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Queries roles that can be granted on a particular resource.
/// A role is grantable if it can be used as the role in a binding for a policy
/// for that resource.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Iam.Admin.V1.QueryGrantableRolesResponse> QueryGrantableRoles(global::Google.Iam.Admin.V1.QueryGrantableRolesRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for IAM</summary>
public class IAMClient : ClientBase<IAMClient>
{
/// <summary>Creates a new client for IAM</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public IAMClient(Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for IAM that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public IAMClient(CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected IAMClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected IAMClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ListServiceAccountsResponse ListServiceAccounts(global::Google.Iam.Admin.V1.ListServiceAccountsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListServiceAccounts(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ListServiceAccountsResponse ListServiceAccounts(global::Google.Iam.Admin.V1.ListServiceAccountsRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ListServiceAccounts, null, options, request);
}
/// <summary>
/// Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ListServiceAccountsResponse> ListServiceAccountsAsync(global::Google.Iam.Admin.V1.ListServiceAccountsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListServiceAccountsAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ListServiceAccountsResponse> ListServiceAccountsAsync(global::Google.Iam.Admin.V1.ListServiceAccountsRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ListServiceAccounts, null, options, request);
}
/// <summary>
/// Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount GetServiceAccount(global::Google.Iam.Admin.V1.GetServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetServiceAccount(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount GetServiceAccount(global::Google.Iam.Admin.V1.GetServiceAccountRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetServiceAccount, null, options, request);
}
/// <summary>
/// Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> GetServiceAccountAsync(global::Google.Iam.Admin.V1.GetServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetServiceAccountAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> GetServiceAccountAsync(global::Google.Iam.Admin.V1.GetServiceAccountRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetServiceAccount, null, options, request);
}
/// <summary>
/// Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]
/// and returns it.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount CreateServiceAccount(global::Google.Iam.Admin.V1.CreateServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateServiceAccount(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]
/// and returns it.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount CreateServiceAccount(global::Google.Iam.Admin.V1.CreateServiceAccountRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_CreateServiceAccount, null, options, request);
}
/// <summary>
/// Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]
/// and returns it.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> CreateServiceAccountAsync(global::Google.Iam.Admin.V1.CreateServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateServiceAccountAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]
/// and returns it.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> CreateServiceAccountAsync(global::Google.Iam.Admin.V1.CreateServiceAccountRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_CreateServiceAccount, null, options, request);
}
/// <summary>
/// Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
///
/// Currently, only the following fields are updatable:
/// `display_name` .
/// The `etag` is mandatory.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount UpdateServiceAccount(global::Google.Iam.Admin.V1.ServiceAccount request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return UpdateServiceAccount(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
///
/// Currently, only the following fields are updatable:
/// `display_name` .
/// The `etag` is mandatory.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccount UpdateServiceAccount(global::Google.Iam.Admin.V1.ServiceAccount request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_UpdateServiceAccount, null, options, request);
}
/// <summary>
/// Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
///
/// Currently, only the following fields are updatable:
/// `display_name` .
/// The `etag` is mandatory.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> UpdateServiceAccountAsync(global::Google.Iam.Admin.V1.ServiceAccount request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return UpdateServiceAccountAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
///
/// Currently, only the following fields are updatable:
/// `display_name` .
/// The `etag` is mandatory.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccount> UpdateServiceAccountAsync(global::Google.Iam.Admin.V1.ServiceAccount request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_UpdateServiceAccount, null, options, request);
}
/// <summary>
/// Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteServiceAccount(global::Google.Iam.Admin.V1.DeleteServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteServiceAccount(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteServiceAccount(global::Google.Iam.Admin.V1.DeleteServiceAccountRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_DeleteServiceAccount, null, options, request);
}
/// <summary>
/// Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccountAsync(global::Google.Iam.Admin.V1.DeleteServiceAccountRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteServiceAccountAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccountAsync(global::Google.Iam.Admin.V1.DeleteServiceAccountRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_DeleteServiceAccount, null, options, request);
}
/// <summary>
/// Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse ListServiceAccountKeys(global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListServiceAccountKeys(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse ListServiceAccountKeys(global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ListServiceAccountKeys, null, options, request);
}
/// <summary>
/// Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse> ListServiceAccountKeysAsync(global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListServiceAccountKeysAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ListServiceAccountKeysResponse> ListServiceAccountKeysAsync(global::Google.Iam.Admin.V1.ListServiceAccountKeysRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ListServiceAccountKeys, null, options, request);
}
/// <summary>
/// Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// by key id.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccountKey GetServiceAccountKey(global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetServiceAccountKey(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// by key id.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccountKey GetServiceAccountKey(global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetServiceAccountKey, null, options, request);
}
/// <summary>
/// Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// by key id.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccountKey> GetServiceAccountKeyAsync(global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetServiceAccountKeyAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// by key id.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccountKey> GetServiceAccountKeyAsync(global::Google.Iam.Admin.V1.GetServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetServiceAccountKey, null, options, request);
}
/// <summary>
/// Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// and returns it.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccountKey CreateServiceAccountKey(global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateServiceAccountKey(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// and returns it.
/// </summary>
public virtual global::Google.Iam.Admin.V1.ServiceAccountKey CreateServiceAccountKey(global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_CreateServiceAccountKey, null, options, request);
}
/// <summary>
/// Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// and returns it.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccountKey> CreateServiceAccountKeyAsync(global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CreateServiceAccountKeyAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]
/// and returns it.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.ServiceAccountKey> CreateServiceAccountKeyAsync(global::Google.Iam.Admin.V1.CreateServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_CreateServiceAccountKey, null, options, request);
}
/// <summary>
/// Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteServiceAccountKey(global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteServiceAccountKey(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteServiceAccountKey(global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_DeleteServiceAccountKey, null, options, request);
}
/// <summary>
/// Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccountKeyAsync(global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteServiceAccountKeyAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteServiceAccountKeyAsync(global::Google.Iam.Admin.V1.DeleteServiceAccountKeyRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_DeleteServiceAccountKey, null, options, request);
}
/// <summary>
/// Signs a blob using a service account's system-managed private key.
/// </summary>
public virtual global::Google.Iam.Admin.V1.SignBlobResponse SignBlob(global::Google.Iam.Admin.V1.SignBlobRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SignBlob(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Signs a blob using a service account's system-managed private key.
/// </summary>
public virtual global::Google.Iam.Admin.V1.SignBlobResponse SignBlob(global::Google.Iam.Admin.V1.SignBlobRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_SignBlob, null, options, request);
}
/// <summary>
/// Signs a blob using a service account's system-managed private key.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.SignBlobResponse> SignBlobAsync(global::Google.Iam.Admin.V1.SignBlobRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SignBlobAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Signs a blob using a service account's system-managed private key.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.SignBlobResponse> SignBlobAsync(global::Google.Iam.Admin.V1.SignBlobRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_SignBlob, null, options, request);
}
/// <summary>
/// Returns the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.Policy GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetIamPolicy(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Returns the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.Policy GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetIamPolicy, null, options, request);
}
/// <summary>
/// Returns the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> GetIamPolicyAsync(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetIamPolicyAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Returns the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> GetIamPolicyAsync(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetIamPolicy, null, options, request);
}
/// <summary>
/// Sets the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.Policy SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SetIamPolicy(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sets the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.Policy SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_SetIamPolicy, null, options, request);
}
/// <summary>
/// Sets the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> SetIamPolicyAsync(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SetIamPolicyAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sets the IAM access control policy for a
/// [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> SetIamPolicyAsync(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_SetIamPolicy, null, options, request);
}
/// <summary>
/// Tests the specified permissions against the IAM access control policy
/// for a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.TestIamPermissionsResponse TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return TestIamPermissions(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Tests the specified permissions against the IAM access control policy
/// for a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual global::Google.Cloud.Iam.V1.TestIamPermissionsResponse TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_TestIamPermissions, null, options, request);
}
/// <summary>
/// Tests the specified permissions against the IAM access control policy
/// for a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissionsAsync(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return TestIamPermissionsAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Tests the specified permissions against the IAM access control policy
/// for a [ServiceAccount][google.iam.admin.v1.ServiceAccount].
/// </summary>
public virtual AsyncUnaryCall<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissionsAsync(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_TestIamPermissions, null, options, request);
}
/// <summary>
/// Queries roles that can be granted on a particular resource.
/// A role is grantable if it can be used as the role in a binding for a policy
/// for that resource.
/// </summary>
public virtual global::Google.Iam.Admin.V1.QueryGrantableRolesResponse QueryGrantableRoles(global::Google.Iam.Admin.V1.QueryGrantableRolesRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return QueryGrantableRoles(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Queries roles that can be granted on a particular resource.
/// A role is grantable if it can be used as the role in a binding for a policy
/// for that resource.
/// </summary>
public virtual global::Google.Iam.Admin.V1.QueryGrantableRolesResponse QueryGrantableRoles(global::Google.Iam.Admin.V1.QueryGrantableRolesRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_QueryGrantableRoles, null, options, request);
}
/// <summary>
/// Queries roles that can be granted on a particular resource.
/// A role is grantable if it can be used as the role in a binding for a policy
/// for that resource.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.QueryGrantableRolesResponse> QueryGrantableRolesAsync(global::Google.Iam.Admin.V1.QueryGrantableRolesRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return QueryGrantableRolesAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Queries roles that can be granted on a particular resource.
/// A role is grantable if it can be used as the role in a binding for a policy
/// for that resource.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Iam.Admin.V1.QueryGrantableRolesResponse> QueryGrantableRolesAsync(global::Google.Iam.Admin.V1.QueryGrantableRolesRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_QueryGrantableRoles, null, options, request);
}
protected override IAMClient NewInstance(ClientBaseConfiguration configuration)
{
return new IAMClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
public static ServerServiceDefinition BindService(IAMBase serviceImpl)
{
return ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_ListServiceAccounts, serviceImpl.ListServiceAccounts)
.AddMethod(__Method_GetServiceAccount, serviceImpl.GetServiceAccount)
.AddMethod(__Method_CreateServiceAccount, serviceImpl.CreateServiceAccount)
.AddMethod(__Method_UpdateServiceAccount, serviceImpl.UpdateServiceAccount)
.AddMethod(__Method_DeleteServiceAccount, serviceImpl.DeleteServiceAccount)
.AddMethod(__Method_ListServiceAccountKeys, serviceImpl.ListServiceAccountKeys)
.AddMethod(__Method_GetServiceAccountKey, serviceImpl.GetServiceAccountKey)
.AddMethod(__Method_CreateServiceAccountKey, serviceImpl.CreateServiceAccountKey)
.AddMethod(__Method_DeleteServiceAccountKey, serviceImpl.DeleteServiceAccountKey)
.AddMethod(__Method_SignBlob, serviceImpl.SignBlob)
.AddMethod(__Method_GetIamPolicy, serviceImpl.GetIamPolicy)
.AddMethod(__Method_SetIamPolicy, serviceImpl.SetIamPolicy)
.AddMethod(__Method_TestIamPermissions, serviceImpl.TestIamPermissions)
.AddMethod(__Method_QueryGrantableRoles, serviceImpl.QueryGrantableRoles).Build();
}
}
}
#endregion
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void DivideScalarDouble()
{
var test = new SimpleBinaryOpTest__DivideScalarDouble();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse2.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__DivideScalarDouble
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Double[] inArray1, Double[] inArray2, Double[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Double>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Double>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Double>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Double, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Double, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Double> _fld1;
public Vector128<Double> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__DivideScalarDouble testClass)
{
var result = Sse2.DivideScalar(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__DivideScalarDouble testClass)
{
fixed (Vector128<Double>* pFld1 = &_fld1)
fixed (Vector128<Double>* pFld2 = &_fld2)
{
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static Double[] _data1 = new Double[Op1ElementCount];
private static Double[] _data2 = new Double[Op2ElementCount];
private static Vector128<Double> _clsVar1;
private static Vector128<Double> _clsVar2;
private Vector128<Double> _fld1;
private Vector128<Double> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__DivideScalarDouble()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
}
public SimpleBinaryOpTest__DivideScalarDouble()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
_dataTable = new DataTable(_data1, _data2, new Double[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse2.DivideScalar(
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse2.DivideScalar(
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse2).GetMethod(nameof(Sse2.DivideScalar), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse2).GetMethod(nameof(Sse2.DivideScalar), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse2).GetMethod(nameof(Sse2.DivideScalar), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse2.DivideScalar(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Double>* pClsVar1 = &_clsVar1)
fixed (Vector128<Double>* pClsVar2 = &_clsVar2)
{
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(pClsVar1)),
Sse2.LoadVector128((Double*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr);
var result = Sse2.DivideScalar(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.DivideScalar(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.DivideScalar(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__DivideScalarDouble();
var result = Sse2.DivideScalar(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__DivideScalarDouble();
fixed (Vector128<Double>* pFld1 = &test._fld1)
fixed (Vector128<Double>* pFld2 = &test._fld2)
{
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse2.DivideScalar(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Double>* pFld1 = &_fld1)
fixed (Vector128<Double>* pFld2 = &_fld2)
{
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse2.DivideScalar(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse2.DivideScalar(
Sse2.LoadVector128((Double*)(&test._fld1)),
Sse2.LoadVector128((Double*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Double> op1, Vector128<Double> op2, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.DoubleToInt64Bits(left[0] / right[0]) != BitConverter.DoubleToInt64Bits(result[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.DoubleToInt64Bits(left[i]) != BitConverter.DoubleToInt64Bits(result[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.DivideScalar)}<Double>(Vector128<Double>, Vector128<Double>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
namespace Nancy.Demo.Hosting.Aspnet
{
using System;
using System.Linq;
using Nancy.Demo.Hosting.Aspnet.Metadata;
using Nancy.Demo.Hosting.Aspnet.Models;
using Nancy.Routing;
using Security;
public class MainModule : NancyModule
{
public MainModule(IRouteCacheProvider routeCacheProvider)
{
Get["/"] = x => {
return View["routes", routeCacheProvider.GetCache()];
};
Get["/texts"] = parameters => {
return (string)this.Context.Text.Menu.Home;
};
Get["/meta"] = parameters =>
{
return Negotiate
.WithModel(routeCacheProvider.GetCache().RetrieveMetadata<MyRouteMetadata>())
.WithView("meta");
};
Get["/uber-meta"] = parameters =>
{
return Negotiate
.WithModel(routeCacheProvider.GetCache().RetrieveMetadata<MyUberRouteMetadata>().OfType<MyUberRouteMetadata>())
.WithView("uber-meta");
};
Get["/text"] = x =>
{
var value = (string)this.Context.Text.Menu.Home;
return string.Concat("Value of 'Home' resource key in the Menu resource file: ", value);
};
Get["/negotiated"] = parameters => {
return Negotiate
.WithModel(new RatPack {FirstName = "Nancy "})
.WithMediaRangeModel("text/html", new RatPack {FirstName = "Nancy fancy pants"})
.WithView("negotiatedview")
.WithHeader("X-Custom", "SomeValue");
};
Get["/user/{name}"] = parameters =>
{
return (string)parameters.name;
};
Get["/filtered", r => true] = x => {
return "This is a route with a filter that always returns true.";
};
Get["/filtered", r => false] = x => {
return "This is also a route, but filtered out so should never be hit.";
};
Get[@"/(?<foo>\d{2,4})/{bar}"] = x => {
return string.Format("foo: {0}<br/>bar: {1}", x.foo, x.bar);
};
Get["/test"] = x => {
return "Test";
};
Get["/nustache"] = parameters => {
return View["nustache", new { name = "Nancy", value = 1000000 }];
};
Get["/dotliquid"] = parameters => {
return View["dot", new { name = "dot" }];
};
Get["/javascript"] = x => {
return View["javascript.html"];
};
Get["/static"] = x => {
return View["static"];
};
Get["/razor"] = x => {
var model = new RatPack { FirstName = "Frank" };
return View["razor.cshtml", model];
};
Get["/razor-divzero"] = x =>
{
var model = new { FirstName = "Frank", Number = 22 };
return View["razor-divzero.cshtml", model];
};
Get["/razorError"] = x =>
{
var model = new RatPack { FirstName = "Frank" };
return View["razor-error.cshtml", model];
};
Get["/razor-simple"] = x =>
{
var model = new RatPack { FirstName = "Frank" };
return View["razor-simple.cshtml", model];
};
Get["/razor-dynamic"] = x =>
{
return View["razor.cshtml", new { FirstName = "Frank" }];
};
Get["/razor-cs-strong"] = x =>
{
return View["razor-strong.cshtml", new RatPack { FirstName = "Frank" }];
};
Get["/razor-vb-strong"] = x =>
{
return View["razor-strong.vbhtml", new RatPack { FirstName = "Frank" }];
};
Get["/razor2"] = _ => new Razor2();
Get["/ssve"] = x =>
{
var model = new RatPack { FirstName = "You" };
return View["ssve.sshtml", model];
};
Get["/viewmodelconvention"] = x => {
return View[new SomeViewModel()];
};
Get["/spark"] = x => {
var model = new RatPack { FirstName = "Bright" };
return View["spark.spark", model];
};
Get["/spark-anon"] = x =>
{
var model = new { FirstName = "Anonymous" };
return View["anon.spark", model];
};
Get["/json"] = x => {
var model = new RatPack { FirstName = "Andy" };
return this.Response.AsJson(model);
};
Get["/xml"] = x => {
var model = new RatPack { FirstName = "Andy" };
return this.Response.AsXml(model);
};
Get["/session"] = x => {
var value = Session["moo"] ?? "";
var output = "Current session value is: " + value;
if (String.IsNullOrEmpty(value.ToString()))
{
Session["moo"] = "I've created a session!";
}
return output;
};
Get["/sessionObject"] = x => {
var value = Session["baa"] ?? "null";
var output = "Current session value is: " + value;
if (value.ToString() == "null")
{
Session["baa"] = new Payload(27, true, "some random string value");
}
return output;
};
Get["/error"] = x =>
{
throw new NotSupportedException("This is an exception thrown in a route.");
};
Get["/customErrorHandler"] = _ => HttpStatusCode.ImATeapot;
Get["/csrf"] = x => this.View["csrf", new { Blurb = "CSRF without an expiry using the 'session' token" }];
Post["/csrf"] = x =>
{
this.ValidateCsrfToken();
return string.Format("Hello {0}!", this.Request.Form.Name);
};
Get["/csrfWithExpiry"] = x =>
{
// Create a new one because we have an expiry to check
this.CreateNewCsrfToken();
return this.View["csrf", new { Blurb = "You have 20 seconds to submit the page.. TICK TOCK :-)" }];
};
Post["/csrfWithExpiry"] = x =>
{
this.ValidateCsrfToken(TimeSpan.FromSeconds(20));
return string.Format("Hello {0}!", this.Request.Form.Name);
};
Get["/viewNotFound"] = _ => View["I-do-not-exist"];
Get["/fileupload"] = x =>
{
return View["FileUpload", new { Posted = "Nothing" }];
};
Post["/fileupload"] = x =>
{
var file = this.Request.Files.FirstOrDefault();
string fileDetails = "Nothing";
if (file != null)
{
fileDetails = string.Format("{3} - {0} ({1}) {2}bytes", file.Name, file.ContentType, file.Value.Length, file.Key);
}
return View["FileUpload", new { Posted = fileDetails }];
};
Get["NamedRoute", "/namedRoute"] = _ => "I am a named route!";
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*=============================================================================
**
**
**
** Purpose: Domains represent an application within the runtime. Objects can
** not be shared between domains and each domain can be configured
** independently.
**
**
=============================================================================*/
namespace System
{
using System;
using System.Reflection;
using System.Runtime;
using System.Runtime.CompilerServices;
using System.Security;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Runtime.InteropServices;
using System.Reflection.Emit;
using CultureInfo = System.Globalization.CultureInfo;
using System.IO;
using AssemblyHashAlgorithm = System.Configuration.Assemblies.AssemblyHashAlgorithm;
using System.Text;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.ExceptionServices;
internal sealed class AppDomain
{
// Domain security information
// These fields initialized from the other side only. (NOTE: order
// of these fields cannot be changed without changing the layout in
// the EE- AppDomainBaseObject in this case)
private AppDomainManager _domainManager;
private Dictionary<String, Object> _LocalStore;
private AppDomainSetup _FusionStore;
public event AssemblyLoadEventHandler AssemblyLoad;
private ResolveEventHandler _TypeResolve;
public event ResolveEventHandler TypeResolve
{
add
{
lock (this)
{
_TypeResolve += value;
}
}
remove
{
lock (this)
{
_TypeResolve -= value;
}
}
}
private ResolveEventHandler _ResourceResolve;
public event ResolveEventHandler ResourceResolve
{
add
{
lock (this)
{
_ResourceResolve += value;
}
}
remove
{
lock (this)
{
_ResourceResolve -= value;
}
}
}
private ResolveEventHandler _AssemblyResolve;
public event ResolveEventHandler AssemblyResolve
{
add
{
lock (this)
{
_AssemblyResolve += value;
}
}
remove
{
lock (this)
{
_AssemblyResolve -= value;
}
}
}
private EventHandler _processExit;
private EventHandler _domainUnload;
private UnhandledExceptionEventHandler _unhandledException;
// The compat flags are set at domain creation time to indicate that the given breaking
// changes (named in the strings) should not be used in this domain. We only use the
// keys, the vhe values are ignored.
private Dictionary<String, object> _compatFlags;
// Delegate that will hold references to FirstChance exception notifications
private EventHandler<FirstChanceExceptionEventArgs> _firstChanceException;
private IntPtr _pDomain; // this is an unmanaged pointer (AppDomain * m_pDomain)` used from the VM.
private bool _compatFlagsInitialized;
internal const String TargetFrameworkNameAppCompatSetting = "TargetFrameworkName";
#if FEATURE_APPX
private static APPX_FLAGS s_flags;
//
// Keep in async with vm\appdomainnative.cpp
//
[Flags]
private enum APPX_FLAGS
{
APPX_FLAGS_INITIALIZED = 0x01,
APPX_FLAGS_APPX_MODEL = 0x02,
APPX_FLAGS_APPX_DESIGN_MODE = 0x04,
APPX_FLAGS_APPX_MASK = APPX_FLAGS_APPX_MODEL |
APPX_FLAGS_APPX_DESIGN_MODE,
}
private static APPX_FLAGS Flags
{
get
{
if (s_flags == 0)
s_flags = nGetAppXFlags();
Debug.Assert(s_flags != 0);
return s_flags;
}
}
#endif // FEATURE_APPX
#if FEATURE_APPX
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
[SuppressUnmanagedCodeSecurity]
[return: MarshalAs(UnmanagedType.I4)]
private static extern APPX_FLAGS nGetAppXFlags();
#endif
/// <summary>
/// Get a handle used to make a call into the VM pointing to this domain
/// </summary>
internal AppDomainHandle GetNativeHandle()
{
// This should never happen under normal circumstances. However, there ar ways to create an
// uninitialized object through remoting, etc.
if (_pDomain.IsNull())
{
throw new InvalidOperationException(SR.Argument_InvalidHandle);
}
return new AppDomainHandle(_pDomain);
}
/// <summary>
/// If this AppDomain is configured to have an AppDomain manager then create the instance of it.
/// This method is also called from the VM to create the domain manager in the default domain.
/// </summary>
private void CreateAppDomainManager()
{
Debug.Assert(_domainManager == null, "_domainManager == null");
AppDomainSetup adSetup = FusionStore;
String trustedPlatformAssemblies = (String)(GetData("TRUSTED_PLATFORM_ASSEMBLIES"));
if (trustedPlatformAssemblies != null)
{
String platformResourceRoots = (String)(GetData("PLATFORM_RESOURCE_ROOTS"));
if (platformResourceRoots == null)
{
platformResourceRoots = String.Empty;
}
String appPaths = (String)(GetData("APP_PATHS"));
if (appPaths == null)
{
appPaths = String.Empty;
}
String appNiPaths = (String)(GetData("APP_NI_PATHS"));
if (appNiPaths == null)
{
appNiPaths = String.Empty;
}
String appLocalWinMD = (String)(GetData("APP_LOCAL_WINMETADATA"));
if (appLocalWinMD == null)
{
appLocalWinMD = String.Empty;
}
SetupBindingPaths(trustedPlatformAssemblies, platformResourceRoots, appPaths, appNiPaths, appLocalWinMD);
}
InitializeCompatibilityFlags();
}
/// <summary>
/// Initialize the compatibility flags to non-NULL values.
/// This method is also called from the VM when the default domain doesn't have a domain manager.
/// </summary>
private void InitializeCompatibilityFlags()
{
AppDomainSetup adSetup = FusionStore;
// set up shim flags regardless of whether we create a DomainManager in this method.
if (adSetup.GetCompatibilityFlags() != null)
{
_compatFlags = new Dictionary<String, object>(adSetup.GetCompatibilityFlags(), StringComparer.OrdinalIgnoreCase);
}
// for perf, we don't intialize the _compatFlags dictionary when we don't need to. However, we do need to make a
// note that we've run this method, because IsCompatibilityFlagsSet needs to return different values for the
// case where the compat flags have been setup.
Debug.Assert(!_compatFlagsInitialized);
_compatFlagsInitialized = true;
CompatibilitySwitches.InitializeSwitches();
}
/// <summary>
/// Returns whether the current AppDomain follows the AppX rules.
/// </summary>
[Pure]
internal static bool IsAppXModel()
{
#if FEATURE_APPX
return (Flags & APPX_FLAGS.APPX_FLAGS_APPX_MODEL) != 0;
#else
return false;
#endif
}
/// <summary>
/// Returns the setting of the AppXDevMode config switch.
/// </summary>
[Pure]
internal static bool IsAppXDesignMode()
{
#if FEATURE_APPX
return (Flags & APPX_FLAGS.APPX_FLAGS_APPX_MASK) == (APPX_FLAGS.APPX_FLAGS_APPX_MODEL | APPX_FLAGS.APPX_FLAGS_APPX_DESIGN_MODE);
#else
return false;
#endif
}
/// <summary>
/// Checks (and throws on failure) if the domain supports Assembly.LoadFrom.
/// </summary>
[Pure]
internal static void CheckLoadFromSupported()
{
#if FEATURE_APPX
if (IsAppXModel())
throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.LoadFrom"));
#endif
}
/// <summary>
/// Checks (and throws on failure) if the domain supports Assembly.LoadFile.
/// </summary>
[Pure]
internal static void CheckLoadFileSupported()
{
#if FEATURE_APPX
if (IsAppXModel())
throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.LoadFile"));
#endif
}
/// <summary>
/// Checks (and throws on failure) if the domain supports Assembly.Load(byte[] ...).
/// </summary>
[Pure]
internal static void CheckLoadByteArraySupported()
{
#if FEATURE_APPX
if (IsAppXModel())
throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.Load(byte[], ...)"));
#endif
}
public AppDomainManager DomainManager
{
get
{
return _domainManager;
}
}
public static AppDomain CurrentDomain
{
get
{
return Thread.GetDomain();
}
}
public String BaseDirectory
{
get
{
return FusionStore.ApplicationBase;
}
}
public override String ToString()
{
StringBuilder sb = StringBuilderCache.Acquire();
String fn = nGetFriendlyName();
if (fn != null)
{
sb.Append(SR.Loader_Name + fn);
sb.Append(Environment.NewLine);
}
return StringBuilderCache.GetStringAndRelease(sb);
}
[MethodImpl(MethodImplOptions.InternalCall)]
private extern Assembly[] nGetAssemblies(bool forIntrospection);
internal Assembly[] GetAssemblies(bool forIntrospection)
{
return nGetAssemblies(forIntrospection);
}
// this is true when we've removed the handles etc so really can't do anything
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal extern bool IsUnloadingForcedFinalize();
// this is true when we've just started going through the finalizers and are forcing objects to finalize
// so must be aware that certain infrastructure may have gone away
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public extern bool IsFinalizingForUnload();
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal static extern void PublishAnonymouslyHostedDynamicMethodsAssembly(RuntimeAssembly assemblyHandle);
public void SetData(string name, object data)
{
if (name == null)
throw new ArgumentNullException(nameof(name));
lock (((ICollection)LocalStore).SyncRoot)
{
LocalStore[name] = data;
}
}
[Pure]
public Object GetData(string name)
{
if (name == null)
throw new ArgumentNullException(nameof(name));
object data;
lock (((ICollection)LocalStore).SyncRoot)
{
LocalStore.TryGetValue(name, out data);
}
if (data == null)
return null;
return data;
}
[Obsolete("AppDomain.GetCurrentThreadId has been deprecated because it does not provide a stable Id when managed threads are running on fibers (aka lightweight threads). To get a stable identifier for a managed thread, use the ManagedThreadId property on Thread. http://go.microsoft.com/fwlink/?linkid=14202", false)]
[DllImport(Interop.Libraries.Kernel32)]
public static extern int GetCurrentThreadId();
private AppDomain()
{
throw new NotSupportedException(SR.NotSupported_Constructor);
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal extern void nCreateContext();
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
[SuppressUnmanagedCodeSecurity]
private static extern void nSetupBindingPaths(String trustedPlatformAssemblies, String platformResourceRoots, String appPath, String appNiPaths, String appLocalWinMD);
internal void SetupBindingPaths(String trustedPlatformAssemblies, String platformResourceRoots, String appPath, String appNiPaths, String appLocalWinMD)
{
nSetupBindingPaths(trustedPlatformAssemblies, platformResourceRoots, appPath, appNiPaths, appLocalWinMD);
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern String nGetFriendlyName();
// support reliability for certain event handlers, if the target
// methods also participate in this discipline. If caller passes
// an existing MulticastDelegate, then we could use a MDA to indicate
// that reliability is not guaranteed. But if it is a single cast
// scenario, we can make it work.
public event EventHandler ProcessExit
{
add
{
if (value != null)
{
RuntimeHelpers.PrepareContractedDelegate(value);
lock (this)
_processExit += value;
}
}
remove
{
lock (this)
_processExit -= value;
}
}
public event EventHandler DomainUnload
{
add
{
if (value != null)
{
RuntimeHelpers.PrepareContractedDelegate(value);
lock (this)
_domainUnload += value;
}
}
remove
{
lock (this)
_domainUnload -= value;
}
}
public event UnhandledExceptionEventHandler UnhandledException
{
add
{
if (value != null)
{
RuntimeHelpers.PrepareContractedDelegate(value);
lock (this)
_unhandledException += value;
}
}
remove
{
lock (this)
_unhandledException -= value;
}
}
// This is the event managed code can wireup against to be notified
// about first chance exceptions.
//
// To register/unregister the callback, the code must be SecurityCritical.
public event EventHandler<FirstChanceExceptionEventArgs> FirstChanceException
{
add
{
if (value != null)
{
RuntimeHelpers.PrepareContractedDelegate(value);
lock (this)
_firstChanceException += value;
}
}
remove
{
lock (this)
_firstChanceException -= value;
}
}
private void OnAssemblyLoadEvent(RuntimeAssembly LoadedAssembly)
{
AssemblyLoadEventHandler eventHandler = AssemblyLoad;
if (eventHandler != null)
{
AssemblyLoadEventArgs ea = new AssemblyLoadEventArgs(LoadedAssembly);
eventHandler(this, ea);
}
}
// This method is called by the VM.
private RuntimeAssembly OnResourceResolveEvent(RuntimeAssembly assembly, String resourceName)
{
ResolveEventHandler eventHandler = _ResourceResolve;
if (eventHandler == null)
return null;
Delegate[] ds = eventHandler.GetInvocationList();
int len = ds.Length;
for (int i = 0; i < len; i++)
{
Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(resourceName, assembly));
RuntimeAssembly ret = GetRuntimeAssembly(asm);
if (ret != null)
return ret;
}
return null;
}
// This method is called by the VM
private RuntimeAssembly OnTypeResolveEvent(RuntimeAssembly assembly, String typeName)
{
ResolveEventHandler eventHandler = _TypeResolve;
if (eventHandler == null)
return null;
Delegate[] ds = eventHandler.GetInvocationList();
int len = ds.Length;
for (int i = 0; i < len; i++)
{
Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(typeName, assembly));
RuntimeAssembly ret = GetRuntimeAssembly(asm);
if (ret != null)
return ret;
}
return null;
}
// This method is called by the VM.
private RuntimeAssembly OnAssemblyResolveEvent(RuntimeAssembly assembly, String assemblyFullName)
{
ResolveEventHandler eventHandler = _AssemblyResolve;
if (eventHandler == null)
{
return null;
}
Delegate[] ds = eventHandler.GetInvocationList();
int len = ds.Length;
for (int i = 0; i < len; i++)
{
Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(assemblyFullName, assembly));
RuntimeAssembly ret = GetRuntimeAssembly(asm);
if (ret != null)
return ret;
}
return null;
}
#if FEATURE_COMINTEROP
// Called by VM - code:CLRPrivTypeCacheWinRT::RaiseDesignerNamespaceResolveEvent
private string[] OnDesignerNamespaceResolveEvent(string namespaceName)
{
return System.Runtime.InteropServices.WindowsRuntime.WindowsRuntimeMetadata.OnDesignerNamespaceResolveEvent(this, namespaceName);
}
#endif // FEATURE_COMINTEROP
internal AppDomainSetup FusionStore
{
get
{
Debug.Assert(_FusionStore != null,
"Fusion store has not been correctly setup in this domain");
return _FusionStore;
}
}
internal static RuntimeAssembly GetRuntimeAssembly(Assembly asm)
{
if (asm == null)
return null;
RuntimeAssembly rtAssembly = asm as RuntimeAssembly;
if (rtAssembly != null)
return rtAssembly;
AssemblyBuilder ab = asm as AssemblyBuilder;
if (ab != null)
return ab.InternalAssembly;
return null;
}
private Dictionary<String, Object> LocalStore
{
get
{
if (_LocalStore != null)
return _LocalStore;
else
{
_LocalStore = new Dictionary<String, Object>();
return _LocalStore;
}
}
}
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
[SuppressUnmanagedCodeSecurity]
private static extern void nSetNativeDllSearchDirectories(string paths);
private void SetupFusionStore(AppDomainSetup info, AppDomainSetup oldInfo)
{
Debug.Assert(info != null);
if (info.ApplicationBase == null)
{
info.SetupDefaults(RuntimeEnvironment.GetModuleFileName(), imageLocationAlreadyNormalized: true);
}
nCreateContext();
// This must be the last action taken
_FusionStore = info;
}
// Used to switch into other AppDomain and call SetupRemoteDomain.
// We cannot simply call through the proxy, because if there
// are any remoting sinks registered, they can add non-mscorlib
// objects to the message (causing an assembly load exception when
// we try to deserialize it on the other side)
private static object PrepareDataForSetup(String friendlyName,
AppDomainSetup setup,
string[] propertyNames,
string[] propertyValues)
{
AppDomainSetup newSetup = new AppDomainSetup(setup, false);
// Remove the special AppDomainCompatSwitch entries from the set of name value pairs
// And add them to the AppDomainSetup
//
// This is only supported on CoreCLR through ICLRRuntimeHost2.CreateAppDomainWithManager
// Desktop code should use System.AppDomain.CreateDomain() or
// System.AppDomainManager.CreateDomain() and add the flags to the AppDomainSetup
List<String> compatList = new List<String>();
if (propertyNames != null && propertyValues != null)
{
for (int i = 0; i < propertyNames.Length; i++)
{
if (String.Compare(propertyNames[i], "AppDomainCompatSwitch", StringComparison.OrdinalIgnoreCase) == 0)
{
compatList.Add(propertyValues[i]);
propertyNames[i] = null;
propertyValues[i] = null;
}
}
if (compatList.Count > 0)
{
newSetup.SetCompatibilitySwitches(compatList);
}
}
return new Object[]
{
friendlyName,
newSetup,
propertyNames,
propertyValues
};
} // PrepareDataForSetup
private static Object Setup(Object arg)
{
Object[] args = (Object[])arg;
String friendlyName = (String)args[0];
AppDomainSetup setup = (AppDomainSetup)args[1];
string[] propertyNames = (string[])args[2]; // can contain null elements
string[] propertyValues = (string[])args[3]; // can contain null elements
AppDomain ad = AppDomain.CurrentDomain;
AppDomainSetup newSetup = new AppDomainSetup(setup, false);
if (propertyNames != null && propertyValues != null)
{
for (int i = 0; i < propertyNames.Length; i++)
{
// We want to set native dll probing directories before any P/Invokes have a
// chance to fire. The Path class, for one, has P/Invokes.
if (propertyNames[i] == "NATIVE_DLL_SEARCH_DIRECTORIES")
{
if (propertyValues[i] == null)
throw new ArgumentNullException("NATIVE_DLL_SEARCH_DIRECTORIES");
string paths = propertyValues[i];
if (paths.Length == 0)
break;
nSetNativeDllSearchDirectories(paths);
}
}
for (int i = 0; i < propertyNames.Length; i++)
{
if (propertyNames[i] == "APPBASE") // make sure in sync with Fusion
{
if (propertyValues[i] == null)
throw new ArgumentNullException("APPBASE");
if (PathInternal.IsPartiallyQualified(propertyValues[i]))
throw new ArgumentException(SR.Argument_AbsolutePathRequired);
newSetup.ApplicationBase = NormalizePath(propertyValues[i], fullCheck: true);
}
else if (propertyNames[i] == "TRUSTED_PLATFORM_ASSEMBLIES" ||
propertyNames[i] == "PLATFORM_RESOURCE_ROOTS" ||
propertyNames[i] == "APP_PATHS" ||
propertyNames[i] == "APP_NI_PATHS")
{
string values = propertyValues[i];
if (values == null)
throw new ArgumentNullException(propertyNames[i]);
ad.SetData(propertyNames[i], NormalizeAppPaths(values));
}
else if (propertyNames[i] != null)
{
ad.SetData(propertyNames[i], propertyValues[i]); // just propagate
}
}
}
ad.SetupFusionStore(newSetup, null); // makes FusionStore a ref to newSetup
// technically, we don't need this, newSetup refers to the same object as FusionStore
// but it's confusing since it isn't immediately obvious whether we have a ref or a copy
AppDomainSetup adSetup = ad.FusionStore;
// set up the friendly name
ad.nSetupFriendlyName(friendlyName);
ad.CreateAppDomainManager(); // could modify FusionStore's object
return null;
}
private static string NormalizeAppPaths(string values)
{
int estimatedLength = values.Length + 1; // +1 for extra separator temporarily added at end
StringBuilder sb = StringBuilderCache.Acquire(estimatedLength);
for (int pos = 0; pos < values.Length; pos++)
{
string path;
int nextPos = values.IndexOf(Path.PathSeparator, pos);
if (nextPos == -1)
{
path = values.Substring(pos);
pos = values.Length - 1;
}
else
{
path = values.Substring(pos, nextPos - pos);
pos = nextPos;
}
// Skip empty directories
if (path.Length == 0)
continue;
if (PathInternal.IsPartiallyQualified(path))
throw new ArgumentException(SR.Argument_AbsolutePathRequired);
string appPath = NormalizePath(path, fullCheck: true);
sb.Append(appPath);
sb.Append(Path.PathSeparator);
}
// Strip the last separator
if (sb.Length > 0)
{
sb.Remove(sb.Length - 1, 1);
}
return StringBuilderCache.GetStringAndRelease(sb);
}
internal static string NormalizePath(string path, bool fullCheck)
{
return Path.GetFullPath(path);
}
// This routine is called from unmanaged code to
// set the default fusion context.
private void SetupDomain(bool allowRedirects, String path, String configFile, String[] propertyNames, String[] propertyValues)
{
// It is possible that we could have multiple threads initializing
// the default domain. We will just take the winner of these two.
// (eg. one thread doing a com call and another doing attach for IJW)
lock (this)
{
if (_FusionStore == null)
{
AppDomainSetup setup = new AppDomainSetup();
// always use internet permission set
SetupFusionStore(setup, null);
}
}
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void nSetupFriendlyName(string friendlyName);
public AppDomainSetup SetupInformation
{
get
{
return new AppDomainSetup(FusionStore, true);
}
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal extern String IsStringInterned(String str);
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal extern String GetOrInternString(String str);
public bool IsFullyTrusted
{
get
{
return true;
}
}
public Int32 Id
{
get
{
return GetId();
}
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal extern Int32 GetId();
}
/// <summary>
/// Handle used to marshal an AppDomain to the VM (eg QCall). When marshaled via a QCall, the target
/// method in the VM will receive a QCall::AppDomainHandle parameter.
/// </summary>
internal struct AppDomainHandle
{
private IntPtr m_appDomainHandle;
// Note: generall an AppDomainHandle should not be directly constructed, instead the
// code:System.AppDomain.GetNativeHandle method should be called to get the handle for a specific
// AppDomain.
internal AppDomainHandle(IntPtr domainHandle)
{
m_appDomainHandle = domainHandle;
}
}
}
| |
//#define TRACEDEBUG
//#define TRACEDEBUGTICKS
//#define TRACELOG
using System;
using System.Collections.Generic;
using System.Linq;
using System.ComponentModel;
using System.IO;
using System.Net;
using System.Net.Mime;
using W3C.Soap;
using Microsoft.Ccr.Core;
using Microsoft.Dss.Core;
using Microsoft.Dss.Core.Attributes;
using Microsoft.Dss.ServiceModel.Dssp;
using Microsoft.Dss.Services.Serializer;
using Microsoft.Dss.ServiceModel.DsspServiceBase;
using Microsoft.Dss.Core.DsspHttp;
using Microsoft.Dss.Core.DsspHttpUtilities;
using bumper = Microsoft.Robotics.Services.ContactSensor.Proxy;
//using bumper = TrackRoamer.Robotics.Services.TrackRoamerServices.Bumper.Proxy;
using drive = Microsoft.Robotics.Services.Drive.Proxy;
using trdrive = TrackRoamer.Robotics.Services.TrackRoamerDrive.Proxy;
using encoder = Microsoft.Robotics.Services.Encoder.Proxy;
using sicklrf = Microsoft.Robotics.Services.Sensors.SickLRF.Proxy;
using powerbrick = TrackRoamer.Robotics.Services.TrackRoamerBrickPower.Proxy;
using proxibrick = TrackRoamer.Robotics.Services.TrackRoamerBrickProximityBoard.Proxy;
using pololumaestro = TrackRoamer.Robotics.Hardware.PololuMaestroService.Proxy;
using dssp = Microsoft.Dss.ServiceModel.Dssp;
using TrackRoamer.Robotics.LibMapping;
using TrackRoamer.Robotics.Utility.LibSystem;
namespace TrackRoamer.Robotics.Services.TrackRoamerBehaviors
{
/// <summary>
/// takes care of all the DSS Service plumbing to allow derived class handle higher level behavior related code
/// </summary>
partial class TrackRoamerBehaviorsService : DsspServiceBase
{
#region Unit Testing
//protected bool _doUnitTest = true; // "true" disables laser and calls simple behaviors like "Straight" or "Square" to verify that the plumbing works.
//protected bool _doDecisionStraightForward = true; // "true" disables Decide so that it always decides to hust move straight forward. Useful for timing evaluation and bumper debugging.
protected bool _doUnitTest = false; // "true" disables laser and calls simple behaviors like "Straight" or "Square" to verify that the plumbing works.
protected bool _unitTestSensorsOn = false; // "true" will leave laser, proximity and other sensors on; false turns them all off.
protected bool _doDecisionStraightForward = false; // "true" disables Decide so that it always decides to just move straight forward. Useful for timing evaluation and bumper debugging.
protected bool _doDecisionDontMove = false; // "true" disables Decide so that it does nothing, and no commands are issued. Useful for wheel encoder and bumper debugging.
protected bool _doSimulatedLaser = false; // "true" disables real laser and replaces it with simulated wide-open laser view
protected bool _simulatedLaserRandomize = true; // if _doSimulatedLaser=true, some randomization is imposed on laser frames, otherwise it is wide open.
protected int _simulatedLaserWatchdogInterval = 10000; // if _doSimulatedLaser=true, a frame comes every ... ms
protected bool _testBumpMode = false; // "true" disables Decide and disregards moving speed to allow debugging (while stationary) of simple moves activated by events from the Bumper/Whiskers
double utForwardVelocity;
double utTurnPower;
double utPowerScale = 1.0d;
const int repeatCount = 1; // Number of times to repeat behavior
// Values for "exact" movements using DriveDistance and RotateDegrees
protected float driveDistanceMeters = 10.0f * (float)Distance.METERS_PER_FOOT; // Drive 0.5f = 50cm 5.0f = 5m
protected float rotateAngle = 90.0f; // Turn 90 degrees to the right (+) or left (-)
// this will be called after all initialization:
private void performUnitTest()
{
LogInfo("DriveBehaviorServiceBase: performUnitTest() Started");
utForwardVelocity = ModerateForwardVelocityMmSec;
utTurnPower = ModerateTurnPower;
if (_state.collisionState == null)
{
_state.collisionState = new CollisionState();
}
// to test drive operation, execute the geometric pattern
//SpawnIterator(BehaviorMoveForward);
//SpawnIterator(BehaviorExercisePololuMaestro);
//SpawnIterator(BehaviorPushForward);
SpawnIterator(BehaviorTurn);
//SpawnIterator(BehaviorTurnAndMoveForward);
//SpawnIterator(BehaviorSquare);
//SpawnIterator(BehaviorStraight);
// SpawnIterator(BehaviorStraightInterruptTurn);
//SpawnIterator(BehaviorKata);
LogInfo("DriveBehaviorServiceBase: performUnitTest() finished");
}
#region BehaviorExercisePololuMaestro
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorExercisePololuMaestro()
{
LogInfo("DriveBehaviorServiceBase: BehaviorExercisePololuMaestro() Started");
Talker.Say(10, "waiting for Behavior Exercise Pololu Maestro");
// Wait for the robot to initialize, otherwise it will miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
Talker.Say(10, "starting Behavior Exercise Pololu Maestro");
// Wait for settling time
yield return Timeout(settlingTime);
byte channel = ServoChannelMap.leftGunTilt;
for (int i = 1; i <= 50; i++)
{
int servoPos = 1000 + 20 * i;
//Talker.Say(10, "servo " + servoPos);
pololumaestro.ChannelValuePair cvp = new pololumaestro.ChannelValuePair() { Channel = channel, Target = (ushort)(servoPos * 4) };
List<pololumaestro.ChannelValuePair> channelValues = new List<pololumaestro.ChannelValuePair>();
channelValues.Add(cvp);
pololumaestro.PololuMaestroCommand cmd = new pololumaestro.PololuMaestroCommand() { Command = "set", ChannelValues = channelValues };
_pololuMaestroPort.Post(new pololumaestro.SendPololuMaestroCommand(cmd));
// wait some time
yield return Timeout(1000);
}
Talker.Say(10, "Behavior Exercise Pololu Maestro finished");
// done
yield break;
}
#endregion // BehaviorExercisePololuMaestro
#region BehaviorKata
protected IEnumerator<ITask> BehaviorKata()
{
LogInfo("DriveBehaviorServiceBase: BehaviorKata() Started");
Talker.Say(2, "starting Behavior Kata");
// Wait for settling time
yield return Timeout(15000);
/*
SpawnIterator<TurnAndMoveParameters, Handler>(
new TurnAndMoveParameters()
{
rotateAngle = 30,
rotatePower = MaximumTurnPower,
speed = (int)Math.Round(ModerateForwardVelocity),
desiredMovingState = MovingState.FreeForwards
},
delegate()
{
},
TurnAndMoveForward);
* */
/*
Kata kata = new Kata() { name="My Kata" };
kata.Add(new KataStep()
{
name = "Turn 30 backup 400",
rotateAngle = 30,
rotatePower = MaximumTurnPower,
speed = (int)Math.Round(ModerateForwardVelocity),
distance = -400,
desiredMovingState = MovingState.Unknown
}
);
kata.Add(new KataStep()
{
name = "Turn -30 forward 400",
rotateAngle = -30,
rotatePower = MaximumTurnPower,
speed = (int)Math.Round(ModerateForwardVelocity),
distance = 400,
desiredMovingState = MovingState.Unknown
}
);
*/
Kata kata = KataHelper.KataByName("avoid.*").FirstOrDefault();
if (kata != null)
{
Talker.Say(5, "kata " + kata.name);
SpawnIterator<Kata, Handler>(
kata,
delegate()
{
Talker.Say(5, "kata success: " + kata.success + " count: " + kata.successfulStepsCount);
},
KataRunner);
}
else
{
LogError("DriveBehaviorServiceBase: BehaviorKata() - cannot find appropriate Kata");
}
LogInfo("DriveBehaviorServiceBase: BehaviorKata() finished");
}
#endregion // BehaviorKata
#region BehaviorMoveForward
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorMoveForward()
{
LogInfo("DriveBehaviorServiceBase: BehaviorMoveForward() Started");
Talker.Say(10, "waiting for Behavior Move Forward");
// Wait for the robot to initialize, otherwise it will
// miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
// Wait for settling time
yield return Timeout(settlingTime);
Talker.Say(10, "starting Behavior Move Forward");
double speedMms = utForwardVelocity * utPowerScale;
// a fire-and-forget command to move forward:
SetDriveSpeed(speedMms, speedMms);
// wait some time
for (int i = 10; i > 0; i--)
{
LogInfo(i.ToString());
yield return Timeout(1000);
}
// we expect the drive to stop at the command, not by completion:
StopMoving();
Talker.Say(10, "Behavior Move Forward finished");
// done
yield break;
}
#endregion // BehaviorMoveForward
#region BehaviorPushForward
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorPushForward()
{
LogInfo("DriveBehaviorServiceBase: BehaviorPushForward() Started");
Talker.Say(10, "waiting for Behavior Push Forward");
// Wait for the robot to initialize, otherwise it will
// miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
Talker.Say(10, "starting Behavior Push Forward");
// Wait for settling time
yield return Timeout(settlingTime);
for (int i = 1; i <= 50; i++)
{
Talker.Say(10, "push " + i);
double speedMms = utForwardVelocity * utPowerScale;
// a fire-and-forget command to move forward:
SetDriveSpeed(speedMms, speedMms);
// wait some time
LogInfo(i.ToString());
yield return Timeout(2000);
}
// we expect the drive to stop at the command, not by completion:
StopMoving();
Talker.Say(10, "Behavior Push Forward finished");
// done
yield break;
}
#endregion // BehaviorPushForward
#region BehaviorTurnAndMoveForward
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorTurnAndMoveForward()
{
LogInfo("DriveBehaviorServiceBase: BehaviorTurnAndMoveForward() Started");
Talker.Say(2, "starting Behavior Turn And Move Forward");
// Wait for settling time
yield return Timeout(settlingTime);
LogInfo(LogGroups.Console, "Turning " + (rotateAngle > 0.0d ? "Right " : "Left "));
bool success = true;
Fault fault = null;
// First turn:
yield return Arbiter.Choice(
TurnByAngle((int)rotateAngle, utTurnPower * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the RotateDegrees was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
}
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
_mapperVicinity.turnState.wasCanceled = true;
}
LogError("Error occurred on TurnByAngle: " + fault);
}
if (success)
{
// Wait for settling time
//yield return Timeout(settlingTime);
double speedMms = utForwardVelocity * utPowerScale;
// a fire-and-forget command to move forward:
SetDriveSpeed(speedMms, speedMms);
// wait some time
for (int i = 10; i > 0; i--)
{
LogInfo(i.ToString());
yield return Timeout(1000);
}
// we expect the drive to stop at the command, not by completion:
StopMoving();
Talker.Say(2, "Behavior Turn And Move Forward finished");
}
else
{
Talker.Say(2, "Behavior Turn And Move Forward canceled");
}
// done
yield break;
}
#endregion // BehaviorTurnAndMoveForward
#region BehaviorTurn
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorTurn()
{
LogInfo("DriveBehaviorServiceBase: BehaviorTurn() Started");
Talker.Say(2, "starting Behavior Turn And Move Forward");
// Wait for the robot to initialize, otherwise it will
// miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
LogInfo(LogGroups.Console, "Turning " + (rotateAngle > 0.0d ? "Right " : "Left "));
bool success = true;
Fault fault = null;
// Turn:
yield return Arbiter.Choice(
TurnByAngle((int)rotateAngle, utTurnPower * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the RotateDegrees was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
}
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
_mapperVicinity.turnState.wasCanceled = true;
}
LogError("Error occurred on TurnByAngle: " + fault);
}
// done
yield break;
}
#endregion // BehaviorTurn
#region BehaviorStraight
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorStraight()
{
LogInfo("DriveBehaviorServiceBase: BehaviorStraight() Started");
// Wait for the robot to initialize, otherwise it will miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
Talker.Say(2, "starting Behavior Straight");
LogInfo("Starting STRAIGHT using Controlled Moves ...");
// Make sure that the drive is enabled first!
//EnableMotor();
for (int times = 1; times <= repeatCount; times++)
{
// Wait for settling time
yield return Timeout(settlingTime);
// This code uses the Translate operation to control the robot. These are not precise,
// but they should be better than using timers and they should also work regardless of the type of robot.
bool success = true;
Fault fault = null;
LogInfo("Drive Straight Ahead - starting step " + times);
// Drive straight ahead
yield return Arbiter.Choice(
Translate((int)(driveDistanceMeters * 1000.0d), utForwardVelocity * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the DriveDistance was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
// NOTE: This approach only works if you always wait for a
// completion message. If you send any other drive request
// while the current one is active, then the current motion
// will be canceled, i.e. cut short.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
LogError("Error occurred on Translate: " + fault);
}
LogInfo("Drive Straight Ahead - finished step " + times);
}
// And finally make sure that the robot is stopped!
//StopMoving();
LogInfo("STRAIGHT Finished, robot stopped");
Talker.Say(2, "Behavior Straight finished");
yield break;
}
#endregion // BehaviorStraight
#region BehaviorStraightInterruptTurn
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorStraightInterruptTurn()
{
LogInfo("DriveBehaviorServiceBase: BehaviorStraightInterruptTurn() Started ----------------------------------------------");
//Talker.Say(2, "starting Behavior Straight Interrupt Turn");
for (int times = 1; times <= repeatCount; times++)
{
SpawnIterator(BehaviorStraightInterruptTurn_Straight);
yield return Timeout(10000);
SpawnIterator(BehaviorStraightInterruptTurn_Turn);
LogInfo("DriveBehaviorServiceBase: BehaviorStraightInterruptTurn() - finished step " + times);
}
LogInfo("DriveBehaviorServiceBase: BehaviorStraightInterruptTurn() Finished ----------------------------------------------");
//Talker.Say(2, "Behavior Straight Interrupt Turn finished");
yield break;
}
protected IEnumerator<ITask> BehaviorStraightInterruptTurn_Straight()
{
LogInfo("DriveBehaviorServiceBase: BehaviorStraightInterruptTurn_Straight() Started");
bool success = true;
Fault fault = null;
// Drive straight ahead
yield return Arbiter.Choice(
// 3 meters takes about 30 seconds:
Translate((int)(3.0d * 1000.0d), utForwardVelocity * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the DriveDistance was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
LogError("Error occurred on Translate: " + fault);
}
if (success)
{
LogInfo("BehaviorStraightInterruptTurn_Straight() Finished --------");
}
else
{
LogInfo("BehaviorStraightInterruptTurn_Straight() Canceled --------");
}
yield break;
}
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
protected IEnumerator<ITask> BehaviorStraightInterruptTurn_Turn()
{
LogInfo("DriveBehaviorServiceBase: BehaviorStraightInterruptTurn_Turn() Started");
bool success = true;
Fault fault = null;
LogInfo(LogGroups.Console, "Turning " + (rotateAngle > 0.0d ? "Right " : "Left "));
// turn first:
yield return Arbiter.Choice(
TurnByAngle((int)rotateAngle, utTurnPower * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the RotateDegrees was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
}
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
_mapperVicinity.turnState.wasCanceled = true;
}
LogError("Error occurred on TurnByAngle: " + fault);
}
if (success)
{
LogInfo("BehaviorStraightInterruptTurn_Turn() Finished --------");
}
else
{
LogInfo("BehaviorStraightInterruptTurn_Turn() Canceled --------");
}
yield break;
}
#endregion // BehaviorStraightInterruptTurn
#region BehaviorSquare
// Iterator to execute the Behavior
// It is important to use an Iterator so that it can relinquish control
// when there is nothing to do, i.e. yield return
IEnumerator<ITask> BehaviorSquare()
{
LogInfo(LogGroups.Console, "DriveBehaviorServiceBase: BehaviorSquare Started");
Talker.Say(2, "starting Behavior Square");
// Wait for the robot to initialize, otherwise it will
// miss the initial command
for (int i = 10; i > 0; i--)
{
LogInfo(LogGroups.Console, i.ToString());
yield return Timeout(1000);
}
LogInfo(LogGroups.Console, "Starting SQUARE using Controlled Moves ...");
// Make sure that the drive is enabled first!
//EnableMotor();
int times = 1;
for (; times <= repeatCount; times++)
{
// Drive along the four sides of a square
for (int side = 0; side < 4; side++)
{
bool success = true;
Fault fault = null;
LogInfo(LogGroups.Console, "Driving Straight Ahead - side " + side);
// Drive straight ahead
yield return Arbiter.Choice(
Translate((int)(driveDistanceMeters * 1000.0d), utForwardVelocity * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the DriveDistance was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
// NOTE: This approach only works if you always wait for a
// completion message. If you send any other drive request
// while the current one is active, then the current motion
// will be canceled, i.e. cut short.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
LogError("Error occurred on Translate: " + fault);
}
// Wait for settling time
yield return Timeout(settlingTime);
LogInfo(LogGroups.Console, "Turning " + (rotateAngle > 0.0d ? "Right " : "Left ") + " - side " + side);
// Now turn:
yield return Arbiter.Choice(
TurnByAngle((int)rotateAngle, utTurnPower * utPowerScale),
delegate(DefaultUpdateResponseType response) { success = true; },
delegate(Fault f) { success = false; fault = f; }
);
// If the RotateDegrees was accepted, then wait for it to complete.
// It is important not to wait if the request failed.
if (success)
{
DriveStageContainer driveStage = new DriveStageContainer();
yield return WaitForCompletion(driveStage);
LogInfo("WaitForCompletion() returned: " + driveStage.DriveStage);
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
}
success = driveStage.DriveStage == drive.DriveStage.Completed;
}
else
{
if (_mapperVicinity.turnState != null)
{
_mapperVicinity.turnState.finished = DateTime.Now;
_mapperVicinity.turnState.hasFinished = true;
_mapperVicinity.turnState.wasCanceled = true;
}
LogError("Error occurred on TurnByAngle: " + fault);
}
// Wait for settling time
yield return Timeout(settlingTime);
}
}
// And finally make sure that the robot is stopped!
//StopMoving();
LogInfo(LogGroups.Console, "BehaviorSquare Finished after completing " + (times-1) + " cycles; robot stopped");
Talker.Say(2, "Behavior Square finished");
yield break;
}
#endregion // BehaviorSquare
#endregion // Unit Testing
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using RailwaysTicketsUA.Web.Api.Areas.HelpPage.ModelDescriptions;
using RailwaysTicketsUA.Web.Api.Areas.HelpPage.Models;
namespace RailwaysTicketsUA.Web.Api.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Editor.Shared.Options;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.VisualStudio.ComponentModelHost;
using Microsoft.VisualStudio.LanguageServices.Implementation.EditAndContinue;
using Microsoft.VisualStudio.LanguageServices.Implementation.TaskList;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.TextManager.Interop;
using Microsoft.VisualStudio.Utilities;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem
{
// NOTE: Microsoft.VisualStudio.LanguageServices.TypeScript.TypeScriptProject derives from AbstractProject.
internal abstract partial class AbstractProject : ForegroundThreadAffinitizedObject, IVisualStudioHostProject
{
internal static object RuleSetErrorId = new object();
/// <summary>
/// The path to the project file itself. This is intentionally kept private, to avoid having to deal with people who
/// want the file path without realizing they need to deal with renames. If you need the folder of the project, just
/// use <see cref="ContainingDirectoryPathOpt" /> which is internal and doesn't change for a project.
/// </summary>
private string _filePathOpt;
private string _objOutputPathOpt;
private string _binOutputPathOpt;
private readonly List<ProjectReference> _projectReferences = new List<ProjectReference>();
private readonly List<VisualStudioMetadataReference> _metadataReferences = new List<VisualStudioMetadataReference>();
private readonly Dictionary<DocumentId, IVisualStudioHostDocument> _documents = new Dictionary<DocumentId, IVisualStudioHostDocument>();
private readonly Dictionary<string, IVisualStudioHostDocument> _documentMonikers = new Dictionary<string, IVisualStudioHostDocument>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, VisualStudioAnalyzer> _analyzers = new Dictionary<string, VisualStudioAnalyzer>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<DocumentId, IVisualStudioHostDocument> _additionalDocuments = new Dictionary<DocumentId, IVisualStudioHostDocument>();
/// <summary>
/// The list of files which have been added to the project but we aren't tracking since they
/// aren't real source files. Sometimes we're asked to add silly things like HTML files or XAML
/// files, and if those are open in a strange editor we just bail.
/// </summary>
private readonly ISet<string> _untrackedDocuments = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// The path to a metadata reference that was converted to project references.
/// </summary>
private readonly Dictionary<string, ProjectReference> _metadataFileNameToConvertedProjectReference = new Dictionary<string, ProjectReference>(StringComparer.OrdinalIgnoreCase);
private bool _pushingChangesToWorkspaceHosts;
// PERF: Create these event handlers once to be shared amongst all documents (the sender arg identifies which document and project)
private static readonly EventHandler<bool> s_documentOpenedEventHandler = OnDocumentOpened;
private static readonly EventHandler<bool> s_documentClosingEventHandler = OnDocumentClosing;
private static readonly EventHandler s_documentUpdatedOnDiskEventHandler = OnDocumentUpdatedOnDisk;
private static readonly EventHandler<bool> s_additionalDocumentOpenedEventHandler = OnAdditionalDocumentOpened;
private static readonly EventHandler<bool> s_additionalDocumentClosingEventHandler = OnAdditionalDocumentClosing;
private static readonly EventHandler s_additionalDocumentUpdatedOnDiskEventHandler = OnAdditionalDocumentUpdatedOnDisk;
private readonly DiagnosticDescriptor _errorReadingRulesetRule = new DiagnosticDescriptor(
id: IDEDiagnosticIds.ErrorReadingRulesetId,
title: ServicesVSResources.ErrorReadingRuleset,
messageFormat: ServicesVSResources.Error_reading_ruleset_file_0_1,
category: FeaturesResources.Roslyn_HostError,
defaultSeverity: DiagnosticSeverity.Error,
isEnabledByDefault: true);
/// <summary>
/// When a reference changes on disk we start a delayed task to update the <see cref="Workspace"/>.
/// It is delayed for two reasons: first, there are often a bunch of change notifications in quick succession
/// as the file is written. Second, we often get the first notification while something is still writing the
/// file, so we're unable to actually load it. To avoid both of these issues, we wait five seconds before
/// reloading the metadata. This <see cref="Dictionary{TKey, TValue}"/> holds on to
/// <see cref="CancellationTokenSource"/>s that allow us to cancel the existing reload task if another file
/// change comes in before we process it.
/// </summary>
private readonly Dictionary<VisualStudioMetadataReference, CancellationTokenSource> _changedReferencesPendingUpdate
= new Dictionary<VisualStudioMetadataReference, CancellationTokenSource>();
public AbstractProject(
VisualStudioProjectTracker projectTracker,
Func<ProjectId, IVsReportExternalErrors> reportExternalErrorCreatorOpt,
string projectSystemName,
string projectFilePath,
IVsHierarchy hierarchy,
string language,
Guid projectGuid,
IServiceProvider serviceProvider,
VisualStudioWorkspaceImpl visualStudioWorkspaceOpt,
HostDiagnosticUpdateSource hostDiagnosticUpdateSourceOpt,
ICommandLineParserService commandLineParserServiceOpt = null)
{
Contract.ThrowIfNull(projectSystemName);
ServiceProvider = serviceProvider;
Language = language;
Hierarchy = hierarchy;
Guid = projectGuid;
var componentModel = (IComponentModel)serviceProvider.GetService(typeof(SComponentModel));
ContentTypeRegistryService = componentModel.GetService<IContentTypeRegistryService>();
this.RunningDocumentTable = (IVsRunningDocumentTable4)serviceProvider.GetService(typeof(SVsRunningDocumentTable));
this.DisplayName = projectSystemName;
this.ProjectTracker = projectTracker;
ProjectSystemName = projectSystemName;
Workspace = visualStudioWorkspaceOpt;
CommandLineParserService = commandLineParserServiceOpt;
HostDiagnosticUpdateSource = hostDiagnosticUpdateSourceOpt;
UpdateProjectDisplayNameAndFilePath(projectSystemName, projectFilePath);
if (_filePathOpt != null)
{
Version = VersionStamp.Create(File.GetLastWriteTimeUtc(_filePathOpt));
}
else
{
Version = VersionStamp.Create();
}
Id = this.ProjectTracker.GetOrCreateProjectIdForPath(_filePathOpt ?? ProjectSystemName, ProjectSystemName);
if (reportExternalErrorCreatorOpt != null)
{
ExternalErrorReporter = reportExternalErrorCreatorOpt(Id);
}
if (visualStudioWorkspaceOpt != null)
{
this.EditAndContinueImplOpt = new VsENCRebuildableProjectImpl(this);
}
}
internal IServiceProvider ServiceProvider { get; }
/// <summary>
/// Indicates whether this project is a website type.
/// </summary>
public bool IsWebSite { get; protected set; }
/// <summary>
/// A full path to the project obj output binary, or null if the project doesn't have an obj output binary.
/// </summary>
internal string TryGetObjOutputPath() => _objOutputPathOpt;
/// <summary>
/// A full path to the project bin output binary, or null if the project doesn't have an bin output binary.
/// </summary>
internal string TryGetBinOutputPath() => _binOutputPathOpt;
public IRuleSetFile RuleSetFile { get; private set; }
protected VisualStudioProjectTracker ProjectTracker { get; }
protected IVsRunningDocumentTable4 RunningDocumentTable { get; }
protected IVsReportExternalErrors ExternalErrorReporter { get; }
internal HostDiagnosticUpdateSource HostDiagnosticUpdateSource { get; }
public ProjectId Id { get; }
public string Language { get; }
private ICommandLineParserService CommandLineParserService { get; }
public IVsHierarchy Hierarchy { get; }
/// <summary>
/// Guid of the project
///
/// it is not readonly since it can be changed while loading project
/// </summary>
public Guid Guid { get; protected set; }
/// <summary>
/// string (Guid) of the Hierarchy project type
/// </summary>
public string ProjectType { get; protected set; }
public Workspace Workspace { get; }
public VersionStamp Version { get; }
/// <summary>
/// The containing directory of the project. Null if none exists (consider Venus.)
/// </summary>
protected string ContainingDirectoryPathOpt
{
get
{
if (_filePathOpt != null)
{
return Path.GetDirectoryName(_filePathOpt);
}
else
{
return null;
}
}
}
/// <summary>
/// The full path of the project file. Null if none exists (consider Venus.)
/// </summary>
public string ProjectFilePath => _filePathOpt;
/// <summary>
/// The public display name of the project. This name is not unique and may be shared
/// between multiple projects, especially in cases like Venus where the intellisense
/// projects will match the name of their logical parent project.
/// </summary>
public string DisplayName { get; protected set; }
internal string AssemblyName { get; private set; }
/// <summary>
/// The name of the project according to the project system. In "regular" projects this is
/// equivalent to <see cref="DisplayName"/>, but in Venus cases these will differ. The
/// ProjectSystemName is the 2_Default.aspx project name, whereas the regular display name
/// matches the display name of the project the user actually sees in the solution explorer.
/// These can be assumed to be unique within the Visual Studio workspace.
/// </summary>
public string ProjectSystemName { get; }
protected DocumentProvider DocumentProvider => this.ProjectTracker.DocumentProvider;
protected VisualStudioMetadataReferenceManager MetadataReferenceProvider => this.ProjectTracker.MetadataReferenceProvider;
protected IContentTypeRegistryService ContentTypeRegistryService { get; }
/// <summary>
/// Flag indicating if the latest design time build has succeeded for current project state.
/// </summary>
protected abstract bool LastDesignTimeBuildSucceeded { get; }
internal VsENCRebuildableProjectImpl EditAndContinueImplOpt { get; private set; }
/// <summary>
/// Override this method to validate references when creating <see cref="ProjectInfo"/> for current state.
/// By default, this method does nothing.
/// </summary>
protected virtual void ValidateReferences()
{
}
public ProjectInfo CreateProjectInfoForCurrentState()
{
ValidateReferences();
var info = ProjectInfo.Create(
this.Id,
this.Version,
this.DisplayName,
AssemblyName ?? this.ProjectSystemName,
this.Language,
filePath: _filePathOpt,
outputFilePath: this.TryGetObjOutputPath(),
compilationOptions: CurrentCompilationOptions,
parseOptions: CurrentParseOptions,
documents: _documents.Values.Select(d => d.GetInitialState()),
metadataReferences: _metadataReferences.Select(r => r.CurrentSnapshot),
projectReferences: _projectReferences,
analyzerReferences: _analyzers.Values.Select(a => a.GetReference()),
additionalDocuments: _additionalDocuments.Values.Select(d => d.GetInitialState()));
return info.WithHasAllInformation(hasAllInformation: LastDesignTimeBuildSucceeded);
}
protected ImmutableArray<string> GetStrongNameKeyPaths()
{
var outputPath = this.TryGetObjOutputPath();
if (this.ContainingDirectoryPathOpt == null && outputPath == null)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>();
if (this.ContainingDirectoryPathOpt != null)
{
builder.Add(this.ContainingDirectoryPathOpt);
}
if (outputPath != null)
{
builder.Add(Path.GetDirectoryName(outputPath));
}
return builder.ToImmutable();
}
public ImmutableArray<ProjectReference> GetCurrentProjectReferences()
{
return ImmutableArray.CreateRange(_projectReferences);
}
public ImmutableArray<VisualStudioMetadataReference> GetCurrentMetadataReferences()
{
return ImmutableArray.CreateRange(_metadataReferences);
}
public IVisualStudioHostDocument GetDocumentOrAdditionalDocument(DocumentId id)
{
IVisualStudioHostDocument doc;
_documents.TryGetValue(id, out doc);
if (doc == null)
{
_additionalDocuments.TryGetValue(id, out doc);
}
return doc;
}
public IEnumerable<IVisualStudioHostDocument> GetCurrentDocuments()
{
return _documents.Values.ToImmutableArrayOrEmpty();
}
public IEnumerable<IVisualStudioHostDocument> GetCurrentAdditionalDocuments()
{
return _additionalDocuments.Values.ToImmutableArrayOrEmpty();
}
public bool ContainsFile(string moniker)
{
return _documentMonikers.ContainsKey(moniker);
}
public IVisualStudioHostDocument GetCurrentDocumentFromPath(string filePath)
{
IVisualStudioHostDocument document;
_documentMonikers.TryGetValue(filePath, out document);
return document;
}
public bool HasMetadataReference(string filename)
{
return _metadataReferences.Any(r => StringComparer.OrdinalIgnoreCase.Equals(r.FilePath, filename));
}
public VisualStudioMetadataReference TryGetCurrentMetadataReference(string filename)
{
// We must normalize the file path, since the paths we're comparing to are always normalized
filename = FileUtilities.NormalizeAbsolutePath(filename);
return _metadataReferences.SingleOrDefault(r => StringComparer.OrdinalIgnoreCase.Equals(r.FilePath, filename));
}
public bool CurrentProjectReferencesContains(ProjectId projectId)
{
return _projectReferences.Any(r => r.ProjectId == projectId);
}
public bool CurrentProjectAnalyzersContains(string fullPath)
{
return _analyzers.ContainsKey(fullPath);
}
// internal for testing purposes.
internal CompilationOptions CurrentCompilationOptions { get; private set; }
internal ParseOptions CurrentParseOptions { get; private set; }
/// <summary>
/// Returns a map from full path to <see cref="VisualStudioAnalyzer"/>.
/// </summary>
public ImmutableDictionary<string, VisualStudioAnalyzer> GetProjectAnalyzersMap() => _analyzers.ToImmutableDictionary();
private static string GetAssemblyName(string outputPath)
{
Contract.Requires(outputPath != null);
// dev11 sometimes gives us output path w/o extension, so removing extension becomes problematic
if (outputPath.EndsWith(".exe", StringComparison.OrdinalIgnoreCase) ||
outputPath.EndsWith(".dll", StringComparison.OrdinalIgnoreCase) ||
outputPath.EndsWith(".netmodule", StringComparison.OrdinalIgnoreCase) ||
outputPath.EndsWith(".winmdobj", StringComparison.OrdinalIgnoreCase))
{
return Path.GetFileNameWithoutExtension(outputPath);
}
else
{
return Path.GetFileName(outputPath);
}
}
protected bool CanConvertToProjectReferences
{
get
{
if (this.Workspace != null)
{
return this.Workspace.Options.GetOption(InternalFeatureOnOffOptions.ProjectReferenceConversion);
}
else
{
return InternalFeatureOnOffOptions.ProjectReferenceConversion.DefaultValue;
}
}
}
protected int AddMetadataReferenceAndTryConvertingToProjectReferenceIfPossible(string filePath, MetadataReferenceProperties properties)
{
// If this file is coming from a project, then we should convert it to a project reference instead
AbstractProject project;
if (this.CanConvertToProjectReferences && ProjectTracker.TryGetProjectByBinPath(filePath, out project))
{
var projectReference = new ProjectReference(project.Id, properties.Aliases, properties.EmbedInteropTypes);
if (CanAddProjectReference(projectReference))
{
AddProjectReference(projectReference);
_metadataFileNameToConvertedProjectReference.Add(filePath, projectReference);
return VSConstants.S_OK;
}
}
// regardless whether the file exists or not, we still record it. one of reason
// we do that is some cross language p2p references might be resolved
// after they are already reported as metadata references. since we use bin path
// as a way to discover them, if we don't previously record the reference ourselves,
// cross p2p references won't be resolved as p2p references when we finally have
// all required information.
//
// it looks like
// 1. project system sometimes won't guarantee build dependency for intellisense build
// if it is cross language dependency
// 2. output path of referenced cross language project might be changed to right one
// once it is already added as a metadata reference.
//
// but this has one consequence. even if a user adds a project in the solution as
// a metadata reference explicitly, that dll will be automatically converted back to p2p
// reference.
//
// unfortunately there is no way to prevent this using information we have since,
// at this point, we don't know whether it is a metadata reference added because
// we don't have enough information yet for p2p reference or user explicitly added it
// as a metadata reference.
AddMetadataReferenceCore(this.MetadataReferenceProvider.CreateMetadataReference(this, filePath, properties));
// here, we change behavior compared to old C# language service. regardless of file being exist or not,
// we will always return S_OK. this is to support cross language p2p reference better.
//
// this should make project system to cache all cross language p2p references regardless
// whether it actually exist in disk or not.
// (see Roslyn bug 7315 for history - http://vstfdevdiv:8080/DevDiv_Projects/Roslyn/_workitems?_a=edit&id=7315)
//
// after this point, Roslyn will take care of non-exist metadata reference.
//
// But, this doesn't sovle the issue where actual metadata reference
// (not cross language p2p reference) is missing at the time project is opened.
//
// in that case, msbuild filter those actual metadata references out, so project system doesn't know
// path to the reference. since it doesn't know where dll is, it can't (or currently doesn't)
// setup file change notification either to find out when dll becomes available.
//
// at this point, user has 2 ways to recover missing metadata reference once it becomes available.
//
// one way is explicitly clicking that missing reference from solution explorer reference node.
// the other is building the project. at that point, project system will refresh references
// which will discover new dll and connect to us. once it is connected, we will take care of it.
return VSConstants.S_OK;
}
protected void RemoveMetadataReference(string filePath)
{
// Is this a reference we converted to a project reference?
ProjectReference projectReference;
if (_metadataFileNameToConvertedProjectReference.TryGetValue(filePath, out projectReference))
{
// We converted this, so remove the project reference instead
RemoveProjectReference(projectReference);
Contract.ThrowIfFalse(_metadataFileNameToConvertedProjectReference.Remove(filePath));
}
// Just a metadata reference, so remove all of those
var referenceToRemove = TryGetCurrentMetadataReference(filePath);
if (referenceToRemove != null)
{
RemoveMetadataReferenceCore(referenceToRemove, disposeReference: true);
}
}
private void AddMetadataReferenceCore(VisualStudioMetadataReference reference)
{
_metadataReferences.Add(reference);
if (_pushingChangesToWorkspaceHosts)
{
var snapshot = reference.CurrentSnapshot;
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnMetadataReferenceAdded(this.Id, snapshot));
}
reference.UpdatedOnDisk += OnImportChanged;
}
private void RemoveMetadataReferenceCore(VisualStudioMetadataReference reference, bool disposeReference)
{
_metadataReferences.Remove(reference);
if (_pushingChangesToWorkspaceHosts)
{
var snapshot = reference.CurrentSnapshot;
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnMetadataReferenceRemoved(this.Id, snapshot));
}
reference.UpdatedOnDisk -= OnImportChanged;
if (disposeReference)
{
reference.Dispose();
}
}
/// <summary>
/// Called when a referenced metadata file changes on disk.
/// </summary>
private void OnImportChanged(object sender, EventArgs e)
{
AssertIsForeground();
VisualStudioMetadataReference reference = (VisualStudioMetadataReference)sender;
CancellationTokenSource delayTaskCancellationTokenSource;
if (_changedReferencesPendingUpdate.TryGetValue(reference, out delayTaskCancellationTokenSource))
{
delayTaskCancellationTokenSource.Cancel();
}
delayTaskCancellationTokenSource = new CancellationTokenSource();
_changedReferencesPendingUpdate[reference] = delayTaskCancellationTokenSource;
var task = Task.Delay(TimeSpan.FromSeconds(5), delayTaskCancellationTokenSource.Token)
.ContinueWith(
OnImportChangedAfterDelay,
reference,
delayTaskCancellationTokenSource.Token,
TaskContinuationOptions.None,
TaskScheduler.FromCurrentSynchronizationContext());
}
private void OnImportChangedAfterDelay(Task previous, object state)
{
AssertIsForeground();
var reference = (VisualStudioMetadataReference)state;
_changedReferencesPendingUpdate.Remove(reference);
// Ensure that we are still referencing this binary
if (_metadataReferences.Contains(reference))
{
// remove the old metadata reference
this.RemoveMetadataReferenceCore(reference, disposeReference: false);
// Signal to update the underlying reference snapshot
reference.UpdateSnapshot();
// add it back (it will now be based on the new file contents)
this.AddMetadataReferenceCore(reference);
}
}
private void OnAnalyzerChanged(object sender, EventArgs e)
{
// Postpone handler's actions to prevent deadlock. This AnalyzeChanged event can
// be invoked while the FileChangeService lock is held, and VisualStudioAnalyzer's
// efforts to listen to file changes can lead to a deadlock situation.
// Postponing the VisualStudioAnalyzer operations gives this thread the opportunity
// to release the lock.
Dispatcher.CurrentDispatcher.BeginInvoke(new Action(() => {
VisualStudioAnalyzer analyzer = (VisualStudioAnalyzer)sender;
RemoveAnalyzerReference(analyzer.FullPath);
AddAnalyzerReference(analyzer.FullPath);
}));
}
// Internal for unit testing
internal void AddProjectReference(ProjectReference projectReference)
{
// dev11 is sometimes calling us multiple times for the same data
if (!CanAddProjectReference(projectReference))
{
return;
}
// always manipulate current state after workspace is told so it will correctly observe the initial state
_projectReferences.Add(projectReference);
if (_pushingChangesToWorkspaceHosts)
{
// This project is already pushed to listening workspace hosts, but it's possible that our target
// project hasn't been yet. Get the dependent project into the workspace as well.
var targetProject = this.ProjectTracker.GetProject(projectReference.ProjectId);
this.ProjectTracker.StartPushingToWorkspaceAndNotifyOfOpenDocuments(SpecializedCollections.SingletonEnumerable(targetProject));
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectReferenceAdded(this.Id, projectReference));
}
}
protected bool CanAddProjectReference(ProjectReference projectReference)
{
if (projectReference.ProjectId == this.Id)
{
// cannot self reference
return false;
}
if (_projectReferences.Contains(projectReference))
{
// already have this reference
return false;
}
var project = this.ProjectTracker.GetProject(projectReference.ProjectId);
if (project != null)
{
// cannot add a reference to a project that references us (it would make a cycle)
return !project.TransitivelyReferences(this.Id);
}
return true;
}
private bool TransitivelyReferences(ProjectId projectId)
{
return TransitivelyReferencesWorker(projectId, new HashSet<ProjectId>());
}
private bool TransitivelyReferencesWorker(ProjectId projectId, HashSet<ProjectId> visited)
{
visited.Add(this.Id);
foreach (var pr in _projectReferences)
{
if (projectId == pr.ProjectId)
{
return true;
}
if (!visited.Contains(pr.ProjectId))
{
var project = this.ProjectTracker.GetProject(pr.ProjectId);
if (project != null)
{
if (project.TransitivelyReferencesWorker(projectId, visited))
{
return true;
}
}
}
}
return false;
}
protected void RemoveProjectReference(ProjectReference projectReference)
{
Contract.ThrowIfFalse(_projectReferences.Remove(projectReference));
if (_pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectReferenceRemoved(this.Id, projectReference));
}
}
private static void OnDocumentOpened(object sender, bool isCurrentContext)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
if (project._pushingChangesToWorkspaceHosts)
{
project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext));
}
else
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments(project);
}
}
private static void OnDocumentClosing(object sender, bool updateActiveContext)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
var projectTracker = project.ProjectTracker;
if (project._pushingChangesToWorkspaceHosts)
{
projectTracker.NotifyWorkspaceHosts(host => host.OnDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader, updateActiveContext));
}
}
private static void OnDocumentUpdatedOnDisk(object sender, EventArgs e)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
if (project._pushingChangesToWorkspaceHosts)
{
project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentTextUpdatedOnDisk(document.Id));
}
}
private static void OnAdditionalDocumentOpened(object sender, bool isCurrentContext)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
if (project._pushingChangesToWorkspaceHosts)
{
project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext));
}
else
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments(project);
}
}
private static void OnAdditionalDocumentClosing(object sender, bool notUsed)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
var projectTracker = project.ProjectTracker;
if (project._pushingChangesToWorkspaceHosts)
{
projectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader));
}
}
private static void OnAdditionalDocumentUpdatedOnDisk(object sender, EventArgs e)
{
IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender;
AbstractProject project = (AbstractProject)document.Project;
if (project._pushingChangesToWorkspaceHosts)
{
project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentTextUpdatedOnDisk(document.Id));
}
}
protected void AddFile(string filename, SourceCodeKind sourceCodeKind, Func<IVisualStudioHostDocument, bool> getIsCurrentContext, IReadOnlyList<string> folderNames)
{
var document = this.DocumentProvider.TryGetDocumentForFile(
this,
folderNames,
filePath: filename,
sourceCodeKind: sourceCodeKind,
canUseTextBuffer: CanUseTextBuffer);
if (document == null)
{
// It's possible this file is open in some very strange editor. In that case, we'll just ignore it.
// This might happen if somebody decides to mark a non-source-file as something to compile.
// TODO: Venus does this for .aspx/.cshtml files which is completely unnecessary for Roslyn. We should remove that code.
AddUntrackedFile(filename);
return;
}
AddDocument(document, getIsCurrentContext(document));
}
protected virtual bool CanUseTextBuffer(ITextBuffer textBuffer)
{
return true;
}
protected void AddUntrackedFile(string filename)
{
_untrackedDocuments.Add(filename);
}
protected void RemoveFile(string filename)
{
// Remove this as an untracked file, if it is
if (_untrackedDocuments.Remove(filename))
{
return;
}
IVisualStudioHostDocument document = this.GetCurrentDocumentFromPath(filename);
if (document == null)
{
throw new InvalidOperationException("The document is not a part of the finalProject.");
}
RemoveDocument(document);
}
internal void AddDocument(IVisualStudioHostDocument document, bool isCurrentContext)
{
// We do not want to allow message pumping/reentrancy when processing project system changes.
using (Dispatcher.CurrentDispatcher.DisableProcessing())
{
_documents.Add(document.Id, document);
_documentMonikers.Add(document.Key.Moniker, document);
if (_pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentAdded(document.GetInitialState()));
if (document.IsOpen)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext));
}
}
document.Opened += s_documentOpenedEventHandler;
document.Closing += s_documentClosingEventHandler;
document.UpdatedOnDisk += s_documentUpdatedOnDiskEventHandler;
DocumentProvider.NotifyDocumentRegisteredToProject(document);
if (!_pushingChangesToWorkspaceHosts && document.IsOpen)
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments();
}
}
}
internal void RemoveDocument(IVisualStudioHostDocument document)
{
// We do not want to allow message pumping/reentrancy when processing project system changes.
using (Dispatcher.CurrentDispatcher.DisableProcessing())
{
_documents.Remove(document.Id);
_documentMonikers.Remove(document.Key.Moniker);
UninitializeDocument(document);
OnDocumentRemoved(document.Key.Moniker);
}
}
internal void AddAdditionalDocument(IVisualStudioHostDocument document, bool isCurrentContext)
{
_additionalDocuments.Add(document.Id, document);
_documentMonikers.Add(document.Key.Moniker, document);
if (_pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentAdded(document.GetInitialState()));
if (document.IsOpen)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext));
}
}
document.Opened += s_additionalDocumentOpenedEventHandler;
document.Closing += s_additionalDocumentClosingEventHandler;
document.UpdatedOnDisk += s_additionalDocumentUpdatedOnDiskEventHandler;
DocumentProvider.NotifyDocumentRegisteredToProject(document);
if (!_pushingChangesToWorkspaceHosts && document.IsOpen)
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments();
}
}
internal void RemoveAdditionalDocument(IVisualStudioHostDocument document)
{
_additionalDocuments.Remove(document.Id);
_documentMonikers.Remove(document.Key.Moniker);
UninitializeAdditionalDocument(document);
}
private void AddGeneratedDocument(IVisualStudioHostDocument document, bool isCurrentContext)
{
// We do not want to allow message pumping/reentrancy when processing project system changes.
using (Dispatcher.CurrentDispatcher.DisableProcessing())
{
_documents.Add(document.Id, document);
_documentMonikers.Add(document.Key.Moniker, document);
if (_pushingChangesToWorkspaceHosts)
{
if (document.IsOpen)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext));
}
}
document.Opened += s_documentOpenedEventHandler;
document.Closing += s_documentClosingEventHandler;
document.UpdatedOnDisk += s_documentUpdatedOnDiskEventHandler;
DocumentProvider.NotifyDocumentRegisteredToProject(document);
if (!_pushingChangesToWorkspaceHosts && document.IsOpen)
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments();
}
}
}
private void RemoveGeneratedDocument(IVisualStudioHostDocument document)
{
// We do not want to allow message pumping/reentrancy when processing project system changes.
using (Dispatcher.CurrentDispatcher.DisableProcessing())
{
_documents.Remove(document.Id);
_documentMonikers.Remove(document.Key.Moniker);
UninitializeDocument(document);
OnDocumentRemoved(document.Key.Moniker);
}
}
public virtual void Disconnect()
{
AssertIsForeground();
using (Workspace?.Services.GetService<IGlobalOperationNotificationService>()?.Start("Disconnect Project"))
{
// No sense in reloading any metadata references anymore.
foreach (var cancellationTokenSource in _changedReferencesPendingUpdate.Values)
{
cancellationTokenSource.Cancel();
}
_changedReferencesPendingUpdate.Clear();
var wasPushing = _pushingChangesToWorkspaceHosts;
// disable pushing down to workspaces, so we don't get redundant workspace document removed events
_pushingChangesToWorkspaceHosts = false;
// The project is going away, so let's remove ourselves from the host. First, we
// close and dispose of any remaining documents
foreach (var document in this.GetCurrentDocuments())
{
UninitializeDocument(document);
}
foreach (var document in this.GetCurrentAdditionalDocuments())
{
UninitializeAdditionalDocument(document);
}
// Dispose metadata references.
foreach (var reference in _metadataReferences)
{
reference.Dispose();
}
foreach (var analyzer in _analyzers.Values)
{
analyzer.Dispose();
}
// Make sure we clear out any external errors left when closing the project.
ExternalErrorReporter?.ClearAllErrors();
// Make sure we clear out any host errors left when closing the project.
HostDiagnosticUpdateSource?.ClearAllDiagnosticsForProject(this.Id);
ClearAnalyzerRuleSet();
// reinstate pushing down to workspace, so the workspace project remove event fires
_pushingChangesToWorkspaceHosts = wasPushing;
this.ProjectTracker.RemoveProject(this);
_pushingChangesToWorkspaceHosts = false;
this.EditAndContinueImplOpt = null;
}
}
internal void TryProjectConversionForIntroducedOutputPath(string binPath, AbstractProject projectToReference)
{
if (this.CanConvertToProjectReferences)
{
// We should not already have references for this, since we're only introducing the path for the first time
Contract.ThrowIfTrue(_metadataFileNameToConvertedProjectReference.ContainsKey(binPath));
var metadataReference = TryGetCurrentMetadataReference(binPath);
if (metadataReference != null)
{
var projectReference = new ProjectReference(
projectToReference.Id,
metadataReference.Properties.Aliases,
metadataReference.Properties.EmbedInteropTypes);
if (CanAddProjectReference(projectReference))
{
RemoveMetadataReferenceCore(metadataReference, disposeReference: true);
AddProjectReference(projectReference);
_metadataFileNameToConvertedProjectReference.Add(binPath, projectReference);
}
}
}
}
internal void UndoProjectReferenceConversionForDisappearingOutputPath(string binPath)
{
ProjectReference projectReference;
if (_metadataFileNameToConvertedProjectReference.TryGetValue(binPath, out projectReference))
{
// We converted this, so convert it back to a metadata reference
RemoveProjectReference(projectReference);
var metadataReferenceProperties = new MetadataReferenceProperties(
MetadataImageKind.Assembly,
projectReference.Aliases,
projectReference.EmbedInteropTypes);
AddMetadataReferenceCore(MetadataReferenceProvider.CreateMetadataReference(this, binPath, metadataReferenceProperties));
Contract.ThrowIfFalse(_metadataFileNameToConvertedProjectReference.Remove(binPath));
}
}
protected void UpdateMetadataReferenceAliases(string file, ImmutableArray<string> aliases)
{
file = FileUtilities.NormalizeAbsolutePath(file);
// Have we converted these to project references?
ProjectReference convertedProjectReference;
if (_metadataFileNameToConvertedProjectReference.TryGetValue(file, out convertedProjectReference))
{
var project = ProjectTracker.GetProject(convertedProjectReference.ProjectId);
UpdateProjectReferenceAliases(project, aliases);
}
else
{
var existingReference = TryGetCurrentMetadataReference(file);
Contract.ThrowIfNull(existingReference);
var newProperties = existingReference.Properties.WithAliases(aliases);
RemoveMetadataReferenceCore(existingReference, disposeReference: true);
AddMetadataReferenceCore(this.MetadataReferenceProvider.CreateMetadataReference(this, file, newProperties));
}
}
protected void UpdateProjectReferenceAliases(AbstractProject referencedProject, ImmutableArray<string> aliases)
{
var projectReference = GetCurrentProjectReferences().Single(r => r.ProjectId == referencedProject.Id);
var newProjectReference = new ProjectReference(referencedProject.Id, aliases, projectReference.EmbedInteropTypes);
// Is this a project with converted references? If so, make sure we track it
string referenceBinPath = referencedProject.TryGetBinOutputPath();
if (referenceBinPath != null && _metadataFileNameToConvertedProjectReference.ContainsKey(referenceBinPath))
{
_metadataFileNameToConvertedProjectReference[referenceBinPath] = newProjectReference;
}
// Remove the existing reference first
RemoveProjectReference(projectReference);
AddProjectReference(newProjectReference);
}
private void UninitializeDocument(IVisualStudioHostDocument document)
{
if (_pushingChangesToWorkspaceHosts)
{
if (document.IsOpen)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader, updateActiveContext: true));
}
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentRemoved(document.Id));
}
document.Opened -= s_documentOpenedEventHandler;
document.Closing -= s_documentClosingEventHandler;
document.UpdatedOnDisk -= s_documentUpdatedOnDiskEventHandler;
document.Dispose();
}
private void UninitializeAdditionalDocument(IVisualStudioHostDocument document)
{
if (_pushingChangesToWorkspaceHosts)
{
if (document.IsOpen)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader));
}
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentRemoved(document.Id));
}
document.Opened -= s_additionalDocumentOpenedEventHandler;
document.Closing -= s_additionalDocumentClosingEventHandler;
document.UpdatedOnDisk -= s_additionalDocumentUpdatedOnDiskEventHandler;
document.Dispose();
}
protected virtual void OnDocumentRemoved(string filePath)
{
}
internal void StartPushingToWorkspaceHosts()
{
_pushingChangesToWorkspaceHosts = true;
}
internal void StopPushingToWorkspaceHosts()
{
_pushingChangesToWorkspaceHosts = false;
}
internal void StartPushingToWorkspaceAndNotifyOfOpenDocuments()
{
StartPushingToWorkspaceAndNotifyOfOpenDocuments(this);
}
internal bool PushingChangesToWorkspaceHosts
{
get
{
return _pushingChangesToWorkspaceHosts;
}
}
protected void UpdateRuleSetError(IRuleSetFile ruleSetFile)
{
if (this.HostDiagnosticUpdateSource == null)
{
return;
}
if (ruleSetFile == null ||
ruleSetFile.GetException() == null)
{
this.HostDiagnosticUpdateSource.ClearDiagnosticsForProject(this.Id, RuleSetErrorId);
}
else
{
var messageArguments = new string[] { ruleSetFile.FilePath, ruleSetFile.GetException().Message };
DiagnosticData diagnostic;
if (DiagnosticData.TryCreate(_errorReadingRulesetRule, messageArguments, this.Id, this.Workspace, out diagnostic))
{
this.HostDiagnosticUpdateSource.UpdateDiagnosticsForProject(this.Id, RuleSetErrorId, SpecializedCollections.SingletonEnumerable(diagnostic));
}
}
}
protected void SetOutputPathAndRelatedData(string objOutputPath, bool hasSameBinAndObjOutputPaths = false)
{
if (PathUtilities.IsAbsolute(objOutputPath) && !string.Equals(_objOutputPathOpt, objOutputPath, StringComparison.OrdinalIgnoreCase))
{
// set obj output path if changed
_objOutputPathOpt = objOutputPath;
// Workspace can be null for tests.
if (this.Workspace != null)
{
CurrentCompilationOptions = CurrentCompilationOptions.WithMetadataReferenceResolver(CreateMetadataReferenceResolver(
metadataService: this.Workspace.Services.GetService<IMetadataService>(),
projectDirectory: this.ContainingDirectoryPathOpt,
outputDirectory: Path.GetDirectoryName(_objOutputPathOpt)));
}
if (_pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnOptionsChanged(this.Id, CurrentCompilationOptions, CurrentParseOptions));
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnOutputFilePathChanged(this.Id, _objOutputPathOpt));
}
}
// set assembly name if changed
// we use designTimeOutputPath to get assembly name since it is more reliable way to get the assembly name.
// otherwise, friend assembly all get messed up.
var newAssemblyName = GetAssemblyName(_objOutputPathOpt ?? this.ProjectSystemName);
if (!string.Equals(AssemblyName, newAssemblyName, StringComparison.Ordinal))
{
AssemblyName = newAssemblyName;
if (_pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAssemblyNameChanged(this.Id, AssemblyName));
}
}
// refresh final output path
string newBinOutputPath;
if (hasSameBinAndObjOutputPaths)
{
newBinOutputPath = objOutputPath;
}
else if (!TryGetOutputPathFromHierarchy(out newBinOutputPath))
{
newBinOutputPath = null;
}
if (newBinOutputPath != null && !string.Equals(_binOutputPathOpt, newBinOutputPath, StringComparison.OrdinalIgnoreCase))
{
string oldBinOutputPath = _binOutputPathOpt;
// set obj output path if changed
_binOutputPathOpt = newBinOutputPath;
// If the project has been hooked up with the project tracker, then update the bin path with the tracker.
if (this.ProjectTracker.GetProject(Id) != null)
{
this.ProjectTracker.UpdateProjectBinPath(this, oldBinOutputPath, _binOutputPathOpt);
}
}
}
protected void UpdateProjectDisplayName(string newDisplayName)
{
UpdateProjectDisplayNameAndFilePath(newDisplayName, newFilePath: null);
}
protected void UpdateProjectFilePath(string newFilePath)
{
UpdateProjectDisplayNameAndFilePath(newDisplayName: null, newFilePath: newFilePath);
}
protected void UpdateProjectDisplayNameAndFilePath(string newDisplayName, string newFilePath)
{
bool updateMade = false;
if (newDisplayName != null && this.DisplayName != newDisplayName)
{
this.DisplayName = newDisplayName;
updateMade = true;
}
if (newFilePath != null && File.Exists(newFilePath) && _filePathOpt != newFilePath)
{
Debug.Assert(PathUtilities.IsAbsolute(newFilePath));
_filePathOpt = newFilePath;
updateMade = true;
}
if (updateMade && _pushingChangesToWorkspaceHosts)
{
this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectNameChanged(Id, this.DisplayName, _filePathOpt));
}
}
private static void StartPushingToWorkspaceAndNotifyOfOpenDocuments(AbstractProject project)
{
// If a document is opened in a project but we haven't started pushing yet, we want to stop doing lazy
// loading for this project and get it up to date so the user gets a fast experience there. If the file
// was presented as open to us right away, then we'll never do this in OnDocumentOpened, so we should do
// it here. It's important to do this after everything else happens in this method, so we don't get
// strange ordering issues. It's still possible that this won't actually push changes if the workspace
// host isn't ready to receive events yet.
project.ProjectTracker.StartPushingToWorkspaceAndNotifyOfOpenDocuments(SpecializedCollections.SingletonEnumerable(project));
}
private static MetadataReferenceResolver CreateMetadataReferenceResolver(IMetadataService metadataService, string projectDirectory, string outputDirectory)
{
ImmutableArray<string> assemblySearchPaths;
if (projectDirectory != null && outputDirectory != null)
{
assemblySearchPaths = ImmutableArray.Create(projectDirectory, outputDirectory);
}
else if (projectDirectory != null)
{
assemblySearchPaths = ImmutableArray.Create(projectDirectory);
}
else if (outputDirectory != null)
{
assemblySearchPaths = ImmutableArray.Create(outputDirectory);
}
else
{
assemblySearchPaths = ImmutableArray<string>.Empty;
}
return new WorkspaceMetadataFileReferenceResolver(metadataService, new RelativePathResolver(assemblySearchPaths, baseDirectory: projectDirectory));
}
protected abstract bool TryGetOutputPathFromHierarchy(out string binOutputPath);
#if DEBUG
public virtual bool Debug_VBEmbeddedCoreOptionOn
{
get
{
return false;
}
}
#endif
/// <summary>
/// Used for unit testing: don't crash the process if something bad happens.
/// </summary>
internal static bool CrashOnException = true;
protected static bool FilterException(Exception e)
{
if (CrashOnException)
{
FatalError.Report(e);
}
// Nothing fancy, so don't catch
return false;
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="XmlSchemaObjectTable.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <owner current="true" primary="true">[....]</owner>
//------------------------------------------------------------------------------
namespace System.Xml.Schema {
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable"]/*' />
public class XmlSchemaObjectTable {
Dictionary<XmlQualifiedName, XmlSchemaObject> table = new Dictionary<XmlQualifiedName,XmlSchemaObject>();
List<XmlSchemaObjectEntry> entries = new List<XmlSchemaObjectEntry>();
internal XmlSchemaObjectTable() {
}
internal void Add(XmlQualifiedName name, XmlSchemaObject value) {
Debug.Assert(!table.ContainsKey(name), "XmlSchemaObjectTable.Add: entry already exists");
table.Add(name, value);
entries.Add(new XmlSchemaObjectEntry(name, value));
}
internal void Insert(XmlQualifiedName name, XmlSchemaObject value) {
XmlSchemaObject oldValue = null;
if (table.TryGetValue(name, out oldValue)) {
table[name] = value; //set new value
Debug.Assert(oldValue != null);
int matchedIndex = FindIndexByValue(oldValue);
Debug.Assert(matchedIndex >= 0);
//set new entry
Debug.Assert(entries[matchedIndex].qname == name);
entries[matchedIndex] = new XmlSchemaObjectEntry(name, value);
}
else {
Add(name, value);
}
}
internal void Replace(XmlQualifiedName name, XmlSchemaObject value) {
XmlSchemaObject oldValue;
if (table.TryGetValue(name, out oldValue)) {
table[name] = value; //set new value
Debug.Assert(oldValue != null);
int matchedIndex = FindIndexByValue(oldValue);
Debug.Assert(entries[matchedIndex].qname == name);
entries[matchedIndex] = new XmlSchemaObjectEntry(name, value);
}
}
internal void Clear() {
table.Clear();
entries.Clear();
}
internal void Remove(XmlQualifiedName name) {
XmlSchemaObject value;
if (table.TryGetValue(name, out value)) {
table.Remove(name);
int matchedIndex = FindIndexByValue(value);
Debug.Assert(matchedIndex >= 0);
Debug.Assert(entries[matchedIndex].qname == name);
entries.RemoveAt(matchedIndex);
}
}
private int FindIndexByValue(XmlSchemaObject xso) {
int index;
for(index = 0; index < entries.Count; index++) {
if((object)entries[index].xso == (object)xso) {
return index;
}
}
return -1;
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.Count"]/*' />
public int Count {
get {
Debug.Assert(table.Count == entries.Count);
return table.Count;
}
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.Contains"]/*' />
public bool Contains(XmlQualifiedName name) {
return table.ContainsKey(name);
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.this"]/*' />
public XmlSchemaObject this[XmlQualifiedName name] {
get {
XmlSchemaObject value;
if (table.TryGetValue(name, out value)) {
return value;
}
return null;
}
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.Names"]/*' />
public ICollection Names {
get {
return new NamesCollection(entries, table.Count);
}
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.Values"]/*' />
public ICollection Values {
get {
return new ValuesCollection(entries, table.Count);
}
}
/// <include file='doc\XmlSchemaObjectTable.uex' path='docs/doc[@for="XmlSchemaObjectTable.GetEnumerator"]/*' />
public IDictionaryEnumerator GetEnumerator() {
return new XSODictionaryEnumerator(this.entries, table.Count, EnumeratorType.DictionaryEntry);
}
internal enum EnumeratorType {
Keys,
Values,
DictionaryEntry,
}
internal struct XmlSchemaObjectEntry {
internal XmlQualifiedName qname;
internal XmlSchemaObject xso;
public XmlSchemaObjectEntry(XmlQualifiedName name, XmlSchemaObject value) {
qname = name;
xso = value;
}
public XmlSchemaObject IsMatch(string localName, string ns) {
if (localName == qname.Name && ns == qname.Namespace) {
return xso;
}
return null;
}
public void Reset() {
qname = null;
xso = null;
}
}
internal class NamesCollection : ICollection {
private List<XmlSchemaObjectEntry> entries;
int size;
internal NamesCollection(List<XmlSchemaObjectEntry> entries, int size) {
this.entries = entries;
this.size = size;
}
public int Count {
get { return size; }
}
public Object SyncRoot {
get {
return ((ICollection)entries).SyncRoot;
}
}
public bool IsSynchronized {
get {
return ((ICollection)entries).IsSynchronized;
}
}
public void CopyTo(Array array, int arrayIndex) {
if (array == null)
throw new ArgumentNullException("array");
if (arrayIndex < 0)
throw new ArgumentOutOfRangeException("arrayIndex");
Debug.Assert(array.Length >= size, "array is not big enough to hold all the items in the ICollection");
for (int i = 0; i < size; i++) {
array.SetValue(entries[i].qname, arrayIndex++);
}
}
public IEnumerator GetEnumerator() {
return new XSOEnumerator(this.entries, this.size, EnumeratorType.Keys);
}
}
//ICollection for Values
internal class ValuesCollection : ICollection {
private List<XmlSchemaObjectEntry> entries;
int size;
internal ValuesCollection(List<XmlSchemaObjectEntry> entries, int size) {
this.entries = entries;
this.size = size;
}
public int Count {
get { return size; }
}
public Object SyncRoot {
get {
return ((ICollection)entries).SyncRoot;
}
}
public bool IsSynchronized {
get {
return ((ICollection)entries).IsSynchronized;
}
}
public void CopyTo(Array array, int arrayIndex) {
if (array == null)
throw new ArgumentNullException("array");
if (arrayIndex < 0)
throw new ArgumentOutOfRangeException("arrayIndex");
Debug.Assert(array.Length >= size, "array is not big enough to hold all the items in the ICollection");
for (int i = 0; i < size; i++) {
array.SetValue(entries[i].xso, arrayIndex++);
}
}
public IEnumerator GetEnumerator() {
return new XSOEnumerator(this.entries, this.size, EnumeratorType.Values);
}
}
internal class XSOEnumerator : IEnumerator {
private List<XmlSchemaObjectEntry> entries;
private EnumeratorType enumType;
protected int currentIndex;
protected int size;
protected XmlQualifiedName currentKey;
protected XmlSchemaObject currentValue;
internal XSOEnumerator(List<XmlSchemaObjectEntry> entries, int size, EnumeratorType enumType) {
this.entries = entries;
this.size = size;
this.enumType = enumType;
currentIndex = -1;
}
public Object Current {
get {
if (currentIndex == -1) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumNotStarted, string.Empty));
}
if (currentIndex >= size) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumFinished, string.Empty));
}
switch(enumType) {
case EnumeratorType.Keys:
return currentKey;
case EnumeratorType.Values:
return currentValue;
case EnumeratorType.DictionaryEntry:
return new DictionaryEntry(currentKey, currentValue);
default:
break;
}
return null;
}
}
public bool MoveNext() {
if (currentIndex >= size - 1) {
currentValue = null;
currentKey = null;
return false;
}
currentIndex++;
currentValue = entries[currentIndex].xso;
currentKey = entries[currentIndex].qname;
return true;
}
public void Reset() {
currentIndex = -1;
currentValue = null;
currentKey = null;
}
}
internal class XSODictionaryEnumerator : XSOEnumerator, IDictionaryEnumerator {
internal XSODictionaryEnumerator(List<XmlSchemaObjectEntry> entries, int size, EnumeratorType enumType) : base(entries, size, enumType) {
}
//IDictionaryEnumerator members
public DictionaryEntry Entry {
get {
if (currentIndex == -1) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumNotStarted, string.Empty));
}
if (currentIndex >= size) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumFinished, string.Empty));
}
return new DictionaryEntry(currentKey, currentValue);
}
}
public object Key {
get {
if (currentIndex == -1) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumNotStarted, string.Empty));
}
if (currentIndex >= size) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumFinished, string.Empty));
}
return currentKey;
}
}
public object Value {
get {
if (currentIndex == -1) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumNotStarted, string.Empty));
}
if (currentIndex >= size) {
throw new InvalidOperationException(Res.GetString(Res.Sch_EnumFinished, string.Empty));
}
return currentValue;
}
}
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2007 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections;
using System.Reflection;
using NUnit.Framework.Api;
using NUnit.Framework.Extensibility;
using NUnit.Framework.Internal;
namespace NUnit.Framework.Builders
{
/// <summary>
/// Built-in SuiteBuilder for NUnit TestFixture
/// </summary>
public class NUnitTestFixtureBuilder : ISuiteBuilder
{
#region Static Fields
static readonly string NO_TYPE_ARGS_MSG =
"Fixture type contains generic parameters. You must either provide " +
"Type arguments or specify constructor arguments that allow NUnit " +
"to deduce the Type arguments.";
#endregion
#region Instance Fields
/// <summary>
/// The NUnitTestFixture being constructed;
/// </summary>
private TestFixture fixture;
#if NUNITLITE || true
private Extensibility.ITestCaseBuilder2 testBuilder = new NUnitTestCaseBuilder();
#else
private Extensibility.ITestCaseBuilder2 testBuilder = CoreExtensions.Host.TestBuilders;
private Extensibility.ITestDecorator testDecorators = CoreExtensions.Host.TestDecorators;
#endif
#endregion
#region ISuiteBuilder Methods
/// <summary>
/// Checks to see if the fixture type has the TestFixtureAttribute
/// </summary>
/// <param name="type">The fixture type to check</param>
/// <returns>True if the fixture can be built, false if not</returns>
public bool CanBuildFrom(Type type)
{
if ( type.IsAbstract && !type.IsSealed )
return false;
if (type.IsDefined(typeof(TestFixtureAttribute), true))
return true;
#if true
// Generics must have a TestFixtureAttribute
if (type.IsGenericTypeDefinition)
return false;
#endif
#if NUNITLITE || true
return Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestAttribute), true) ||
Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestCaseAttribute), true) ||
Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestCaseSourceAttribute), true);
#else
return Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestAttribute), true) ||
Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestCaseAttribute), true) ||
Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TestCaseSourceAttribute), true) ||
Reflect.HasMethodWithAttribute(type, typeof(NUnit.Framework.TheoryAttribute), true);
#endif
}
/// <summary>
/// Build a TestSuite from type provided.
/// </summary>
/// <param name="type"></param>
/// <returns></returns>
public Test BuildFrom(Type type)
{
TestFixtureAttribute[] attrs = GetTestFixtureAttributes(type);
#if true
if (type.IsGenericType)
return BuildMultipleFixtures(type, attrs);
#endif
switch (attrs.Length)
{
case 0:
return BuildSingleFixture(type, null);
case 1:
object[] args = (object[])attrs[0].Arguments;
return args == null || args.Length == 0
? BuildSingleFixture(type, attrs[0])
: BuildMultipleFixtures(type, attrs);
default:
return BuildMultipleFixtures(type, attrs);
}
}
#endregion
#region Helper Methods
private Test BuildMultipleFixtures(Type type, TestFixtureAttribute[] attrs)
{
TestSuite suite = new ParameterizedFixtureSuite(type);
if (attrs.Length > 0)
{
foreach (TestFixtureAttribute attr in attrs)
suite.Add(BuildSingleFixture(type, attr));
}
else
{
suite.RunState = RunState.NotRunnable;
suite.Properties.Set(PropertyNames.SkipReason, NO_TYPE_ARGS_MSG);
}
return suite;
}
private Test BuildSingleFixture(Type type, TestFixtureAttribute attr)
{
object[] arguments = null;
if (attr != null)
{
arguments = (object[])attr.Arguments;
#if true
if (type.ContainsGenericParameters)
{
Type[] typeArgs = (Type[])attr.TypeArgs;
if( typeArgs.Length > 0 ||
TypeHelper.CanDeduceTypeArgsFromArgs(type, arguments, ref typeArgs))
{
type = TypeHelper.MakeGenericType(type, typeArgs);
}
}
#endif
}
this.fixture = new TestFixture(type, arguments);
CheckTestFixtureIsValid(fixture);
fixture.ApplyCommonAttributes(type);
if (fixture.RunState == RunState.Runnable && attr != null)
{
if (attr.Ignore)
{
fixture.RunState = RunState.Ignored;
fixture.Properties.Set(PropertyNames.SkipReason, attr.IgnoreReason);
}
}
AddTestCases(type);
return this.fixture;
}
/// <summary>
/// Method to add test cases to the newly constructed fixture.
/// </summary>
/// <param name="fixtureType"></param>
private void AddTestCases( Type fixtureType )
{
IList methods = fixtureType.GetMethods(
BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static );
foreach(MethodInfo method in methods)
{
Test test = BuildTestCase(method, this.fixture);
if(test != null)
{
this.fixture.Add( test );
}
}
}
/// <summary>
/// Method to create a test case from a MethodInfo and add
/// it to the fixture being built. It first checks to see if
/// any global TestCaseBuilder addin wants to build the
/// test case. If not, it uses the internal builder
/// collection maintained by this fixture builder. After
/// building the test case, it applies any decorators
/// that have been installed.
///
/// The default implementation has no test case builders.
/// Derived classes should add builders to the collection
/// in their constructor.
/// </summary>
/// <param name="method">The MethodInfo for which a test is to be created</param>
/// <param name="suite">The test suite being built.</param>
/// <returns>A newly constructed Test</returns>
private Test BuildTestCase( MethodInfo method, TestSuite suite )
{
#if NUNITLITE || true
return testBuilder.CanBuildFrom(method, suite)
? testBuilder.BuildFrom(method, suite)
: null;
#else
Test test = testBuilder.BuildFrom( method, suite );
if ( test != null )
test = testDecorators.Decorate( test, method );
return test;
#endif
}
private void CheckTestFixtureIsValid(TestFixture fixture)
{
Type fixtureType = fixture.FixtureType;
string reason = null;
if (fixture.RunState == RunState.NotRunnable)
return;
if (!IsValidFixtureType(fixtureType, ref reason))
{
fixture.RunState = RunState.NotRunnable;
fixture.Properties.Set(PropertyNames.SkipReason, reason);
}
else if( !IsStaticClass( fixtureType ) )
{
object[] args = fixture.arguments;
Type[] argTypes;
// Note: This could be done more simply using
// Type.EmptyTypes and Type.GetTypeArray() but
// they don't exist in all runtimes we support.
if (args == null)
argTypes = new Type[0];
else
{
argTypes = new Type[args.Length];
int index = 0;
foreach (object arg in args)
argTypes[index++] = arg.GetType();
}
ConstructorInfo ctor = fixtureType.GetConstructor(argTypes);
if (ctor == null)
{
fixture.RunState = RunState.NotRunnable;
fixture.Properties.Set(PropertyNames.SkipReason, "No suitable constructor was found");
}
}
}
private static bool IsStaticClass(Type type)
{
return type.IsAbstract && type.IsSealed;
}
/// <summary>
/// Check that the fixture type is valid. This method ensures that
/// the type is not abstract and that there is no more than one of
/// each setup or teardown method and that their signatures are correct.
/// </summary>
/// <param name="fixtureType">The type of the fixture to check</param>
/// <param name="reason">A message indicating why the fixture is invalid</param>
/// <returns>True if the fixture is valid, false if not</returns>
private bool IsValidFixtureType(Type fixtureType, ref string reason)
{
#if true
if ( fixtureType.ContainsGenericParameters )
{
reason = NO_TYPE_ARGS_MSG;
return false;
}
#endif
return true;
}
/// <summary>
/// Get TestFixtureAttributes following a somewhat obscure
/// set of rules to eliminate spurious duplication of fixtures.
/// 1. If there are any attributes with args, they are the only
/// ones returned and those without args are ignored.
/// 2. No more than one attribute without args is ever returned.
/// </summary>
private TestFixtureAttribute[] GetTestFixtureAttributes(Type type)
{
TestFixtureAttribute[] attrs =
(TestFixtureAttribute[])type.GetCustomAttributes(typeof(TestFixtureAttribute), true);
// Just return - no possibility of duplication
if (attrs.Length <= 1)
return attrs;
int withArgs = 0;
bool[] hasArgs = new bool[attrs.Length];
// Count and record those attrs with arguments
for (int i = 0; i < attrs.Length; i++)
{
TestFixtureAttribute attr = attrs[i];
if (attr.Arguments.Length > 0 || attr.TypeArgs.Length > 0)
{
withArgs++;
hasArgs[i] = true;
}
}
// If all attributes have args, just return them
if (withArgs == attrs.Length)
return attrs;
// If all attributes are without args, just return the first found
if (withArgs == 0)
return new TestFixtureAttribute[] { attrs[0] };
// Some of each type, so extract those with args
int count = 0;
TestFixtureAttribute[] result = new TestFixtureAttribute[withArgs];
for (int i = 0; i < attrs.Length; i++)
if (hasArgs[i])
result[count++] = attrs[i];
return result;
}
#endregion
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Globalization;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Linq
{
internal class JPath
{
private readonly string _expression;
public List<object> Parts { get; private set; }
private int _currentIndex;
public JPath(string expression)
{
ValidationUtils.ArgumentNotNull(expression, "expression");
_expression = expression;
Parts = new List<object>();
ParseMain();
}
private void ParseMain()
{
int currentPartStartIndex = _currentIndex;
bool followingIndexer = false;
while (_currentIndex < _expression.Length)
{
char currentChar = _expression[_currentIndex];
switch (currentChar)
{
case '[':
case '(':
if (_currentIndex > currentPartStartIndex)
{
string member = _expression.Substring(currentPartStartIndex, _currentIndex - currentPartStartIndex);
Parts.Add(member);
}
ParseIndexer(currentChar);
currentPartStartIndex = _currentIndex + 1;
followingIndexer = true;
break;
case ']':
case ')':
throw new JsonException("Unexpected character while parsing path: " + currentChar);
case '.':
if (_currentIndex > currentPartStartIndex)
{
string member = _expression.Substring(currentPartStartIndex, _currentIndex - currentPartStartIndex);
Parts.Add(member);
}
currentPartStartIndex = _currentIndex + 1;
followingIndexer = false;
break;
default:
if (followingIndexer)
throw new JsonException("Unexpected character following indexer: " + currentChar);
break;
}
_currentIndex++;
}
if (_currentIndex > currentPartStartIndex)
{
string member = _expression.Substring(currentPartStartIndex, _currentIndex - currentPartStartIndex);
Parts.Add(member);
}
}
private void ParseIndexer(char indexerOpenChar)
{
_currentIndex++;
char indexerCloseChar = (indexerOpenChar == '[') ? ']' : ')';
int indexerStart = _currentIndex;
int indexerLength = 0;
bool indexerClosed = false;
while (_currentIndex < _expression.Length)
{
char currentCharacter = _expression[_currentIndex];
if (char.IsDigit(currentCharacter))
{
indexerLength++;
}
else if (currentCharacter == indexerCloseChar)
{
indexerClosed = true;
break;
}
else
{
throw new JsonException("Unexpected character while parsing path indexer: " + currentCharacter);
}
_currentIndex++;
}
if (!indexerClosed)
throw new JsonException("Path ended with open indexer. Expected " + indexerCloseChar);
if (indexerLength == 0)
throw new JsonException("Empty path indexer.");
string indexer = _expression.Substring(indexerStart, indexerLength);
Parts.Add(Convert.ToInt32(indexer, CultureInfo.InvariantCulture));
}
internal JToken Evaluate(JToken root, bool errorWhenNoMatch)
{
JToken current = root;
foreach (object part in Parts)
{
string propertyName = part as string;
if (propertyName != null)
{
JObject o = current as JObject;
if (o != null)
{
current = o[propertyName];
if (current == null && errorWhenNoMatch)
throw new JsonException("Property '{0}' does not exist on JObject.".FormatWith(CultureInfo.InvariantCulture, propertyName));
}
else
{
if (errorWhenNoMatch)
throw new JsonException("Property '{0}' not valid on {1}.".FormatWith(CultureInfo.InvariantCulture, propertyName, current.GetType().Name));
return null;
}
}
else
{
int index = (int)part;
JArray a = current as JArray;
JConstructor c = current as JConstructor;
if (a != null)
{
if (a.Count <= index)
{
if (errorWhenNoMatch)
throw new JsonException("Index {0} outside the bounds of JArray.".FormatWith(CultureInfo.InvariantCulture, index));
return null;
}
current = a[index];
}
else if (c != null)
{
if (c.Count <= index)
{
if (errorWhenNoMatch)
throw new JsonException("Index {0} outside the bounds of JConstructor.".FormatWith(CultureInfo.InvariantCulture, index));
return null;
}
current = c[index];
}
else
{
if (errorWhenNoMatch)
throw new JsonException("Index {0} not valid on {1}.".FormatWith(CultureInfo.InvariantCulture, index, current.GetType().Name));
return null;
}
}
}
return current;
}
}
}
| |
using UnityEngine;
public delegate NoiseSample NoiseMethod (Vector3 point, float frequency);
public enum NoiseMethodType {
Perlin,
Value
}
public static class Noise {
public static NoiseMethod[] valueMethods = {
Value1D,
Value2D,
Value3D
};
public static NoiseMethod[] perlinMethods = {
Perlin1D,
Perlin2D,
Perlin3D
};
public static NoiseMethod[][] methods = {
valueMethods,
perlinMethods
};
private static int[] hash = {
151,160,137, 91, 90, 15,131, 13,201, 95, 96, 53,194,233, 7,225,
140, 36,103, 30, 69,142, 8, 99, 37,240, 21, 10, 23,190, 6,148,
247,120,234, 75, 0, 26,197, 62, 94,252,219,203,117, 35, 11, 32,
57,177, 33, 88,237,149, 56, 87,174, 20,125,136,171,168, 68,175,
74,165, 71,134,139, 48, 27,166, 77,146,158,231, 83,111,229,122,
60,211,133,230,220,105, 92, 41, 55, 46,245, 40,244,102,143, 54,
65, 25, 63,161, 1,216, 80, 73,209, 76,132,187,208, 89, 18,169,
200,196,135,130,116,188,159, 86,164,100,109,198,173,186, 3, 64,
52,217,226,250,124,123, 5,202, 38,147,118,126,255, 82, 85,212,
207,206, 59,227, 47, 16, 58, 17,182,189, 28, 42,223,183,170,213,
119,248,152, 2, 44,154,163, 70,221,153,101,155,167, 43,172, 9,
129, 22, 39,253, 19, 98,108,110, 79,113,224,232,178,185,112,104,
218,246, 97,228,251, 34,242,193,238,210,144, 12,191,179,162,241,
81, 51,145,235,249, 14,239,107, 49,192,214, 31,181,199,106,157,
184, 84,204,176,115,121, 50, 45,127, 4,150,254,138,236,205, 93,
222,114, 67, 29, 24, 72,243,141,128,195, 78, 66,215, 61,156,180,
151,160,137, 91, 90, 15,131, 13,201, 95, 96, 53,194,233, 7,225,
140, 36,103, 30, 69,142, 8, 99, 37,240, 21, 10, 23,190, 6,148,
247,120,234, 75, 0, 26,197, 62, 94,252,219,203,117, 35, 11, 32,
57,177, 33, 88,237,149, 56, 87,174, 20,125,136,171,168, 68,175,
74,165, 71,134,139, 48, 27,166, 77,146,158,231, 83,111,229,122,
60,211,133,230,220,105, 92, 41, 55, 46,245, 40,244,102,143, 54,
65, 25, 63,161, 1,216, 80, 73,209, 76,132,187,208, 89, 18,169,
200,196,135,130,116,188,159, 86,164,100,109,198,173,186, 3, 64,
52,217,226,250,124,123, 5,202, 38,147,118,126,255, 82, 85,212,
207,206, 59,227, 47, 16, 58, 17,182,189, 28, 42,223,183,170,213,
119,248,152, 2, 44,154,163, 70,221,153,101,155,167, 43,172, 9,
129, 22, 39,253, 19, 98,108,110, 79,113,224,232,178,185,112,104,
218,246, 97,228,251, 34,242,193,238,210,144, 12,191,179,162,241,
81, 51,145,235,249, 14,239,107, 49,192,214, 31,181,199,106,157,
184, 84,204,176,115,121, 50, 45,127, 4,150,254,138,236,205, 93,
222,114, 67, 29, 24, 72,243,141,128,195, 78, 66,215, 61,156,180
};
private const int hashMask = 255;
private static float[] gradients1D = {
1f, -1f
};
private const int gradientsMask1D = 1;
private static Vector2[] gradients2D = {
new Vector2( 1f, 0f),
new Vector2(-1f, 0f),
new Vector2( 0f, 1f),
new Vector2( 0f,-1f),
new Vector2( 1f, 1f).normalized,
new Vector2(-1f, 1f).normalized,
new Vector2( 1f,-1f).normalized,
new Vector2(-1f,-1f).normalized
};
private const int gradientsMask2D = 7;
private static Vector3[] gradients3D = {
new Vector3( 1f, 1f, 0f),
new Vector3(-1f, 1f, 0f),
new Vector3( 1f,-1f, 0f),
new Vector3(-1f,-1f, 0f),
new Vector3( 1f, 0f, 1f),
new Vector3(-1f, 0f, 1f),
new Vector3( 1f, 0f,-1f),
new Vector3(-1f, 0f,-1f),
new Vector3( 0f, 1f, 1f),
new Vector3( 0f,-1f, 1f),
new Vector3( 0f, 1f,-1f),
new Vector3( 0f,-1f,-1f),
new Vector3( 1f, 1f, 0f),
new Vector3(-1f, 1f, 0f),
new Vector3( 0f,-1f, 1f),
new Vector3( 0f,-1f,-1f)
};
private const int gradientsMask3D = 15;
private static float Dot (Vector2 g, float x, float y)
{
return g.x * x + g.y * y;
}
private static float Dot (Vector3 g, float x, float y, float z)
{
return g.x * x + g.y * y + g.z * z;
}
private static float Smooth (float t)
{
return t * t * t * (t * (t * 6f - 15f) + 10f);
}
private static float SmoothDerivative(float t)
{
return 30f * t * t * (t * (t - 2f) + 1f);
}
private static float sqr2 = Mathf.Sqrt(2f);
public static NoiseSample Value1D (Vector3 point, float frequency) {
point *= frequency;
int i0 = Mathf.FloorToInt(point.x);
float t = point.x - i0;
i0 &= hashMask;
int i1 = i0 + 1;
int h0 = hash[i0];
int h1 = hash[i1];
float dt = SmoothDerivative(t);
t = Smooth(t);
float a = h0;
float b = h1 - h0;
NoiseSample sample;
// sample.value = Mathf.Lerp(h0,h1,t) * (1f / hashMask);
sample.value = a + b * t;
sample.derivative.x = b * dt;;
sample.derivative.y = 0f;
sample.derivative.z = 0f;
sample.derivative *= frequency;
return sample * (1f / hashMask);
// return Mathf.Lerp(h0, h1, t) * (1f / hashMask);
}
public static NoiseSample Value2D (Vector3 point, float frequency) {
point *= frequency;
int ix0 = Mathf.FloorToInt(point.x);
int iy0 = Mathf.FloorToInt(point.y);
float tx = point.x - ix0;
float ty = point.y - iy0;
ix0 &= hashMask;
iy0 &= hashMask;
int ix1 = ix0 + 1;
int iy1 = iy0 + 1;
int h0 = hash[ix0];
int h1 = hash[ix1];
int h00 = hash[h0 + iy0];
int h10 = hash[h1 + iy0];
int h01 = hash[h0 + iy1];
int h11 = hash[h1 + iy1];
float dtx = SmoothDerivative(tx);
float dty = SmoothDerivative(ty);
tx = Smooth(tx);
ty = Smooth(ty);
float a = h00;
float b = h10 - h00;
float c = h01 - h00;
float d = h11 - h01 - h10 + h00;
NoiseSample sample;
sample.value = a + b * tx + (c + d * tx) * ty;
sample.derivative.x = (b + d * ty) * dtx;
sample.derivative.y = (c + d * tx) * dty;
sample.derivative.z = 0f;
sample.derivative *= frequency;
return sample * (1f / hashMask);
// return Mathf.Lerp(
// Mathf.Lerp(h00, h10, tx),
// Mathf.Lerp(h01, h11, tx),
// ty) * (1f / hashMask);
}
public static NoiseSample Value3D (Vector3 point, float frequency) {
point *= frequency;
int ix0 = Mathf.FloorToInt(point.x);
int iy0 = Mathf.FloorToInt(point.y);
int iz0 = Mathf.FloorToInt(point.z);
float tx = point.x - ix0;
float ty = point.y - iy0;
float tz = point.z - iz0;
ix0 &= hashMask;
iy0 &= hashMask;
iz0 &= hashMask;
int ix1 = ix0 + 1;
int iy1 = iy0 + 1;
int iz1 = iz0 + 1;
int h0 = hash[ix0];
int h1 = hash[ix1];
int h00 = hash[h0 + iy0];
int h10 = hash[h1 + iy0];
int h01 = hash[h0 + iy1];
int h11 = hash[h1 + iy1];
int h000 = hash[h00 + iz0];
int h100 = hash[h10 + iz0];
int h010 = hash[h01 + iz0];
int h110 = hash[h11 + iz0];
int h001 = hash[h00 + iz1];
int h101 = hash[h10 + iz1];
int h011 = hash[h01 + iz1];
int h111 = hash[h11 + iz1];
float dtx = SmoothDerivative(tx);
float dty = SmoothDerivative(ty);
float dtz = SmoothDerivative(tz);
tx = Smooth(tx);
ty = Smooth(ty);
tz = Smooth(tz);
float a = h000;
float b = h100 - h000;
float c = h010 - h000;
float d = h001 - h000;
float e = h110 - h010 - h100 + h000;
float f = h101 - h001 - h100 + h000;
float g = h011 - h001 - h100 + h000;
float h = h111 - h011 - h101 + h001 - h110 + h010 + h100 - h000;
NoiseSample sample;
sample.value = a + b * tx + (c + e * tx) * ty + (d + f * tx + (g + h * tx) * ty) * tz;
sample.derivative.x = (b + e * ty + (f + h * ty) * tz) * dtx;
sample.derivative.y = (c + f * tx + (g + h * tx) * tz) * dty;
sample.derivative.z = (d + f * tx + (g + h * tx) * ty) * dtz;
sample.derivative *= frequency;
return sample * (1f / hashMask);
// return Mathf.Lerp(
// Mathf.Lerp(Mathf.Lerp(h000, h100, tx), Mathf.Lerp(h010, h110, tx), ty),
// Mathf.Lerp(Mathf.Lerp(h001, h101, tx), Mathf.Lerp(h011, h111, tx), ty),
// tz) * (1f / hashMask);
}
public static NoiseSample Perlin1D (Vector3 point, float frequency) {
point *= frequency;
int i0 = Mathf.FloorToInt(point.x);
float t0 = point.x - i0;
float t1 = t0 - 1f;
i0 &= hashMask;
int i1 = i0 + 1;
float g0 = gradients1D[hash[i0] & gradientsMask1D];
float g1 = gradients1D[hash[i1] & gradientsMask1D];
float v0 = g0 * t0;
float v1 = g1 * t1;
float dt = SmoothDerivative(t0);
float t = Smooth(t0);
float a = v0;
float b = v1 -v0;
float da = g0;
float db = g1 - g0;
NoiseSample sample;
sample.value = a + b * t;
sample.derivative.x = da + db * t + b * dt;
sample.derivative.y = 0f;
sample.derivative.z = 0f;
sample.derivative *= frequency;
return sample * 2f;
// return Mathf.Lerp(v0, v1, t) * 2f;
}
public static NoiseSample Perlin2D (Vector3 point, float frequency) {
point *= frequency;
int ix0 = Mathf.FloorToInt(point.x);
int iy0 = Mathf.FloorToInt(point.y);
float tx0 = point.x - ix0;
float ty0 = point.y - iy0;
float tx1 = tx0 - 1f;
float ty1 = ty0 - 1f;
ix0 &= hashMask;
iy0 &= hashMask;
int ix1 = ix0 + 1;
int iy1 = iy0 + 1;
int h0 = hash[ix0];
int h1 = hash[ix1];
Vector2 g00 = gradients2D[hash[h0 + iy0] & gradientsMask2D];
Vector2 g10 = gradients2D[hash[h1 + iy0] & gradientsMask2D];
Vector2 g01 = gradients2D[hash[h0 + iy1] & gradientsMask2D];
Vector2 g11 = gradients2D[hash[h1 + iy1] & gradientsMask2D];
float v00 = Dot(g00, tx0, ty0);
float v10 = Dot(g10, tx1, ty0);
float v01 = Dot(g01, tx0, ty1);
float v11 = Dot(g11, tx1, ty1);
float dtx = SmoothDerivative(tx0);
float dty = SmoothDerivative(tx0);
float tx = Smooth(tx0);
float ty = Smooth(ty0);
float a = v00;
float b = v10 - v00;
float c = v01 - v00;
float d = v11 - v01 -v10 + v00;
Vector2 da = g00;
Vector2 db = g10 - g00;
Vector2 dc = g01 - g00;
Vector2 dd = g11 -g01 -g10 + g00;
NoiseSample sample;
sample.value = a + b * tx + (c + d * tx) * ty;
sample.derivative = da + db * tx + (dc + dd * tx) * ty;
sample.derivative.x = (b + d * ty) * dtx;
sample.derivative.y = (c + d * tx) * dty;
sample.derivative.z = 0f;
sample.derivative *= frequency;
return sample * sqr2;
// return Mathf.Lerp(
// Mathf.Lerp(v00, v10, tx),
// Mathf.Lerp(v01, v11, tx),
// ty) * sqr2;
}
public static NoiseSample Perlin3D (Vector3 point, float frequency) {
point *= frequency;
int ix0 = Mathf.FloorToInt(point.x);
int iy0 = Mathf.FloorToInt(point.y);
int iz0 = Mathf.FloorToInt(point.z);
float tx0 = point.x - ix0;
float ty0 = point.y - iy0;
float tz0 = point.z - iz0;
float tx1 = tx0 - 1f;
float ty1 = ty0 - 1f;
float tz1 = tz0 - 1f;
ix0 &= hashMask;
iy0 &= hashMask;
iz0 &= hashMask;
int ix1 = ix0 + 1;
int iy1 = iy0 + 1;
int iz1 = iz0 + 1;
int h0 = hash[ix0];
int h1 = hash[ix1];
int h00 = hash[h0 + iy0];
int h10 = hash[h1 + iy0];
int h01 = hash[h0 + iy1];
int h11 = hash[h1 + iy1];
Vector3 g000 = gradients3D[hash[h00 + iz0] & gradientsMask3D];
Vector3 g100 = gradients3D[hash[h10 + iz0] & gradientsMask3D];
Vector3 g010 = gradients3D[hash[h01 + iz0] & gradientsMask3D];
Vector3 g110 = gradients3D[hash[h11 + iz0] & gradientsMask3D];
Vector3 g001 = gradients3D[hash[h00 + iz1] & gradientsMask3D];
Vector3 g101 = gradients3D[hash[h10 + iz1] & gradientsMask3D];
Vector3 g011 = gradients3D[hash[h01 + iz1] & gradientsMask3D];
Vector3 g111 = gradients3D[hash[h11 + iz1] & gradientsMask3D];
float v000 = Dot(g000, tx0, ty0, tz0);
float v100 = Dot(g100, tx1, ty0, tz0);
float v010 = Dot(g010, tx0, ty1, tz0);
float v110 = Dot(g110, tx1, ty1, tz0);
float v001 = Dot(g001, tx0, ty0, tz1);
float v101 = Dot(g101, tx1, ty0, tz1);
float v011 = Dot(g011, tx0, ty1, tz1);
float v111 = Dot(g111, tx1, ty1, tz1);
float dtx = SmoothDerivative(tx0);
float dty = SmoothDerivative(ty0);
float dtz = SmoothDerivative(tz0);
float tx = Smooth(tx0);
float ty = Smooth(ty0);
float tz = Smooth(tz0);
float a = v000;
float b = v100 - v000;
float c = v010 - v000;
float d = v001 - v000;
float e = v110 - v010 -v100 + v000;
float f = v101 - v001 -v100 + v000;
float g = v011 - v001 -v100 + v000;
float h = v111 - v011 -v101 + v001 -v110 + v010 + v100 - v000;
Vector2 da = g000;
Vector2 db = g100 - g000;
Vector2 dc = g010 - g000;
Vector2 dd = g001 - g000;
Vector2 de = g110 - g010 -g100 + g000;
Vector2 df = g101 - g001 -g100 + g000;
Vector2 dg = g011 - g001 -g100 + g000;
Vector2 dh = g111 - g011 -g101 + g001 -g110 + g010 + g100 - g000;
NoiseSample sample;
sample.value = a + b * tx + (
c + e * tx) * ty + (d + f * tx + (g + h * tx) * ty) * tz;
sample.derivative = da + db * tx + (
dc + de * tx) * ty + (dd + df * tx + (dg + dh * tx)* ty) * tz;
sample.derivative.x = (b + e * ty + (f + h * ty) * tz) * dtx;
sample.derivative.y = (c + e * tx + (g + h * tx) * tz) * dty;
sample.derivative.z = (d + f * tx + (g + h * tx) * ty) * dtz;
sample.derivative *= frequency;
return sample;
// return Mathf.Lerp(
// Mathf.Lerp(Mathf.Lerp(v000, v100, tx), Mathf.Lerp(v010, v110, tx), ty),
// Mathf.Lerp(Mathf.Lerp(v001, v101, tx), Mathf.Lerp(v011, v111, tx), ty),
// tz);
}
public static NoiseSample Sum (
NoiseMethod method, Vector3 point,
float frequency, int octaves,
float lacunarity, float persistence)
{
NoiseSample sum = method(point, frequency);
float amplitude = 1f;
float range = 1f;
for (int o = 1; o < octaves; o++) {
frequency *= lacunarity;
amplitude *= persistence;
range += amplitude;
sum += method(point, frequency) * amplitude;
}
return sum * (1f/range);
}
// public static float Sumf (
// NoiseMethod method, Vector3 point,
// float frequency, int octaves,
// float lacunarity, float persistence) {
//
// float sumf = method(point, frequency);
// float amplitude = 1f;
// float range = 1f;
// for (int o = 1; o < octaves; o++) {
// frequency *= lacunarity;
// amplitude *= persistence;
// range += amplitude;
// sumf += method(point, frequency) * amplitude;
// }
// return (sumf * (1f/range));
// }
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using ASC.Common.Logging;
using ASC.Data.Backup.Tasks.Data;
namespace ASC.Data.Backup.Tasks.Modules
{
class FilesModuleSpecifics : ModuleSpecificsBase
{
private static readonly Regex RegexIsInteger = new Regex(@"^\d+$", RegexOptions.Compiled);
private const string BunchRightNodeStartProject = "projects/project/";
private const string BunchRightNodeStartCrmOpportunity = "crm/opportunity/";
private const string BunchRightNodeStartMy = "files/my/";
private const string BunchRightNodeStartTrash = "files/trash/";
private readonly TableInfo[] _tables = new[]
{
new TableInfo("files_bunch_objects", "tenant_id"),
new TableInfo("files_file", "tenant_id", "id", IdType.Integer)
{
UserIDColumns = new[] {"create_by", "modified_by"},
DateColumns = new Dictionary<string, bool> {{"create_on", false}, {"modified_on", false}}
},
new TableInfo("files_folder", "tenant_id", "id")
{
UserIDColumns = new[] {"create_by", "modified_by"},
DateColumns = new Dictionary<string, bool> {{"create_on", false}, {"modified_on", false}}
},
new TableInfo("files_folder_tree"),
new TableInfo("files_security", "tenant_id") {UserIDColumns = new[] {"owner"}},
new TableInfo("files_thirdparty_account", "tenant_id", "id")
{
UserIDColumns = new[] {"user_id"},
DateColumns = new Dictionary<string, bool> {{"create_on", false}}
},
new TableInfo("files_thirdparty_id_mapping", "tenant_id")
};
private readonly RelationInfo[] _tableRelations = new[]
{
new RelationInfo("core_user", "id", "files_bunch_objects", "right_node", typeof(TenantsModuleSpecifics),
x =>
{
var rightNode = Convert.ToString(x["right_node"]);
return rightNode.StartsWith(BunchRightNodeStartMy) || rightNode.StartsWith(BunchRightNodeStartTrash);
}),
new RelationInfo("core_user", "id", "files_security", "subject", typeof(TenantsModuleSpecifics)),
new RelationInfo("core_group", "id", "files_security", "subject", typeof(TenantsModuleSpecifics)),
new RelationInfo("crm_deal", "id", "files_bunch_objects", "right_node", typeof(CrmModuleSpecifics),
x => Convert.ToString(x["right_node"]).StartsWith(BunchRightNodeStartCrmOpportunity)),
new RelationInfo("projects_projects", "id", "files_bunch_objects", "right_node", typeof(ProjectsModuleSpecifics),
x => Convert.ToString(x["right_node"]).StartsWith(BunchRightNodeStartProject, StringComparison.InvariantCultureIgnoreCase)),
new RelationInfo("files_folder", "id", "files_bunch_objects", "left_node"),
new RelationInfo("files_folder", "id", "files_file", "folder_id"),
new RelationInfo("files_folder", "id", "files_folder", "parent_id"),
new RelationInfo("files_folder", "id", "files_folder_tree", "folder_id"),
new RelationInfo("files_folder", "id", "files_folder_tree", "parent_id"),
new RelationInfo("files_file", "id", "files_security", "entry_id",
x => Convert.ToInt32(x["entry_type"]) == 2 && RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
new RelationInfo("files_folder", "id", "files_security", "entry_id",
x => Convert.ToInt32(x["entry_type"]) == 1 && RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
new RelationInfo("files_thirdparty_id_mapping", "hash_id", "files_security", "entry_id",
x => !RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
new RelationInfo("files_thirdparty_account", "id", "files_thirdparty_id_mapping", "id"),
new RelationInfo("files_thirdparty_account", "id", "files_thirdparty_id_mapping", "hash_id")
};
public override ModuleName ModuleName
{
get { return ModuleName.Files; }
}
public override IEnumerable<TableInfo> Tables
{
get { return _tables; }
}
public override IEnumerable<RelationInfo> TableRelations
{
get { return _tableRelations; }
}
public override bool TryAdjustFilePath(bool dump, ColumnMapper columnMapper, ref string filePath)
{
var match = Regex.Match(filePath.Replace('\\', '/'), @"^folder_\d+/file_(?'fileId'\d+)/(?'versionExtension'v\d+/[\.\w]+)$", RegexOptions.Compiled);
if (match.Success)
{
var fileId = columnMapper.GetMapping("files_file", "id", match.Groups["fileId"].Value);
if (fileId == null)
{
if (!dump)
{
return false;
}
fileId = match.Groups["fileId"].Value;
}
filePath = string.Format("folder_{0}/file_{1}/{2}", (Convert.ToInt32(fileId) / 1000 + 1) * 1000, fileId, match.Groups["versionExtension"].Value);
return true;
}
return false;
}
protected override string GetSelectCommandConditionText(int tenantId, TableInfo table)
{
if (table.Name == "files_folder_tree")
{
return "inner join files_folder as t1 on t1.id = t.folder_id where t1.tenant_id = " + tenantId;
}
if (table.Name == "files_file")
{
// do not backup previus backup files
return "where not exists(select 1 from backup_backup b where b.tenant_id = t.tenant_id and b.storage_path = t.id) and t.tenant_id = " + tenantId;
}
return base.GetSelectCommandConditionText(tenantId, table);
}
protected override bool TryPrepareRow(bool dump, DbConnection connection, ColumnMapper columnMapper, TableInfo table, DataRowInfo row, out Dictionary<string, object> preparedRow)
{
if (row.TableName == "files_thirdparty_id_mapping")
{
//todo: think...
preparedRow = new Dictionary<string, object>();
object folderId = null;
var sboxId = Regex.Replace(row[1].ToString(), @"(?<=(?:sbox-|box-|dropbox-|spoint-|drive-|onedrive-))\d+", match =>
{
folderId = columnMapper.GetMapping("files_thirdparty_account", "id", match.Value);
return Convert.ToString(folderId);
}, RegexOptions.Compiled);
if (folderId == null)
return false;
var hashBytes = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(sboxId));
var hashedId = BitConverter.ToString(hashBytes).Replace("-", "").ToLower();
preparedRow.Add("hash_id", hashedId);
preparedRow.Add("id", sboxId);
preparedRow.Add("tenant_id", columnMapper.GetTenantMapping());
columnMapper.SetMapping("files_thirdparty_id_mapping", "hash_id", row["hash_id"], hashedId);
return true;
}
return base.TryPrepareRow(dump, connection, columnMapper, table, row, out preparedRow);
}
protected override bool TryPrepareValue(bool dump, DbConnection connection, ColumnMapper columnMapper, TableInfo table, string columnName, IEnumerable<RelationInfo> relations, ref object value)
{
var relationList = relations.ToList();
if (relationList.All(x => x.ChildTable == "files_security" && x.ChildColumn == "subject"))
{
//note: value could be ShareForEveryoneID and in that case result should be always false
var strVal = Convert.ToString(value);
if (Helpers.IsEmptyOrSystemUser(strVal) || Helpers.IsEmptyOrSystemGroup(strVal))
return true;
foreach (var relation in relationList)
{
var mapping = columnMapper.GetMapping(relation.ParentTable, relation.ParentColumn, value);
if (mapping != null)
{
value = mapping;
return true;
}
}
return false;
}
return base.TryPrepareValue(dump, connection, columnMapper, table, columnName, relationList, ref value);
}
protected override bool TryPrepareValue(DbConnection connection, ColumnMapper columnMapper, RelationInfo relation, ref object value)
{
if (relation.ChildTable == "files_bunch_objects" && relation.ChildColumn == "right_node")
{
var strValue = Convert.ToString(value);
string start = GetStart(strValue);
if (start == null)
return false;
var entityId = columnMapper.GetMapping(relation.ParentTable, relation.ParentColumn, strValue.Substring(start.Length));
if (entityId == null)
return false;
value = strValue.Substring(0, start.Length) + entityId;
return true;
}
return base.TryPrepareValue(connection, columnMapper, relation, ref value);
}
protected override bool TryPrepareValue(DbConnection connection, ColumnMapper columnMapper, TableInfo table, string columnName, ref object value)
{
if (table.Name == "files_thirdparty_account" && (columnName == "password" || columnName == "token") && value != null)
{
try
{
value = Helpers.CreateHash(value as string); // save original hash
}
catch (Exception err)
{
LogManager.GetLogger("ASC").ErrorFormat("Can not prepare value {0}: {1}", value, err);
value = null;
}
return true;
}
if (table.Name == "files_folder" && (columnName == "create_by" || columnName == "modified_by"))
{
base.TryPrepareValue(connection, columnMapper, table, columnName, ref value);
return true;
}
return base.TryPrepareValue(connection, columnMapper, table, columnName, ref value);
}
public override void PrepareData(DataTable data)
{
if (data.TableName == "files_thirdparty_account")
{
var providerColumn = data.Columns.Cast<DataColumn>().Single(c => c.ColumnName == "provider");
var pwdColumn = data.Columns.Cast<DataColumn>().Single(c => c.ColumnName == "password");
var tokenColumn = data.Columns.Cast<DataColumn>().Single(c => c.ColumnName == "token");
for (var i = 0; i < data.Rows.Count; i++)
{
var row = data.Rows[i];
try
{
row[pwdColumn] = Helpers.CreateHash2(row[pwdColumn] as string);
row[tokenColumn] = Helpers.CreateHash2(row[tokenColumn] as string);
}
catch (Exception ex)
{
LogManager.GetLogger("ASC").ErrorFormat("Can not prepare data {0}: {1}", row[providerColumn] as string, ex);
data.Rows.Remove(row);
i--;
}
}
}
}
private static string GetStart(string value)
{
var allStarts = new[] { BunchRightNodeStartProject, BunchRightNodeStartMy, BunchRightNodeStartTrash, BunchRightNodeStartCrmOpportunity };
return allStarts.FirstOrDefault(value.StartsWith);
}
}
class FilesModuleSpecifics2 : ModuleSpecificsBase
{
private static readonly Regex RegexIsInteger = new Regex(@"^\d+$", RegexOptions.Compiled);
private const string TagStartMessage = "Message";
private const string TagStartTask = "Task";
private const string TagStartProject = "Project";
private const string TagStartRelationshipEvent = "RelationshipEvent_";
private readonly TableInfo[] tables = new[]
{
new TableInfo("files_tag", "tenant_id", "id") {UserIDColumns = new[] {"owner"}},
new TableInfo("files_tag_link", "tenant_id")
{
UserIDColumns = new[] {"create_by"},
DateColumns = new Dictionary<string, bool> {{"create_on", false}}
},
};
private readonly RelationInfo[] rels = new[]
{
new RelationInfo("projects_messages", "id", "files_tag", "name", typeof(ProjectsModuleSpecifics),
x => Convert.ToString(x["name"]).StartsWith(TagStartMessage, StringComparison.InvariantCultureIgnoreCase)),
new RelationInfo("projects_tasks", "id", "files_tag", "name", typeof(ProjectsModuleSpecifics),
x => Convert.ToString(x["name"]).StartsWith(TagStartTask, StringComparison.InvariantCultureIgnoreCase)),
new RelationInfo("projects_projects", "id", "files_tag", "name", typeof(ProjectsModuleSpecifics),
x => Convert.ToString(x["name"]).StartsWith(TagStartProject, StringComparison.InvariantCultureIgnoreCase)),
new RelationInfo("crm_relationship_event", "id", "files_tag", "name", typeof(CrmModuleSpecifics2),
x => Convert.ToString(x["name"]).StartsWith(TagStartRelationshipEvent, StringComparison.InvariantCultureIgnoreCase)),
new RelationInfo("files_tag", "id", "files_tag_link", "tag_id", typeof(FilesModuleSpecifics)),
new RelationInfo("files_file", "id", "files_tag_link", "entry_id", typeof(FilesModuleSpecifics),
x => Convert.ToInt32(x["entry_type"]) == 2 && RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
new RelationInfo("files_folder", "id", "files_tag_link", "entry_id",typeof(FilesModuleSpecifics),
x => Convert.ToInt32(x["entry_type"]) == 1 && RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
new RelationInfo("files_thirdparty_id_mapping", "hash_id", "files_tag_link", "entry_id", typeof(FilesModuleSpecifics),
x => !RegexIsInteger.IsMatch(Convert.ToString(x["entry_id"]))),
};
public override ModuleName ModuleName
{
get { return ModuleName.Files2; }
}
public override IEnumerable<TableInfo> Tables
{
get { return tables; }
}
public override IEnumerable<RelationInfo> TableRelations
{
get { return rels; }
}
protected override bool TryPrepareValue(DbConnection connection, ColumnMapper columnMapper, RelationInfo relation, ref object value)
{
if (relation.ChildTable == "files_tag" && relation.ChildColumn == "name")
{
var str = Convert.ToString(value);
var start = GetStart(str);
if (start == null)
{
return false;
}
var entityId = columnMapper.GetMapping(relation.ParentTable, relation.ParentColumn, str.Substring(start.Length));
if (entityId == null)
{
return false;
}
value = str.Substring(0, start.Length) + entityId;
return true;
}
return base.TryPrepareValue(connection, columnMapper, relation, ref value);
}
private static string GetStart(string value)
{
var allStarts = new[] { TagStartMessage, TagStartTask, TagStartRelationshipEvent, TagStartProject, };
return allStarts.FirstOrDefault(value.StartsWith);
}
}
}
| |
// This code is part of the Fungus library (http://fungusgames.com) maintained by Chris Gregan (http://twitter.com/gofungus).
// It is released for free under the MIT open source license (https://github.com/snozbot/fungus/blob/master/LICENSE)
// Copyright (c) 2012-2013 Rotorz Limited. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
using UnityEngine;
using UnityEditor;
using System;
using Rotorz.ReorderableList;
namespace Fungus.EditorUtils
{
public class CommandListAdaptor : IReorderableListAdaptor {
protected SerializedProperty _arrayProperty;
public float fixedItemHeight;
public Rect nodeRect = new Rect();
public SerializedProperty this[int index] {
get { return _arrayProperty.GetArrayElementAtIndex(index); }
}
public SerializedProperty arrayProperty {
get { return _arrayProperty; }
}
public CommandListAdaptor(SerializedProperty arrayProperty, float fixedItemHeight) {
if (arrayProperty == null)
throw new ArgumentNullException("Array property was null.");
if (!arrayProperty.isArray)
throw new InvalidOperationException("Specified serialized propery is not an array.");
this._arrayProperty = arrayProperty;
this.fixedItemHeight = fixedItemHeight;
}
public CommandListAdaptor(SerializedProperty arrayProperty) : this(arrayProperty, 0f) {
}
public int Count {
get { return _arrayProperty.arraySize; }
}
public virtual bool CanDrag(int index) {
return true;
}
public virtual bool CanRemove(int index) {
return true;
}
public void Add() {
Command newCommand = AddNewCommand();
if (newCommand == null)
{
return;
}
int newIndex = _arrayProperty.arraySize;
++_arrayProperty.arraySize;
_arrayProperty.GetArrayElementAtIndex(newIndex).objectReferenceValue = newCommand;
}
public void Insert(int index) {
Command newCommand = AddNewCommand();
if (newCommand == null)
{
return;
}
_arrayProperty.InsertArrayElementAtIndex(index);
_arrayProperty.GetArrayElementAtIndex(index).objectReferenceValue = newCommand;
}
Command AddNewCommand()
{
Flowchart flowchart = FlowchartWindow.GetFlowchart();
if (flowchart == null)
{
return null;
}
var block = flowchart.SelectedBlock;
if (block == null)
{
return null;
}
var newCommand = Undo.AddComponent<Comment>(block.gameObject) as Command;
newCommand.ItemId = flowchart.NextItemId();
flowchart.ClearSelectedCommands();
flowchart.AddSelectedCommand(newCommand);
return newCommand;
}
public void Duplicate(int index) {
Command command = _arrayProperty.GetArrayElementAtIndex(index).objectReferenceValue as Command;
// Add the command as a new component
var parentBlock = command.GetComponent<Block>();
System.Type type = command.GetType();
Command newCommand = Undo.AddComponent(parentBlock.gameObject, type) as Command;
newCommand.ItemId = newCommand.GetFlowchart().NextItemId();
System.Reflection.FieldInfo[] fields = type.GetFields();
foreach (System.Reflection.FieldInfo field in fields)
{
field.SetValue(newCommand, field.GetValue(command));
}
_arrayProperty.InsertArrayElementAtIndex(index);
_arrayProperty.GetArrayElementAtIndex(index).objectReferenceValue = newCommand;
}
public void Remove(int index) {
// Remove the Fungus Command component
Command command = _arrayProperty.GetArrayElementAtIndex(index).objectReferenceValue as Command;
if (command != null)
{
Undo.DestroyObjectImmediate(command);
}
_arrayProperty.GetArrayElementAtIndex(index).objectReferenceValue = null;
_arrayProperty.DeleteArrayElementAtIndex(index);
}
public void Move(int sourceIndex, int destIndex) {
if (destIndex > sourceIndex)
--destIndex;
_arrayProperty.MoveArrayElement(sourceIndex, destIndex);
}
public void Clear() {
while (Count > 0)
{
Remove(0);
}
}
public void BeginGUI()
{}
public void EndGUI()
{}
public void DrawItemBackground(Rect position, int index) {
}
public void DrawItem(Rect position, int index)
{
Command command = this[index].objectReferenceValue as Command;
if (command == null)
{
return;
}
CommandInfoAttribute commandInfoAttr = CommandEditor.GetCommandInfo(command.GetType());
if (commandInfoAttr == null)
{
return;
}
var flowchart = (Flowchart)command.GetFlowchart();
if (flowchart == null)
{
return;
}
bool isComment = command.GetType() == typeof(Comment);
bool isLabel = (command.GetType() == typeof(Label));
bool error = false;
string summary = command.GetSummary();
if (summary == null)
{
summary = "";
}
else
{
summary = summary.Replace("\n", "").Replace("\r", "");
}
if (summary.StartsWith("Error:"))
{
error = true;
}
if (isComment || isLabel)
{
summary = "<b> " + summary + "</b>";
}
else
{
summary = "<i>" + summary + "</i>";
}
bool commandIsSelected = false;
foreach (Command selectedCommand in flowchart.SelectedCommands)
{
if (selectedCommand == command)
{
commandIsSelected = true;
break;
}
}
string commandName = commandInfoAttr.CommandName;
GUIStyle commandLabelStyle = new GUIStyle(GUI.skin.box);
commandLabelStyle.normal.background = FungusEditorResources.texCommandBackground;
int borderSize = 5;
commandLabelStyle.border.top = borderSize;
commandLabelStyle.border.bottom = borderSize;
commandLabelStyle.border.left = borderSize;
commandLabelStyle.border.right = borderSize;
commandLabelStyle.alignment = TextAnchor.MiddleLeft;
commandLabelStyle.richText = true;
commandLabelStyle.fontSize = 11;
commandLabelStyle.padding.top -= 1;
float indentSize = 20;
for (int i = 0; i < command.IndentLevel; ++i)
{
Rect indentRect = position;
indentRect.x += i * indentSize - 21;
indentRect.width = indentSize + 1;
indentRect.y -= 2;
indentRect.height += 5;
GUI.backgroundColor = new Color(0.5f, 0.5f, 0.5f, 1f);
GUI.Box(indentRect, "", commandLabelStyle);
}
float commandNameWidth = Mathf.Max(commandLabelStyle.CalcSize(new GUIContent(commandName)).x, 90f);
float indentWidth = command.IndentLevel * indentSize;
Rect commandLabelRect = position;
commandLabelRect.x += indentWidth - 21;
commandLabelRect.y -= 2;
commandLabelRect.width -= (indentSize * command.IndentLevel - 22);
commandLabelRect.height += 5;
// There's a weird incompatibility between the Reorderable list control used for the command list and
// the UnityEvent list control used in some commands. In play mode, if you click on the reordering grabber
// for a command in the list it causes the UnityEvent list to spew null exception errors.
// The workaround for now is to hide the reordering grabber from mouse clicks by extending the command
// selection rectangle to cover it. We are planning to totally replace the command list display system.
Rect clickRect = position;
clickRect.x -= 20;
clickRect.width += 20;
// Select command via left click
if (Event.current.type == EventType.MouseDown &&
Event.current.button == 0 &&
clickRect.Contains(Event.current.mousePosition))
{
if (flowchart.SelectedCommands.Contains(command) && Event.current.button == 0)
{
// Left click on already selected command
// Command key and shift key is not pressed
if (!EditorGUI.actionKey && !Event.current.shift)
{
BlockEditor.actionList.Add ( delegate {
flowchart.SelectedCommands.Remove(command);
flowchart.ClearSelectedCommands();
});
}
// Command key pressed
if (EditorGUI.actionKey)
{
BlockEditor.actionList.Add ( delegate {
flowchart.SelectedCommands.Remove(command);
});
Event.current.Use();
}
}
else
{
bool shift = Event.current.shift;
// Left click and no command key
if (!shift && !EditorGUI.actionKey && Event.current.button == 0)
{
BlockEditor.actionList.Add ( delegate {
flowchart.ClearSelectedCommands();
});
Event.current.Use();
}
BlockEditor.actionList.Add ( delegate {
flowchart.AddSelectedCommand(command);
});
// Find first and last selected commands
int firstSelectedIndex = -1;
int lastSelectedIndex = -1;
if (flowchart.SelectedCommands.Count > 0)
{
if ( flowchart.SelectedBlock != null)
{
for (int i = 0; i < flowchart.SelectedBlock.CommandList.Count; i++)
{
Command commandInBlock = flowchart.SelectedBlock.CommandList[i];
foreach (Command selectedCommand in flowchart.SelectedCommands)
{
if (commandInBlock == selectedCommand)
{
firstSelectedIndex = i;
break;
}
}
}
for (int i = flowchart.SelectedBlock.CommandList.Count - 1; i >=0; i--)
{
Command commandInBlock = flowchart.SelectedBlock.CommandList[i];
foreach (Command selectedCommand in flowchart.SelectedCommands)
{
if (commandInBlock == selectedCommand)
{
lastSelectedIndex = i;
break;
}
}
}
}
}
if (shift)
{
int currentIndex = command.CommandIndex;
if (firstSelectedIndex == -1 ||
lastSelectedIndex == -1)
{
// No selected command found - select entire list
firstSelectedIndex = 0;
lastSelectedIndex = currentIndex;
}
else
{
if (currentIndex < firstSelectedIndex)
{
firstSelectedIndex = currentIndex;
}
if (currentIndex > lastSelectedIndex)
{
lastSelectedIndex = currentIndex;
}
}
for (int i = Math.Min(firstSelectedIndex, lastSelectedIndex); i < Math.Max(firstSelectedIndex, lastSelectedIndex); ++i)
{
var selectedCommand = flowchart.SelectedBlock.CommandList[i];
BlockEditor.actionList.Add ( delegate {
flowchart.AddSelectedCommand(selectedCommand);
});
}
}
Event.current.Use();
}
GUIUtility.keyboardControl = 0; // Fix for textarea not refeshing (change focus)
}
Color commandLabelColor = Color.white;
if (flowchart.ColorCommands)
{
commandLabelColor = command.GetButtonColor();
}
if (commandIsSelected)
{
commandLabelColor = Color.green;
}
else if (!command.enabled)
{
commandLabelColor = Color.grey;
}
else if (error)
{
// TODO: Show warning icon
}
GUI.backgroundColor = commandLabelColor;
if (isComment)
{
GUI.Label(commandLabelRect, "", commandLabelStyle);
}
else
{
string commandNameLabel;
if (flowchart.ShowLineNumbers)
{
commandNameLabel = command.CommandIndex.ToString() + ": " + commandName;
}
else
{
commandNameLabel = commandName;
}
GUI.Label(commandLabelRect, commandNameLabel, commandLabelStyle);
}
if (command.ExecutingIconTimer > Time.realtimeSinceStartup)
{
Rect iconRect = new Rect(commandLabelRect);
iconRect.x += iconRect.width - commandLabelRect.width - 20;
iconRect.width = 20;
iconRect.height = 20;
Color storeColor = GUI.color;
float alpha = (command.ExecutingIconTimer - Time.realtimeSinceStartup) / FungusConstants.ExecutingIconFadeTime;
alpha = Mathf.Clamp01(alpha);
GUI.color = new Color(1f, 1f, 1f, alpha);
GUI.Label(iconRect, FungusEditorResources.texPlaySmall, new GUIStyle());
GUI.color = storeColor;
}
Rect summaryRect = new Rect(commandLabelRect);
if (isComment)
{
summaryRect.x += 5;
}
else
{
summaryRect.x += commandNameWidth + 5;
summaryRect.width -= commandNameWidth + 5;
}
GUIStyle summaryStyle = new GUIStyle();
summaryStyle.fontSize = 10;
summaryStyle.padding.top += 5;
summaryStyle.richText = true;
summaryStyle.wordWrap = false;
summaryStyle.clipping = TextClipping.Clip;
commandLabelStyle.alignment = TextAnchor.MiddleLeft;
GUI.Label(summaryRect, summary, summaryStyle);
if (error)
{
GUISkin editorSkin = EditorGUIUtility.GetBuiltinSkin(EditorSkin.Inspector);
Rect errorRect = new Rect(summaryRect);
errorRect.x += errorRect.width - 20;
errorRect.y += 2;
errorRect.width = 20;
GUI.Label(errorRect, editorSkin.GetStyle("CN EntryError").normal.background);
summaryRect.width -= 20;
}
GUI.backgroundColor = Color.white;
}
public virtual float GetItemHeight(int index) {
return fixedItemHeight != 0f
? fixedItemHeight
: EditorGUI.GetPropertyHeight(this[index], GUIContent.none, false)
;
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.Authorization;
using Microsoft.Azure.Management.Authorization.Models;
namespace Microsoft.Azure.Management.Authorization
{
public static partial class RoleDefinitionOperationsExtensions
{
/// <summary>
/// Creates or updates a role definition.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <param name='parameters'>
/// Required. Role definition.
/// </param>
/// <returns>
/// Role definition create or update operation result.
/// </returns>
public static RoleDefinitionCreateOrUpdateResult CreateOrUpdate(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope, RoleDefinitionCreateOrUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRoleDefinitionOperations)s).CreateOrUpdateAsync(roleDefinitionId, scope, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a role definition.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <param name='parameters'>
/// Required. Role definition.
/// </param>
/// <returns>
/// Role definition create or update operation result.
/// </returns>
public static Task<RoleDefinitionCreateOrUpdateResult> CreateOrUpdateAsync(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope, RoleDefinitionCreateOrUpdateParameters parameters)
{
return operations.CreateOrUpdateAsync(roleDefinitionId, scope, parameters, CancellationToken.None);
}
/// <summary>
/// Deletes the role definition.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <returns>
/// Role definition delete operation result.
/// </returns>
public static RoleDefinitionDeleteResult Delete(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRoleDefinitionOperations)s).DeleteAsync(roleDefinitionId, scope);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the role definition.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <returns>
/// Role definition delete operation result.
/// </returns>
public static Task<RoleDefinitionDeleteResult> DeleteAsync(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope)
{
return operations.DeleteAsync(roleDefinitionId, scope, CancellationToken.None);
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition Id
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public static RoleDefinitionGetResult Get(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRoleDefinitionOperations)s).GetAsync(roleDefinitionId, scope);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Role definition Id
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public static Task<RoleDefinitionGetResult> GetAsync(this IRoleDefinitionOperations operations, Guid roleDefinitionId, string scope)
{
return operations.GetAsync(roleDefinitionId, scope, CancellationToken.None);
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Fully qualified role definition Id
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public static RoleDefinitionGetResult GetById(this IRoleDefinitionOperations operations, string roleDefinitionId)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRoleDefinitionOperations)s).GetByIdAsync(roleDefinitionId);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='roleDefinitionId'>
/// Required. Fully qualified role definition Id
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public static Task<RoleDefinitionGetResult> GetByIdAsync(this IRoleDefinitionOperations operations, string roleDefinitionId)
{
return operations.GetByIdAsync(roleDefinitionId, CancellationToken.None);
}
/// <summary>
/// Get all role definitions that are applicable at scope and above.
/// Use atScopeAndBelow filter to search below the given scope as well
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <param name='parameters'>
/// Optional. List role definitions filters.
/// </param>
/// <returns>
/// Role definition list operation result.
/// </returns>
public static RoleDefinitionListResult List(this IRoleDefinitionOperations operations, string scope, ListDefinitionFilterParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRoleDefinitionOperations)s).ListAsync(scope, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get all role definitions that are applicable at scope and above.
/// Use atScopeAndBelow filter to search below the given scope as well
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Authorization.IRoleDefinitionOperations.
/// </param>
/// <param name='scope'>
/// Required. Scope
/// </param>
/// <param name='parameters'>
/// Optional. List role definitions filters.
/// </param>
/// <returns>
/// Role definition list operation result.
/// </returns>
public static Task<RoleDefinitionListResult> ListAsync(this IRoleDefinitionOperations operations, string scope, ListDefinitionFilterParameters parameters)
{
return operations.ListAsync(scope, parameters, CancellationToken.None);
}
}
}
| |
#region Apache Notice
/*****************************************************************************
*
* Castle.Igloo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
********************************************************************************/
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Configuration;
using Castle.Core;
using Castle.Igloo.LifestyleManager;
using Castle.Igloo.Navigation;
using Castle.Igloo.Util;
using Castle.MicroKernel;
namespace Castle.Igloo.Scopes.Web
{
/// <summary>
/// <see cref="IPageScope"/> implementation that scopes a single component model to the lifecycle of a aspx page.
/// </summary>
public sealed class WebPageScope : IPageScope
{
public const string PAGE_SCOPE_SUFFIX = "page.";
private ISessionScope _sessionScope = null;
private NavigationState _navigationState = null;
/// <summary>
/// Initializes a new instance of the <see cref="WebPageScope"/> class.
/// </summary>
/// <param name="sessionContest">The session contest.</param>
/// <param name="navigationState">State of the navigation.</param>
public WebPageScope(ISessionScope sessionContest,
NavigationState navigationState)
{
AssertUtils.ArgumentNotNull(sessionContest, "sessionContest");
AssertUtils.ArgumentNotNull(navigationState, "navigationState");
_sessionScope = sessionContest;
_navigationState = navigationState;
}
/// <summary>
/// Gets the navigation context.
/// </summary>
/// <value>The navigation context.</value>
private NavigationState NavigationState
{
get { return _navigationState; }
}
#region IPageContext Members
/// <summary>
/// Gets a value indicating whether this context is active.
/// </summary>
/// <value><c>true</c> if this instance is active; otherwise, <c>false</c>.</value>
public bool IsActive
{
get { return WebUtil.GetCurrentHttpContext() != null; }
}
/// <summary>
/// Gets or sets the <see cref="Object"/> with the specified name.
/// </summary>
/// <value></value>
public object this[string name]
{
get
{
TraceUtil.Log("Get from page scope : " + name);
return _sessionScope[PAGE_SCOPE_SUFFIX + NavigationState.CurrentView + "." + name];
}
set
{
TraceUtil.Log("Set to page scope : " + name);
_sessionScope[PAGE_SCOPE_SUFFIX + NavigationState.CurrentView + "." + name] = value;
}
}
/// <summary>
/// Removes the element with the specified name from the IScope object.
/// </summary>
/// <param name="name">The name of the element to remove.</param>
public void Remove(string name)
{
TraceUtil.Log("Remove from page scope : " + name);
_sessionScope.Remove(PAGE_SCOPE_SUFFIX + NavigationState.CurrentView + "." + name);
}
/// <summary>
/// Determines whether the IDictionary object contains an element with the specified name.
/// </summary>
/// <param name="name">The name to locate in the IScope object.</param>
/// <returns></returns>
public bool Contains(string name)
{
return _sessionScope.Contains(PAGE_SCOPE_SUFFIX + NavigationState.CurrentView + "." + name);
}
/// <summary>
/// Gets All the objects names contain in the IScope object.
/// </summary>
/// <value>The names.</value>
public ICollection Names
{
get
{
StringCollection names = new StringCollection();
foreach(string name in _sessionScope.Names)
{
if (name.StartsWith(PAGE_SCOPE_SUFFIX + NavigationState.CurrentView))
{
names.Add(name);
}
}
return names;
}
}
/// <summary>
/// Removes all the element from the IScope object.
/// </summary>
public void Flush()
{
TraceUtil.Log("Flush page scope.");
IEnumerator enumerator = _sessionScope.Names.GetEnumerator();
IList<string> toRemove = new List<string>();
while ( enumerator.MoveNext() )
{
string name = (string) enumerator.Current;
if (name.StartsWith(PAGE_SCOPE_SUFFIX + NavigationState.PreviousView))
{
toRemove.Add(name);
}
}
foreach(string name in toRemove)
{
TraceUtil.Log("Remove from page scope : " + name);
_sessionScope.Remove(name);
}
}
/// <summary>
/// Registers for eviction.
/// </summary>
/// <param name="manager">The manager.</param>
/// <param name="model">The ComponentModel.</param>
/// <param name="instance">The instance.</param>
public void RegisterForEviction(ILifestyleManager manager, ComponentModel model, object instance)
{
}
/// <summary>
/// Checks the initialisation.
/// </summary>
public void CheckInitialisation()
{
if (!ScopeLifestyleModule.Initialized)
{
string message = "Looks like you forgot to register the http module " +
typeof(ScopeLifestyleModule).FullName +
"\r\nAdd '<add name=\"ScopeLifestyleModule\" type=\"Castle.Igloo.LifestyleManager.ScopeLifestyleModule, Castle.Igloo\" />' " +
"to the <httpModules> section on your web.config";
{
throw new ConfigurationErrorsException(message);
}
}
}
/// <summary>
/// Gets the type of the scope.
/// </summary>
/// <value>The type of the scope.</value>
public string ScopeType
{
get { return Igloo.ScopeType.Page; }
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Orleans.CodeGenerator.Compatibility;
using Orleans.CodeGenerator.Model;
using Orleans.CodeGenerator.Utilities;
using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory;
namespace Orleans.CodeGenerator.Generators
{
/// <summary>
/// Generates GrainReference implementations for grains.
/// </summary>
internal class GrainReferenceGenerator
{
private readonly CodeGeneratorOptions options;
private readonly WellKnownTypes wellKnownTypes;
public GrainReferenceGenerator(CodeGeneratorOptions options, WellKnownTypes wellKnownTypes)
{
this.options = options;
this.wellKnownTypes = wellKnownTypes;
}
/// <summary>
/// Returns the name of the generated class for the provided type.
/// </summary>
internal static string GetGeneratedClassName(INamedTypeSymbol type)
{
return CodeGenerator.ToolName + type.GetSuitableClassName() + "Reference";
}
/// <summary>
/// Generates the class for the provided grain types.
/// </summary>
internal TypeDeclarationSyntax GenerateClass(GrainInterfaceDescription description)
{
var generatedTypeName = description.ReferenceTypeName;
var grainType = description.Type;
var genericTypes = grainType.GetHierarchyTypeParameters()
.Select(_ => TypeParameter(_.ToString()))
.ToArray();
// Create the special marker attribute.
var grainTypeArgument = TypeOfExpression(grainType.WithoutTypeParameters().ToTypeSyntax());
var attributes = AttributeList()
.AddAttributes(
GeneratedCodeAttributeGenerator.GetGeneratedCodeAttributeSyntax(wellKnownTypes),
Attribute(wellKnownTypes.SerializableAttribute.ToNameSyntax()),
Attribute(wellKnownTypes.ExcludeFromCodeCoverageAttribute.ToNameSyntax()),
Attribute(wellKnownTypes.GrainReferenceAttribute.ToNameSyntax())
.AddArgumentListArguments(AttributeArgument(grainTypeArgument)));
var classDeclaration =
ClassDeclaration(generatedTypeName)
.AddModifiers(Token(SyntaxKind.InternalKeyword))
.AddBaseListTypes(
SimpleBaseType(wellKnownTypes.GrainReference.ToTypeSyntax()),
SimpleBaseType(grainType.ToTypeSyntax()))
.AddConstraintClauses(grainType.GetTypeConstraintSyntax())
.AddMembers(GenerateConstructors(generatedTypeName))
.AddMembers(
GrainInterfaceCommon.GenerateInterfaceIdProperty(this.wellKnownTypes, description).AddModifiers(Token(SyntaxKind.OverrideKeyword)),
GrainInterfaceCommon.GenerateInterfaceVersionProperty(this.wellKnownTypes, description).AddModifiers(Token(SyntaxKind.OverrideKeyword)),
GenerateInterfaceNameProperty(description),
GenerateGetMethodNameMethod(description))
.AddMembers(GenerateInvokeMethods(description))
.AddAttributeLists(attributes);
if (genericTypes.Length > 0)
{
classDeclaration = classDeclaration.AddTypeParameterListParameters(genericTypes);
}
if (this.options.DebuggerStepThrough)
{
var debuggerStepThroughAttribute = Attribute(this.wellKnownTypes.DebuggerStepThroughAttribute.ToNameSyntax());
classDeclaration = classDeclaration.AddAttributeLists(AttributeList().AddAttributes(debuggerStepThroughAttribute));
}
return classDeclaration;
}
/// <summary>
/// Generates constructors.
/// </summary>
private MemberDeclarationSyntax[] GenerateConstructors(string className)
{
var baseConstructors =
wellKnownTypes.GrainReference.Constructors.Where(c => c.DeclaredAccessibility != Accessibility.Private);
var constructors = new List<MemberDeclarationSyntax>();
foreach (var baseConstructor in baseConstructors)
{
var args = baseConstructor.Parameters
.Select(arg => Argument(arg.Name.ToIdentifierName()))
.ToArray();
var declaration =
baseConstructor.GetConstructorDeclarationSyntax(className)
.WithInitializer(
ConstructorInitializer(SyntaxKind.BaseConstructorInitializer)
.AddArgumentListArguments(args))
.AddBodyStatements();
constructors.Add(declaration);
}
return constructors.ToArray();
}
/// <summary>
/// Generates invoker methods.
/// </summary>
private MemberDeclarationSyntax[] GenerateInvokeMethods(GrainInterfaceDescription description)
{
var baseReference = BaseExpression();
var methods = description.Methods;
var members = new List<MemberDeclarationSyntax>();
foreach (var methodDescription in methods)
{
var method = methodDescription.Method;
var methodIdArgument = Argument(methodDescription.MethodId.ToHexLiteral());
// Construct a new object array from all method arguments.
var parameters = method.Parameters.Select((p, i) => (p, GetSanitizedName(p, i))).ToList<(IParameterSymbol Symbol, string Name)>();
var body = new List<StatementSyntax>();
foreach (var parameter in parameters)
{
if (parameter.Symbol.Type.HasInterface(wellKnownTypes.IGrainObserver))
{
body.Add(
ExpressionStatement(
InvocationExpression(wellKnownTypes.GrainFactoryBase.ToNameSyntax().Member("CheckGrainObserverParamInternal"))
.AddArgumentListArguments(Argument(parameter.Name.ToIdentifierName()))));
}
}
// Get the parameters argument value.
var objectArrayType = wellKnownTypes.Object.ToTypeSyntax().GetArrayTypeSyntax();
ExpressionSyntax args;
if (method.IsGenericMethod)
{
// Create an arguments array which includes the method's type parameters followed by the method's parameter list.
var allParameters = new List<ExpressionSyntax>();
foreach (var typeParameter in method.TypeParameters)
{
allParameters.Add(TypeOfExpression(typeParameter.ToTypeSyntax()));
}
allParameters.AddRange(parameters.Select(p => GetParameterForInvocation(p.Symbol, p.Name)));
args =
ArrayCreationExpression(objectArrayType)
.WithInitializer(
InitializerExpression(SyntaxKind.ArrayInitializerExpression)
.AddExpressions(allParameters.ToArray()));
}
else if (parameters.Count == 0)
{
args = LiteralExpression(SyntaxKind.NullLiteralExpression);
}
else
{
args =
ArrayCreationExpression(objectArrayType)
.WithInitializer(
InitializerExpression(SyntaxKind.ArrayInitializerExpression)
.AddExpressions(parameters.Select((p => GetParameterForInvocation(p.Symbol, p.Name))).ToArray()));
}
var options = GetInvokeOptions(method);
// Construct the invocation call.
bool asyncMethod;
var isOneWayTask = method.HasAttribute(wellKnownTypes.OneWayAttribute);
if (method.ReturnsVoid || isOneWayTask)
{
// One-way methods are never marked async.
asyncMethod = false;
var invocation = InvocationExpression(baseReference.Member("InvokeOneWayMethod"))
.AddArgumentListArguments(methodIdArgument)
.AddArgumentListArguments(Argument(args));
if (options != null)
{
invocation = invocation.AddArgumentListArguments(options);
}
body.Add(ExpressionStatement(invocation));
if (isOneWayTask)
{
if (SymbolEqualityComparer.Default.Equals(wellKnownTypes.Task, method.ReturnType))
{
var done = wellKnownTypes.Task.ToNameSyntax().Member((object _) => Task.CompletedTask);
body.Add(ReturnStatement(done));
}
else if (wellKnownTypes.ValueTask is WellKnownTypes.Some valueTask
&& SymbolEqualityComparer.Default.Equals(valueTask.Value, method.ReturnType))
{
body.Add(ReturnStatement(LiteralExpression(SyntaxKind.DefaultLiteralExpression)));
}
else
{
throw new CodeGenerationException(
$"Method {method} is marked with [{wellKnownTypes.OneWayAttribute.Name}], " +
$"but has a return type which is not assignable from {typeof(Task)} or {typeof(ValueTask)}");
}
}
}
else if (method.ReturnType is INamedTypeSymbol methodReturnType)
{
// If the method doesn't return a Task type (eg, it returns ValueTask<T>), then we must make an async method and await the invocation result.
var isTaskMethod = SymbolEqualityComparer.Default.Equals(wellKnownTypes.Task, methodReturnType)
|| methodReturnType.IsGenericType && SymbolEqualityComparer.Default.Equals(wellKnownTypes.Task_1, methodReturnType.ConstructedFrom);
asyncMethod = !isTaskMethod;
var returnType = methodReturnType.IsGenericType
? methodReturnType.TypeArguments[0]
: wellKnownTypes.Object;
var invokeMethodAsync = "InvokeMethodAsync".ToGenericName().AddTypeArgumentListArguments(returnType.ToTypeSyntax());
var invocation =
InvocationExpression(MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression,
baseReference,
invokeMethodAsync))
.AddArgumentListArguments(methodIdArgument)
.AddArgumentListArguments(Argument(args));
if (options != null)
{
invocation = invocation.AddArgumentListArguments(options);
}
var methodResult = asyncMethod ? AwaitExpression(invocation) : (ExpressionSyntax)invocation;
if (this.wellKnownTypes.ValueTask is WellKnownTypes.Some valueTask
&& SymbolEqualityComparer.Default.Equals(valueTask.Value, methodReturnType))
{
body.Add(ExpressionStatement(methodResult));
}
else
{
body.Add(ReturnStatement(methodResult));
}
}
else throw new NotSupportedException($"Method {method} has unsupported return type, {method.ReturnType}.");
var paramDeclaration = method.Parameters.Select((p, i) => Parameter(GetSanitizedName(p, i).ToIdentifier()).WithType(p.Type.ToTypeSyntax()));
var methodDeclaration = method.GetDeclarationSyntax()
.WithParameterList(ParameterList().AddParameters(paramDeclaration.ToArray()))
.WithModifiers(TokenList())
.WithExplicitInterfaceSpecifier(ExplicitInterfaceSpecifier(method.ContainingType.ToNameSyntax()))
.AddBodyStatements(body.ToArray())
// Since explicit implementation is used, constraints must not be specified.
.WithConstraintClauses(new SyntaxList<TypeParameterConstraintClauseSyntax>());
if (asyncMethod) methodDeclaration = methodDeclaration.AddModifiers(Token(SyntaxKind.AsyncKeyword));
members.Add(methodDeclaration);
}
return members.ToArray();
ExpressionSyntax GetParameterForInvocation(IParameterSymbol arg, string name)
{
var identifier = name.ToIdentifierName();
// Addressable arguments must be converted to references before passing.
if (arg.Type.HasInterface(wellKnownTypes.IAddressable)
&& arg.Type.TypeKind == TypeKind.Interface)
{
return
ConditionalExpression(
BinaryExpression(SyntaxKind.IsExpression, identifier, wellKnownTypes.Grain.ToTypeSyntax()),
InvocationExpression(identifier.Member("AsReference".ToGenericName().AddTypeArgumentListArguments(arg.Type.ToTypeSyntax()))),
identifier);
}
return identifier;
}
static string GetSanitizedName(IParameterSymbol parameter, int index)
{
var parameterName = string.IsNullOrWhiteSpace(parameter.Name) ? "arg" : parameter.Name;
return string.Format(CultureInfo.InvariantCulture, "{0}{1:G}", parameterName, index);
}
}
/// <summary>
/// Returns syntax for the options argument to GrainReference.InvokeMethodAsync{T} and GrainReference.InvokeOneWayMethod.
/// </summary>
private ArgumentSyntax GetInvokeOptions(IMethodSymbol method)
{
var options = new List<ExpressionSyntax>();
var imo = wellKnownTypes.InvokeMethodOptions.ToNameSyntax();
if (method.HasAttribute(wellKnownTypes.ReadOnlyAttribute))
{
options.Add(imo.Member("ReadOnly"));
}
if (method.HasAttribute(wellKnownTypes.UnorderedAttribute))
{
options.Add(imo.Member("Unordered"));
}
if (method.HasAttribute(wellKnownTypes.AlwaysInterleaveAttribute))
{
options.Add(imo.Member("AlwaysInterleave"));
}
if (method.GetAttribute(wellKnownTypes.TransactionAttribute, out var attr))
{
var enumType = wellKnownTypes.TransactionOption;
var txRequirement = (int)attr.ConstructorArguments.First().Value;
var values = enumType.GetMembers().OfType<IFieldSymbol>().ToList();
var mapping = values.ToDictionary(m => (int)m.ConstantValue, m => m.Name);
if (!mapping.TryGetValue(txRequirement, out var value))
{
throw new NotSupportedException(
$"Transaction requirement {txRequirement} on method {method} was not understood."
+ $" Known values: {string.Join(", ", mapping.Select(kv => $"{kv.Key} ({kv.Value})"))}");
}
switch (value)
{
case "Suppress":
options.Add(imo.Member("TransactionSuppress"));
break;
case "CreateOrJoin":
options.Add(imo.Member("TransactionCreateOrJoin"));
break;
case "Create":
options.Add(imo.Member("TransactionCreate"));
break;
case "Join":
options.Add(imo.Member("TransactionJoin"));
break;
case "Supported":
options.Add(imo.Member("TransactionSupported"));
break;
case "NotAllowed":
options.Add(imo.Member("TransactionNotAllowed"));
break;
default:
throw new NotSupportedException($"Transaction requirement {value} on method {method} was not understood.");
}
}
ExpressionSyntax allOptions;
if (options.Count <= 1)
{
allOptions = options.FirstOrDefault();
}
else
{
allOptions =
options.Aggregate((a, b) => BinaryExpression(SyntaxKind.BitwiseOrExpression, a, b));
}
if (allOptions == null)
{
return null;
}
return Argument(NameColon("options"), Token(SyntaxKind.None), allOptions);
}
private MemberDeclarationSyntax GenerateInterfaceNameProperty(GrainInterfaceDescription description)
{
var returnValue = description.Type.Name.ToLiteralExpression();
return
PropertyDeclaration(wellKnownTypes.String.ToTypeSyntax(), "InterfaceName")
.WithExpressionBody(ArrowExpressionClause(returnValue))
.AddModifiers(Token(SyntaxKind.PublicKeyword), Token(SyntaxKind.OverrideKeyword))
.WithSemicolonToken(Token(SyntaxKind.SemicolonToken));
}
private MethodDeclarationSyntax GenerateGetMethodNameMethod(GrainInterfaceDescription description)
{
var method = wellKnownTypes.GrainReference.Method("GetMethodName");
var methodDeclaration = method.GetDeclarationSyntax().AddModifiers(Token(SyntaxKind.OverrideKeyword));
var parameters = method.Parameters;
var interfaceIdArgument = parameters[0].Name.ToIdentifierName();
var methodIdArgument = parameters[1].Name.ToIdentifierName();
var callThrowMethodNotImplemented = InvocationExpression(IdentifierName("ThrowMethodNotImplemented"))
.WithArgumentList(ArgumentList(SeparatedList(new[]
{
Argument(interfaceIdArgument),
Argument(methodIdArgument)
})));
// This method is used directly after its declaration to create blocks for each interface id, comprising
// primarily of a nested switch statement for each of the methods in the given interface.
BlockSyntax ComposeInterfaceBlock(INamedTypeSymbol interfaceType, SwitchStatementSyntax methodSwitch)
{
return Block(methodSwitch.AddSections(SwitchSection()
.AddLabels(DefaultSwitchLabel())
.AddStatements(
ExpressionStatement(callThrowMethodNotImplemented),
ReturnStatement(LiteralExpression(SyntaxKind.NullLiteralExpression)))));
}
var interfaceCases = GrainInterfaceCommon.GenerateGrainInterfaceAndMethodSwitch(
wellKnownTypes,
description.Type,
methodIdArgument,
methodType => new StatementSyntax[] { ReturnStatement(methodType.Name.ToLiteralExpression()) },
ComposeInterfaceBlock);
// Generate the default case, which will throw a NotImplementedException.
var callThrowInterfaceNotImplemented = InvocationExpression(IdentifierName("ThrowInterfaceNotImplemented"))
.WithArgumentList(ArgumentList(SingletonSeparatedList(Argument(interfaceIdArgument))));
var defaultCase = SwitchSection()
.AddLabels(DefaultSwitchLabel())
.AddStatements(
ExpressionStatement(callThrowInterfaceNotImplemented),
ReturnStatement(LiteralExpression(SyntaxKind.NullLiteralExpression)));
var throwInterfaceNotImplemented = GrainInterfaceCommon.GenerateMethodNotImplementedFunction(wellKnownTypes);
var throwMethodNotImplemented = GrainInterfaceCommon.GenerateInterfaceNotImplementedFunction(wellKnownTypes);
var interfaceIdSwitch =
SwitchStatement(interfaceIdArgument).AddSections(interfaceCases.ToArray()).AddSections(defaultCase);
return methodDeclaration.AddBodyStatements(interfaceIdSwitch, throwInterfaceNotImplemented, throwMethodNotImplemented);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Web;
using System.Web.Hosting;
using System.Web.Mvc;
namespace Orchard.UI.Resources {
public class ResourceDefinition {
private static readonly char[] _queryStringChars = new [] { '?' };
private static readonly Dictionary<string, string> _resourceTypeTagNames = new Dictionary<string, string> {
{ "script", "script" },
{ "stylesheet", "link" }
};
private static readonly Dictionary<string, string> _filePathAttributes = new Dictionary<string, string> {
{ "script", "src" },
{ "link", "href" }
};
private static readonly Dictionary<string, Dictionary<string, string>> _resourceAttributes = new Dictionary<string, Dictionary<string, string>> {
{ "script", new Dictionary<string, string> { {"type", "text/javascript"} } },
{ "stylesheet", new Dictionary<string, string> { {"type", "text/css"}, {"rel", "stylesheet"} } }
};
private static readonly Dictionary<string, TagRenderMode> _fileTagRenderModes = new Dictionary<string, TagRenderMode> {
{ "script", TagRenderMode.Normal },
{ "link", TagRenderMode.SelfClosing }
};
private static readonly Dictionary<string, string> _resourceTypeDirectories = new Dictionary<string, string> {
{"script", "scripts/"},
{"stylesheet", "styles/"}
};
private string _basePath;
private string _physicalPath;
private string _physicalPathDebug;
public ResourceDefinition(ResourceManifest manifest, string type, string name) {
Manifest = manifest;
Type = type;
Name = name;
TagBuilder = new TagBuilder(_resourceTypeTagNames.ContainsKey(type) ? _resourceTypeTagNames[type] : "meta");
TagRenderMode = _fileTagRenderModes.ContainsKey(TagBuilder.TagName) ? _fileTagRenderModes[TagBuilder.TagName] : TagRenderMode.Normal;
Dictionary<string, string> attributes;
if (_resourceAttributes.TryGetValue(type, out attributes)) {
foreach (var pair in attributes) {
TagBuilder.Attributes[pair.Key] = pair.Value;
}
}
FilePathAttributeName = _filePathAttributes.ContainsKey(TagBuilder.TagName) ? _filePathAttributes[TagBuilder.TagName] : null;
}
internal static string GetBasePathFromViewPath(string resourceType, string viewPath) {
if (String.IsNullOrEmpty(viewPath)) {
return null;
}
string basePath = null;
var viewsPartIndex = viewPath.IndexOf("/Views", StringComparison.OrdinalIgnoreCase);
if (viewsPartIndex >= 0) {
basePath = viewPath.Substring(0, viewsPartIndex + 1) + GetResourcePath(resourceType);
}
return basePath;
}
internal static string GetResourcePath(string resourceType) {
string path;
_resourceTypeDirectories.TryGetValue(resourceType, out path);
return path ?? "";
}
private static string Coalesce(params string[] strings) {
foreach (var str in strings) {
if (!String.IsNullOrEmpty(str)) {
return str;
}
}
return null;
}
public IResourceManifest Manifest { get; private set; }
public string TagName {
get { return TagBuilder.TagName; }
}
public TagRenderMode TagRenderMode { get; private set; }
public string Name { get; private set; }
public string Type { get; private set; }
public string Version { get; private set; }
public string Url { get; private set; }
public string UrlDebug { get; private set; }
public string UrlCdn { get; private set; }
public string UrlCdnDebug { get; private set; }
public string BasePath {
get {
if (!String.IsNullOrEmpty(_basePath)) {
return _basePath;
}
var basePath = Manifest.BasePath;
if (!String.IsNullOrEmpty(basePath)) {
basePath += GetResourcePath(Type);
}
return basePath ?? "";
}
}
public string PhysicalPath {
get {
if (!String.IsNullOrEmpty(_physicalPath)) {
return _physicalPath;
}
return GetPhysicalPath(Url);
}
}
public string PhysicalPathDebug {
get {
if (!String.IsNullOrEmpty(_physicalPathDebug)) {
return _physicalPathDebug;
}
return GetPhysicalPath(UrlDebug);
}
}
public string[] Cultures { get; private set; }
public IEnumerable<string> Dependencies { get; private set; }
public string FilePathAttributeName { get; private set; }
public TagBuilder TagBuilder { get; private set; }
public ResourceDefinition AddAttribute(string name, string value) {
TagBuilder.MergeAttribute(name, value);
return this;
}
public ResourceDefinition SetAttribute(string name, string value) {
TagBuilder.MergeAttribute(name, value, true);
return this;
}
public ResourceDefinition SetBasePath(string virtualPath) {
_basePath = virtualPath;
return this;
}
public ResourceDefinition SetUrl(string url) {
return SetUrl(url, null);
}
public ResourceDefinition SetUrl(string url, string urlDebug) {
if (String.IsNullOrEmpty(url)) {
throw new ArgumentNullException("url");
}
Url = url;
if (urlDebug != null) {
UrlDebug = urlDebug;
}
return this;
}
public ResourceDefinition SetCdn(string cdnUrl) {
return SetCdn(cdnUrl, null);
}
public ResourceDefinition SetCdn(string cdnUrl, string cdnUrlDebug) {
if (string.IsNullOrWhiteSpace(cdnUrl)) throw new ArgumentNullException("cdnUrl");
UrlCdn = cdnUrl;
if (!string.IsNullOrWhiteSpace(cdnUrlDebug)) UrlCdnDebug = cdnUrlDebug;
return this;
}
public ResourceDefinition SetPhysicalPath(string physicalPath) {
return SetPhysicalPath(physicalPath, null);
}
public ResourceDefinition SetPhysicalPath(string physicalPath, string physicalPathDebug) {
if (String.IsNullOrEmpty(physicalPath)) {
throw new ArgumentNullException("physicalPath");
}
_physicalPath = physicalPath;
if (physicalPathDebug != null) {
_physicalPathDebug = physicalPathDebug;
}
return this;
}
/// <summary>
/// Sets the version of the resource.
/// </summary>
/// <param name="version">The version to set, in the form of <code>major.minor[.build[.revision]]</code></param>
public ResourceDefinition SetVersion(string version) {
Version = version;
return this;
}
public ResourceDefinition SetCultures(params string[] cultures) {
Cultures = cultures;
return this;
}
public ResourceDefinition SetDependencies(params string[] dependencies) {
Dependencies = dependencies;
return this;
}
public string ResolveUrl(RequireSettings settings, string applicationPath, IResourceFileHashProvider resourceFileHashProvider) {
string url;
string physicalPath = null;
// Url priority:
if (settings.DebugMode) {
url = settings.CdnMode
? Coalesce(UrlCdnDebug, UrlDebug, UrlCdn, Url)
: Coalesce(UrlDebug, Url, UrlCdnDebug, UrlCdn);
}
else {
url = settings.CdnMode
? Coalesce(UrlCdn, Url, UrlCdnDebug, UrlDebug)
: Coalesce(Url, UrlDebug, UrlCdn, UrlCdnDebug);
}
if (url == UrlDebug) {
physicalPath = PhysicalPathDebug;
}
else if (url == Url) {
physicalPath = PhysicalPath;
}
if (String.IsNullOrEmpty(url)) {
return null;
}
if (!String.IsNullOrEmpty(settings.Culture)) {
string nearestCulture = FindNearestCulture(settings.Culture);
if (!String.IsNullOrEmpty(nearestCulture)) {
url = Path.ChangeExtension(url, nearestCulture + Path.GetExtension(url));
}
}
if (!Uri.IsWellFormedUriString(url, UriKind.Absolute) && !VirtualPathUtility.IsAbsolute(url) && !VirtualPathUtility.IsAppRelative(url) && !String.IsNullOrEmpty(BasePath)) {
// relative urls are relative to the base path of the module that defined the manifest
url = VirtualPathUtility.Combine(BasePath, url);
}
if (VirtualPathUtility.IsAppRelative(url)) {
url = applicationPath != null
? VirtualPathUtility.ToAbsolute(url, applicationPath)
: VirtualPathUtility.ToAbsolute(url);
}
if (settings.FileHashMode && !String.IsNullOrEmpty(physicalPath) && File.Exists(physicalPath)) {
url = AddQueryStringValue(url, "fileHash", resourceFileHashProvider.GetResourceFileHash(physicalPath));
}
return url;
}
private string FindNearestCulture(string culture) {
// go for an exact match
if (Cultures == null) {
return null;
}
int selectedIndex = Array.IndexOf(Cultures, culture);
if (selectedIndex != -1) {
return Cultures[selectedIndex];
}
// try parent culture if any
var cultureInfo = CultureInfo.GetCultureInfo(culture);
if (cultureInfo.Parent.Name != culture) {
var selectedCulture = FindNearestCulture(cultureInfo.Parent.Name);
if (selectedCulture != null) {
return selectedCulture;
}
}
return null;
}
public override bool Equals(object obj) {
if (obj == null || obj.GetType() != GetType()) {
return false;
}
var that = (ResourceDefinition)obj;
return string.Equals(that.Name, Name, StringComparison.Ordinal) &&
string.Equals(that.Type, Type, StringComparison.Ordinal) &&
string.Equals(that.Version, Version, StringComparison.Ordinal);
}
public override int GetHashCode() {
return (Name ?? "").GetHashCode() ^ (Type ?? "").GetHashCode();
}
private string GetPhysicalPath(string url) {
if (!String.IsNullOrEmpty(url) && !Uri.IsWellFormedUriString(url, UriKind.Absolute) && !url.StartsWith("//")) {
if (VirtualPathUtility.IsAbsolute(url) || VirtualPathUtility.IsAppRelative(url)) {
return HostingEnvironment.MapPath(url.Split(_queryStringChars)[0]);
}
if (!String.IsNullOrEmpty(BasePath)) {
return HostingEnvironment.MapPath(VirtualPathUtility.Combine(BasePath, url.Split(_queryStringChars)[0]));
}
}
return null;
}
private string AddQueryStringValue(string url, string name, string value) {
if (String.IsNullOrEmpty(url)) {
return null;
}
var encodedValue = HttpUtility.UrlEncode(value);
if (url.Contains("?")) {
if (url.EndsWith("&")) {
return String.Format("{0}{1}={2}", url, name, encodedValue);
}
else {
return String.Format("{0}&{1}={2}", url, name, encodedValue);
}
}
else {
return String.Format("{0}?{1}={2}", url, name, encodedValue);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.Versioning;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using NuGet.Test.Mocks;
namespace NuGet.Test {
[TestClass]
public class ProjectManagerTest {
[TestMethod]
public void AddingPackageReferenceNullOrEmptyPackageIdThrows() {
// Arrange
ProjectManager projectManager = CreateProjectManager();
// Act & Assert
ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.AddPackageReference((string)null), "packageId");
ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.AddPackageReference(String.Empty), "packageId");
}
[TestMethod]
public void AddingUnknownPackageReferenceThrows() {
// Arrange
ProjectManager projectManager = CreateProjectManager();
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("unknown"), "Unable to find package 'unknown'.");
}
[TestMethod]
public void AddingPackageReferenceThrowsExceptionPackageReferenceIsAdded() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new Mock<MockProjectSystem>() { CallBase = true };
projectSystem.Setup(m => m.AddFile("file", It.IsAny<Stream>())).Throws<UnauthorizedAccessException>();
projectSystem.Setup(m => m.Root).Returns("FakeRoot");
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "file" });
sourceRepository.AddPackage(packageA);
// Act
ExceptionAssert.Throws<UnauthorizedAccessException>(() => projectManager.AddPackageReference("A"));
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA));
}
[TestMethod]
public void AddingPackageReferenceAddsPreprocessedFileToTargetPathWithRemovedExtension() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { @"foo\bar\file.pp" });
sourceRepository.AddPackage(packageA);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.IsFalse(projectSystem.FileExists(@"foo\bar\file.pp"));
Assert.IsTrue(projectSystem.FileExists(@"foo\bar\file"));
}
[TestMethod]
public void AddPackageReferenceWhenNewVersionOfPackageAlreadyReferencedThrows() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
}, content: new[] { "foo" });
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
}, content: new[] { "foo" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "foo" });
projectManager.LocalRepository.AddPackage(packageA20);
projectManager.LocalRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("A", Version.Parse("1.0")), @"Already referencing a newer version of 'A'.");
}
[TestMethod]
public void RemovingUnknownPackageReferenceThrows() {
// Arrange
var projectManager = CreateProjectManager();
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.RemovePackageReference("foo"), "Unable to find package 'foo'.");
}
[TestMethod]
public void RemovingPackageReferenceWithOtherProjectWithReferencesThatWereNotCopiedToProject() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
var packageA = PackageUtility.CreatePackage("A", "1.0", content: new[] { "a.file" });
var packageB = PackageUtility.CreatePackage("B", "1.0",
content: null,
assemblyReferences: new[] { PackageUtility.CreateAssemblyReference("foo.dll", new FrameworkName("SP", new Version("40.0"))) },
tools: null,
dependencies: null,
rating: null,
description: null);
projectManager.LocalRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageA);
projectManager.LocalRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageB);
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA));
}
[TestMethod]
public void RemovingUnknownPackageReferenceNullOrEmptyPackageIdThrows() {
// Arrange
var projectManager = CreateProjectManager();
// Act & Assert
ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.RemovePackageReference((string)null), "packageId");
ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.RemovePackageReference(String.Empty), "packageId");
}
[TestMethod]
public void RemovingPackageReferenceWithNoDependents() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
var package = PackageUtility.CreatePackage("foo", "1.2.33", content: new[] { "file1" });
projectManager.LocalRepository.AddPackage(package);
sourceRepository.AddPackage(package);
// Act
projectManager.RemovePackageReference("foo");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(package));
}
[TestMethod]
public void AddPackageReferenceAddsContentAndReferencesProjectSystem() {
// Arrange
var projectSystem = new MockProjectSystem();
var localRepository = new MockPackageRepository();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, localRepository);
var packageA = PackageUtility.CreatePackage("A", "1.0",
new[] { "contentFile" },
new[] { "reference.dll" },
new[] { "tool" });
mockRepository.AddPackage(packageA);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.AreEqual(1, projectSystem.Paths.Count);
Assert.AreEqual(1, projectSystem.References.Count);
Assert.IsTrue(projectSystem.References.ContainsKey(@"reference.dll"));
Assert.IsTrue(projectSystem.FileExists(@"contentFile"));
Assert.IsTrue(localRepository.Exists("A"));
}
[TestMethod]
public void AddPackageReferenceAddingPackageWithDuplicateReferenceOverwritesReference() {
// Arrange
var projectSystem = new MockProjectSystem();
var localRepository = new MockPackageRepository();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, localRepository);
var packageA = PackageUtility.CreatePackage("A", "1.0",
assemblyReferences: new[] { "reference.dll" });
var packageB = PackageUtility.CreatePackage("B", "1.0",
assemblyReferences: new[] { "reference.dll" });
mockRepository.AddPackage(packageA);
mockRepository.AddPackage(packageB);
// Act
projectManager.AddPackageReference("A");
projectManager.AddPackageReference("B");
// Assert
Assert.AreEqual(0, projectSystem.Paths.Count);
Assert.AreEqual(1, projectSystem.References.Count);
Assert.IsTrue(projectSystem.References.ContainsKey(@"reference.dll"));
Assert.IsTrue(projectSystem.References.ContainsValue(@"B.1.0\reference.dll"));
Assert.IsTrue(localRepository.Exists("A"));
Assert.IsTrue(localRepository.Exists("B"));
}
[TestMethod]
public void AddPackageReferenceRaisesOnBeforeInstallAndOnAfterInstall() {
// Arrange
var projectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
var packageA = PackageUtility.CreatePackage("A", "1.0",
new[] { "contentFile" },
new[] { "reference.dll" },
new[] { "tool" });
projectManager.PackageReferenceAdding += (sender, e) => {
// Assert
Assert.AreEqual(e.InstallPath, @"C:\MockFileSystem\A.1.0");
Assert.AreSame(e.Package, packageA);
};
projectManager.PackageReferenceAdded += (sender, e) => {
// Assert
Assert.AreEqual(e.InstallPath, @"C:\MockFileSystem\A.1.0");
Assert.AreSame(e.Package, packageA);
};
mockRepository.AddPackage(packageA);
// Act
projectManager.AddPackageReference("A");
}
[TestMethod]
public void RemovePackageReferenceRaisesOnBeforeUninstallAndOnAfterUninstall() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
new[] { @"sub\file1", @"sub\file2" });
projectManager.PackageReferenceRemoving += (sender, e) => {
// Assert
Assert.AreEqual(e.InstallPath, @"C:\MockFileSystem\A.1.0");
Assert.AreSame(e.Package, packageA);
};
projectManager.PackageReferenceRemoved += (sender, e) => {
// Assert
Assert.AreEqual(e.InstallPath, @"C:\MockFileSystem\A.1.0");
Assert.AreSame(e.Package, packageA);
};
mockRepository.AddPackage(packageA);
projectManager.AddPackageReference("A");
// Act
projectManager.RemovePackageReference("A");
}
[TestMethod]
public void RemovePackageReferenceExcludesFileIfAnotherPackageUsesThem() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
new[] { "fileA", "commonFile" });
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
new[] { "fileB", "commonFile" });
mockRepository.AddPackage(packageA);
mockRepository.AddPackage(packageB);
projectManager.AddPackageReference("A");
projectManager.AddPackageReference("B");
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsTrue(mockProjectSystem.Deleted.Contains(@"fileA"));
Assert.IsTrue(mockProjectSystem.FileExists(@"commonFile"));
}
[TestMethod]
public void AddPackageWithUnsupportedFilesSkipsUnsupportedFiles() {
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
var projectSystem = new Mock<MockProjectSystem>() { CallBase = true };
projectSystem.Setup(m => m.IsSupportedFile("unsupported")).Returns(false);
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository);
IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "unsupported" });
sourceRepository.AddPackage(packageA);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.AreEqual(2, projectSystem.Object.Paths.Count);
Assert.IsTrue(projectSystem.Object.FileExists("a"));
Assert.IsTrue(projectSystem.Object.FileExists("b"));
Assert.IsTrue(localRepository.Exists("A"));
Assert.IsFalse(projectSystem.Object.FileExists("unsupported"));
}
[TestMethod]
public void AddPackageWithUnsupportedTransformFileSkipsUnsupportedFile() {
// Arrange
var sourceRepository = new MockPackageRepository();
var localRepository = new MockPackageRepository();
var projectSystem = new Mock<MockProjectSystem>() { CallBase = true };
projectSystem.Setup(m => m.IsSupportedFile("unsupported")).Returns(false);
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository);
IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "unsupported.pp" });
sourceRepository.AddPackage(packageA);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.AreEqual(2, projectSystem.Object.Paths.Count);
Assert.IsTrue(projectSystem.Object.FileExists("a"));
Assert.IsTrue(projectSystem.Object.FileExists("b"));
Assert.IsTrue(localRepository.Exists("A"));
Assert.IsFalse(projectSystem.Object.FileExists("unsupported"));
}
[TestMethod]
public void AddPackageWithTransformFile() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
mockProjectSystem.AddFile("web.config",
@"<configuration>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" />
</system.web>
</configuration>
".AsStream());
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository());
var package = new Mock<IPackage>();
package.Setup(m => m.Id).Returns("A");
package.Setup(m => m.Version).Returns(new Version("1.0"));
var file = new Mock<IPackageFile>();
file.Setup(m => m.Path).Returns(@"content\web.config.transform");
file.Setup(m => m.GetStream()).Returns(() =>
@"<configuration>
<configSections>
<add a=""n"" />
</configSections>
</configuration>
".AsStream());
package.Setup(m => m.GetFiles()).Returns(new[] { file.Object });
mockRepository.AddPackage(package.Object);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.AreEqual(@"<?xml version=""1.0"" encoding=""utf-8""?>
<configuration>
<configSections>
<add a=""n"" />
</configSections>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" />
</system.web>
</configuration>", mockProjectSystem.OpenFile("web.config").ReadToEnd());
}
[TestMethod]
public void RemovePackageWithTransformFile() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
mockProjectSystem.AddFile("web.config",
@"<configuration>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" baz=""test"" />
</system.web>
</configuration>
".AsStream());
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository());
var package = new Mock<IPackage>();
package.Setup(m => m.Id).Returns("A");
package.Setup(m => m.Version).Returns(new Version("1.0"));
var file = new Mock<IPackageFile>();
file.Setup(m => m.Path).Returns(@"content\web.config.transform");
file.Setup(m => m.GetStream()).Returns(() =>
@"<configuration>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" />
</system.web>
</configuration>
".AsStream());
package.Setup(m => m.GetFiles()).Returns(new[] { file.Object });
mockRepository.AddPackage(package.Object);
projectManager.LocalRepository.AddPackage(package.Object);
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.AreEqual(@"<?xml version=""1.0"" encoding=""utf-8""?>
<configuration>
<system.web>
<compilation baz=""test"" />
</system.web>
</configuration>", mockProjectSystem.OpenFile("web.config").ReadToEnd());
}
[TestMethod]
public void RemovePackageWithTransformFileThatThrowsContinuesRemovingPackage() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
var localRepository = new MockPackageRepository();
mockProjectSystem.AddFile("web.config", () => { throw new UnauthorizedAccessException(); });
mockProjectSystem.AddFile("foo.txt");
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, localRepository);
var package = new Mock<IPackage>();
package.Setup(m => m.Id).Returns("A");
package.Setup(m => m.Version).Returns(new Version("1.0"));
var file = new Mock<IPackageFile>();
var contentFile = new Mock<IPackageFile>();
contentFile.Setup(m => m.Path).Returns(@"content\foo.txt");
contentFile.Setup(m => m.GetStream()).Returns(new MemoryStream());
file.Setup(m => m.Path).Returns(@"content\web.config.transform");
file.Setup(m => m.GetStream()).Returns(() =>
@"<configuration>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" />
</system.web>
</configuration>
".AsStream());
package.Setup(m => m.GetFiles()).Returns(new[] { file.Object, contentFile.Object });
mockRepository.AddPackage(package.Object);
projectManager.LocalRepository.AddPackage(package.Object);
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsFalse(mockProjectSystem.FileExists("foo.txt"));
Assert.IsFalse(localRepository.Exists(package.Object));
}
[TestMethod]
public void RemovePackageWithUnsupportedTransformFileDoesNothing() {
// Arrange
var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true };
mockProjectSystem.Setup(m => m.IsSupportedFile("web.config")).Returns(false);
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem.Object, new MockPackageRepository());
var package = new Mock<IPackage>();
package.Setup(m => m.Id).Returns("A");
package.Setup(m => m.Version).Returns(new Version("1.0"));
var file = new Mock<IPackageFile>();
file.Setup(m => m.Path).Returns(@"content\web.config.transform");
file.Setup(m => m.GetStream()).Returns(() =>
@"<configuration>
<system.web>
<compilation debug=""true"" targetFramework=""4.0"" />
</system.web>
</configuration>
".AsStream());
package.Setup(m => m.GetFiles()).Returns(new[] { file.Object });
mockRepository.AddPackage(package.Object);
projectManager.LocalRepository.AddPackage(package.Object);
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsFalse(mockProjectSystem.Object.FileExists("web.config"));
}
[TestMethod]
public void RemovePackageRemovesDirectoriesAddedByPackageFilesIfEmpty() {
// Arrange
var mockProjectSystem = new MockProjectSystem();
var mockRepository = new MockPackageRepository();
var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
new[] { @"sub\file1", @"sub\file2" });
mockRepository.AddPackage(packageA);
projectManager.AddPackageReference("A");
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsTrue(mockProjectSystem.Deleted.Contains(@"sub\file1"));
Assert.IsTrue(mockProjectSystem.Deleted.Contains(@"sub\file2"));
Assert.IsTrue(mockProjectSystem.Deleted.Contains("sub"));
}
[TestMethod]
public void AddPackageReferenceWhenOlderVersionOfPackageInstalledDoesAnUpgrade() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
},
content: new[] { "foo" });
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[2.0]")
},
content: new[] { "bar" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "foo" });
IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "foo" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageB20);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageB10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA20));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB20));
}
[TestMethod]
public void UpdatePackageNullOrEmptyPackageIdThrows() {
// Arrange
ProjectManager packageManager = CreateProjectManager();
// Act & Assert
ExceptionAssert.ThrowsArgNullOrEmpty(() => packageManager.UpdatePackageReference(null), "packageId");
ExceptionAssert.ThrowsArgNullOrEmpty(() => packageManager.UpdatePackageReference(String.Empty), "packageId");
}
[TestMethod]
public void UpdatePackageReferenceWithMixedDependenciesUpdatesPackageAndDependenciesIfUnused() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0, C 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B","[1.0]"),
PackageDependency.CreateDependency("C","[1.0]")
}, content: new[] { "A.file" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B.fie" });
IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "C.file" });
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
projectManager.LocalRepository.AddPackage(packageC10);
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
PackageDependency.CreateDependency("C", "[2.0]"),
PackageDependency.CreateDependency("D", "[1.0]")
}, content: new[] { "A.20.file" });
IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "C.20" });
IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", content: new[] { "D.20" });
// A 2.0 -> [B 1.0, C 2.0, D 1.0]
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageC20);
sourceRepository.AddPackage(packageD10);
// Act
projectManager.UpdatePackageReference("A");
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA20));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageC20));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageD10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageC10));
}
[TestMethod]
public void UpdatePackageReferenceIfPackageNotReferencedThrows() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), @"C:\MockFileSystem\ does not reference 'A'.");
}
[TestMethod]
public void UpdatePackageReferenceToOlderVersionThrows() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0");
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0");
IPackage packageA30 = PackageUtility.CreatePackage("A", "3.0");
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageA30);
projectManager.LocalRepository.AddPackage(packageA20);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A", version: Version.Parse("1.0")), @"Already referencing a newer version of 'A'.");
}
[TestMethod]
public void UpdatePackageReferenceWithUnresolvedDependencyThrows() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
});
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0");
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
// A 2.0 -> [B 2.0]
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[2.0]")
});
sourceRepository.AddPackage(packageA20);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), "Unable to resolve dependency 'B (= 2.0)'.");
}
[TestMethod]
public void UpdatePackageReferenceWithUpdateDependenciesSetToFalseIgnoresDependencies() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
}, content: new[] { "A.cs" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B.fs.spp" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
// A 2.0 -> [B 2.0]
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[2.0]"),
}, content: new[] { "D.a" });
IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "B.s" });
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB20);
// Act
projectManager.UpdatePackageReference("A", version: null, updateDependencies: false);
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA20));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageB20));
}
[TestMethod]
public void UpdateDependencyDependentsHaveSatisfyableDependencies() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [C >= 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("C", "1.0")
}, content: new[] { "A" });
// B 1.0 -> [C <= 2.0]
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("C", "2.0")
}, content: new[] { "B" });
IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "C" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
projectManager.LocalRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "C2" });
// A 2.0 -> [B 1.0, C 2.0, D 1.0]
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageC20);
// Act
projectManager.UpdatePackageReference("C");
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageC20));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageC10));
}
[TestMethod]
public void UpdatePackageReferenceWithSatisfyableDependencies() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0, C 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
PackageDependency.CreateDependency("C", "[1.0]")
}, content: new[] { "file" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", new[] { "Bfile" });
IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", new[] { "Cfile" });
// G 1.0 -> [C (>= 1.0)]
IPackage packageG10 = PackageUtility.CreatePackage("G", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("C", "1.0")
}, content: new[] { "Gfile" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
projectManager.LocalRepository.AddPackage(packageC10);
projectManager.LocalRepository.AddPackage(packageG10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageG10);
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
PackageDependency.CreateDependency("C", "[2.0]"),
PackageDependency.CreateDependency("D", "[1.0]")
}, content: new[] { "A20file" });
IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", new[] { "C20file" });
IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", new[] { "D20file" });
// A 2.0 -> [B 1.0, C 2.0, D 1.0]
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageC20);
sourceRepository.AddPackage(packageD10);
// Act
projectManager.UpdatePackageReference("A");
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA20));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageC20));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageD10));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageG10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageC10));
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA10));
}
[TestMethod]
public void UpdatePackageReferenceWithDependenciesInUseThrowsConflictError() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
// A 1.0 -> [B 1.0, C 1.0]
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
PackageDependency.CreateDependency("C", "[1.0]")
}, content: new[] { "afile" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "Bfile" });
IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "Cfile" });
// G 1.0 -> [C 1.0]
IPackage packageG10 = PackageUtility.CreatePackage("G", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("C", "[1.0]")
}, content: new[] { "gfile" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
projectManager.LocalRepository.AddPackage(packageC10);
projectManager.LocalRepository.AddPackage(packageG10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageG10);
IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]"),
PackageDependency.CreateDependency("C", "[2.0]"),
PackageDependency.CreateDependency("D", "[1.0]")
}, content: new[] { "a20file" });
IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "cfile" });
IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", content: new[] { "dfile" });
// A 2.0 -> [B 1.0, C 2.0, D 1.0]
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA20);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageC10);
sourceRepository.AddPackage(packageC20);
sourceRepository.AddPackage(packageD10);
// Act
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), "Conflict occurred. 'C 1.0' referenced but requested 'C 2.0'. 'G 1.0' depends on 'C 1.0'.");
}
[TestMethod]
public void UpdatePackageReferenceFromRepositoryThrowsIfPackageHasDependents() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
}, content: new[] { "afile" });
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "bfile" });
IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "cfile" });
projectManager.LocalRepository.AddPackage(packageA10);
projectManager.LocalRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageB10);
sourceRepository.AddPackage(packageB20);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("B"), "Conflict occurred. 'B 1.0' referenced but requested 'B 2.0'. 'A 1.0' depends on 'B 1.0'.");
}
[TestMethod]
public void UpdatePackageReferenceNoVersionSpecifiedShouldUpdateToLatest() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage package10 = PackageUtility.CreatePackage("NetFramework", "1.0", content: new[] { "1.0f" });
projectManager.LocalRepository.AddPackage(package10);
sourceRepository.AddPackage(package10);
IPackage package11 = PackageUtility.CreatePackage("NetFramework", "1.1", content: new[] { "1.1f" });
sourceRepository.AddPackage(package11);
IPackage package20 = PackageUtility.CreatePackage("NetFramework", "2.0", content: new[] { "2.0f" });
sourceRepository.AddPackage(package20);
IPackage package35 = PackageUtility.CreatePackage("NetFramework", "3.5", content: new[] { "3.5f" });
sourceRepository.AddPackage(package35);
// Act
projectManager.UpdatePackageReference("NetFramework");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(package10));
Assert.IsTrue(projectManager.LocalRepository.Exists(package35));
}
[TestMethod]
public void UpdatePackageReferenceVersionSpeciedShouldUpdateToSpecifiedVersion() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
var package10 = PackageUtility.CreatePackage("NetFramework", "1.0", new[] { "file.dll" });
projectManager.LocalRepository.AddPackage(package10);
sourceRepository.AddPackage(package10);
var package11 = PackageUtility.CreatePackage("NetFramework", "1.1", new[] { "file.dll" });
sourceRepository.AddPackage(package11);
var package20 = PackageUtility.CreatePackage("NetFramework", "2.0", new[] { "file.dll" });
sourceRepository.AddPackage(package20);
// Act
projectManager.UpdatePackageReference("NetFramework", new Version("1.1"));
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(package10));
Assert.IsTrue(projectManager.LocalRepository.Exists(package11));
}
[TestMethod]
public void RemovingPackageReferenceRemovesPackageButNotDependencies() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
}, content: new[] { "A" });
IPackage packageB = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B" });
projectManager.LocalRepository.AddPackage(packageA);
projectManager.LocalRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB));
}
[TestMethod]
public void RemovePackageReferenceOnlyRemovedAssembliesFromTheTargetFramework() {
// Arrange
var net20 = new FrameworkName(".NETFramework", new Version("2.0"));
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem(net20);
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackageAssemblyReference net20Reference = PackageUtility.CreateAssemblyReference("foo.dll", net20);
IPackageAssemblyReference net40Reference = PackageUtility.CreateAssemblyReference("bar.dll", new FrameworkName(".NETFramework", new Version("4.0")));
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
content: null,
assemblyReferences: new[] { net20Reference, net40Reference },
tools: null,
dependencies: null,
rating: null,
description: null);
projectManager.LocalRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageA);
projectManager.AddPackageReference("A");
// Act
projectManager.RemovePackageReference("A");
// Assert
Assert.IsFalse(projectManager.LocalRepository.Exists(packageA));
Assert.AreEqual(1, projectSystem.Deleted.Count);
Assert.IsTrue(projectSystem.Deleted.Contains("foo.dll"));
}
[TestMethod]
public void ReAddingAPackageReferenceAfterRemovingADependencyShouldReReferenceAllDependencies() {
// Arrange
var sourceRepository = new MockPackageRepository();
var projectSystem = new MockProjectSystem();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
},
content: new[] { "foo" });
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("C")
},
content: new[] { "bar" });
var packageC = PackageUtility.CreatePackage("C", "1.0", content: new[] { "baz" });
projectManager.LocalRepository.AddPackage(packageA);
projectManager.LocalRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
// Act
projectManager.AddPackageReference("A");
// Assert
Assert.IsTrue(projectManager.LocalRepository.Exists(packageA));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageB));
Assert.IsTrue(projectManager.LocalRepository.Exists(packageC));
}
[TestMethod]
public void AddPackageReferenceWithAnyNonCompatibleReferenceThrowsAndPackageIsNotReferenced() {
// Arrange
var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true };
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(mockProjectSystem.Object), mockProjectSystem.Object, localRepository);
mockProjectSystem.Setup(m => m.TargetFramework).Returns(new FrameworkName(".NETFramework", new Version("2.0")));
var mockPackage = new Mock<IPackage>();
mockPackage.Setup(m => m.Id).Returns("A");
mockPackage.Setup(m => m.Version).Returns(new Version("1.0"));
var assemblyReference = PackageUtility.CreateAssemblyReference("foo.dll", new FrameworkName(".NETFramework", new Version("5.0")));
mockPackage.Setup(m => m.AssemblyReferences).Returns(new[] { assemblyReference });
sourceRepository.AddPackage(mockPackage.Object);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("A"), "Unable to find assembly references that are compatible with the target framework '.NETFramework,Version=v2.0'.");
Assert.IsFalse(localRepository.Exists(mockPackage.Object));
}
[TestMethod]
public void AddPackageReferenceWithAnyNonCompatibleFrameworkReferenceThrowsAndPackageIsNotReferenced() {
// Arrange
var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true };
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(mockProjectSystem.Object), mockProjectSystem.Object, localRepository);
mockProjectSystem.Setup(m => m.TargetFramework).Returns(new FrameworkName(".NETFramework", new Version("2.0")));
var mockPackage = new Mock<IPackage>();
mockPackage.Setup(m => m.Id).Returns("A");
mockPackage.Setup(m => m.Version).Returns(new Version("1.0"));
var frameworkReference = new FrameworkAssemblyReference("System.Web", new[] { new FrameworkName(".NETFramework", new Version("5.0")) });
mockPackage.Setup(m => m.FrameworkAssemblies).Returns(new[] { frameworkReference });
sourceRepository.AddPackage(mockPackage.Object);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("A"), "Unable to find framework assemblies that are compatible with the target framework '.NETFramework,Version=v2.0'.");
Assert.IsFalse(localRepository.Exists(mockPackage.Object));
}
private ProjectManager CreateProjectManager() {
var projectSystem = new MockProjectSystem();
return new ProjectManager(new MockPackageRepository(), new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository());
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: This class will encapsulate a byte and provide an
** Object representation of it.
**
**
===========================================================*/
using System;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Diagnostics.Contracts;
namespace System
{
// The Byte class extends the Value class and
// provides object representation of the byte primitive type.
//
[Serializable]
[System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)]
public struct Byte : IComparable, IFormattable, IConvertible
, IComparable<Byte>, IEquatable<Byte>
{
private byte m_value;
// The maximum value that a Byte may represent: 255.
public const byte MaxValue = (byte)0xFF;
// The minimum value that a Byte may represent: 0.
public const byte MinValue = 0;
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns a value less than zero if this object
// null is considered to be less than any instance.
// If object is not of type byte, this method throws an ArgumentException.
//
public int CompareTo(Object value)
{
if (value == null)
{
return 1;
}
if (!(value is Byte))
{
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeByte"));
}
return m_value - (((Byte)value).m_value);
}
public int CompareTo(Byte value)
{
return m_value - value;
}
// Determines whether two Byte objects are equal.
public override bool Equals(Object obj)
{
if (!(obj is Byte))
{
return false;
}
return m_value == ((Byte)obj).m_value;
}
[System.Runtime.Versioning.NonVersionable]
public bool Equals(Byte obj)
{
return m_value == obj;
}
// Gets a hash code for this instance.
public override int GetHashCode()
{
return m_value;
}
[Pure]
public static byte Parse(String s)
{
return Parse(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo);
}
[Pure]
public static byte Parse(String s, NumberStyles style)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Parse(s, style, NumberFormatInfo.CurrentInfo);
}
[Pure]
public static byte Parse(String s, IFormatProvider provider)
{
return Parse(s, NumberStyles.Integer, NumberFormatInfo.GetInstance(provider));
}
// Parses an unsigned byte from a String in the given style. If
// a NumberFormatInfo isn't specified, the current culture's
// NumberFormatInfo is assumed.
[Pure]
public static byte Parse(String s, NumberStyles style, IFormatProvider provider)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Parse(s, style, NumberFormatInfo.GetInstance(provider));
}
private static byte Parse(String s, NumberStyles style, NumberFormatInfo info)
{
int i = 0;
try
{
i = Number.ParseInt32(s, style, info);
}
catch (OverflowException e)
{
throw new OverflowException(Environment.GetResourceString("Overflow_Byte"), e);
}
if (i < MinValue || i > MaxValue) throw new OverflowException(Environment.GetResourceString("Overflow_Byte"));
return (byte)i;
}
public static bool TryParse(String s, out Byte result)
{
return TryParse(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result);
}
public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out Byte result)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return TryParse(s, style, NumberFormatInfo.GetInstance(provider), out result);
}
private static bool TryParse(String s, NumberStyles style, NumberFormatInfo info, out Byte result)
{
result = 0;
int i;
if (!Number.TryParseInt32(s, style, info, out i))
{
return false;
}
if (i < MinValue || i > MaxValue)
{
return false;
}
result = (byte)i;
return true;
}
[Pure]
public override String ToString()
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatInt32(m_value, null, NumberFormatInfo.CurrentInfo);
}
[Pure]
public String ToString(String format)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatInt32(m_value, format, NumberFormatInfo.CurrentInfo);
}
[Pure]
public String ToString(IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatInt32(m_value, null, NumberFormatInfo.GetInstance(provider));
}
[Pure]
public String ToString(String format, IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatInt32(m_value, format, NumberFormatInfo.GetInstance(provider));
}
//
// IConvertible implementation
//
[Pure]
public TypeCode GetTypeCode()
{
return TypeCode.Byte;
}
/// <internalonly/>
bool IConvertible.ToBoolean(IFormatProvider provider)
{
return Convert.ToBoolean(m_value);
}
/// <internalonly/>
char IConvertible.ToChar(IFormatProvider provider)
{
return Convert.ToChar(m_value);
}
/// <internalonly/>
sbyte IConvertible.ToSByte(IFormatProvider provider)
{
return Convert.ToSByte(m_value);
}
/// <internalonly/>
byte IConvertible.ToByte(IFormatProvider provider)
{
return m_value;
}
/// <internalonly/>
short IConvertible.ToInt16(IFormatProvider provider)
{
return Convert.ToInt16(m_value);
}
/// <internalonly/>
ushort IConvertible.ToUInt16(IFormatProvider provider)
{
return Convert.ToUInt16(m_value);
}
/// <internalonly/>
int IConvertible.ToInt32(IFormatProvider provider)
{
return Convert.ToInt32(m_value);
}
/// <internalonly/>
uint IConvertible.ToUInt32(IFormatProvider provider)
{
return Convert.ToUInt32(m_value);
}
/// <internalonly/>
long IConvertible.ToInt64(IFormatProvider provider)
{
return Convert.ToInt64(m_value);
}
/// <internalonly/>
ulong IConvertible.ToUInt64(IFormatProvider provider)
{
return Convert.ToUInt64(m_value);
}
/// <internalonly/>
float IConvertible.ToSingle(IFormatProvider provider)
{
return Convert.ToSingle(m_value);
}
/// <internalonly/>
double IConvertible.ToDouble(IFormatProvider provider)
{
return Convert.ToDouble(m_value);
}
/// <internalonly/>
Decimal IConvertible.ToDecimal(IFormatProvider provider)
{
return Convert.ToDecimal(m_value);
}
/// <internalonly/>
DateTime IConvertible.ToDateTime(IFormatProvider provider)
{
throw new InvalidCastException(Environment.GetResourceString("InvalidCast_FromTo", "Byte", "DateTime"));
}
/// <internalonly/>
Object IConvertible.ToType(Type type, IFormatProvider provider)
{
return Convert.DefaultToType((IConvertible)this, type, provider);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.IO.Compression.Tests
{
public class DeflateStreamTests
{
static string gzTestFile(string fileName) => Path.Combine("GZTestData", fileName);
[Fact]
public void BaseStream1()
{
var writeStream = new MemoryStream();
var zip = new DeflateStream(writeStream, CompressionMode.Compress);
Assert.Same(zip.BaseStream, writeStream);
writeStream.Dispose();
}
[Fact]
public void BaseStream2()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Decompress);
Assert.Same(zip.BaseStream, ms);
ms.Dispose();
}
[Fact]
public async Task ModifyBaseStream()
{
var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz"));
var newMs = StripHeaderAndFooter.Strip(ms);
var zip = new DeflateStream(newMs, CompressionMode.Decompress);
int size = 1024;
byte[] bytes = new byte[size];
zip.BaseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writable as expected
zip.BaseStream.Position = 0;
await zip.BaseStream.ReadAsync(bytes, 0, size);
}
[Fact]
public void DecompressCanRead()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Decompress);
Assert.True(zip.CanRead);
zip.Dispose();
Assert.False(zip.CanRead);
}
[Fact]
public void CompressCanWrite()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Compress);
Assert.True(zip.CanWrite);
zip.Dispose();
Assert.False(zip.CanWrite);
}
[Fact]
public void CanDisposeBaseStream()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Compress);
ms.Dispose(); // This would throw if this was invalid
}
[Fact]
public void CanDisposeDeflateStream()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Compress);
zip.Dispose();
// Base Stream should be null after dispose
Assert.Null(zip.BaseStream);
zip.Dispose(); // Should be a no-op
}
[Fact]
public async Task CanReadBaseStreamAfterDispose()
{
var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz"));
var newMs = StripHeaderAndFooter.Strip(ms);
var zip = new DeflateStream(newMs, CompressionMode.Decompress, leaveOpen: true);
var baseStream = zip.BaseStream;
zip.Dispose();
int size = 1024;
byte[] bytes = new byte[size];
baseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writable as expected
baseStream.Position = 0;
await baseStream.ReadAsync(bytes, 0, size);
}
[Fact]
public async Task DecompressFailsWithRealGzStream()
{
string[] files = { gzTestFile("GZTestDocument.doc.gz"), gzTestFile("GZTestDocument.txt.gz") };
foreach (string fileName in files)
{
var baseStream = await LocalMemoryStream.readAppFileAsync(fileName);
var zip = new DeflateStream(baseStream, CompressionMode.Decompress);
int _bufferSize = 2048;
var bytes = new byte[_bufferSize];
Assert.Throws<InvalidDataException>(() => { zip.Read(bytes, 0, _bufferSize); });
zip.Dispose();
}
}
[Fact]
public void DisposedBaseStreamThrows()
{
var ms = new MemoryStream();
ms.Dispose();
Assert.Throws<ArgumentException>(() =>
{
var deflate = new DeflateStream(ms, CompressionMode.Decompress);
});
Assert.Throws<ArgumentException>(() =>
{
var deflate = new DeflateStream(ms, CompressionMode.Compress);
});
}
[Fact]
public void ReadOnlyStreamThrowsOnCompress()
{
var ms = new LocalMemoryStream();
ms.SetCanWrite(false);
Assert.Throws<ArgumentException>(() =>
{
var gzip = new DeflateStream(ms, CompressionMode.Compress);
});
}
[Fact]
public void WriteOnlyStreamThrowsOnDecompress()
{
var ms = new LocalMemoryStream();
ms.SetCanRead(false);
Assert.Throws<ArgumentException>(() =>
{
var gzip = new DeflateStream(ms, CompressionMode.Decompress);
});
}
[Fact]
public void TestCtors()
{
CompressionLevel[] legalValues = new CompressionLevel[] { CompressionLevel.Optimal, CompressionLevel.Fastest, CompressionLevel.NoCompression };
foreach (CompressionLevel level in legalValues)
{
bool[] boolValues = new bool[] { true, false };
foreach (bool remainsOpen in boolValues)
{
TestCtor(level, remainsOpen);
}
}
}
[Fact]
public void TestLevelOptimial()
{
TestCtor(CompressionLevel.Optimal);
}
[Fact]
public void TestLevelNoCompression()
{
TestCtor(CompressionLevel.NoCompression);
}
[Fact]
public void TestLevelFastest()
{
TestCtor(CompressionLevel.Fastest);
}
private static void TestCtor(CompressionLevel level, bool? leaveOpen = null)
{
//Create the DeflateStream
int _bufferSize = 1024;
var bytes = new byte[_bufferSize];
var baseStream = new MemoryStream(bytes, writable: true);
DeflateStream ds;
if (leaveOpen == null)
{
ds = new DeflateStream(baseStream, level);
}
else
{
ds = new DeflateStream(baseStream, level, leaveOpen ?? false);
}
//Write some data and Close the stream
string strData = "Test Data";
var encoding = Encoding.UTF8;
byte[] data = encoding.GetBytes(strData);
ds.Write(data, 0, data.Length);
ds.Flush();
ds.Dispose();
if (leaveOpen != true)
{
//Check that Close has really closed the underlying stream
Assert.Throws<ObjectDisposedException>(() => { baseStream.Write(bytes, 0, bytes.Length); });
}
//Read the data
byte[] data2 = new byte[_bufferSize];
baseStream = new MemoryStream(bytes, writable: false);
ds = new DeflateStream(baseStream, CompressionMode.Decompress);
int size = ds.Read(data2, 0, _bufferSize - 5);
//Verify the data roundtripped
for (int i = 0; i < size + 5; i++)
{
if (i < data.Length)
{
Assert.Equal(data[i], data2[i]);
}
else
{
Assert.Equal(data2[i], (byte)0);
}
}
}
[Fact]
public void CtorArgumentValidation()
{
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionLevel.Fastest));
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionMode.Decompress));
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionMode.Compress));
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionLevel.Fastest, true));
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionMode.Decompress, false));
Assert.Throws<ArgumentNullException>(() => new DeflateStream(null, CompressionMode.Compress, true));
Assert.Throws<ArgumentException>(() => new DeflateStream(new MemoryStream(), (CompressionMode)42));
Assert.Throws<ArgumentException>(() => new DeflateStream(new MemoryStream(), (CompressionMode)43, true));
Assert.Throws<ArgumentException>(() => new DeflateStream(new MemoryStream(new byte[1], writable: false), CompressionLevel.Optimal));
}
[Fact]
public async Task Flush()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Flush();
await ds.FlushAsync();
}
[Fact]
public void DoubleFlush()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Flush();
ds.Flush();
}
[Fact]
public void DoubleDispose()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Dispose();
ds.Dispose();
}
[Fact]
public void FlushThenDispose()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Flush();
ds.Dispose();
}
[Fact]
public void FlushFailsAfterDispose()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Dispose();
Assert.Throws<ObjectDisposedException>(() => { ds.Flush(); });
}
[Fact]
public async Task FlushAsyncFailsAfterDispose()
{
var ms = new MemoryStream();
var ds = new DeflateStream(ms, CompressionMode.Compress);
ds.Dispose();
await Assert.ThrowsAsync<ObjectDisposedException>(async () =>
{
await ds.FlushAsync();
});
}
[Fact]
public void TestSeekMethodsDecompress()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Decompress);
Assert.False(zip.CanSeek, "CanSeek should be false");
Assert.Throws<NotSupportedException>(delegate { long value = zip.Length; });
Assert.Throws<NotSupportedException>(delegate { long value = zip.Position; });
Assert.Throws<NotSupportedException>(delegate { zip.Position = 100L; });
Assert.Throws<NotSupportedException>(delegate { zip.SetLength(100L); });
Assert.Throws<NotSupportedException>(delegate { zip.Seek(100L, SeekOrigin.Begin); });
}
[Fact]
public void TestSeekMethodsCompress()
{
var ms = new MemoryStream();
var zip = new DeflateStream(ms, CompressionMode.Compress);
Assert.False(zip.CanSeek, "CanSeek should be false");
Assert.Throws<NotSupportedException>(delegate { long value = zip.Length; });
Assert.Throws<NotSupportedException>(delegate { long value = zip.Position; });
Assert.Throws<NotSupportedException>(delegate { zip.Position = 100L; });
Assert.Throws<NotSupportedException>(delegate { zip.SetLength(100L); });
Assert.Throws<NotSupportedException>(delegate { zip.Seek(100L, SeekOrigin.Begin); });
}
[Fact]
public void ReadWriteArgumentValidation()
{
using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress))
{
Assert.Throws<ArgumentNullException>(() => ds.Write(null, 0, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ds.Write(new byte[1], -1, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ds.Write(new byte[1], 0, -1));
Assert.Throws<ArgumentException>(() => ds.Write(new byte[1], 0, 2));
Assert.Throws<ArgumentException>(() => ds.Write(new byte[1], 1, 1));
Assert.Throws<InvalidOperationException>(() => ds.Read(new byte[1], 0, 1));
ds.Write(new byte[1], 0, 0);
}
using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress))
{
Assert.Throws<ArgumentNullException>(() => { ds.WriteAsync(null, 0, 0); });
Assert.Throws<ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], -1, 0); });
Assert.Throws<ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], 0, -1); });
Assert.Throws<ArgumentException>(() => { ds.WriteAsync(new byte[1], 0, 2); });
Assert.Throws<ArgumentException>(() => { ds.WriteAsync(new byte[1], 1, 1); });
Assert.Throws<InvalidOperationException>(() => { ds.Read(new byte[1], 0, 1); });
}
using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress))
{
Assert.Throws<ArgumentNullException>(() => ds.Read(null, 0, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ds.Read(new byte[1], -1, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ds.Read(new byte[1], 0, -1));
Assert.Throws<ArgumentException>(() => ds.Read(new byte[1], 0, 2));
Assert.Throws<ArgumentException>(() => ds.Read(new byte[1], 1, 1));
Assert.Throws<InvalidOperationException>(() => ds.Write(new byte[1], 0, 1));
var data = new byte[1] { 42 };
Assert.Equal(0, ds.Read(data, 0, 0));
Assert.Equal(42, data[0]);
}
using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress))
{
Assert.Throws<ArgumentNullException>(() => { ds.ReadAsync(null, 0, 0); });
Assert.Throws<ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], -1, 0); });
Assert.Throws<ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], 0, -1); });
Assert.Throws<ArgumentException>(() => { ds.ReadAsync(new byte[1], 0, 2); });
Assert.Throws<ArgumentException>(() => { ds.ReadAsync(new byte[1], 1, 1); });
Assert.Throws<InvalidOperationException>(() => { ds.Write(new byte[1], 0, 1); });
}
}
[Fact]
public void CopyToAsyncArgumentValidation()
{
using (DeflateStream ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress))
{
AssertExtensions.Throws<ArgumentNullException>("destination", () => { ds.CopyToAsync(null); });
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => { ds.CopyToAsync(new MemoryStream(), 0); });
Assert.Throws<NotSupportedException>(() => { ds.CopyToAsync(new MemoryStream(new byte[1], writable: false)); });
ds.Dispose();
Assert.Throws<ObjectDisposedException>(() => { ds.CopyToAsync(new MemoryStream()); });
}
using (DeflateStream ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress))
{
Assert.Throws<NotSupportedException>(() => { ds.CopyToAsync(new MemoryStream()); });
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public void Precancellation()
{
var ms = new MemoryStream();
using (DeflateStream ds = new DeflateStream(ms, CompressionMode.Compress, leaveOpen: true))
{
Assert.True(ds.WriteAsync(new byte[1], 0, 1, new CancellationToken(true)).IsCanceled);
Assert.True(ds.FlushAsync(new CancellationToken(true)).IsCanceled);
}
using (DeflateStream ds = new DeflateStream(ms, CompressionMode.Decompress, leaveOpen: true))
{
Assert.True(ds.ReadAsync(new byte[1], 0, 1, new CancellationToken(true)).IsCanceled);
}
}
[Fact]
public async Task RoundtripCompressDecompress()
{
await RoundtripCompressDecompress(useAsync: false, useGzip: false, chunkSize: 1, totalSize: 10, level: CompressionLevel.Fastest);
await RoundtripCompressDecompress(useAsync: true, useGzip: true, chunkSize: 1024, totalSize: 8192, level: CompressionLevel.Optimal);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task RoundTripWithFlush()
{
await RoundTripWithFlush(useAsync: false, useGzip: false, chunkSize: 1, totalSize: 10, level: CompressionLevel.Fastest);
await RoundTripWithFlush(useAsync: true, useGzip: true, chunkSize: 1024, totalSize: 8192, level: CompressionLevel.Optimal);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task WriteAfterFlushing()
{
await WriteAfterFlushing(useAsync: false, useGzip: false, chunkSize: 1, totalSize: 10, level: CompressionLevel.Fastest);
await WriteAfterFlushing(useAsync: true, useGzip: true, chunkSize: 1024, totalSize: 8192, level: CompressionLevel.Optimal);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task FlushBeforeFirstWrites()
{
await FlushBeforeFirstWrites(useAsync: false, useGzip: false, chunkSize: 1, totalSize: 10, level: CompressionLevel.Fastest);
await FlushBeforeFirstWrites(useAsync: true, useGzip: true, chunkSize: 1024, totalSize: 8192, level: CompressionLevel.Optimal);
}
public static IEnumerable<object[]> RoundtripCompressDecompressOuterData
{
get
{
foreach (bool useAsync in new[] { true, false }) // whether to use Read/Write or ReadAsync/WriteAsync
{
foreach (bool useGzip in new[] { true, false }) // whether to add on gzip headers/footers
{
foreach (var level in new[] { CompressionLevel.Fastest, CompressionLevel.Optimal, CompressionLevel.NoCompression }) // compression level
{
yield return new object[] { useAsync, useGzip, 1, 5, level }; // smallest possible writes
yield return new object[] { useAsync, useGzip, 1023, 1023 * 10, level }; // overflowing internal buffer
yield return new object[] { useAsync, useGzip, 1024 * 1024, 1024 * 1024, level }; // large single write
}
}
}
}
}
[OuterLoop]
[Theory]
[MemberData(nameof(RoundtripCompressDecompressOuterData))]
public async Task RoundtripCompressDecompress(bool useAsync, bool useGzip, int chunkSize, int totalSize, CompressionLevel level)
{
byte[] data = new byte[totalSize];
new Random(42).NextBytes(data);
var compressed = new MemoryStream();
using (var compressor = useGzip ? (Stream)new GZipStream(compressed, level, true) : new DeflateStream(compressed, level, true))
{
for (int i = 0; i < data.Length; i += chunkSize) // not using CopyTo{Async} due to optimizations in MemoryStream's implementation that avoid what we're trying to test
{
switch (useAsync)
{
case true: await compressor.WriteAsync(data, i, chunkSize); break;
case false: compressor.Write(data, i, chunkSize); break;
}
}
}
compressed.Position = 0;
await ValidateCompressedData(useAsync, useGzip, chunkSize, compressed, data);
compressed.Dispose();
}
[OuterLoop]
[Theory]
[MemberData(nameof(RoundtripCompressDecompressOuterData))]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task RoundTripWithFlush(bool useAsync, bool useGzip, int chunkSize, int totalSize, CompressionLevel level)
{
byte[] data = new byte[totalSize];
new Random(42).NextBytes(data);
using (var compressed = new MemoryStream())
using (var compressor = useGzip ? (Stream)new GZipStream(compressed, level, true) : new DeflateStream(compressed, level, true))
{
for (int i = 0; i < data.Length; i += chunkSize) // not using CopyTo{Async} due to optimizations in MemoryStream's implementation that avoid what we're trying to test
{
switch (useAsync)
{
case true: await compressor.WriteAsync(data, i, chunkSize); break;
case false: compressor.Write(data, i, chunkSize); break;
}
}
switch (useAsync)
{
case true: await compressor.FlushAsync(); break;
case false: compressor.Flush(); break;
}
compressed.Position = 0;
await ValidateCompressedData(useAsync, useGzip, chunkSize, compressed, data);
}
}
[OuterLoop]
[Theory]
[MemberData(nameof(RoundtripCompressDecompressOuterData))]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task WriteAfterFlushing(bool useAsync, bool useGzip, int chunkSize, int totalSize, CompressionLevel level)
{
byte[] data = new byte[totalSize];
List<byte> expected = new List<byte>();
new Random(42).NextBytes(data);
using (var compressed = new MemoryStream())
using (var compressor = useGzip ? (Stream)new GZipStream(compressed, level, true) : new DeflateStream(compressed, level, true))
{
for (int i = 0; i < data.Length; i += chunkSize) // not using CopyTo{Async} due to optimizations in MemoryStream's implementation that avoid what we're trying to test
{
switch (useAsync)
{
case true: await compressor.WriteAsync(data, i, chunkSize); break;
case false: compressor.Write(data, i, chunkSize); break;
}
for (int j = i; j < i + chunkSize; j++)
expected.Insert(j, data[j]);
switch (useAsync)
{
case true: await compressor.FlushAsync(); break;
case false: compressor.Flush(); break;
}
MemoryStream partiallyCompressed = new MemoryStream(compressed.ToArray());
partiallyCompressed.Position = 0;
await ValidateCompressedData(useAsync, useGzip, chunkSize, partiallyCompressed, expected.ToArray());
}
}
}
[OuterLoop]
[Theory]
[MemberData(nameof(RoundtripCompressDecompressOuterData))]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task FlushBeforeFirstWrites(bool useAsync, bool useGzip, int chunkSize, int totalSize, CompressionLevel level)
{
byte[] data = new byte[totalSize];
new Random(42).NextBytes(data);
using (var compressed = new MemoryStream())
using (var compressor = useGzip ? (Stream)new GZipStream(compressed, level, true) : new DeflateStream(compressed, level, true))
{
switch (useAsync)
{
case true: await compressor.FlushAsync(); break;
case false: compressor.Flush(); break;
}
for (int i = 0; i < data.Length; i += chunkSize) // not using CopyTo{Async} due to optimizations in MemoryStream's implementation that avoid what we're trying to test
{
switch (useAsync)
{
case true: await compressor.WriteAsync(data, i, chunkSize); break;
case false: compressor.Write(data, i, chunkSize); break;
}
}
switch (useAsync)
{
case true: await compressor.FlushAsync(); break;
case false: compressor.Flush(); break;
}
compressed.Position = 0;
await ValidateCompressedData(useAsync, useGzip, chunkSize, compressed, data);
}
}
/// <summary>
/// Given a MemoryStream of compressed data and a byte array of desired output, decompresses
/// the stream and validates that it is equal to the expected array.
/// </summary>
private async Task ValidateCompressedData(bool useAsync, bool useGzip, int chunkSize, MemoryStream compressed, byte[] expected)
{
using (MemoryStream decompressed = new MemoryStream())
using (Stream decompressor = useGzip ? (Stream)new GZipStream(compressed, CompressionMode.Decompress, true) : new DeflateStream(compressed, CompressionMode.Decompress, true))
{
if (useAsync)
decompressor.CopyTo(decompressed, chunkSize);
else
await decompressor.CopyToAsync(decompressed, chunkSize, CancellationToken.None);
Assert.Equal<byte>(expected, decompressed.ToArray());
}
}
[Fact]
public void SequentialReadsOnMemoryStream_Return_SameBytes()
{
byte[] data = new byte[1024 * 10];
new Random(42).NextBytes(data);
var compressed = new MemoryStream();
using (var compressor = new DeflateStream(compressed, CompressionMode.Compress, true))
{
for (int i = 0; i < data.Length; i += 1024)
{
compressor.Write(data, i, 1024);
}
}
compressed.Position = 0;
using (var decompressor = new DeflateStream(compressed, CompressionMode.Decompress, true))
{
int i, j;
byte[] array = new byte[100];
byte[] array2 = new byte[100];
// only read in the first 100 bytes
decompressor.Read(array, 0, array.Length);
for (i = 0; i < array.Length; i++)
Assert.Equal(data[i], array[i]);
// read in the next 100 bytes and make sure nothing is missing
decompressor.Read(array2, 0, array2.Length);
for (j = 0; j < array2.Length; j++)
Assert.Equal(data[j], array[j]);
}
}
[Fact]
public void Roundtrip_Write_ReadByte()
{
byte[] data = new byte[1024 * 10];
new Random(42).NextBytes(data);
var compressed = new MemoryStream();
using (var compressor = new DeflateStream(compressed, CompressionMode.Compress, true))
{
compressor.Write(data, 0, data.Length);
}
compressed.Position = 0;
using (var decompressor = new DeflateStream(compressed, CompressionMode.Decompress, true))
{
for (int i = 0; i < data.Length; i++)
Assert.Equal(data[i], decompressor.ReadByte());
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task WrapNullReturningTasksStream()
{
using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnNullTasks), CompressionMode.Decompress))
await Assert.ThrowsAsync<InvalidOperationException>(() => ds.ReadAsync(new byte[1024], 0, 1024));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Full Framework Flush is a no-op.")]
public async Task WrapStreamReturningBadReadValues()
{
using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress))
Assert.Throws<InvalidDataException>(() => ds.Read(new byte[1024], 0, 1024));
using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress))
await Assert.ThrowsAsync<InvalidDataException>(() => ds.ReadAsync(new byte[1024], 0, 1024));
using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress))
Assert.Equal(0, ds.Read(new byte[1024], 0, 1024));
using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress))
Assert.Equal(0, await ds.ReadAsync(new byte[1024], 0, 1024));
}
public static IEnumerable<object[]> CopyToAsync_Roundtrip_OutputMatchesInput_MemberData()
{
var rand = new Random();
foreach (int dataSize in new[] { 1, 1024, 4095, 1024 * 1024 })
{
var data = new byte[dataSize];
rand.NextBytes(data);
var compressed = new MemoryStream();
using (var ds = new DeflateStream(compressed, CompressionMode.Compress, leaveOpen: true))
{
ds.Write(data, 0, data.Length);
}
byte[] compressedData = compressed.ToArray();
foreach (int copyBufferSize in new[] { 1, 4096, 80 * 1024 })
{
// Memory source
var m = new MemoryStream(compressedData, writable: false);
yield return new object[] { data, copyBufferSize, m };
// File sources, sync and async
foreach (bool useAsync in new[] { true, false })
{
string path = Path.GetTempFileName();
File.WriteAllBytes(path, compressedData);
FileOptions options = FileOptions.DeleteOnClose;
if (useAsync) options |= FileOptions.Asynchronous;
yield return new object[] { data, copyBufferSize, new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, 0x1000, options) };
}
}
}
}
[Theory]
[MemberData(nameof(CopyToAsync_Roundtrip_OutputMatchesInput_MemberData))]
public async Task CopyToAsync_Roundtrip_OutputMatchesInput(byte[] expectedDecrypted, int copyBufferSize, Stream source)
{
var m = new MemoryStream();
using (DeflateStream ds = new DeflateStream(source, CompressionMode.Decompress))
{
await ds.CopyToAsync(m);
}
Assert.Equal(expectedDecrypted, m.ToArray());
}
private sealed class BadWrappedStream : Stream
{
public enum Mode
{
Default,
ReturnNullTasks,
ReturnTooSmallCounts,
ReturnTooLargeCounts,
}
private readonly Mode _mode;
public BadWrappedStream(Mode mode) { _mode = mode; }
public override int Read(byte[] buffer, int offset, int count)
{
switch (_mode)
{
case Mode.ReturnTooSmallCounts: return -1;
case Mode.ReturnTooLargeCounts: return buffer.Length + 1;
default: return 0;
}
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return _mode == Mode.ReturnNullTasks ?
null :
base.ReadAsync(buffer, offset, count, cancellationToken);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return _mode == Mode.ReturnNullTasks ?
null :
base.WriteAsync(buffer, offset, count, cancellationToken);
}
public override void Write(byte[] buffer, int offset, int count) { }
public override void Flush() { }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return true; } }
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException(); }
public override void SetLength(long value) { throw new NotSupportedException(); }
}
}
public class ManualSyncMemoryStream : MemoryStream
{
private bool isSync;
public ManualResetEventSlim manualResetEvent = new ManualResetEventSlim(initialState: false);
public bool ReadHit = false; // For validation of the async methods we want to ensure they correctly delegate the async
public bool WriteHit = false; // methods of the underlying stream. This bool acts as a toggle to check that they're being used.
public static async Task<ManualSyncMemoryStream> GetStreamFromFileAsync(string testFile, bool sync = false, bool strip = false)
{
var baseStream = await StreamHelpers.CreateTempCopyStream(testFile);
if (strip)
{
baseStream = StripHeaderAndFooter.Strip(baseStream);
}
var ms = new ManualSyncMemoryStream(sync);
await baseStream.CopyToAsync(ms);
ms.Position = 0;
return ms;
}
public ManualSyncMemoryStream(bool sync = false) : base()
{
isSync = sync;
}
public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) => TaskToApm.Begin(ReadAsync(buffer, offset, count), callback, state);
public override int EndRead(IAsyncResult asyncResult) => TaskToApm.End<int>(asyncResult);
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) => TaskToApm.Begin(WriteAsync(buffer, offset, count), callback, state);
public override void EndWrite(IAsyncResult asyncResult) => TaskToApm.End(asyncResult);
public override async Task<int> ReadAsync(byte[] array, int offset, int count, CancellationToken cancellationToken)
{
ReadHit = true;
if (isSync)
{
manualResetEvent.Wait(cancellationToken);
}
else
{
await Task.Run(() => manualResetEvent.Wait(cancellationToken));
}
return await base.ReadAsync(array, offset, count, cancellationToken);
}
public override async Task WriteAsync(byte[] array, int offset, int count, CancellationToken cancellationToken)
{
WriteHit = true;
if (isSync)
{
manualResetEvent.Wait(cancellationToken);
}
else
{
await Task.Run(() => manualResetEvent.Wait(cancellationToken));
}
await base.WriteAsync(array, offset, count, cancellationToken);
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Compute.V1.Snippets
{
using Google.Api.Gax;
using System;
using System.Linq;
using System.Threading.Tasks;
/// <summary>Generated snippets.</summary>
public sealed class AllGeneratedRegionDiskTypesClientSnippets
{
/// <summary>Snippet for Get</summary>
public void GetRequestObject()
{
// Snippet: Get(GetRegionDiskTypeRequest, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = RegionDiskTypesClient.Create();
// Initialize request argument(s)
GetRegionDiskTypeRequest request = new GetRegionDiskTypeRequest
{
DiskType = "",
Region = "",
Project = "",
};
// Make the request
DiskType response = regionDiskTypesClient.Get(request);
// End snippet
}
/// <summary>Snippet for GetAsync</summary>
public async Task GetRequestObjectAsync()
{
// Snippet: GetAsync(GetRegionDiskTypeRequest, CallSettings)
// Additional: GetAsync(GetRegionDiskTypeRequest, CancellationToken)
// Create client
RegionDiskTypesClient regionDiskTypesClient = await RegionDiskTypesClient.CreateAsync();
// Initialize request argument(s)
GetRegionDiskTypeRequest request = new GetRegionDiskTypeRequest
{
DiskType = "",
Region = "",
Project = "",
};
// Make the request
DiskType response = await regionDiskTypesClient.GetAsync(request);
// End snippet
}
/// <summary>Snippet for Get</summary>
public void Get()
{
// Snippet: Get(string, string, string, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = RegionDiskTypesClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
string diskType = "";
// Make the request
DiskType response = regionDiskTypesClient.Get(project, region, diskType);
// End snippet
}
/// <summary>Snippet for GetAsync</summary>
public async Task GetAsync()
{
// Snippet: GetAsync(string, string, string, CallSettings)
// Additional: GetAsync(string, string, string, CancellationToken)
// Create client
RegionDiskTypesClient regionDiskTypesClient = await RegionDiskTypesClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
string diskType = "";
// Make the request
DiskType response = await regionDiskTypesClient.GetAsync(project, region, diskType);
// End snippet
}
/// <summary>Snippet for List</summary>
public void ListRequestObject()
{
// Snippet: List(ListRegionDiskTypesRequest, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = RegionDiskTypesClient.Create();
// Initialize request argument(s)
ListRegionDiskTypesRequest request = new ListRegionDiskTypesRequest
{
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedEnumerable<RegionDiskTypeList, DiskType> response = regionDiskTypesClient.List(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (DiskType item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionDiskTypeList page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (DiskType item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<DiskType> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (DiskType item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListAsync</summary>
public async Task ListRequestObjectAsync()
{
// Snippet: ListAsync(ListRegionDiskTypesRequest, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = await RegionDiskTypesClient.CreateAsync();
// Initialize request argument(s)
ListRegionDiskTypesRequest request = new ListRegionDiskTypesRequest
{
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedAsyncEnumerable<RegionDiskTypeList, DiskType> response = regionDiskTypesClient.ListAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((DiskType item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionDiskTypeList page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (DiskType item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<DiskType> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (DiskType item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for List</summary>
public void List()
{
// Snippet: List(string, string, string, int?, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = RegionDiskTypesClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
// Make the request
PagedEnumerable<RegionDiskTypeList, DiskType> response = regionDiskTypesClient.List(project, region);
// Iterate over all response items, lazily performing RPCs as required
foreach (DiskType item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionDiskTypeList page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (DiskType item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<DiskType> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (DiskType item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListAsync</summary>
public async Task ListAsync()
{
// Snippet: ListAsync(string, string, string, int?, CallSettings)
// Create client
RegionDiskTypesClient regionDiskTypesClient = await RegionDiskTypesClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
// Make the request
PagedAsyncEnumerable<RegionDiskTypeList, DiskType> response = regionDiskTypesClient.ListAsync(project, region);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((DiskType item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionDiskTypeList page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (DiskType item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<DiskType> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (DiskType item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2010 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using NUnit.Framework.Interfaces;
using NUnit.TestUtilities;
namespace NUnit.Framework.Internal
{
/// <summary>
/// Summary description for TestResultTests.
/// </summary>
[TestFixture]
public abstract class TestResultTests
{
protected TestResult testResult;
protected TestResult suiteResult;
protected TestMethod test;
protected double expectedDuration;
protected DateTime expectedStart;
protected DateTime expectedEnd;
[SetUp]
public void SetUp()
{
expectedDuration = 0.125;
expectedStart = new DateTime(1968, 4, 8, 15, 05, 30, 250, DateTimeKind.Utc);
expectedEnd = expectedStart.AddSeconds(expectedDuration);
test = new TestMethod(new MethodWrapper(typeof(DummySuite), "DummyMethod"));
test.Properties.Set(PropertyNames.Description, "Test description");
test.Properties.Add(PropertyNames.Category, "Dubious");
test.Properties.Set("Priority", "low");
testResult = test.MakeTestResult();
TestSuite suite = new TestSuite(typeof(DummySuite));
suite.Properties.Set(PropertyNames.Description, "Suite description");
suite.Properties.Add(PropertyNames.Category, "Fast");
suite.Properties.Add("Value", 3);
suiteResult = suite.MakeTestResult();
SimulateTestRun();
}
[Test]
public void TestResultBasicInfo()
{
Assert.AreEqual("DummyMethod", testResult.Name);
Assert.AreEqual("NUnit.Framework.Internal.TestResultTests+DummySuite.DummyMethod", testResult.FullName);
}
[Test]
public void SuiteResultBasicInfo()
{
Assert.AreEqual("TestResultTests+DummySuite", suiteResult.Name);
Assert.AreEqual("NUnit.Framework.Internal.TestResultTests+DummySuite", suiteResult.FullName);
}
[Test]
public void TestResultXmlNodeBasicInfo()
{
TNode testNode = testResult.ToXml(true);
Assert.NotNull(testNode.Attributes["id"]);
Assert.AreEqual("test-case", testNode.Name);
Assert.AreEqual("DummyMethod", testNode.Attributes["name"]);
Assert.AreEqual("NUnit.Framework.Internal.TestResultTests+DummySuite.DummyMethod", testNode.Attributes["fullname"]);
Assert.AreEqual("Test description", testNode.SelectSingleNode("properties/property[@name='Description']").Attributes["value"]);
Assert.AreEqual("Dubious", testNode.SelectSingleNode("properties/property[@name='Category']").Attributes["value"]);
Assert.AreEqual("low", testNode.SelectSingleNode("properties/property[@name='Priority']").Attributes["value"]);
Assert.AreEqual(0, testNode.SelectNodes("test-case").Count);
}
[Test]
public void SuiteResultXmlNodeBasicInfo()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.NotNull(suiteNode.Attributes["id"]);
Assert.AreEqual("test-suite", suiteNode.Name);
Assert.AreEqual("TestResultTests+DummySuite", suiteNode.Attributes["name"]);
Assert.AreEqual("NUnit.Framework.Internal.TestResultTests+DummySuite", suiteNode.Attributes["fullname"]);
Assert.AreEqual("Suite description", suiteNode.SelectSingleNode("properties/property[@name='Description']").Attributes["value"]);
Assert.AreEqual("Fast", suiteNode.SelectSingleNode("properties/property[@name='Category']").Attributes["value"]);
Assert.AreEqual("3", suiteNode.SelectSingleNode("properties/property[@name='Value']").Attributes["value"]);
}
protected virtual ResultState ResultState
{
get { return null; }
}
protected virtual string ReasonNodeName
{
get { return "reason"; }
}
protected abstract void SimulateTestRun();
public class DummySuite
{
public void DummyMethod() { }
}
}
public class DefaultResultTests : TestResultTests
{
protected override void SimulateTestRun()
{
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsInconclusive()
{
Assert.AreEqual(ResultState.Inconclusive, testResult.ResultState);
Assert.AreEqual(TestStatus.Inconclusive, testResult.ResultState.Status);
Assert.That(testResult.ResultState.Label, Is.Empty);
Assert.That(testResult.Duration, Is.EqualTo(0d));
}
[Test]
public void SuiteResultIsInconclusive()
{
Assert.AreEqual(ResultState.Inconclusive, suiteResult.ResultState);
Assert.AreEqual(0, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsInconclusive()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Inconclusive", testNode.Attributes["result"]);
}
[Test]
public void SuiteResultXmlNodeIsInconclusive()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Inconclusive", suiteNode.Attributes["result"]);
Assert.AreEqual("0", suiteNode.Attributes["passed"]);
Assert.AreEqual("0", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("1", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("0", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class SuccessResultTests : TestResultTests
{
protected override ResultState ResultState
{
get { return ResultState.Success; }
}
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.Success, "Test passed!");
testResult.StartTime = expectedStart;
testResult.EndTime = expectedEnd;
testResult.Duration = expectedDuration;
suiteResult.StartTime = expectedStart;
suiteResult.EndTime = expectedEnd;
suiteResult.Duration = expectedDuration;
testResult.AssertCount = 2;
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsSuccess()
{
Assert.True(testResult.ResultState == ResultState.Success);
Assert.AreEqual(TestStatus.Passed, testResult.ResultState.Status);
Assert.That(testResult.ResultState.Label, Is.Empty);
Assert.AreEqual("Test passed!", testResult.Message);
Assert.AreEqual(expectedStart, testResult.StartTime);
Assert.AreEqual(expectedEnd, testResult.EndTime);
Assert.AreEqual(expectedDuration, testResult.Duration);
}
[Test]
public void SuiteResultIsSuccess()
{
Assert.True(suiteResult.ResultState == ResultState.Success);
Assert.AreEqual(TestStatus.Passed, suiteResult.ResultState.Status);
Assert.That(suiteResult.ResultState.Label, Is.Empty);
Assert.AreEqual(1, suiteResult.PassCount);
Assert.AreEqual(0, suiteResult.FailCount);
Assert.AreEqual(0, suiteResult.SkipCount);
Assert.AreEqual(0, suiteResult.InconclusiveCount);
Assert.AreEqual(2, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsSuccess()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Passed", testNode.Attributes["result"]);
Assert.AreEqual(null, testNode.Attributes["label"]);
Assert.AreEqual(null, testNode.Attributes["site"]);
Assert.AreEqual("1968-04-08 15:05:30Z", testNode.Attributes["start-time"]);
Assert.AreEqual("1968-04-08 15:05:30Z", testNode.Attributes["end-time"]);
Assert.AreEqual("0.125000", testNode.Attributes["duration"]);
Assert.AreEqual("2", testNode.Attributes["asserts"]);
TNode reason = testNode.SelectSingleNode("reason");
Assert.NotNull(reason);
Assert.NotNull(reason.SelectSingleNode("message"));
Assert.AreEqual("Test passed!", reason.SelectSingleNode("message").Value);
Assert.Null(reason.SelectSingleNode("stack-trace"));
}
[Test]
public void SuiteResultXmlNodeIsSuccess()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Passed", suiteNode.Attributes["result"]);
Assert.AreEqual(null, suiteNode.Attributes["label"]);
Assert.AreEqual(null, suiteNode.Attributes["site"]);
Assert.AreEqual("1968-04-08 15:05:30Z", suiteNode.Attributes["start-time"]);
Assert.AreEqual("1968-04-08 15:05:30Z", suiteNode.Attributes["end-time"]);
Assert.AreEqual("0.125000", suiteNode.Attributes["duration"]);
Assert.AreEqual("1", suiteNode.Attributes["passed"]);
Assert.AreEqual("0", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("0", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("2", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class IgnoredResultTests : TestResultTests
{
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.Ignored, "because");
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsIgnored()
{
Assert.AreEqual(ResultState.Ignored, testResult.ResultState);
Assert.AreEqual(TestStatus.Skipped, testResult.ResultState.Status);
Assert.AreEqual("Ignored", testResult.ResultState.Label);
Assert.AreEqual("because", testResult.Message);
}
[Test]
public void SuiteResultIsIgnored()
{
Assert.AreEqual(ResultState.Ignored, suiteResult.ResultState);
Assert.AreEqual(TestStatus.Skipped, suiteResult.ResultState.Status);
Assert.AreEqual(TestResult.CHILD_IGNORE_MESSAGE, suiteResult.Message);
Assert.AreEqual(0, suiteResult.PassCount);
Assert.AreEqual(0, suiteResult.FailCount);
Assert.AreEqual(1, suiteResult.SkipCount);
Assert.AreEqual(0, suiteResult.InconclusiveCount);
Assert.AreEqual(0, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsIgnored()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Skipped", testNode.Attributes["result"]);
Assert.AreEqual("Ignored", testNode.Attributes["label"]);
Assert.AreEqual(null, testNode.Attributes["site"]);
TNode reason = testNode.SelectSingleNode("reason");
Assert.NotNull(reason);
Assert.NotNull(reason.SelectSingleNode("message"));
Assert.AreEqual("because", reason.SelectSingleNode("message").Value);
Assert.Null(reason.SelectSingleNode("stack-trace"));
}
protected override ResultState ResultState
{
get { return ResultState.Ignored; }
}
[Test]
public void SuiteResultXmlNodeIsIgnored()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Skipped", suiteNode.Attributes["result"]);
Assert.AreEqual("Ignored", suiteNode.Attributes["label"]);
Assert.AreEqual(null, suiteNode.Attributes["site"]);
Assert.AreEqual("0", suiteNode.Attributes["passed"]);
Assert.AreEqual("0", suiteNode.Attributes["failed"]);
Assert.AreEqual("1", suiteNode.Attributes["skipped"]);
Assert.AreEqual("0", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("0", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class NotRunnableResultTests : TestResultTests
{
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.NotRunnable, "bad test");
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsNotRunnable()
{
Assert.AreEqual(ResultState.NotRunnable, testResult.ResultState);
Assert.AreEqual(TestStatus.Failed, testResult.ResultState.Status);
Assert.AreEqual("Invalid", testResult.ResultState.Label);
Assert.AreEqual("bad test", testResult.Message);
}
[Test]
public void SuiteResultIsFailure()
{
Assert.AreEqual(ResultState.ChildFailure, suiteResult.ResultState);
Assert.AreEqual(TestStatus.Failed, suiteResult.ResultState.Status);
Assert.AreEqual(TestResult.CHILD_ERRORS_MESSAGE, suiteResult.Message);
Assert.That(suiteResult.ResultState.Site, Is.EqualTo(FailureSite.Child));
Assert.AreEqual(0, suiteResult.PassCount);
Assert.AreEqual(1, suiteResult.FailCount);
Assert.AreEqual(0, suiteResult.SkipCount);
Assert.AreEqual(0, suiteResult.InconclusiveCount);
Assert.AreEqual(0, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsNotRunnable()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Failed", testNode.Attributes["result"]);
Assert.AreEqual("Invalid", testNode.Attributes["label"]);
Assert.AreEqual(null, testNode.Attributes["site"]);
TNode failure = testNode.SelectSingleNode("failure");
Assert.NotNull(failure);
Assert.NotNull(failure.SelectSingleNode("message"));
Assert.AreEqual("bad test", failure.SelectSingleNode("message").Value);
Assert.Null(failure.SelectSingleNode("stack-trace"));
}
[Test]
public void SuiteResultXmlNodeIsFailure()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Failed", suiteNode.Attributes["result"]);
Assert.AreEqual(null, suiteNode.Attributes["label"]);
Assert.AreEqual("Child", suiteNode.Attributes["site"]);
Assert.AreEqual("0", suiteNode.Attributes["passed"]);
Assert.AreEqual("1", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("0", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("0", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class FailedResultTests : TestResultTests
{
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.Failure, "message", "stack trace");
testResult.StartTime = expectedStart;
testResult.EndTime = expectedEnd;
testResult.Duration = expectedDuration;
suiteResult.StartTime = expectedStart;
suiteResult.EndTime = expectedEnd;
suiteResult.Duration = expectedDuration;
testResult.AssertCount = 3;
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsFailure()
{
Assert.AreEqual(ResultState.Failure, testResult.ResultState);
Assert.AreEqual(TestStatus.Failed, testResult.ResultState.Status);
Assert.AreEqual("message", testResult.Message);
Assert.AreEqual("stack trace", testResult.StackTrace);
Assert.AreEqual(expectedStart, testResult.StartTime);
Assert.AreEqual(expectedEnd, testResult.EndTime);
Assert.AreEqual(expectedDuration, testResult.Duration);
}
[Test]
public void SuiteResultIsFailure()
{
Assert.AreEqual(ResultState.ChildFailure, suiteResult.ResultState);
Assert.AreEqual(TestStatus.Failed, suiteResult.ResultState.Status);
Assert.AreEqual(TestResult.CHILD_ERRORS_MESSAGE, suiteResult.Message);
Assert.That(suiteResult.ResultState.Site, Is.EqualTo(FailureSite.Child));
Assert.Null(suiteResult.StackTrace);
Assert.AreEqual(0, suiteResult.PassCount);
Assert.AreEqual(1, suiteResult.FailCount);
Assert.AreEqual(0, suiteResult.SkipCount);
Assert.AreEqual(0, suiteResult.InconclusiveCount);
Assert.AreEqual(3, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsFailure()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Failed", testNode.Attributes["result"]);
Assert.AreEqual(null, testNode.Attributes["label"]);
Assert.AreEqual(null, testNode.Attributes["site"]);
Assert.AreEqual("1968-04-08 15:05:30Z", testNode.Attributes["start-time"]);
Assert.AreEqual("1968-04-08 15:05:30Z", testNode.Attributes["end-time"]);
Assert.AreEqual("0.125000", testNode.Attributes["duration"]);
TNode failureNode = testNode.SelectSingleNode("failure");
Assert.NotNull(failureNode, "No <failure> element found");
TNode messageNode = failureNode.SelectSingleNode("message");
Assert.NotNull(messageNode, "No <message> element found");
Assert.AreEqual("message", messageNode.Value);
TNode stacktraceNode = failureNode.SelectSingleNode("stack-trace");
Assert.NotNull(stacktraceNode, "No <stack-trace> element found");
Assert.AreEqual("stack trace", stacktraceNode.Value);
}
[Test]
public void TestResultXmlNodeEscapesInvalidXmlCharacters()
{
if ( ResultState == null )
Assert.Ignore( "Test ignored because ResultState is not set" );
testResult.SetResult( ResultState, "Invalid Characters: \u0001\u0008\u000b\u001f\ud800; Valid Characters: \u0009\u000a\u000d\u0020\ufffd\ud800\udc00" );
TNode testNode = testResult.ToXml( true );
TNode reasonNode = testNode.SelectSingleNode( ReasonNodeName );
Assert.That( reasonNode, Is.Not.Null, "No <{0}> element found", ReasonNodeName );
TNode messageNode = reasonNode.SelectSingleNode( "message" );
Assert.That( messageNode, Is.Not.Null, "No <message> element found" );
Assert.That( messageNode.Value, Is.EqualTo( "Invalid Characters: \\u0001\\u0008\\u000b\\u001f\\ud800; Valid Characters: \u0009\u000a\u000d\u0020\ufffd\ud800\udc00" ) );
}
protected override ResultState ResultState
{
get { return ResultState.Failure; }
}
protected override string ReasonNodeName
{
get { return "failure"; }
}
[Test]
public void SuiteResultXmlNodeIsFailure()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Failed", suiteNode.Attributes["result"]);
Assert.AreEqual(null, suiteNode.Attributes["label"]);
Assert.AreEqual("Child", suiteNode.Attributes["site"]);
Assert.AreEqual("1968-04-08 15:05:30Z", suiteNode.Attributes["start-time"]);
Assert.AreEqual("1968-04-08 15:05:30Z", suiteNode.Attributes["end-time"]);
Assert.AreEqual("0.125000", suiteNode.Attributes["duration"]);
TNode failureNode = suiteNode.SelectSingleNode("failure");
Assert.NotNull(failureNode, "No <failure> element found");
TNode messageNode = failureNode.SelectSingleNode("message");
Assert.NotNull(messageNode, "No <message> element found");
Assert.AreEqual(TestResult.CHILD_ERRORS_MESSAGE, messageNode.Value);
TNode stacktraceNode = failureNode.SelectSingleNode("stacktrace");
Assert.Null(stacktraceNode, "Unexpected <stack-trace> element found");
Assert.AreEqual("0", suiteNode.Attributes["passed"]);
Assert.AreEqual("1", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("0", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("3", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class InconclusiveResultTests : TestResultTests
{
protected override ResultState ResultState
{
get { return ResultState.Inconclusive; }
}
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.Inconclusive, "because");
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultIsInconclusive()
{
Assert.AreEqual(ResultState.Inconclusive, testResult.ResultState);
Assert.AreEqual(TestStatus.Inconclusive, testResult.ResultState.Status);
Assert.That(testResult.ResultState.Label, Is.Empty);
Assert.AreEqual("because", testResult.Message);
}
[Test]
public void SuiteResultIsInconclusive()
{
Assert.AreEqual(ResultState.Inconclusive, suiteResult.ResultState);
Assert.AreEqual(TestStatus.Inconclusive, suiteResult.ResultState.Status);
Assert.Null(suiteResult.Message);
Assert.AreEqual(0, suiteResult.PassCount);
Assert.AreEqual(0, suiteResult.FailCount);
Assert.AreEqual(0, suiteResult.SkipCount);
Assert.AreEqual(1, suiteResult.InconclusiveCount);
Assert.AreEqual(0, suiteResult.AssertCount);
}
[Test]
public void TestResultXmlNodeIsInconclusive()
{
TNode testNode = testResult.ToXml(true);
Assert.AreEqual("Inconclusive", testNode.Attributes["result"]);
Assert.IsNull(testNode.Attributes["label"]);
Assert.IsNull(testNode.Attributes["site"]);
TNode reason = testNode.SelectSingleNode("reason");
Assert.NotNull(reason);
Assert.NotNull(reason.SelectSingleNode("message"));
Assert.AreEqual("because", reason.SelectSingleNode("message").Value);
Assert.Null(reason.SelectSingleNode("stack-trace"));
}
[Test]
public void SuiteResultXmlNodeIsInconclusive()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Inconclusive", suiteNode.Attributes["result"]);
Assert.IsNull(suiteNode.Attributes["label"]);
Assert.AreEqual("0", suiteNode.Attributes["passed"]);
Assert.AreEqual("0", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("1", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("0", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasOneChildTest()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(1, suiteNode.SelectNodes("test-case").Count);
}
}
public class MixedResultTests : TestResultTests
{
protected override void SimulateTestRun()
{
testResult.SetResult(ResultState.Success);
testResult.AssertCount = 2;
suiteResult.AddResult(testResult);
testResult.SetResult(ResultState.Failure, "message", "stack trace");
testResult.AssertCount = 1;
suiteResult.AddResult(testResult);
testResult.SetResult(ResultState.Success);
testResult.AssertCount = 3;
suiteResult.AddResult(testResult);
testResult.SetResult(ResultState.Inconclusive, "inconclusive reason", "stacktrace");
testResult.AssertCount = 0;
suiteResult.AddResult(testResult);
}
[Test]
public void SuiteResultIsFailure()
{
Assert.AreEqual(ResultState.ChildFailure, suiteResult.ResultState);
Assert.AreEqual(TestStatus.Failed, suiteResult.ResultState.Status);
Assert.AreEqual(TestResult.CHILD_ERRORS_MESSAGE, suiteResult.Message);
Assert.That(suiteResult.ResultState.Site, Is.EqualTo(FailureSite.Child));
Assert.Null(suiteResult.StackTrace, "There should be no stacktrace");
Assert.AreEqual(2, suiteResult.PassCount);
Assert.AreEqual(1, suiteResult.FailCount);
Assert.AreEqual(0, suiteResult.SkipCount);
Assert.AreEqual(1, suiteResult.InconclusiveCount);
Assert.AreEqual(6, suiteResult.AssertCount);
}
[Test]
public void SuiteResultXmlNodeIsFailure()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual("Failed", suiteNode.Attributes["result"]);
TNode failureNode = suiteNode.SelectSingleNode("failure");
Assert.NotNull(failureNode, "No failure element found");
TNode messageNode = failureNode.SelectSingleNode("message");
Assert.NotNull(messageNode, "No message element found");
Assert.AreEqual(TestResult.CHILD_ERRORS_MESSAGE, messageNode.Value);
TNode stacktraceNode = failureNode.SelectSingleNode("stacktrace");
Assert.Null(stacktraceNode, "There should be no stacktrace");
Assert.AreEqual("2", suiteNode.Attributes["passed"]);
Assert.AreEqual("1", suiteNode.Attributes["failed"]);
Assert.AreEqual("0", suiteNode.Attributes["skipped"]);
Assert.AreEqual("1", suiteNode.Attributes["inconclusive"]);
Assert.AreEqual("6", suiteNode.Attributes["asserts"]);
}
[Test]
public void SuiteResultXmlNodeHasFourChildTests()
{
TNode suiteNode = suiteResult.ToXml(true);
Assert.AreEqual(4, suiteNode.SelectNodes("test-case").Count);
}
}
public class MinimumDurationResultTests : TestResultTests
{
protected override ResultState ResultState
{
get { return ResultState.Success; }
}
protected override void SimulateTestRun()
{
// Change the duration from that provided in the base
expectedDuration = TestResult.MIN_DURATION - 0.0000001d;
expectedEnd = expectedStart.AddSeconds(expectedDuration);
testResult.SetResult(ResultState.Success, "Test passed!");
testResult.StartTime = expectedStart;
testResult.EndTime = expectedEnd;
testResult.Duration = expectedDuration;
suiteResult.StartTime = expectedStart;
suiteResult.EndTime = expectedEnd;
suiteResult.Duration = expectedDuration;
testResult.AssertCount = 2;
suiteResult.AddResult(testResult);
}
[Test]
public void TestResultHasMinimumDuration()
{
Assert.That(testResult.Duration, Is.EqualTo(TestResult.MIN_DURATION));
Assert.That(suiteResult.Duration, Is.EqualTo(TestResult.MIN_DURATION));
}
}
}
| |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace DocuSign.eSign.Model
{
/// <summary>
/// Contains information about a preview billing plan.
/// </summary>
[DataContract]
public partial class BillingPlanPreview : IEquatable<BillingPlanPreview>, IValidatableObject
{
public BillingPlanPreview()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="BillingPlanPreview" /> class.
/// </summary>
/// <param name="CurrencyCode">Specifies the ISO currency code for the account..</param>
/// <param name="Invoice">Invoice.</param>
/// <param name="IsProrated">.</param>
/// <param name="SubtotalAmount">.</param>
/// <param name="TaxAmount">.</param>
/// <param name="TotalAmount">.</param>
public BillingPlanPreview(string CurrencyCode = default(string), BillingInvoice Invoice = default(BillingInvoice), string IsProrated = default(string), string SubtotalAmount = default(string), string TaxAmount = default(string), string TotalAmount = default(string))
{
this.CurrencyCode = CurrencyCode;
this.Invoice = Invoice;
this.IsProrated = IsProrated;
this.SubtotalAmount = SubtotalAmount;
this.TaxAmount = TaxAmount;
this.TotalAmount = TotalAmount;
}
/// <summary>
/// Specifies the ISO currency code for the account.
/// </summary>
/// <value>Specifies the ISO currency code for the account.</value>
[DataMember(Name="currencyCode", EmitDefaultValue=false)]
public string CurrencyCode { get; set; }
/// <summary>
/// Gets or Sets Invoice
/// </summary>
[DataMember(Name="invoice", EmitDefaultValue=false)]
public BillingInvoice Invoice { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="isProrated", EmitDefaultValue=false)]
public string IsProrated { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="subtotalAmount", EmitDefaultValue=false)]
public string SubtotalAmount { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="taxAmount", EmitDefaultValue=false)]
public string TaxAmount { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="totalAmount", EmitDefaultValue=false)]
public string TotalAmount { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class BillingPlanPreview {\n");
sb.Append(" CurrencyCode: ").Append(CurrencyCode).Append("\n");
sb.Append(" Invoice: ").Append(Invoice).Append("\n");
sb.Append(" IsProrated: ").Append(IsProrated).Append("\n");
sb.Append(" SubtotalAmount: ").Append(SubtotalAmount).Append("\n");
sb.Append(" TaxAmount: ").Append(TaxAmount).Append("\n");
sb.Append(" TotalAmount: ").Append(TotalAmount).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as BillingPlanPreview);
}
/// <summary>
/// Returns true if BillingPlanPreview instances are equal
/// </summary>
/// <param name="other">Instance of BillingPlanPreview to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(BillingPlanPreview other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.CurrencyCode == other.CurrencyCode ||
this.CurrencyCode != null &&
this.CurrencyCode.Equals(other.CurrencyCode)
) &&
(
this.Invoice == other.Invoice ||
this.Invoice != null &&
this.Invoice.Equals(other.Invoice)
) &&
(
this.IsProrated == other.IsProrated ||
this.IsProrated != null &&
this.IsProrated.Equals(other.IsProrated)
) &&
(
this.SubtotalAmount == other.SubtotalAmount ||
this.SubtotalAmount != null &&
this.SubtotalAmount.Equals(other.SubtotalAmount)
) &&
(
this.TaxAmount == other.TaxAmount ||
this.TaxAmount != null &&
this.TaxAmount.Equals(other.TaxAmount)
) &&
(
this.TotalAmount == other.TotalAmount ||
this.TotalAmount != null &&
this.TotalAmount.Equals(other.TotalAmount)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.CurrencyCode != null)
hash = hash * 59 + this.CurrencyCode.GetHashCode();
if (this.Invoice != null)
hash = hash * 59 + this.Invoice.GetHashCode();
if (this.IsProrated != null)
hash = hash * 59 + this.IsProrated.GetHashCode();
if (this.SubtotalAmount != null)
hash = hash * 59 + this.SubtotalAmount.GetHashCode();
if (this.TaxAmount != null)
hash = hash * 59 + this.TaxAmount.GetHashCode();
if (this.TotalAmount != null)
hash = hash * 59 + this.TotalAmount.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Generic;
using System.Linq;
using Cassandra.Data.Linq;
using Cassandra.IntegrationTests.SimulacronAPI;
using Cassandra.IntegrationTests.TestBase;
using NUnit.Framework;
#pragma warning disable 618
namespace Cassandra.IntegrationTests.Linq.Structures
{
[AllowFiltering]
[Table(ManyDataTypesEntity.TableName)]
public class ManyDataTypesEntity
{
public const string TableName = "ManyDataTypesEntity";
public const int DefaultListLength = 5;
[PartitionKey]
public string StringType { get; set; }
public Guid GuidType { get; set; }
public DateTime DateTimeType { get; set; }
public DateTimeOffset DateTimeOffsetType { get; set; }
public bool BooleanType { get; set; }
public Decimal DecimalType { get; set; }
public double DoubleType { get; set; }
public float FloatType { get; set; }
public int? NullableIntType { get; set; }
public int IntType { get; set; }
public Int64 Int64Type { get; set; }
//public TimeUuid TimeUuidType { get; set; }
//public TimeUuid? NullableTimeUuidType { get; set; }
public Dictionary<string, long> DictionaryStringLongType { get; set; }
public Dictionary<string, string> DictionaryStringStringType { get; set; }
public List<Guid> ListOfGuidsType { get; set; }
public List<string> ListOfStringsType { get; set; }
public static ManyDataTypesEntity GetRandomInstance()
{
Dictionary<string, long> dictionaryStringLong = new Dictionary<string, long>() { { "key_" + Randomm.RandomAlphaNum(10), (long)1234321 } };
Dictionary<string, string> dictionaryStringString = new Dictionary<string, string>() { { "key_" + Randomm.RandomAlphaNum(10), "value_" + Randomm.RandomAlphaNum(10) } };
List<Guid> listOfGuidsType = new List<Guid>() { Guid.NewGuid(), Guid.NewGuid() };
List<string> listOfStringsType = new List<string>() { Randomm.RandomAlphaNum(20), Randomm.RandomAlphaNum(12), "" };
ManyDataTypesEntity randomRow = new ManyDataTypesEntity
{
StringType = "StringType_val_" + Randomm.RandomAlphaNum(10),
GuidType = Guid.NewGuid(),
DateTimeType = DateTime.Now.ToUniversalTime(),
DateTimeOffsetType = new DateTimeOffset(),
BooleanType = false,
DecimalType = (decimal)98765432.0,
DoubleType = (double)9876543,
FloatType = (float)987654,
NullableIntType = null,
IntType = 98765,
Int64Type = (Int64)9876,
//TimeUuidType = TimeUuid.NewId(),
//NullableTimeUuidType = null,
DictionaryStringLongType = dictionaryStringLong,
DictionaryStringStringType = dictionaryStringString,
ListOfGuidsType = listOfGuidsType,
ListOfStringsType = listOfStringsType,
};
return randomRow;
}
public void AssertEquals(ManyDataTypesEntity actualRow)
{
Assert.AreEqual(StringType, actualRow.StringType);
Assert.AreEqual(GuidType, actualRow.GuidType);
Assert.AreEqual(DateTimeType.ToString(), actualRow.DateTimeType.ToString()); // 'ToString' rounds to the nearest second
Assert.AreEqual(DateTimeOffsetType.ToString(), actualRow.DateTimeOffsetType.ToString());
Assert.AreEqual(BooleanType, actualRow.BooleanType);
Assert.AreEqual(DecimalType, actualRow.DecimalType);
Assert.AreEqual(DoubleType, actualRow.DoubleType);
Assert.AreEqual(FloatType, actualRow.FloatType);
Assert.AreEqual(IntType, actualRow.IntType);
Assert.AreEqual(Int64Type, actualRow.Int64Type);
//Assert.AreEqual(TimeUuidType, actualRow.TimeUuidType);
//Assert.AreEqual(NullableTimeUuidType, actualRow.NullableTimeUuidType);
Assert.AreEqual(DictionaryStringLongType, actualRow.DictionaryStringLongType);
Assert.AreEqual(DictionaryStringStringType, actualRow.DictionaryStringStringType);
Assert.AreEqual(ListOfGuidsType, actualRow.ListOfGuidsType);
Assert.AreEqual(ListOfStringsType, actualRow.ListOfStringsType);
}
public static List<ManyDataTypesEntity> GetDefaultAllDataTypesList()
{
List<ManyDataTypesEntity> movieList = new List<ManyDataTypesEntity>();
for (int i = 0; i < DefaultListLength; i++)
{
movieList.Add(GetRandomInstance());
}
return movieList;
}
public static List<ManyDataTypesEntity> SetupDefaultTable(ISession session)
{
// drop table if exists, re-create
var table = session.GetTable<ManyDataTypesEntity>();
table.Create();
List<ManyDataTypesEntity> allDataTypesRandomList = GetDefaultAllDataTypesList();
//Insert some data
foreach (var allDataTypesEntity in allDataTypesRandomList)
table.Insert(allDataTypesEntity).Execute();
return allDataTypesRandomList;
}
public static bool ListContains(List<ManyDataTypesEntity> expectedEntities, ManyDataTypesEntity actualEntity)
{
foreach (var expectedEntity in expectedEntities)
{
try
{
expectedEntity.AssertEquals(actualEntity);
return true;
}
catch (AssertionException) { }
}
return false;
}
public static void AssertListContains(List<ManyDataTypesEntity> expectedEntities, ManyDataTypesEntity actualEntity)
{
Assert.IsTrue(ListContains(expectedEntities, actualEntity));
}
public static void AssertListEqualsList(List<ManyDataTypesEntity> expectedEntities, List<ManyDataTypesEntity> actualEntities)
{
Assert.AreEqual(expectedEntities.Count, actualEntities.Count);
foreach (var expectedEntity in expectedEntities)
Assert.IsTrue(ListContains(actualEntities, expectedEntity));
}
private static readonly IDictionary<string, Func<ManyDataTypesEntity, object>> ColumnMappings =
new Dictionary<string, Func<ManyDataTypesEntity, object>>
{
{ "BooleanType", entity => entity.BooleanType },
{ "DateTimeOffsetType", entity => entity.DateTimeOffsetType },
{ "DateTimeType", entity => entity.DateTimeType },
{ "DecimalType", entity => entity.DecimalType },
{ "DictionaryStringLongType", entity => entity.DictionaryStringLongType },
{ "DictionaryStringStringType", entity => entity.DictionaryStringStringType },
{ "DoubleType", entity => entity.DoubleType },
{ "FloatType", entity => entity.FloatType },
{ "GuidType", entity => entity.GuidType },
{ "Int64Type", entity => entity.Int64Type },
{ "IntType", entity => entity.IntType },
{ "ListOfGuidsType", entity => entity.ListOfGuidsType },
{ "ListOfStringsType", entity => entity.ListOfStringsType },
{ "NullableIntType", entity => entity.NullableIntType },
{ "StringType", entity => entity.StringType }
};
private static readonly IDictionary<string, DataType> ColumnsToTypes =
new Dictionary<string, DataType>
{
{ "BooleanType", DataType.GetDataType(typeof(bool)) },
{ "DateTimeOffsetType", DataType.GetDataType(typeof(DateTimeOffset)) },
{ "DateTimeType", DataType.GetDataType(typeof(DateTime)) },
{ "DecimalType", DataType.GetDataType(typeof(decimal)) },
{ "DictionaryStringLongType", DataType.GetDataType(typeof(Dictionary<string, long>)) },
{ "DictionaryStringStringType", DataType.GetDataType(typeof(Dictionary<string, string>)) },
{ "DoubleType", DataType.GetDataType(typeof(double)) },
{ "FloatType", DataType.GetDataType(typeof(float)) },
{ "GuidType", DataType.GetDataType(typeof(Guid)) },
{ "Int64Type", DataType.GetDataType(typeof(long)) },
{ "IntType", DataType.GetDataType(typeof(int)) },
{ "ListOfGuidsType", DataType.GetDataType(typeof(List<Guid>)) },
{ "ListOfStringsType", DataType.GetDataType(typeof(List<string>)) },
{ "NullableIntType", DataType.GetDataType(typeof(int?)) },
{ "StringType", DataType.GetDataType(typeof(string)) }
};
public static string[] GetColumns()
{
return ManyDataTypesEntity.ColumnMappings.Keys.ToArray();
}
public object[] GetColumnValues()
{
return ManyDataTypesEntity.ColumnMappings.Values.Select(c => c.Invoke(this)).ToArray();
}
public static (string, DataType)[] GetColumnsWithTypes()
{
return ManyDataTypesEntity.ColumnMappings.Keys.Zip(ManyDataTypesEntity.ColumnsToTypes, (key, kvp) => (key, kvp.Value)).ToArray();
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Reflection.Internal;
using System.Text;
using TestUtilities;
using Xunit;
namespace System.Reflection.Metadata.Tests
{
public class BlobReaderTests
{
[Fact]
public unsafe void PublicBlobReaderCtorValidatesArgs()
{
byte* bufferPtrForLambda;
byte[] buffer = new byte[4] { 0, 1, 0, 2 };
fixed (byte* bufferPtr = buffer)
{
bufferPtrForLambda = bufferPtr;
Assert.Throws<ArgumentOutOfRangeException>(() => new BlobReader(bufferPtrForLambda, -1));
}
Assert.Throws<ArgumentNullException>(() => new BlobReader(null, 1));
Assert.Equal(0, new BlobReader(null, 0).Length); // this is valid
Assert.Throws<BadImageFormatException>(() => new BlobReader(null, 0).ReadByte()); // but can't read anything non-empty from it...
Assert.Same(String.Empty, new BlobReader(null, 0).ReadUtf8NullTerminated()); // can read empty string.
}
[Fact]
public unsafe void ReadFromMemoryReader()
{
byte[] buffer = new byte[4] { 0, 1, 0, 2 };
fixed (byte* bufferPtr = buffer)
{
var reader = new BlobReader(new MemoryBlock(bufferPtr, buffer.Length));
Assert.False(reader.SeekOffset(-1));
Assert.False(reader.SeekOffset(Int32.MaxValue));
Assert.False(reader.SeekOffset(Int32.MinValue));
Assert.False(reader.SeekOffset(buffer.Length));
Assert.True(reader.SeekOffset(buffer.Length - 1));
Assert.True(reader.SeekOffset(0));
Assert.Equal(0, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadUInt64());
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(1));
Assert.Equal(1, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadDouble());
Assert.Equal(1, reader.Offset);
Assert.True(reader.SeekOffset(2));
Assert.Equal(2, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadUInt32());
Assert.Equal((ushort)0x0200, reader.ReadUInt16());
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(2));
Assert.Equal(2, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadSingle());
Assert.Equal(2, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Equal(9.404242E-38F, reader.ReadSingle());
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(3));
Assert.Equal(3, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadUInt16());
Assert.Equal((byte)0x02, reader.ReadByte());
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Equal("\u0000\u0001\u0000\u0002", reader.ReadUTF8(4));
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadUTF8(5));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadUTF8(-1));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Equal("\u0100\u0200", reader.ReadUTF16(4));
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadUTF16(5));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadUTF16(-1));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadUTF16(6));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
AssertEx.Equal(buffer, reader.ReadBytes(4));
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Same(String.Empty, reader.ReadUtf8NullTerminated());
Assert.Equal(1, reader.Offset);
Assert.True(reader.SeekOffset(1));
Assert.Equal("\u0001", reader.ReadUtf8NullTerminated());
Assert.Equal(3, reader.Offset);
Assert.True(reader.SeekOffset(3));
Assert.Equal("\u0002", reader.ReadUtf8NullTerminated());
Assert.Equal(4, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Same(String.Empty, reader.ReadUtf8NullTerminated());
Assert.Equal(1, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadBytes(5));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.ReadBytes(Int32.MinValue));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.GetMemoryBlockAt(-1, 1));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Throws<BadImageFormatException>(() => reader.GetMemoryBlockAt(1, -1));
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(0));
Assert.Equal(3, reader.GetMemoryBlockAt(1, 3).Length);
Assert.Equal(0, reader.Offset);
Assert.True(reader.SeekOffset(3));
reader.ReadByte();
Assert.Equal(4, reader.Offset);
Assert.Equal(4, reader.Offset);
Assert.Equal(0, reader.ReadBytes(0).Length);
Assert.Equal(4, reader.Offset);
int value;
Assert.False(reader.TryReadCompressedInteger(out value));
Assert.Equal(BlobReader.InvalidCompressedInteger, value);
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadCompressedInteger());
Assert.Equal(4, reader.Offset);
Assert.Equal(SerializationTypeCode.Invalid, reader.ReadSerializationTypeCode());
Assert.Equal(4, reader.Offset);
Assert.Equal(SignatureTypeCode.Invalid, reader.ReadSignatureTypeCode());
Assert.Equal(4, reader.Offset);
Assert.Equal(default(EntityHandle), reader.ReadTypeHandle());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadBoolean());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadByte());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadSByte());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadUInt32());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadInt32());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadUInt64());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadInt64());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadSingle());
Assert.Equal(4, reader.Offset);
Assert.Throws<BadImageFormatException>(() => reader.ReadDouble());
Assert.Equal(4, reader.Offset);
}
byte[] buffer2 = new byte[8] { 1, 2, 3, 4, 5, 6, 7, 8 };
fixed (byte* bufferPtr2 = buffer2)
{
var reader = new BlobReader(new MemoryBlock(bufferPtr2, buffer2.Length));
Assert.Equal(reader.Offset, 0);
Assert.Equal(0x0807060504030201UL, reader.ReadUInt64());
Assert.Equal(reader.Offset, 8);
reader.Reset();
Assert.Equal(reader.Offset, 0);
Assert.Equal(0x0807060504030201L, reader.ReadInt64());
reader.Reset();
Assert.Equal(reader.Offset, 0);
Assert.Equal(BitConverter.ToDouble(buffer2, 0), reader.ReadDouble());
}
}
[Fact]
public unsafe void ValidatePeekReferenceSize()
{
byte[] buffer = new byte[8] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01 };
fixed (byte* bufferPtr = buffer)
{
var block = new MemoryBlock(bufferPtr, buffer.Length);
// small ref size always fits in 16 bits
Assert.Equal(0xFFFF, block.PeekReference(0, smallRefSize: true));
Assert.Equal(0xFFFF, block.PeekReference(4, smallRefSize: true));
Assert.Equal(0xFFFFU, block.PeekTaggedReference(0, smallRefSize: true));
Assert.Equal(0xFFFFU, block.PeekTaggedReference(4, smallRefSize: true));
Assert.Equal(0x01FFU, block.PeekTaggedReference(6, smallRefSize: true));
// large ref size throws on > RIDMask when tagged variant is not used.
AssertEx.Throws<BadImageFormatException>(() => block.PeekReference(0, smallRefSize: false), MetadataResources.RowIdOrHeapOffsetTooLarge);
AssertEx.Throws<BadImageFormatException>(() => block.PeekReference(4, smallRefSize: false), MetadataResources.RowIdOrHeapOffsetTooLarge);
// large ref size does not throw when Tagged variant is used.
Assert.Equal(0xFFFFFFFFU, block.PeekTaggedReference(0, smallRefSize: false));
Assert.Equal(0x01FFFFFFU, block.PeekTaggedReference(4, smallRefSize: false));
// bounds check applies in all cases
AssertEx.Throws<BadImageFormatException>(() => block.PeekReference(7, smallRefSize: true), MetadataResources.OutOfBoundsRead);
AssertEx.Throws<BadImageFormatException>(() => block.PeekReference(5, smallRefSize: false), MetadataResources.OutOfBoundsRead);
}
}
[Fact]
public unsafe void ReadFromMemoryBlock()
{
byte[] buffer = new byte[4] { 0, 1, 0, 2 };
fixed (byte* bufferPtr = buffer)
{
var block = new MemoryBlock(bufferPtr, buffer.Length);
Assert.Throws<BadImageFormatException>(() => block.PeekUInt32(Int32.MaxValue));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt32(-1));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt32(Int32.MinValue));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt32(4));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt32(1));
Assert.Equal(0x02000100U, block.PeekUInt32(0));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt16(Int32.MaxValue));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt16(-1));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt16(Int32.MinValue));
Assert.Throws<BadImageFormatException>(() => block.PeekUInt16(4));
Assert.Equal(0x0200, block.PeekUInt16(2));
int bytesRead;
MetadataStringDecoder stringDecoder = MetadataStringDecoder.DefaultUTF8;
Assert.Throws<BadImageFormatException>(() => block.PeekUtf8NullTerminated(Int32.MaxValue, null, stringDecoder, out bytesRead));
Assert.Throws<BadImageFormatException>(() => block.PeekUtf8NullTerminated(-1, null, stringDecoder, out bytesRead));
Assert.Throws<BadImageFormatException>(() => block.PeekUtf8NullTerminated(Int32.MinValue, null, stringDecoder, out bytesRead));
Assert.Throws<BadImageFormatException>(() => block.PeekUtf8NullTerminated(5, null, stringDecoder, out bytesRead));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(-1, 1));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(1, -1));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(0, -1));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(-1, 0));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(-Int32.MaxValue, Int32.MaxValue));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(Int32.MaxValue, -Int32.MaxValue));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(Int32.MaxValue, Int32.MaxValue));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(block.Length, -1));
Assert.Throws<BadImageFormatException>(() => block.GetMemoryBlockAt(-1, block.Length));
Assert.Equal("\u0001", block.PeekUtf8NullTerminated(1, null, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 2);
Assert.Equal("\u0002", block.PeekUtf8NullTerminated(3, null, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 1);
Assert.Equal("", block.PeekUtf8NullTerminated(4, null, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 0);
byte[] helloPrefix = Encoding.UTF8.GetBytes("Hello");
Assert.Equal("Hello\u0001", block.PeekUtf8NullTerminated(1, helloPrefix, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 2);
Assert.Equal("Hello\u0002", block.PeekUtf8NullTerminated(3, helloPrefix, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 1);
Assert.Equal("Hello", block.PeekUtf8NullTerminated(4, helloPrefix, stringDecoder, out bytesRead));
Assert.Equal(bytesRead, 0);
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Management.ExpressRoute;
using Microsoft.WindowsAzure.Management.ExpressRoute.Models;
namespace Microsoft.WindowsAzure
{
/// <summary>
/// The Express Route API provides programmatic access to the functionality
/// needed by the customer to set up Dedicated Circuits and Dedicated
/// Circuit Links. The Express Route Customer API is a REST API. All API
/// operations are performed over SSL and mutually authenticated using
/// X.509 v3 certificates. (see
/// http://msdn.microsoft.com/en-us/library/windowsazure/ee460799.aspx for
/// more information)
/// </summary>
public static partial class CrossConnectionOperationsExtensions
{
/// <summary>
/// The New Cross Connection operation provisions a cross connection
/// for as dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key of the dedicated circuit.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static ExpressRouteOperationResponse BeginNew(this ICrossConnectionOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).BeginNewAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The New Cross Connection operation provisions a cross connection
/// for as dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key of the dedicated circuit.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<ExpressRouteOperationResponse> BeginNewAsync(this ICrossConnectionOperations operations, string serviceKey)
{
return operations.BeginNewAsync(serviceKey, CancellationToken.None);
}
/// <summary>
/// The Update Cross Connection operation updates an existing cross
/// connection.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the relationship between
/// Azure and the customer.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Update CrossConnection
/// operation.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static ExpressRouteOperationResponse BeginUpdate(this ICrossConnectionOperations operations, string serviceKey, CrossConnectionUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).BeginUpdateAsync(serviceKey, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Update Cross Connection operation updates an existing cross
/// connection.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the relationship between
/// Azure and the customer.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Update CrossConnection
/// operation.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<ExpressRouteOperationResponse> BeginUpdateAsync(this ICrossConnectionOperations operations, string serviceKey, CrossConnectionUpdateParameters parameters)
{
return operations.BeginUpdateAsync(serviceKey, parameters, CancellationToken.None);
}
/// <summary>
/// The Get Cross Connection operation retrieves the Cross Connection
/// information for the Dedicated Circuit with the specified service
/// key.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The servicee key representing the dedicated circuit.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static CrossConnectionGetResponse Get(this ICrossConnectionOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).GetAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Cross Connection operation retrieves the Cross Connection
/// information for the Dedicated Circuit with the specified service
/// key.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The servicee key representing the dedicated circuit.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static Task<CrossConnectionGetResponse> GetAsync(this ICrossConnectionOperations operations, string serviceKey)
{
return operations.GetAsync(serviceKey, CancellationToken.None);
}
/// <summary>
/// The List Cross Connection operation retrieves a list of cross
/// connections owned by the provider.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <returns>
/// The List Cross Connection operation response.
/// </returns>
public static CrossConnectionListResponse List(this ICrossConnectionOperations operations)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).ListAsync();
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The List Cross Connection operation retrieves a list of cross
/// connections owned by the provider.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <returns>
/// The List Cross Connection operation response.
/// </returns>
public static Task<CrossConnectionListResponse> ListAsync(this ICrossConnectionOperations operations)
{
return operations.ListAsync(CancellationToken.None);
}
/// <summary>
/// The New Cross Connection operation provisions a cross connection
/// for the given azure circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key of the dedicated circuit.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static CrossConnectionGetResponse New(this ICrossConnectionOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).NewAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The New Cross Connection operation provisions a cross connection
/// for the given azure circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key of the dedicated circuit.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static Task<CrossConnectionGetResponse> NewAsync(this ICrossConnectionOperations operations, string serviceKey)
{
return operations.NewAsync(serviceKey, CancellationToken.None);
}
/// <summary>
/// The Update Cross Connection operation updates an existing cross
/// connection.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the relationship between
/// Azure and the customer.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Update Bgp Peering operation.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static CrossConnectionGetResponse Update(this ICrossConnectionOperations operations, string serviceKey, CrossConnectionUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((ICrossConnectionOperations)s).UpdateAsync(serviceKey, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Update Cross Connection operation updates an existing cross
/// connection.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.ICrossConnectionOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the relationship between
/// Azure and the customer.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Update Bgp Peering operation.
/// </param>
/// <returns>
/// The Get Cross Connection Operation Response.
/// </returns>
public static Task<CrossConnectionGetResponse> UpdateAsync(this ICrossConnectionOperations operations, string serviceKey, CrossConnectionUpdateParameters parameters)
{
return operations.UpdateAsync(serviceKey, parameters, CancellationToken.None);
}
}
}
| |
/* New BSD License
-------------------------------------------------------------------------------
Copyright (c) 2006-2012, EntitySpaces, LLC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the EntitySpaces, LLC nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL EntitySpaces, LLC BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------------------
*/
using System;
using System.Data;
using System.Data.OleDb;
using Tiraggo.DynamicQuery;
using Tiraggo.Interfaces;
using System.Threading;
using System.Diagnostics;
using System.Collections.Generic;
namespace Tiraggo.MSAccessProvider
{
public class DataProvider : IDataProvider
{
public DataProvider()
{
}
#region esTraceArguments
private sealed class esTraceArguments : Tiraggo.Interfaces.ITraceArguments, IDisposable
{
static private long packetOrder = 0;
private sealed class esTraceParameter : ITraceParameter
{
public string Name { get; set; }
public string Direction { get; set; }
public string ParamType { get; set; }
public string BeforeValue { get; set; }
public string AfterValue { get; set; }
}
public esTraceArguments()
{
}
public esTraceArguments(tgDataRequest request, IDbCommand cmd, tgEntitySavePacket packet, string action, string callStack)
{
PacketOrder = Interlocked.Increment(ref esTraceArguments.packetOrder);
this.command = cmd;
TraceChannel = DataProvider.sTraceChannel;
Syntax = "ACCESS";
Request = request;
ThreadId = Thread.CurrentThread.ManagedThreadId;
Action = action;
CallStack = callStack;
SqlCommand = cmd;
ApplicationName = System.IO.Path.GetFileName(System.Reflection.Assembly.GetExecutingAssembly().Location);
IDataParameterCollection parameters = cmd.Parameters;
if (parameters.Count > 0)
{
Parameters = new List<ITraceParameter>(parameters.Count);
for (int i = 0; i < parameters.Count; i++)
{
OleDbParameter param = parameters[i] as OleDbParameter;
esTraceParameter p = new esTraceParameter()
{
Name = param.ParameterName,
Direction = param.Direction.ToString(),
ParamType = param.OleDbType.ToString().ToUpper(),
BeforeValue = param.Value != null && param.Value != DBNull.Value ? Convert.ToString(param.Value) : "null"
};
try
{
// Let's make it look like we're using parameters for the profiler
if (param.Value == null || param.Value == DBNull.Value)
{
if (param.SourceVersion == DataRowVersion.Current)
{
object o = packet.CurrentValues[param.SourceColumn];
if (o != null && o != DBNull.Value)
{
p.BeforeValue = Convert.ToString(o);
}
}
else if (param.SourceVersion == DataRowVersion.Original)
{
object o = packet.OriginalValues[param.SourceColumn];
if (o != null && o != DBNull.Value)
{
p.BeforeValue = Convert.ToString(o);
}
}
}
}
catch { }
this.Parameters.Add(p);
}
}
stopwatch = Stopwatch.StartNew();
}
public esTraceArguments(tgDataRequest request, IDbCommand cmd, string action, string callStack)
{
PacketOrder = Interlocked.Increment(ref esTraceArguments.packetOrder);
this.command = cmd;
TraceChannel = DataProvider.sTraceChannel;
Syntax = "ACCESS";
Request = request;
ThreadId = Thread.CurrentThread.ManagedThreadId;
Action = action;
CallStack = callStack;
SqlCommand = cmd;
ApplicationName = System.IO.Path.GetFileName(System.Reflection.Assembly.GetExecutingAssembly().Location);
IDataParameterCollection parameters = cmd.Parameters;
if (parameters.Count > 0)
{
Parameters = new List<ITraceParameter>(parameters.Count);
for (int i = 0; i < parameters.Count; i++)
{
OleDbParameter param = parameters[i] as OleDbParameter;
esTraceParameter p = new esTraceParameter()
{
Name = param.ParameterName,
Direction = param.Direction.ToString(),
ParamType = param.OleDbType.ToString().ToUpper(),
BeforeValue = param.Value != null && param.Value != DBNull.Value ? Convert.ToString(param.Value) : "null"
};
this.Parameters.Add(p);
}
}
stopwatch = Stopwatch.StartNew();
}
// Temporary variable
private IDbCommand command;
public long PacketOrder { get; set; }
public string Syntax { get; set; }
public tgDataRequest Request { get; set; }
public int ThreadId { get; set; }
public string Action { get; set; }
public string CallStack { get; set; }
public IDbCommand SqlCommand { get; set; }
public string ApplicationName { get; set; }
public string TraceChannel { get; set; }
public long Duration { get; set; }
public long Ticks { get; set; }
public string Exception { get; set; }
public List<ITraceParameter> Parameters { get; set; }
private Stopwatch stopwatch;
void IDisposable.Dispose()
{
stopwatch.Stop();
Duration = stopwatch.ElapsedMilliseconds;
Ticks = stopwatch.ElapsedTicks;
// Gather Output Parameters
if (this.Parameters != null && this.Parameters.Count > 0)
{
IDataParameterCollection parameters = command.Parameters;
for (int i = 0; i < this.Parameters.Count; i++)
{
ITraceParameter esParam = this.Parameters[i];
IDbDataParameter param = parameters[esParam.Name] as IDbDataParameter;
if (param.Direction == ParameterDirection.InputOutput || param.Direction == ParameterDirection.Output)
{
esParam.AfterValue = param.Value != null ? Convert.ToString(param.Value) : "null";
}
}
}
DataProvider.sTraceHandler(this);
}
}
#endregion
#region Profiling Logic
/// <summary>
/// The EventHandler used to decouple the profiling code from the core assemblies
/// </summary>
event TraceEventHandler IDataProvider.TraceHandler
{
add { DataProvider.sTraceHandler += value; }
remove { DataProvider.sTraceHandler -= value; }
}
static private event TraceEventHandler sTraceHandler;
/// <summary>
/// Returns true if this Provider is current being profiled
/// </summary>
bool IDataProvider.IsTracing
{
get
{
return sTraceHandler != null ? true : false;
}
}
/// <summary>
/// Used to set the Channel this provider is to use during Profiling
/// </summary>
string IDataProvider.TraceChannel
{
get { return DataProvider.sTraceChannel; }
set { DataProvider.sTraceChannel = value; }
}
static private string sTraceChannel = "Channel1";
#endregion
/// <summary>
/// This method acts as a delegate for tgTransactionScope
/// </summary>
/// <returns></returns>
static private IDbConnection CreateIDbConnectionDelegate()
{
return new OleDbConnection();
}
static private void CleanupCommand(OleDbCommand cmd)
{
if (cmd != null && cmd.Connection != null)
{
if (cmd.Connection.State == ConnectionState.Open)
{
cmd.Connection.Close();
}
}
}
#region IDataProvider Members
tgDataResponse IDataProvider.esLoadDataTable(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
try
{
switch (request.QueryType)
{
case tgQueryType.StoredProcedure:
response.Table = LoadDataTableFromStoredProcedure(request);
break;
case tgQueryType.Text:
response.Table = LoadDataTableFromText(request);
break;
case tgQueryType.DynamicQuery:
response = new tgDataResponse();
OleDbCommand cmd = QueryBuilder.PrepareCommand(request);
LoadDataTableFromDynamicQuery(request, response, cmd);
break;
case tgQueryType.DynamicQueryParseOnly:
response = new tgDataResponse();
OleDbCommand cmd1 = QueryBuilder.PrepareCommand(request);
response.LastQuery = cmd1.CommandText;
break;
case tgQueryType.ManyToMany:
response.Table = LoadManyToMany(request);
break;
default:
break;
}
}
catch (Exception ex)
{
response.Exception = ex;
}
return response;
}
tgDataResponse IDataProvider.esSaveDataTable(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
try
{
if (request.SqlAccessType == tgSqlAccessType.StoredProcedure)
{
if (request.CollectionSavePacket != null)
SaveStoredProcCollection(request);
else
SaveStoredProcEntity(request);
}
else
{
if (request.EntitySavePacket.CurrentValues == null)
SaveDynamicCollection(request);
else
SaveDynamicEntity(request);
}
}
catch (OleDbException ex)
{
tgException es = Shared.CheckForConcurrencyException(ex);
if (es != null)
{
response.Exception = es;
}
else
{
response.Exception = ex;
}
}
catch (DBConcurrencyException dbex)
{
response.Exception = new tgConcurrencyException("Error in MSAccessProvider.esSaveDataTable", dbex);
}
response.Table = request.Table;
return response;
}
tgDataResponse IDataProvider.ExecuteNonQuery(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
OleDbCommand cmd = null;
try
{
cmd = new OleDbCommand();
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if(request.Parameters != null) AddParameters(cmd, request);
cmd.CommandText = request.QueryText;
switch (request.QueryType)
{
case tgQueryType.TableDirect:
cmd.CommandType = CommandType.TableDirect;
break;
case tgQueryType.StoredProcedure:
cmd.CommandType = CommandType.StoredProcedure;
break;
case tgQueryType.Text:
cmd.CommandType = CommandType.Text;
break;
}
try
{
tgTransactionScope.Enlist(cmd, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "ExecuteNonQuery", System.Environment.StackTrace))
{
try
{
response.RowsEffected = cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
response.RowsEffected = cmd.ExecuteNonQuery();
}
}
finally
{
tgTransactionScope.DeEnlist(cmd);
}
}
catch (Exception ex)
{
CleanupCommand(cmd);
response.Exception = ex;
}
return response;
}
tgDataResponse IDataProvider.ExecuteReader(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
OleDbCommand cmd = null;
try
{
cmd = new OleDbCommand();
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if(request.Parameters != null) AddParameters(cmd, request);
cmd.CommandText = request.QueryText;
switch (request.QueryType)
{
case tgQueryType.TableDirect:
cmd.CommandType = CommandType.TableDirect;
break;
case tgQueryType.StoredProcedure:
cmd.CommandType = CommandType.StoredProcedure;
break;
case tgQueryType.Text:
cmd.CommandType = CommandType.Text;
break;
case tgQueryType.DynamicQuery:
cmd = QueryBuilder.PrepareCommand(request);
break;
}
cmd.Connection = new OleDbConnection(request.ConnectionString);
cmd.Connection.Open();
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "ExecuteReader", System.Environment.StackTrace))
{
try
{
response.DataReader = cmd.ExecuteReader(CommandBehavior.CloseConnection);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
response.DataReader = cmd.ExecuteReader(CommandBehavior.CloseConnection);
}
}
catch (Exception ex)
{
CleanupCommand(cmd);
response.Exception = ex;
}
return response;
}
tgDataResponse IDataProvider.ExecuteScalar(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
OleDbCommand cmd = null;
try
{
cmd = new OleDbCommand();
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if(request.Parameters != null) AddParameters(cmd, request);
cmd.CommandText = request.QueryText;
switch (request.QueryType)
{
case tgQueryType.TableDirect:
cmd.CommandType = CommandType.TableDirect;
break;
case tgQueryType.StoredProcedure:
cmd.CommandType = CommandType.StoredProcedure;
break;
case tgQueryType.Text:
cmd.CommandType = CommandType.Text;
break;
case tgQueryType.DynamicQuery:
cmd = QueryBuilder.PrepareCommand(request);
break;
}
try
{
tgTransactionScope.Enlist(cmd, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "ExecuteScalar", System.Environment.StackTrace))
{
try
{
response.Scalar = cmd.ExecuteScalar();
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
response.Scalar = cmd.ExecuteScalar();
}
}
finally
{
tgTransactionScope.DeEnlist(cmd);
}
}
catch (Exception ex)
{
CleanupCommand(cmd);
response.Exception = ex;
}
return response;
}
tgDataResponse IDataProvider.FillDataSet(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
try
{
switch (request.QueryType)
{
case tgQueryType.StoredProcedure:
response.DataSet = LoadDataSetFromStoredProcedure(request);
break;
case tgQueryType.Text:
response.DataSet = LoadDataSetFromText(request);
break;
default:
break;
}
}
catch (Exception ex)
{
response.Exception = ex;
}
return response;
}
tgDataResponse IDataProvider.FillDataTable(tgDataRequest request)
{
tgDataResponse response = new tgDataResponse();
try
{
switch (request.QueryType)
{
case tgQueryType.StoredProcedure:
response.Table = LoadDataTableFromStoredProcedure(request);
break;
case tgQueryType.Text:
response.Table = LoadDataTableFromText(request);
break;
default:
break;
}
}
catch (Exception ex)
{
response.Exception = ex;
}
return response;
}
#endregion
static private DataSet LoadDataSetFromStoredProcedure(tgDataRequest request)
{
DataSet dataSet = null;
OleDbCommand cmd = null;
try
{
dataSet = new DataSet();
cmd = new OleDbCommand();
cmd.CommandText = request.QueryText;
cmd.CommandType = CommandType.StoredProcedure;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if (request.Parameters != null)
{
foreach (tgParameter param in request.Parameters)
{
cmd.Parameters.AddWithValue(Delimiters.Param + param.Name, param.Value);
}
}
OleDbDataAdapter da = new OleDbDataAdapter();
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadFromStoredProcedure", System.Environment.StackTrace))
{
try
{
da.Fill(dataSet);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataSet);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
return dataSet;
}
static private DataSet LoadDataSetFromText(tgDataRequest request)
{
DataSet dataSet = null;
OleDbCommand cmd = null;
try
{
dataSet = new DataSet();
cmd = new OleDbCommand();
cmd.CommandType = CommandType.Text;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if(request.Parameters != null) AddParameters(cmd, request);
OleDbDataAdapter da = new OleDbDataAdapter();
cmd.CommandText = request.QueryText;
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadDataSetFromText", System.Environment.StackTrace))
{
try
{
da.Fill(dataSet);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataSet);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
return dataSet;
}
static private DataTable LoadManyToMany(tgDataRequest request)
{
DataTable dataTable = null;
OleDbCommand cmd = null;
try
{
dataTable = new DataTable(request.ProviderMetadata.Destination);
cmd = new OleDbCommand();
cmd.CommandType = CommandType.Text;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
string mmQuery = request.QueryText;
string[] sections = mmQuery.Split('|');
string[] tables = sections[0].Split(',');
string[] columns = sections[1].Split(',');
// We build the query, we don't use Delimiters to avoid tons of extra concatentation
string sql = "SELECT * FROM [" + tables[0] + "] LEFT JOIN [" + tables[1] + "] ON [" + tables[0] + "].[" + columns[0] + "] = [";
sql += tables[1] + "].[" + columns[1] + "] WHERE [" + tables[1] + "].[" + sections[2] + "] = @";
if (request.Parameters != null)
{
foreach (tgParameter esParam in request.Parameters)
{
sql += esParam.Name;
}
AddParameters(cmd, request);
}
OleDbDataAdapter da = new OleDbDataAdapter();
cmd.CommandText = sql;
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadManyToMany", System.Environment.StackTrace))
{
try
{
da.Fill(dataTable);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataTable);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
return dataTable;
}
static private DataTable LoadDataTableFromStoredProcedure(tgDataRequest request)
{
DataTable dataTable = null;
OleDbCommand cmd = null;
try
{
dataTable = new DataTable(request.ProviderMetadata.Destination);
cmd = new OleDbCommand();
cmd.CommandText = request.QueryText;
cmd.CommandType = CommandType.StoredProcedure;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if (request.Parameters != null)
{
foreach (tgParameter param in request.Parameters)
{
cmd.Parameters.AddWithValue(Delimiters.Param + param.Name, param.Value);
}
}
OleDbDataAdapter da = new OleDbDataAdapter();
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadFromStoredProcedure", System.Environment.StackTrace))
{
try
{
da.Fill(dataTable);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataTable);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
return dataTable;
}
static private DataTable LoadDataTableFromText(tgDataRequest request)
{
DataTable dataTable = null;
OleDbCommand cmd = null;
try
{
dataTable = new DataTable(request.ProviderMetadata.Destination);
cmd = new OleDbCommand();
cmd.CommandType = CommandType.Text;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
if(request.Parameters != null) AddParameters(cmd, request);
OleDbDataAdapter da = new OleDbDataAdapter();
cmd.CommandText = request.QueryText;
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadFromText", System.Environment.StackTrace))
{
try
{
da.Fill(dataTable);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataTable);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
return dataTable;
}
// This is used only to execute the Dynamic Query API
static private void LoadDataTableFromDynamicQuery(tgDataRequest request, tgDataResponse response, OleDbCommand cmd)
{
DataTable dataTable = null;
try
{
response.LastQuery = cmd.CommandText;
if(request.CommandTimeout != null) cmd.CommandTimeout = request.CommandTimeout.Value;
dataTable = new DataTable(request.ProviderMetadata.Destination);
OleDbDataAdapter da = new OleDbDataAdapter();
da.SelectCommand = cmd;
try
{
tgTransactionScope.Enlist(da.SelectCommand, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "LoadFromDynamicQuery", System.Environment.StackTrace))
{
try
{
da.Fill(dataTable);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Fill(dataTable);
}
}
finally
{
tgTransactionScope.DeEnlist(da.SelectCommand);
}
response.Table = dataTable;
}
catch (Exception)
{
CleanupCommand(cmd);
throw;
}
finally
{
}
}
static private DataTable SaveStoredProcCollection(tgDataRequest request)
{
throw new NotImplementedException("Stored Procedures not supported");
}
static private DataTable SaveStoredProcEntity(tgDataRequest request)
{
throw new NotImplementedException("Stored Procedures not supported");
}
static private DataTable SaveDynamicCollection(tgDataRequest request)
{
tgEntitySavePacket pkt = request.CollectionSavePacket[0];
if (pkt.RowState == tgDataRowState.Deleted)
{
//============================================================================
// We do all our deletes at once, so if the first one is a delete they all are
//============================================================================
return SaveDynamicCollection_Deletes(request);
}
else
{
//============================================================================
// We do all our Inserts and Updates at once
//============================================================================
return SaveDynamicCollection_InsertsUpdates(request);
}
}
static private DataTable SaveDynamicCollection_InsertsUpdates(tgDataRequest request)
{
DataTable dataTable = CreateDataTable(request);
using (tgTransactionScope scope = new tgTransactionScope())
{
using (OleDbDataAdapter da = new OleDbDataAdapter())
{
da.AcceptChangesDuringUpdate = false;
da.ContinueUpdateOnError = request.ContinueUpdateOnError;
OleDbCommand cmd = null;
if (!request.IgnoreComputedColumns)
{
da.RowUpdated += new OleDbRowUpdatedEventHandler(OnRowUpdated);
}
foreach (tgEntitySavePacket packet in request.CollectionSavePacket)
{
if (packet.RowState != tgDataRowState.Added && packet.RowState != tgDataRowState.Modified) continue;
DataRow row = dataTable.NewRow();
dataTable.Rows.Add(row);
switch (packet.RowState)
{
case tgDataRowState.Added:
cmd = da.InsertCommand = Shared.BuildDynamicInsertCommand(request, packet);
SetModifiedValues(request, packet, row);
break;
case tgDataRowState.Modified:
cmd = da.UpdateCommand = Shared.BuildDynamicUpdateCommand(request, packet);
SetOriginalValues(request, packet, row, false);
SetModifiedValues(request, packet, row);
row.AcceptChanges();
row.SetModified();
break;
}
request.Properties["tgDataRequest"] = request;
request.Properties["esEntityData"] = packet;
dataTable.ExtendedProperties["props"] = request.Properties;
DataRow[] singleRow = new DataRow[1];
singleRow[0] = row;
try
{
tgTransactionScope.Enlist(cmd, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, packet, "SaveCollectionDynamic", System.Environment.StackTrace))
{
try
{
da.Update(singleRow);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Update(singleRow);
}
if (row.HasErrors)
{
request.FireOnError(packet, row.RowError);
}
}
finally
{
tgTransactionScope.DeEnlist(cmd);
dataTable.Rows.Clear();
}
if (!row.HasErrors && packet.RowState != tgDataRowState.Deleted && cmd.Parameters != null)
{
foreach (OleDbParameter param in cmd.Parameters)
{
switch (param.Direction)
{
case ParameterDirection.Output:
case ParameterDirection.InputOutput:
packet.CurrentValues[param.SourceColumn] = param.Value;
break;
}
}
}
}
}
scope.Complete();
}
return dataTable;
}
static private DataTable SaveDynamicCollection_Deletes(tgDataRequest request)
{
OleDbCommand cmd = null;
DataTable dataTable = CreateDataTable(request);
using (tgTransactionScope scope = new tgTransactionScope())
{
using (OleDbDataAdapter da = new OleDbDataAdapter())
{
da.AcceptChangesDuringUpdate = false;
da.ContinueUpdateOnError = request.ContinueUpdateOnError;
try
{
cmd = da.DeleteCommand = Shared.BuildDynamicDeleteCommand(request, request.CollectionSavePacket[0].ModifiedColumns);
tgTransactionScope.Enlist(cmd, request.ConnectionString, CreateIDbConnectionDelegate);
DataRow[] singleRow = new DataRow[1];
// Delete each record
foreach (tgEntitySavePacket packet in request.CollectionSavePacket)
{
DataRow row = dataTable.NewRow();
dataTable.Rows.Add(row);
SetOriginalValues(request, packet, row, true);
row.AcceptChanges();
row.Delete();
singleRow[0] = row;
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, packet, "SaveCollectionDynamic", System.Environment.StackTrace))
{
try
{
da.Update(singleRow);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Update(singleRow);
}
if (row.HasErrors)
{
request.FireOnError(packet, row.RowError);
}
dataTable.Rows.Clear(); // ADO.NET won't let us reuse the same DataRow
}
}
finally
{
tgTransactionScope.DeEnlist(cmd);
}
}
scope.Complete();
}
return request.Table;
}
static private DataTable SaveDynamicEntity(tgDataRequest request)
{
bool needToDelete = request.EntitySavePacket.RowState == tgDataRowState.Deleted;
DataTable dataTable = CreateDataTable(request);
using (OleDbDataAdapter da = new OleDbDataAdapter())
{
da.AcceptChangesDuringUpdate = false;
DataRow row = dataTable.NewRow();
dataTable.Rows.Add(row);
OleDbCommand cmd = null;
switch (request.EntitySavePacket.RowState)
{
case tgDataRowState.Added:
cmd = da.InsertCommand = Shared.BuildDynamicInsertCommand(request, request.EntitySavePacket);
SetModifiedValues(request, request.EntitySavePacket, row);
break;
case tgDataRowState.Modified:
cmd = da.UpdateCommand = Shared.BuildDynamicUpdateCommand(request, request.EntitySavePacket);
SetOriginalValues(request, request.EntitySavePacket, row, false);
SetModifiedValues(request, request.EntitySavePacket, row);
row.AcceptChanges();
row.SetModified();
break;
case tgDataRowState.Deleted:
cmd = da.DeleteCommand = Shared.BuildDynamicDeleteCommand(request, null);
SetOriginalValues(request, request.EntitySavePacket, row, true);
row.AcceptChanges();
row.Delete();
break;
}
if (!needToDelete && request.Properties != null)
{
request.Properties["tgDataRequest"] = request;
request.Properties["esEntityData"] = request.EntitySavePacket;
dataTable.ExtendedProperties["props"] = request.Properties;
}
DataRow[] singleRow = new DataRow[1];
singleRow[0] = row;
if (!request.IgnoreComputedColumns)
{
da.RowUpdated += new OleDbRowUpdatedEventHandler(OnRowUpdated);
}
try
{
tgTransactionScope.Enlist(cmd, request.ConnectionString, CreateIDbConnectionDelegate);
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, request.EntitySavePacket, "SaveEntityDynamic", System.Environment.StackTrace))
{
try
{
da.Update(singleRow);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
da.Update(singleRow);
}
}
finally
{
tgTransactionScope.DeEnlist(cmd);
}
if (request.EntitySavePacket.RowState != tgDataRowState.Deleted && cmd.Parameters != null)
{
foreach (OleDbParameter param in cmd.Parameters)
{
switch (param.Direction)
{
case ParameterDirection.Output:
case ParameterDirection.InputOutput:
request.EntitySavePacket.CurrentValues[param.SourceColumn] = param.Value;
break;
}
}
}
}
return dataTable;
}
static private DataTable CreateDataTable(tgDataRequest request)
{
DataTable dataTable = new DataTable();
DataColumnCollection dataColumns = dataTable.Columns;
tgColumnMetadataCollection cols = request.Columns;
if (request.SelectedColumns == null)
{
tgColumnMetadata col;
for (int i = 0; i < cols.Count; i++)
{
col = cols[i];
dataColumns.Add(new DataColumn(col.Name, col.Type));
}
}
else
{
foreach (string col in request.SelectedColumns.Keys)
{
dataColumns.Add(new DataColumn(col, cols[col].Type));
}
}
return dataTable;
}
static void SetOriginalValues(tgDataRequest request, tgEntitySavePacket packet, DataRow row, bool primaryKeysAndConcurrencyOnly)
{
foreach (tgColumnMetadata col in request.Columns)
{
if (primaryKeysAndConcurrencyOnly &&
(!col.IsInPrimaryKey && !col.IsConcurrency && !col.IsTiraggoConcurrency)) continue;
string columnName = col.Name;
if (packet.OriginalValues.ContainsKey(columnName))
{
row[columnName] = packet.OriginalValues[columnName];
}
}
}
static void SetModifiedValues(tgDataRequest request, tgEntitySavePacket packet, DataRow row)
{
foreach (string column in packet.ModifiedColumns)
{
if (request.Columns.FindByColumnName(column) != null)
{
row[column] = packet.CurrentValues[column];
}
}
}
protected static void OnRowUpdated(object sender, OleDbRowUpdatedEventArgs e)
{
try
{
PropertyCollection props = e.Row.Table.ExtendedProperties;
if (props.ContainsKey("props"))
{
props = (PropertyCollection)props["props"];
}
if (e.Status == UpdateStatus.Continue && (e.StatementType == StatementType.Insert || e.StatementType == StatementType.Update))
{
tgDataRequest request = props["tgDataRequest"] as tgDataRequest;
tgEntitySavePacket packet = (tgEntitySavePacket)props["esEntityData"];
if (e.StatementType == StatementType.Insert)
{
if (props.Contains("AutoInc"))
{
string autoInc = props["AutoInc"] as string;
OleDbCommand cmd = new OleDbCommand();
cmd.Connection = e.Command.Connection;
cmd.Transaction = e.Command.Transaction;
cmd.CommandText = "SELECT @@IDENTITY";
cmd.CommandType = CommandType.Text;
object o = null;
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "OnRowUpdated", System.Environment.StackTrace))
{
try
{
o = cmd.ExecuteScalar();
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
o = cmd.ExecuteScalar();
}
if (o != null)
{
packet.CurrentValues[autoInc] = o;
e.Row[autoInc] = o;
}
}
if (props.Contains("EntitySpacesConcurrency"))
{
string esConcurrencyColumn = props["EntitySpacesConcurrency"] as string;
packet.CurrentValues[esConcurrencyColumn] = 1;
}
}
//-------------------------------------------------------------------------------------------------
// Fetch any defaults, SQLite doesn't support output parameters so we gotta do this the hard way
//-------------------------------------------------------------------------------------------------
if (props.Contains("Defaults"))
{
// Build the Where parameter and parameters
OleDbCommand cmd = new OleDbCommand();
cmd.Connection = e.Command.Connection;
cmd.Transaction = e.Command.Transaction;
string select = (string)props["Defaults"];
string[] whereParameters = ((string)props["Where"]).Split(',');
string comma = String.Empty;
string where = String.Empty;
int i = 1;
foreach (string parameter in whereParameters)
{
OleDbParameter p = new OleDbParameter("@p" + i++.ToString(), e.Row[parameter]);
cmd.Parameters.Add(p);
where += comma + "[" + parameter + "] = " + p.ParameterName;
comma = " AND ";
}
// Okay, now we can execute the sql and get any values that have defaults that were
// null at the time of the insert and/or our timestamp
cmd.CommandText = "SELECT " + select + " FROM [" + request.ProviderMetadata.Source + "] WHERE " + where + ";";
OleDbDataReader rdr = null;
try
{
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "OnRowUpdated", System.Environment.StackTrace))
{
try
{
rdr = cmd.ExecuteReader(CommandBehavior.SingleResult);
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
rdr = cmd.ExecuteReader(CommandBehavior.SingleResult);
}
if (rdr.Read())
{
select = select.Replace("[", String.Empty).Replace("]", String.Empty);
string[] selectCols = select.Split(',');
for (int k = 0; k < selectCols.Length; k++)
{
packet.CurrentValues[selectCols[k]] = rdr.GetValue(k);
}
}
}
finally
{
// Make sure we close the reader no matter what
if (rdr != null) rdr.Close();
}
}
if (e.StatementType == StatementType.Update)
{
string colName = props["EntitySpacesConcurrency"] as string;
object o = e.Row[colName];
switch (Type.GetTypeCode(o.GetType()))
{
case TypeCode.Int16: packet.CurrentValues[colName] = ((System.Int16)o) + 1; break;
case TypeCode.Int32: packet.CurrentValues[colName] = ((System.Int32)o) + 1; break;
case TypeCode.Int64: packet.CurrentValues[colName] = ((System.Int64)o) + 1; break;
case TypeCode.UInt16: packet.CurrentValues[colName] = ((System.UInt16)o) + 1; break;
case TypeCode.UInt32: packet.CurrentValues[colName] = ((System.UInt32)o) + 1; break;
case TypeCode.UInt64: packet.CurrentValues[colName] = ((System.UInt64)o) + 1; break;
}
}
}
}
catch { }
}
// If it's an Insert we fetch the @@Identity value and stuff it in the proper column
protected static void OnRowUpdated1(object sender, OleDbRowUpdatedEventArgs e)
{
try
{
if(e.StatementType == StatementType.Delete || e.Status != UpdateStatus.Continue) return;
PropertyCollection props = e.Row.Table.ExtendedProperties;
if (e.StatementType == StatementType.Insert)
{
tgEntitySavePacket packet = (tgEntitySavePacket)props["esEntityData"];
if (e.Row.Table.ExtendedProperties.Contains("AutoInc"))
{
tgDataRequest request = props["tgDataRequest"] as tgDataRequest;
string autoInc = props["AutoInc"] as string;
OleDbCommand cmd = new OleDbCommand();
cmd.Connection = e.Command.Connection;
cmd.Transaction = e.Command.Transaction;
cmd.CommandText = "SELECT @@IDENTITY";
cmd.CommandType = CommandType.Text;
object o = null;
#region Profiling
if (sTraceHandler != null)
{
using (esTraceArguments esTrace = new esTraceArguments(request, cmd, "OnRowUpdated", System.Environment.StackTrace))
{
try
{
o = cmd.ExecuteScalar();
}
catch (Exception ex)
{
esTrace.Exception = ex.Message;
throw;
}
}
}
else
#endregion
{
o = cmd.ExecuteScalar();
}
if (o != null)
{
packet.CurrentValues[autoInc] = o;
}
}
if (props.Contains("EntitySpacesConcurrency"))
{
string colName = props["EntitySpacesConcurrency"] as string;
packet.CurrentValues[colName] = 1;
}
}
else if (e.StatementType == StatementType.Update)
{
if (props.Contains("EntitySpacesConcurrency"))
{
tgEntitySavePacket packet = (tgEntitySavePacket)props["esEntityData"];
string colName = props["EntitySpacesConcurrency"] as string;
object o = e.Row[colName];
switch (Type.GetTypeCode(o.GetType()))
{
case TypeCode.Int16: packet.CurrentValues[colName] = ((System.Int16)o) + 1; break;
case TypeCode.Int32: packet.CurrentValues[colName] = ((System.Int32)o) + 1; break;
case TypeCode.Int64: packet.CurrentValues[colName] = ((System.Int64)o) + 1; break;
case TypeCode.UInt16: packet.CurrentValues[colName] = ((System.UInt16)o) + 1; break;
case TypeCode.UInt32: packet.CurrentValues[colName] = ((System.UInt32)o) + 1; break;
case TypeCode.UInt64: packet.CurrentValues[colName] = ((System.UInt64)o) + 1; break;
}
e.Row.AcceptChanges();
}
}
}
catch { }
}
static private void AddParameters(OleDbCommand cmd, tgDataRequest request)
{
if (request.QueryType == tgQueryType.Text && request.QueryText != null && request.QueryText.Contains("{0}"))
{
int i = 0;
string token = String.Empty;
string sIndex = String.Empty;
string param = String.Empty;
foreach (tgParameter esParam in request.Parameters)
{
sIndex = i.ToString();
token = '{' + sIndex + '}';
param = Delimiters.Param + "p" + sIndex;
request.QueryText = request.QueryText.Replace(token, param);
i++;
cmd.Parameters.AddWithValue(Delimiters.Param + esParam.Name, esParam.Value);
}
}
else
{
foreach (tgParameter esParam in request.Parameters)
{
cmd.Parameters.AddWithValue(Delimiters.Param + esParam.Name, esParam.Value);
}
}
}
}
}
| |
using System;
using Android.Content;
using Android.Widget;
using Android.Util;
using Android.Graphics;
using Android.Views;
using System.Collections;
using System.Collections.Generic;
using PrintBot.Domain.Models;
using PrintBot.Infrastructure.ViewModels;
using System.Threading.Tasks;
namespace PrintBot.Droid
{
public class BordEditor_Modul : RelativeLayout
{
public List<ModulButton> modulPins = new List<ModulButton>();
public Button selfDestrucktion;
public EditText enterModulName;
Button changeTypeBtn;
public String CurrentModulType { get; set; }
private int _modulTypeIndex = 0;
private ModuleSetupViewModel _vm;
string _name;
public BordEditor_Modul(Context context, int pinCount) : base(context, null, 0)
{
Init(context, pinCount);
_vm = ServiceLocator.Current.ModuleSetupViewModel;
}
public BordEditor_Modul(Context context, BordEditor_ModulPhysical modul) : base(context, null, 0)
{
Init(context, modul.PinList.Count);
_vm = ServiceLocator.Current.ModuleSetupViewModel;
enterModulName.Text = modul.Name;
}
// public Modul_2Pins(Context context, IAttributeSet attrs) : base(context, attrs) { Init(context); }
// public Modul_2Pins(Context context, IAttributeSet attrs, int defStyle) : base(context, attrs, defStyle) { Init(context); }
private void Init(Context context, int pinCount)
{
var scale = (int) PrintBot.Droid.Activities.BordEditor_MainActivity._scaleFactor;
var enterTexHeiht = 50 * scale;
var buttonsize = 25 * scale;
var modulMinHeight = 55 * scale + 4 * buttonsize;
pinCount += 1; // on extra for gnd
int modulHeight = modulMinHeight + (30 * pinCount * scale);
int modulWidth = 100 * scale;
string[] modulTypes = new string[] { "LED","Motor","Custom", "Sensor"};
int typeButtonWidth = modulWidth / modulTypes.Length;
this.LayoutParameters = new LayoutParams(modulWidth, modulHeight);
this.SetBackgroundColor(Color.PowderBlue);
//modul name
enterModulName = new EditText(context);
enterModulName.LayoutParameters = new LayoutParams(LayoutParams.MatchParent, enterTexHeiht);
enterModulName.TextSize = 5.5f * scale;
enterModulName.SetTextColor( Color.Black);
this.AddView(enterModulName);
enterModulName.AfterTextChanged += delegate { SetModulName(enterModulName.Text); };
//setType
changeTypeBtn = new Button(context);
changeTypeBtn.LayoutParameters = new LayoutParams(LayoutParams.MatchParent, 2*buttonsize);
changeTypeBtn.TranslationY = enterTexHeiht;
changeTypeBtn.TextSize = 5.5f * scale;
changeTypeBtn.Gravity = GravityFlags.Left;
changeTypeBtn.Text = modulTypes[0];
changeTypeBtn.SetBackgroundColor(Color.Gray);
this.AddView(changeTypeBtn);
changeTypeBtn.Click += delegate
{
_modulTypeIndex += 1;
_modulTypeIndex = _modulTypeIndex % modulTypes.Length;
CurrentModulType = modulTypes[_modulTypeIndex];
changeTypeBtn.Text = CurrentModulType;
};
//delet modul
selfDestrucktion = new Button(context);
selfDestrucktion.LayoutParameters = new LayoutParams(LayoutParams.MatchParent, buttonsize);
selfDestrucktion.TranslationX = 0;
selfDestrucktion.TranslationY = modulHeight - buttonsize;
selfDestrucktion.SetBackgroundColor(Color.Blue);
this.AddView(selfDestrucktion);
//save
Button save = new Button(context);
save.LayoutParameters = new LayoutParams(LayoutParams.MatchParent, buttonsize);
save.TranslationX = 0;
save.TranslationY = modulHeight - 2*buttonsize;
save.SetBackgroundColor(Color.Pink);
this.AddView(save);
save.Click += async delegate { await createPhysicalModul(); };
//create pins
int yOffset =enterTexHeiht + 2*buttonsize;
for (int i = 0; i < pinCount; i++)
{
var pin = new ModulButton(context, this);
pin.LayoutParameters = new LayoutParams(2* buttonsize, buttonsize);
pin.TranslationX = 2* buttonsize;
pin.TranslationY = yOffset;
if (i == pinCount - 1)
{
pin.SetBackgroundColor(Color.Black);
pin.Clickable = false;
pin.Text = "GND";
pin.SetTextColor(Color.White);
}
else
{
pin.SetBackgroundColor(Color.White);
modulPins.Add(pin);
}
this.AddView(pin);
yOffset += 30 * scale;
}
}
public void SetModulTyp( string s)
{
this.CurrentModulType = s;
changeTypeBtn.Text = s;
}
public void AttachSimulator(int pinNr)
{
// just testing
Console.WriteLine("Device is attached to Pin" + pinNr);
}
public void SetModulName(String newName)
{
this._name = newName;
Console.WriteLine("Modul name : " + _name);
}
public class ModulButton : Button
{
private BordEditor_BordPin _connectePin;
private BordEditor_Modul _modul;
internal BordEditor_BordPin ConnectePin
{
get
{
return _connectePin;
}
set
{
_connectePin = value;
}
}
public BordEditor_Modul Modul
{
get
{
return _modul;
}
set
{
_modul = value;
}
}
public ModulButton(Context context, BordEditor_Modul parent) : base(context, null, 0) { Init(parent); }
public ModulButton(Context context, IAttributeSet attrs) : base(context, attrs) { }
public void reset()
{
this.SetBackgroundColor(Color.White);
this.Text = "";
}
private void Init(BordEditor_Modul rel)
{
this.Modul = rel;
}
public override float GetX()
{
return base.GetX() + Width / 2 + Modul.GetX();
}
public override float GetY()
{
return base.GetY() + Height / 2 + Modul.GetY();
}
}
private bool isModulRightFromBord()
{
var metrics = Resources.DisplayMetrics;
var widthInDp = ConvertPixelsToDp(metrics.WidthPixels);
return this.GetX() > widthInDp;
}
private int ConvertPixelsToDp(float pixelValue)
{
var dp = (int)((pixelValue) / Resources.DisplayMetrics.Density);
return dp;
}
public void SwitchPinSideIfNeeded()
{
//Console.WriteLine(this.isModulRightFromBord());
if (this.isModulRightFromBord())
{
foreach (ModulButton pin in modulPins)
{
pin.TranslationX = 0;
}
}
}
public async Task createPhysicalModul()
{
// Save modul by serializertion
BordEditor_ModulPhysical tmp = new BordEditor_ModulPhysical();
tmp.Name = this.enterModulName.Text;
tmp.ModulType = this.CurrentModulType;
foreach (ModulButton pin in this.modulPins)
{
try
{
var conectPin = pin.ConnectePin; // get conected bord pin
var tmpPin = new BordEditor_PysicalPin(conectPin.Type, conectPin.Nr); // create new physical Pin
tmp.PinList.Add(tmpPin); // add physical p to PhysicalModul
}
catch
{
var tmpPin = new BordEditor_PysicalPin(); // create new empty physical Pin
tmp.PinList.Add(tmpPin); // add empty physical p to PhysicalModul
}
}
await _vm.SaveModuleAsync(tmp);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.IntroduceVariable
{
internal partial class AbstractIntroduceVariableService<TService, TExpressionSyntax, TTypeSyntax, TTypeDeclarationSyntax, TQueryExpressionSyntax>
{
private partial class State
{
public SemanticDocument Document { get; }
public TExpressionSyntax Expression { get; private set; }
public bool InAttributeContext { get; private set; }
public bool InBlockContext { get; private set; }
public bool InConstructorInitializerContext { get; private set; }
public bool InFieldContext { get; private set; }
public bool InParameterContext { get; private set; }
public bool InQueryContext { get; private set; }
public bool InExpressionBodiedMemberContext { get; private set; }
public bool IsConstant { get; private set; }
private SemanticMap _semanticMap;
private readonly TService _service;
public State(TService service, SemanticDocument document)
{
_service = service;
this.Document = document;
}
public static State Generate(
TService service,
SemanticDocument document,
TextSpan textSpan,
CancellationToken cancellationToken)
{
var state = new State(service, document);
if (!state.TryInitialize(textSpan, cancellationToken))
{
return null;
}
return state;
}
private bool TryInitialize(
TextSpan textSpan,
CancellationToken cancellationToken)
{
if (cancellationToken.IsCancellationRequested)
{
return false;
}
var tree = this.Document.SyntaxTree;
var syntaxFacts = this.Document.Project.LanguageServices.GetService<ISyntaxFactsService>();
this.Expression = this.GetExpressionUnderSpan(tree, textSpan, cancellationToken);
if (this.Expression == null)
{
return false;
}
var containingType = this.Expression.AncestorsAndSelf()
.Select(n => this.Document.SemanticModel.GetDeclaredSymbol(n, cancellationToken))
.OfType<INamedTypeSymbol>()
.FirstOrDefault();
#if SCRIPTING
containingType = containingType ?? this.Document.SemanticModel.Compilation.ScriptClass;
#endif
if (containingType == null || containingType.TypeKind == TypeKind.Interface)
{
return false;
}
if (!CanIntroduceVariable(cancellationToken))
{
return false;
}
this.IsConstant = this.Document.SemanticModel.GetConstantValue(this.Expression, cancellationToken).HasValue;
// Note: the ordering of these clauses are important. They go, generally, from
// innermost to outermost order.
if (IsInQueryContext(cancellationToken))
{
if (CanGenerateInto<TQueryExpressionSyntax>(cancellationToken))
{
this.InQueryContext = true;
return true;
}
return false;
}
if (IsInConstructorInitializerContext(cancellationToken))
{
if (CanGenerateInto<TTypeDeclarationSyntax>(cancellationToken))
{
this.InConstructorInitializerContext = true;
return true;
}
return false;
}
var enclosingBlocks = _service.GetContainingExecutableBlocks(this.Expression);
if (enclosingBlocks.Any())
{
// If we're inside a block, then don't even try the other options (like field,
// constructor initializer, etc.). This is desirable behavior. If we're in a
// block in a field, then we're in a lambda, and we want to offer to generate
// a local, and not a field.
if (IsInBlockContext(cancellationToken))
{
this.InBlockContext = true;
return true;
}
return false;
}
// The ordering of checks is important here. If we are inside a block within an Expression
// bodied member, we should treat it as if we are in block context.
// For example, in such a scenario we should generate inside the block, instead of rewriting
// a concise expression bodied member to its equivalent that has a body with a block.
// For this reason, block should precede expression bodied member check.
if (_service.IsInExpressionBodiedMember(this.Expression))
{
if (CanGenerateInto<TTypeDeclarationSyntax>(cancellationToken))
{
this.InExpressionBodiedMemberContext = true;
return true;
}
return false;
}
if (CanGenerateInto<TTypeDeclarationSyntax>(cancellationToken))
{
if (IsInParameterContext(cancellationToken))
{
this.InParameterContext = true;
return true;
}
else if (IsInFieldContext(cancellationToken))
{
this.InFieldContext = true;
return true;
}
else if (IsInAttributeContext(cancellationToken))
{
this.InAttributeContext = true;
return true;
}
}
return false;
}
public SemanticMap GetSemanticMap(CancellationToken cancellationToken)
{
_semanticMap = _semanticMap ?? this.Document.SemanticModel.GetSemanticMap(this.Expression, cancellationToken);
return _semanticMap;
}
private TExpressionSyntax GetExpressionUnderSpan(SyntaxTree tree, TextSpan textSpan, CancellationToken cancellationToken)
{
var root = tree.GetRoot(cancellationToken);
var startToken = root.FindToken(textSpan.Start);
var stopToken = root.FindToken(textSpan.End);
if (textSpan.End <= stopToken.SpanStart)
{
stopToken = stopToken.GetPreviousToken(includeSkipped: true);
}
if (startToken.RawKind == 0 || stopToken.RawKind == 0)
{
return null;
}
var containingExpressions1 = startToken.GetAncestors<TExpressionSyntax>().ToList();
var containingExpressions2 = stopToken.GetAncestors<TExpressionSyntax>().ToList();
var commonExpression = containingExpressions1.FirstOrDefault(containingExpressions2.Contains);
if (commonExpression == null)
{
return null;
}
if (!(textSpan.Start >= commonExpression.FullSpan.Start &&
textSpan.Start <= commonExpression.SpanStart))
{
return null;
}
if (!(textSpan.End >= commonExpression.Span.End &&
textSpan.End <= commonExpression.FullSpan.End))
{
return null;
}
return commonExpression;
}
private bool CanIntroduceVariable(
CancellationToken cancellationToken)
{
if (!_service.CanIntroduceVariableFor(this.Expression))
{
return false;
}
if (this.Expression is TTypeSyntax)
{
return false;
}
// Even though we're creating a variable, we still ask if we can be replaced with an
// RValue and not an LValue. This is because introduction of a local adds a *new* LValue
// location, and we want to ensure that any writes will still happen to the *original*
// LValue location. i.e. if you have: "a[1] = b" then you don't want to change that to
// "var c = a[1]; c = b", as that write is no longer happening into the right LValue.
//
// In essense, this says "i can be replaced with an expression as long as i'm not being
// written to".
var semanticFacts = this.Document.Project.LanguageServices.GetService<ISemanticFactsService>();
return semanticFacts.CanReplaceWithRValue(this.Document.SemanticModel, this.Expression, cancellationToken);
}
private bool CanGenerateInto<TSyntax>(CancellationToken cancellationToken)
where TSyntax : SyntaxNode
{
#if SCRIPTING
if (this.Document.SemanticModel.Compilation.ScriptClass != null)
{
return true;
}
#endif
var syntax = this.Expression.GetAncestor<TSyntax>();
return syntax != null && !syntax.OverlapsHiddenPosition(cancellationToken);
}
private bool IsInTypeDeclarationOrValidCompilationUnit()
{
if (this.Expression.GetAncestorOrThis<TTypeDeclarationSyntax>() != null)
{
return true;
}
#if SCRIPTING
// If we're interactive/script, we can generate into the compilation unit.
if (this.Document.Document.SourceCodeKind != SourceCodeKind.Regular)
{
return true;
}
#endif
return false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics.Contracts;
using System.Globalization;
using System.Runtime.InteropServices;
namespace System
{
// Implements the Decimal data type. The Decimal data type can
// represent values ranging from -79,228,162,514,264,337,593,543,950,335 to
// 79,228,162,514,264,337,593,543,950,335 with 28 significant digits. The
// Decimal data type is ideally suited to financial calculations that
// require a large number of significant digits and no round-off errors.
//
// The finite set of values of type Decimal are of the form m
// / 10e, where m is an integer such that
// -296 <; m <; 296, and e is an integer
// between 0 and 28 inclusive.
//
// Contrary to the float and double data types, decimal
// fractional numbers such as 0.1 can be represented exactly in the
// Decimal representation. In the float and double
// representations, such numbers are often infinite fractions, making those
// representations more prone to round-off errors.
//
// The Decimal class implements widening conversions from the
// ubyte, char, short, int, and long types
// to Decimal. These widening conversions never loose any information
// and never throw exceptions. The Decimal class also implements
// narrowing conversions from Decimal to ubyte, char,
// short, int, and long. These narrowing conversions round
// the Decimal value towards zero to the nearest integer, and then
// converts that integer to the destination type. An OverflowException
// is thrown if the result is not within the range of the destination type.
//
// The Decimal class provides a widening conversion from
// Currency to Decimal. This widening conversion never loses any
// information and never throws exceptions. The Currency class provides
// a narrowing conversion from Decimal to Currency. This
// narrowing conversion rounds the Decimal to four decimals and then
// converts that number to a Currency. An OverflowException
// is thrown if the result is not within the range of the Currency type.
//
// The Decimal class provides narrowing conversions to and from the
// float and double types. A conversion from Decimal to
// float or double may loose precision, but will not loose
// information about the overall magnitude of the numeric value, and will never
// throw an exception. A conversion from float or double to
// Decimal throws an OverflowException if the value is not within
// the range of the Decimal type.
[StructLayout(LayoutKind.Sequential)]
public partial struct Decimal : IFormattable, IComparable, IComparable<Decimal>, IEquatable<Decimal>, IConvertible
{
// Sign mask for the flags field. A value of zero in this bit indicates a
// positive Decimal value, and a value of one in this bit indicates a
// negative Decimal value.
//
// Look at OleAut's DECIMAL_NEG constant to check for negative values
// in native code.
private const uint SignMask = 0x80000000;
// Scale mask for the flags field. This byte in the flags field contains
// the power of 10 to divide the Decimal value by. The scale byte must
// contain a value between 0 and 28 inclusive.
private const uint ScaleMask = 0x00FF0000;
// Number of bits scale is shifted by.
private const int ScaleShift = 16;
// Constant representing the Decimal value 0.
public const Decimal Zero = 0m;
// Constant representing the Decimal value 1.
public const Decimal One = 1m;
// Constant representing the Decimal value -1.
public const Decimal MinusOne = -1m;
// Constant representing the largest possible Decimal value. The value of
// this constant is 79,228,162,514,264,337,593,543,950,335.
public const Decimal MaxValue = 79228162514264337593543950335m;
// Constant representing the smallest possible Decimal value. The value of
// this constant is -79,228,162,514,264,337,593,543,950,335.
public const Decimal MinValue = -79228162514264337593543950335m;
// The lo, mid, hi, and flags fields contain the representation of the
// Decimal value. The lo, mid, and hi fields contain the 96-bit integer
// part of the Decimal. Bits 0-15 (the lower word) of the flags field are
// unused and must be zero; bits 16-23 contain must contain a value between
// 0 and 28, indicating the power of 10 to divide the 96-bit integer part
// by to produce the Decimal value; bits 24-30 are unused and must be zero;
// and finally bit 31 indicates the sign of the Decimal value, 0 meaning
// positive and 1 meaning negative.
//
// NOTE: Do not change the order in which these fields are declared. The
// native methods in this class rely on this particular order.
private uint _flags;
private uint _hi;
private uint _lo;
private uint _mid;
// Constructs a zero Decimal.
//public Decimal() {
// lo = 0;
// mid = 0;
// hi = 0;
// flags = 0;
//}
// Constructs a Decimal from an integer value.
//
public Decimal(int value)
{
// JIT today can't inline methods that contains "starg" opcode.
// For more details, see DevDiv Bugs 81184: x86 JIT CQ: Removing the inline striction of "starg".
int value_copy = value;
if (value_copy >= 0)
{
_flags = 0;
}
else
{
_flags = SignMask;
value_copy = -value_copy;
}
_lo = (uint)value_copy;
_mid = 0;
_hi = 0;
}
// Constructs a Decimal from an unsigned integer value.
//
[CLSCompliant(false)]
public Decimal(uint value)
{
_flags = 0;
_lo = value;
_mid = 0;
_hi = 0;
}
// Constructs a Decimal from a long value.
//
public Decimal(long value)
{
// JIT today can't inline methods that contains "starg" opcode.
// For more details, see DevDiv Bugs 81184: x86 JIT CQ: Removing the inline striction of "starg".
long value_copy = value;
if (value_copy >= 0)
{
_flags = 0;
}
else
{
_flags = SignMask;
value_copy = -value_copy;
}
_lo = (uint)value_copy;
_mid = (uint)(value_copy >> 32);
_hi = 0;
}
// Constructs a Decimal from an unsigned long value.
//
[CLSCompliant(false)]
public Decimal(ulong value)
{
_flags = 0;
_lo = (uint)value;
_mid = (uint)(value >> 32);
_hi = 0;
}
// Constructs a Decimal from a float value.
//
public Decimal(float value)
{
DecCalc.VarDecFromR4(value, out this);
}
// Constructs a Decimal from a double value.
//
public Decimal(double value)
{
DecCalc.VarDecFromR8(value, out this);
}
// Constructs a Decimal from an integer array containing a binary
// representation. The bits argument must be a non-null integer
// array with four elements. bits[0], bits[1], and
// bits[2] contain the low, middle, and high 32 bits of the 96-bit
// integer part of the Decimal. bits[3] contains the scale factor
// and sign of the Decimal: bits 0-15 (the lower word) are unused and must
// be zero; bits 16-23 must contain a value between 0 and 28, indicating
// the power of 10 to divide the 96-bit integer part by to produce the
// Decimal value; bits 24-30 are unused and must be zero; and finally bit
// 31 indicates the sign of the Decimal value, 0 meaning positive and 1
// meaning negative.
//
// Note that there are several possible binary representations for the
// same numeric value. For example, the value 1 can be represented as {1,
// 0, 0, 0} (integer value 1 with a scale factor of 0) and equally well as
// {1000, 0, 0, 0x30000} (integer value 1000 with a scale factor of 3).
// The possible binary representations of a particular value are all
// equally valid, and all are numerically equivalent.
//
public Decimal(int[] bits)
{
_lo = 0;
_mid = 0;
_hi = 0;
_flags = 0;
SetBits(bits);
}
private void SetBits(int[] bits)
{
if (bits == null)
throw new ArgumentNullException(nameof(bits));
Contract.EndContractBlock();
if (bits.Length == 4)
{
uint f = (uint)bits[3];
if ((f & ~(SignMask | ScaleMask)) == 0 && (f & ScaleMask) <= (28 << 16))
{
_lo = (uint)bits[0];
_mid = (uint)bits[1];
_hi = (uint)bits[2];
_flags = f;
return;
}
}
throw new ArgumentException(SR.Arg_DecBitCtor);
}
// Constructs a Decimal from its constituent parts.
//
public Decimal(int lo, int mid, int hi, bool isNegative, byte scale)
{
if (scale > 28)
throw new ArgumentOutOfRangeException(nameof(scale), SR.ArgumentOutOfRange_DecimalScale);
Contract.EndContractBlock();
_lo = (uint)lo;
_mid = (uint)mid;
_hi = (uint)hi;
_flags = ((uint)scale) << 16;
if (isNegative)
_flags |= SignMask;
}
// Constructs a Decimal from its constituent parts.
private Decimal(int lo, int mid, int hi, int flags)
{
if ((flags & ~(SignMask | ScaleMask)) == 0 && (flags & ScaleMask) <= (28 << 16))
{
_lo = (uint)lo;
_mid = (uint)mid;
_hi = (uint)hi;
_flags = (uint)flags;
return;
}
throw new ArgumentException(SR.Arg_DecBitCtor);
}
// Returns the absolute value of the given Decimal. If d is
// positive, the result is d. If d is negative, the result
// is -d.
//
internal static Decimal Abs(Decimal d)
{
return new Decimal((int)d._lo, (int)d._mid, (int)d._hi, (int)(d._flags & ~SignMask));
}
// Adds two Decimal values.
//
public static Decimal Add(Decimal d1, Decimal d2)
{
DecCalc.VarDecAdd(ref d1, ref d2);
return d1;
}
// Rounds a Decimal to an integer value. The Decimal argument is rounded
// towards positive infinity.
public static Decimal Ceiling(Decimal d)
{
return (-(Decimal.Floor(-d)));
}
// Compares two Decimal values, returning an integer that indicates their
// relationship.
//
public static int Compare(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2);
}
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns a value less than zero if this object
// null is considered to be less than any instance.
// If object is not of type Decimal, this method throws an ArgumentException.
//
int IComparable.CompareTo(Object value)
{
if (value == null)
return 1;
if (!(value is Decimal))
throw new ArgumentException(SR.Arg_MustBeDecimal);
Decimal other = (Decimal)value;
return DecCalc.VarDecCmp(ref this, ref other);
}
public int CompareTo(Decimal value)
{
return DecCalc.VarDecCmp(ref this, ref value);
}
// Divides two Decimal values.
//
public static Decimal Divide(Decimal d1, Decimal d2)
{
DecCalc.VarDecDiv(ref d1, ref d2);
return d1;
}
// Checks if this Decimal is equal to a given object. Returns true
// if the given object is a boxed Decimal and its value is equal to the
// value of this Decimal. Returns false otherwise.
//
public override bool Equals(Object value)
{
if (value is Decimal)
{
Decimal other = (Decimal)value;
return DecCalc.VarDecCmp(ref this, ref other) == 0;
}
return false;
}
public bool Equals(Decimal value)
{
return DecCalc.VarDecCmp(ref this, ref value) == 0;
}
// Returns the hash code for this Decimal.
//
public unsafe override int GetHashCode()
{
double dbl = DecCalc.VarR8FromDec(ref this);
if (dbl == 0.0)
// Ensure 0 and -0 have the same hash code
return 0;
// conversion to double is lossy and produces rounding errors so we mask off the lowest 4 bits
//
// For example these two numerically equal decimals with different internal representations produce
// slightly different results when converted to double:
//
// decimal a = new decimal(new int[] { 0x76969696, 0x2fdd49fa, 0x409783ff, 0x00160000 });
// => (decimal)1999021.176470588235294117647000000000 => (double)1999021.176470588
// decimal b = new decimal(new int[] { 0x3f0f0f0f, 0x1e62edcc, 0x06758d33, 0x00150000 });
// => (decimal)1999021.176470588235294117647000000000 => (double)1999021.1764705882
//
return (int)(((((uint*)&dbl)[0]) & 0xFFFFFFF0) ^ ((uint*)&dbl)[1]);
}
// Compares two Decimal values for equality. Returns true if the two
// Decimal values are equal, or false if they are not equal.
//
public static bool Equals(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) == 0;
}
// Rounds a Decimal to an integer value. The Decimal argument is rounded
// towards negative infinity.
//
public static Decimal Floor(Decimal d)
{
DecCalc.VarDecInt(ref d);
return d;
}
// Converts this Decimal to a string. The resulting string consists of an
// optional minus sign ("-") followed to a sequence of digits ("0" - "9"),
// optionally followed by a decimal point (".") and another sequence of
// digits.
//
public override String ToString()
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatDecimal(this, null, null);
}
public String ToString(String format)
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatDecimal(this, format, null);
}
public String ToString(IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatDecimal(this, null, provider);
}
public String ToString(String format, IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatDecimal(this, format, provider);
}
// Converts a string to a Decimal. The string must consist of an optional
// minus sign ("-") followed by a sequence of digits ("0" - "9"). The
// sequence of digits may optionally contain a single decimal point (".")
// character. Leading and trailing whitespace characters are allowed.
// Parse also allows a currency symbol, a trailing negative sign, and
// parentheses in the number.
//
public static Decimal Parse(String s)
{
return FormatProvider.ParseDecimal(s, NumberStyles.Number, null);
}
internal const NumberStyles InvalidNumberStyles = ~(NumberStyles.AllowLeadingWhite | NumberStyles.AllowTrailingWhite
| NumberStyles.AllowLeadingSign | NumberStyles.AllowTrailingSign
| NumberStyles.AllowParentheses | NumberStyles.AllowDecimalPoint
| NumberStyles.AllowThousands | NumberStyles.AllowExponent
| NumberStyles.AllowCurrencySymbol | NumberStyles.AllowHexSpecifier);
internal static void ValidateParseStyleFloatingPoint(NumberStyles style)
{
// Check for undefined flags
if ((style & InvalidNumberStyles) != 0)
{
throw new ArgumentException(SR.Argument_InvalidNumberStyles, nameof(style));
}
Contract.EndContractBlock();
if ((style & NumberStyles.AllowHexSpecifier) != 0)
{ // Check for hex number
throw new ArgumentException(SR.Arg_HexStyleNotSupported);
}
}
public static Decimal Parse(String s, NumberStyles style)
{
ValidateParseStyleFloatingPoint(style);
return FormatProvider.ParseDecimal(s, style, null);
}
public static Decimal Parse(String s, IFormatProvider provider)
{
return FormatProvider.ParseDecimal(s, NumberStyles.Number, provider);
}
public static Decimal Parse(String s, NumberStyles style, IFormatProvider provider)
{
ValidateParseStyleFloatingPoint(style);
return FormatProvider.ParseDecimal(s, style, provider);
}
public static Boolean TryParse(String s, out Decimal result)
{
return FormatProvider.TryParseDecimal(s, NumberStyles.Number, null, out result);
}
public static Boolean TryParse(String s, NumberStyles style, IFormatProvider provider, out Decimal result)
{
ValidateParseStyleFloatingPoint(style);
return FormatProvider.TryParseDecimal(s, style, provider, out result);
}
// Returns a binary representation of a Decimal. The return value is an
// integer array with four elements. Elements 0, 1, and 2 contain the low,
// middle, and high 32 bits of the 96-bit integer part of the Decimal.
// Element 3 contains the scale factor and sign of the Decimal: bits 0-15
// (the lower word) are unused; bits 16-23 contain a value between 0 and
// 28, indicating the power of 10 to divide the 96-bit integer part by to
// produce the Decimal value; bits 24-30 are unused; and finally bit 31
// indicates the sign of the Decimal value, 0 meaning positive and 1
// meaning negative.
//
public static int[] GetBits(Decimal d)
{
return new int[] { (int)d._lo, (int)d._mid, (int)d._hi, (int)d._flags };
}
// Returns the larger of two Decimal values.
//
internal static Decimal Max(Decimal d1, Decimal d2)
{
return Compare(d1, d2) >= 0 ? d1 : d2;
}
// Returns the smaller of two Decimal values.
//
internal static Decimal Min(Decimal d1, Decimal d2)
{
return Compare(d1, d2) < 0 ? d1 : d2;
}
public static Decimal Remainder(Decimal d1, Decimal d2)
{
return DecCalc.VarDecMod(ref d1, ref d2);
}
// Multiplies two Decimal values.
//
public static Decimal Multiply(Decimal d1, Decimal d2)
{
Decimal decRes;
DecCalc.VarDecMul(ref d1, ref d2, out decRes);
return decRes;
}
// Returns the negated value of the given Decimal. If d is non-zero,
// the result is -d. If d is zero, the result is zero.
//
public static Decimal Negate(Decimal d)
{
return new Decimal((int)d._lo, (int)d._mid, (int)d._hi, (int)(d._flags ^ SignMask));
}
// Rounds a Decimal value to a given number of decimal places. The value
// given by d is rounded to the number of decimal places given by
// decimals. The decimals argument must be an integer between
// 0 and 28 inclusive.
//
// By default a mid-point value is rounded to the nearest even number. If the mode is
// passed in, it can also round away from zero.
internal static Decimal Round(Decimal d, int decimals)
{
Decimal result = new Decimal();
if (decimals < 0 || decimals > 28)
throw new ArgumentOutOfRangeException(nameof(decimals), SR.ArgumentOutOfRange_DecimalRound);
DecCalc.VarDecRound(ref d, decimals, ref result);
d = result;
return d;
}
internal static Decimal Round(Decimal d, int decimals, MidpointRounding mode)
{
if (decimals < 0 || decimals > 28)
throw new ArgumentOutOfRangeException(nameof(decimals), SR.ArgumentOutOfRange_DecimalRound);
if (mode < MidpointRounding.ToEven || mode > MidpointRounding.AwayFromZero)
throw new ArgumentException(SR.Format(SR.Argument_InvalidEnumValue, mode, "MidpointRounding"), nameof(mode));
Contract.EndContractBlock();
if (mode == MidpointRounding.ToEven)
{
Decimal result = new Decimal();
DecCalc.VarDecRound(ref d, decimals, ref result);
d = result;
}
else
{
DecCalc.InternalRoundFromZero(ref d, decimals);
}
return d;
}
// Subtracts two Decimal values.
//
public static Decimal Subtract(Decimal d1, Decimal d2)
{
DecCalc.VarDecSub(ref d1, ref d2);
return d1;
}
// Converts a Decimal to an unsigned byte. The Decimal value is rounded
// towards zero to the nearest integer value, and the result of this
// operation is returned as a byte.
//
public static byte ToByte(Decimal value)
{
uint temp;
try
{
temp = ToUInt32(value);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_Byte, e);
}
if (temp < Byte.MinValue || temp > Byte.MaxValue) throw new OverflowException(SR.Overflow_Byte);
return (byte)temp;
}
// Converts a Decimal to a signed byte. The Decimal value is rounded
// towards zero to the nearest integer value, and the result of this
// operation is returned as a byte.
//
[CLSCompliant(false)]
public static sbyte ToSByte(Decimal value)
{
int temp;
try
{
temp = ToInt32(value);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_SByte, e);
}
if (temp < SByte.MinValue || temp > SByte.MaxValue) throw new OverflowException(SR.Overflow_SByte);
return (sbyte)temp;
}
// Converts a Decimal to a short. The Decimal value is
// rounded towards zero to the nearest integer value, and the result of
// this operation is returned as a short.
//
public static short ToInt16(Decimal value)
{
int temp;
try
{
temp = ToInt32(value);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_Int16, e);
}
if (temp < Int16.MinValue || temp > Int16.MaxValue) throw new OverflowException(SR.Overflow_Int16);
return (short)temp;
}
// Converts a Decimal to a double. Since a double has fewer significant
// digits than a Decimal, this operation may produce round-off errors.
//
public static double ToDouble(Decimal d)
{
return DecCalc.VarR8FromDec(ref d);
}
// Converts a Decimal to an integer. The Decimal value is rounded towards
// zero to the nearest integer value, and the result of this operation is
// returned as an integer.
//
public static int ToInt32(Decimal d)
{
if (d.Scale != 0) DecCalc.VarDecFix(ref d);
if (d._hi == 0 && d._mid == 0)
{
int i = (int)d._lo;
if (!d.Sign)
{
if (i >= 0) return i;
}
else
{
i = -i;
if (i <= 0) return i;
}
}
throw new OverflowException(SR.Overflow_Int32);
}
// Converts a Decimal to a long. The Decimal value is rounded towards zero
// to the nearest integer value, and the result of this operation is
// returned as a long.
//
public static long ToInt64(Decimal d)
{
if (d.Scale != 0) DecCalc.VarDecFix(ref d);
if (d._hi == 0)
{
long l = d._lo | (long)(int)d._mid << 32;
if (!d.Sign)
{
if (l >= 0) return l;
}
else
{
l = -l;
if (l <= 0) return l;
}
}
throw new OverflowException(SR.Overflow_Int64);
}
// Converts a Decimal to an ushort. The Decimal
// value is rounded towards zero to the nearest integer value, and the
// result of this operation is returned as an ushort.
//
[CLSCompliant(false)]
public static ushort ToUInt16(Decimal value)
{
uint temp;
try
{
temp = ToUInt32(value);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_UInt16, e);
}
if (temp < UInt16.MinValue || temp > UInt16.MaxValue) throw new OverflowException(SR.Overflow_UInt16);
return (ushort)temp;
}
// Converts a Decimal to an unsigned integer. The Decimal
// value is rounded towards zero to the nearest integer value, and the
// result of this operation is returned as an unsigned integer.
//
[CLSCompliant(false)]
public static uint ToUInt32(Decimal d)
{
if (d.Scale != 0) DecCalc.VarDecFix(ref d);
if (d._hi == 0 && d._mid == 0)
{
if (!d.Sign || d._lo == 0)
return d._lo;
}
throw new OverflowException(SR.Overflow_UInt32);
}
// Converts a Decimal to an unsigned long. The Decimal
// value is rounded towards zero to the nearest integer value, and the
// result of this operation is returned as a long.
//
[CLSCompliant(false)]
public static ulong ToUInt64(Decimal d)
{
if (d.Scale != 0) DecCalc.VarDecFix(ref d);
if (d._hi == 0)
{
ulong l = (ulong)d._lo | ((ulong)d._mid << 32);
if (!d.Sign || l == 0)
return l;
}
throw new OverflowException(SR.Overflow_UInt64);
}
// Converts a Decimal to a float. Since a float has fewer significant
// digits than a Decimal, this operation may produce round-off errors.
//
public static float ToSingle(Decimal d)
{
return DecCalc.VarR4FromDec(ref d);
}
// Truncates a Decimal to an integer value. The Decimal argument is rounded
// towards zero to the nearest integer value, corresponding to removing all
// digits after the decimal point.
//
public static Decimal Truncate(Decimal d)
{
DecCalc.VarDecFix(ref d);
return d;
}
public static implicit operator Decimal(byte value)
{
return new Decimal(value);
}
[CLSCompliant(false)]
public static implicit operator Decimal(sbyte value)
{
return new Decimal(value);
}
public static implicit operator Decimal(short value)
{
return new Decimal(value);
}
[CLSCompliant(false)]
public static implicit operator Decimal(ushort value)
{
return new Decimal(value);
}
public static implicit operator Decimal(char value)
{
return new Decimal(value);
}
public static implicit operator Decimal(int value)
{
return new Decimal(value);
}
[CLSCompliant(false)]
public static implicit operator Decimal(uint value)
{
return new Decimal(value);
}
public static implicit operator Decimal(long value)
{
return new Decimal(value);
}
[CLSCompliant(false)]
public static implicit operator Decimal(ulong value)
{
return new Decimal(value);
}
public static explicit operator Decimal(float value)
{
return new Decimal(value);
}
public static explicit operator Decimal(double value)
{
return new Decimal(value);
}
public static explicit operator byte(Decimal value)
{
return ToByte(value);
}
[CLSCompliant(false)]
public static explicit operator sbyte(Decimal value)
{
return ToSByte(value);
}
public static explicit operator char(Decimal value)
{
UInt16 temp;
try
{
temp = ToUInt16(value);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_Char, e);
}
return (char)temp;
}
public static explicit operator short(Decimal value)
{
return ToInt16(value);
}
[CLSCompliant(false)]
public static explicit operator ushort(Decimal value)
{
return ToUInt16(value);
}
public static explicit operator int(Decimal value)
{
return ToInt32(value);
}
[CLSCompliant(false)]
public static explicit operator uint(Decimal value)
{
return ToUInt32(value);
}
public static explicit operator long(Decimal value)
{
return ToInt64(value);
}
[CLSCompliant(false)]
public static explicit operator ulong(Decimal value)
{
return ToUInt64(value);
}
public static explicit operator float(Decimal value)
{
return ToSingle(value);
}
public static explicit operator double(Decimal value)
{
return ToDouble(value);
}
public static Decimal operator +(Decimal d)
{
return d;
}
public static Decimal operator -(Decimal d)
{
return Negate(d);
}
public static Decimal operator ++(Decimal d)
{
return Add(d, One);
}
public static Decimal operator --(Decimal d)
{
return Subtract(d, One);
}
public static Decimal operator +(Decimal d1, Decimal d2)
{
return Add(d1, d2);
}
public static Decimal operator -(Decimal d1, Decimal d2)
{
return Subtract(d1, d2);
}
public static Decimal operator *(Decimal d1, Decimal d2)
{
return Multiply(d1, d2);
}
public static Decimal operator /(Decimal d1, Decimal d2)
{
return Divide(d1, d2);
}
public static Decimal operator %(Decimal d1, Decimal d2)
{
return Remainder(d1, d2);
}
public static bool operator ==(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) == 0;
}
public static bool operator !=(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) != 0;
}
public static bool operator <(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) < 0;
}
public static bool operator <=(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) <= 0;
}
public static bool operator >(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) > 0;
}
public static bool operator >=(Decimal d1, Decimal d2)
{
return DecCalc.VarDecCmp(ref d1, ref d2) >= 0;
}
//
// IConvertible implementation
//
TypeCode IConvertible.GetTypeCode()
{
return TypeCode.Decimal;
}
/// <internalonly/>
bool IConvertible.ToBoolean(IFormatProvider provider)
{
return Convert.ToBoolean(this);
}
/// <internalonly/>
char IConvertible.ToChar(IFormatProvider provider)
{
throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Decimal", "Char"));
}
/// <internalonly/>
sbyte IConvertible.ToSByte(IFormatProvider provider)
{
return Convert.ToSByte(this);
}
/// <internalonly/>
byte IConvertible.ToByte(IFormatProvider provider)
{
return Convert.ToByte(this);
}
/// <internalonly/>
short IConvertible.ToInt16(IFormatProvider provider)
{
return Convert.ToInt16(this);
}
/// <internalonly/>
ushort IConvertible.ToUInt16(IFormatProvider provider)
{
return Convert.ToUInt16(this);
}
/// <internalonly/>
int IConvertible.ToInt32(IFormatProvider provider)
{
return Convert.ToInt32(this);
}
/// <internalonly/>
uint IConvertible.ToUInt32(IFormatProvider provider)
{
return Convert.ToUInt32(this);
}
/// <internalonly/>
long IConvertible.ToInt64(IFormatProvider provider)
{
return Convert.ToInt64(this);
}
/// <internalonly/>
ulong IConvertible.ToUInt64(IFormatProvider provider)
{
return Convert.ToUInt64(this);
}
/// <internalonly/>
float IConvertible.ToSingle(IFormatProvider provider)
{
return Convert.ToSingle(this);
}
/// <internalonly/>
double IConvertible.ToDouble(IFormatProvider provider)
{
return Convert.ToDouble(this);
}
/// <internalonly/>
Decimal IConvertible.ToDecimal(IFormatProvider provider)
{
return this;
}
/// <internalonly/>
DateTime IConvertible.ToDateTime(IFormatProvider provider)
{
throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Decimal", "DateTime"));
}
/// <internalonly/>
Object IConvertible.ToType(Type type, IFormatProvider provider)
{
return Convert.DefaultToType((IConvertible)this, type, provider);
}
}
}
| |
using System;
using Xunit;
using Xunit.Abstractions;
namespace dexih.functions.ml.tests
{
public class regression
{
private readonly ITestOutputHelper _output;
public regression(ITestOutputHelper output)
{
_output = output;
}
[Fact]
public void RegressionLbfgsPoisson()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionLbfgsPoissonTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionLbfgsPoissonTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
Assert.True(prediction1 > 4 && prediction1 < 6);
}
[Fact]
public void RegressionGam()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionGamTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionGamTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
}
//TODO Missing Library Issue.
// [Fact]
// public void RegressionOls()
// {
// var regression = new RegressionAnalysis();
//
// // create data that easily clusters into two areas.
// var labels = new[] {"x", "y"};
// var hotEncode = new[] {EEncoding.None, EEncoding.None};
//
// // predict a simple linear line
// for (var i = 1; i < 10; i++)
// {
// regression.RegressionOlsTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
// }
//
// var model = regression.RegressionOlsTrainResult();
//
// Assert.NotNull(model);
//
// // check the clusters
// var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
//
// _output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
//
// }
[Fact]
public void RegressionFastForest()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionFastForestTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionFastForestTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
}
[Fact]
public void RegressionSdca()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionSdcaTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionSdcaTrainResult(maximumNumberOfIterations:5);
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
Assert.True(prediction1 > 0);
}
[Fact]
public void RegressionFastTree()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionFastTreeTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionFastTreeTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
}
[Fact]
public void RegressionOnlineGradientDescent()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionOnlineGradientDescentTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionOnlineGradientDescentTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
}
[Fact]
public void RegressionFastTreeTweedie()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 10; i++)
{
regression.RegressionFastTreeTweedieTrain(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var model = regression.RegressionFastTreeTweedieTrainResult();
Assert.NotNull(model);
// check the clusters
var prediction1 = regression.RegressionPredict(model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
// Assert.True(prediction1 > 4 && prediction1 < 6);
}
[Fact]
public void RegressionBest()
{
var regression = new RegressionAnalysis();
// create data that easily clusters into two areas.
var labels = new[] {"x", "y"};
var hotEncode = new[] {EEncoding.None, EEncoding.None};
// predict a simple linear line
for (var i = 1; i < 50; i++)
{
regression.RegressionExperimentBest(i, labels, new object[] {i, 2 * i}, hotEncode);
}
var result = regression.RegressionExperimentBestResult(60);
Assert.NotNull(result.Model);
// check the clusters
var prediction1 = regression.RegressionPredict(result.Model, labels, new object[] {5, 10});
_output.WriteLine($"Prediction: {prediction1}");
Assert.True(prediction1 > 4 && prediction1 < 6);
}
}
}
| |
//
// DapService.cs
//
// Authors:
// Gabriel Burt <gburt@novell.com>
// Aaron Bockover <abockover@novell.com>
// Ruben Vermeersch <ruben@savanne.be>
//
// Copyright (C) 2007-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Linq;
using Mono.Unix;
using Mono.Addins;
using Hyena;
using Banshee.Base;
using Banshee.Kernel;
using Banshee.ServiceStack;
using Banshee.Sources;
using Banshee.Collection;
using Banshee.Collection.Database;
using Banshee.Hardware;
namespace Banshee.Dap
{
public class DapService : IExtensionService, IDelayedInitializeService, IDisposable
{
private Dictionary<string, DapSource> sources;
private List<DeviceCommand> unhandled_device_commands;
private List<DapPriorityNode> supported_dap_types;
private bool initialized;
private object sync = new object ();
public void Initialize ()
{
}
public void DelayedInitialize ()
{
// This group source gives us a separator for DAPs in the source view.
SourceManager.GroupSource dap_group = new SourceManager.GroupSource (Catalog.GetString ("Devices"), 400);
ThreadAssist.ProxyToMain (delegate {
ServiceManager.SourceManager.AddSource (dap_group);
});
lock (sync) {
if (initialized || ServiceManager.HardwareManager == null)
return;
sources = new Dictionary<string, DapSource> ();
supported_dap_types = new List<DapPriorityNode> ();
AddinManager.AddExtensionNodeHandler ("/Banshee/Dap/DeviceClass", OnExtensionChanged);
ServiceManager.HardwareManager.DeviceAdded += OnHardwareDeviceAdded;
ServiceManager.HardwareManager.DeviceRemoved += OnHardwareDeviceRemoved;
ServiceManager.HardwareManager.DeviceCommand += OnDeviceCommand;
ServiceManager.SourceManager.SourceRemoved += OnSourceRemoved;
initialized = true;
// Now that we've loaded all the enabled DAP providers, load the devices
foreach (IDevice device in ServiceManager.HardwareManager.GetAllDevices ()) {
MapDevice (device);
}
}
}
private void OnExtensionChanged (object o, ExtensionNodeEventArgs args)
{
lock (sync) {
var node = (DapPriorityNode)args.ExtensionNode;
if (!node.Type.IsSubclassOf (typeof (DapSource)))
return;
if (args.Change == ExtensionChange.Add) {
Log.DebugFormat ("Dap support extension loaded: {0}", node.Addin.Id);
supported_dap_types.Add (node);
supported_dap_types.Sort ((left, right) => right.Priority.CompareTo (left.Priority));
if (initialized) {
// See if any existing devices are handled by this new DAP support
foreach (IDevice device in ServiceManager.HardwareManager.GetAllDevices ()) {
MapDevice (device);
}
}
} else if (args.Change == ExtensionChange.Remove) {
supported_dap_types.Remove ((DapPriorityNode) args.ExtensionNode);
Queue<DapSource> to_remove = new Queue<DapSource> ();
foreach (DapSource source in sources.Values) {
if (source.AddinId == node.Addin.Id) {
to_remove.Enqueue (source);
}
}
while (to_remove.Count > 0) {
UnmapDevice (to_remove.Dequeue ().Device.Uuid);
}
}
}
}
public void Dispose ()
{
Scheduler.Unschedule (typeof(MapDeviceJob));
lock (sync) {
if (!initialized)
return;
AddinManager.RemoveExtensionNodeHandler ("/Banshee/Dap/DeviceClass", OnExtensionChanged);
ServiceManager.HardwareManager.DeviceAdded -= OnHardwareDeviceAdded;
ServiceManager.HardwareManager.DeviceRemoved -= OnHardwareDeviceRemoved;
ServiceManager.HardwareManager.DeviceCommand -= OnDeviceCommand;
ServiceManager.SourceManager.SourceRemoved -= OnSourceRemoved;
List<DapSource> dap_sources = new List<DapSource> (sources.Values);
foreach (DapSource source in dap_sources) {
UnmapDevice (source.Device.Uuid);
}
sources.Clear ();
sources = null;
supported_dap_types.Clear ();
supported_dap_types = null;
initialized = false;
}
}
private DapSource FindDeviceSource (IDevice device)
{
foreach (TypeExtensionNode node in supported_dap_types) {
try {
DapSource source = (DapSource)node.CreateInstance ();
source.DeviceInitialize (device);
source.LoadDeviceContents ();
source.AddinId = node.Addin.Id;
return source;
} catch (InvalidDeviceException) {
} catch (InvalidCastException e) {
Log.Exception ("Extension is not a DapSource as required", e);
} catch (Exception e) {
Log.Exception (e);
}
}
return null;
}
private void MapDevice (IDevice device)
{
Scheduler.Schedule (new MapDeviceJob (this, device));
}
private class MapDeviceJob : IJob
{
IDevice device;
DapService service;
public MapDeviceJob (DapService service, IDevice device)
{
this.device = device;
this.service = service;
}
public string Uuid {
get { return device.Uuid; }
}
public void Run ()
{
DapSource source = null;
lock (service.sync) {
try {
if (service.sources.ContainsKey (device.Uuid)) {
return;
}
if (device is ICdromDevice || device is IDiscVolume) {
return;
}
if (device is IVolume && (device as IVolume).ShouldIgnore) {
return;
}
if (device.MediaCapabilities == null && !(device is IBlockDevice) && !(device is IVolume)) {
return;
}
source = service.FindDeviceSource (device);
if (source != null) {
Log.DebugFormat ("Found DAP support ({0}) for device {1} and Uuid {2}", source.GetType ().FullName,
source.Name, device.Uuid);
service.sources.Add (device.Uuid, source);
}
} catch (Exception e) {
Log.Exception (e);
}
}
if (source != null) {
ThreadAssist.ProxyToMain (delegate {
ServiceManager.SourceManager.AddSource (source);
source.NotifyUser ();
// If there are any queued device commands, see if they are to be
// handled by this new DAP (e.g. --device-activate=file:///media/disk)
try {
if (service.unhandled_device_commands != null) {
foreach (DeviceCommand command in service.unhandled_device_commands) {
if (source.CanHandleDeviceCommand (command)) {
service.HandleDeviceCommand (source, command.Action);
service.unhandled_device_commands.Remove (command);
if (service.unhandled_device_commands.Count == 0) {
service.unhandled_device_commands = null;
}
break;
}
}
}
} catch (Exception e) {
Log.Exception (e);
}
});
}
}
}
internal void UnmapDevice (string uuid)
{
DapSource source = null;
lock (sync) {
if (sources.ContainsKey (uuid)) {
Log.DebugFormat ("Unmapping DAP source ({0})", uuid);
source = sources[uuid];
sources.Remove (uuid);
}
}
if (source != null) {
try {
source.Dispose ();
ThreadAssist.ProxyToMain (delegate {
ServiceManager.SourceManager.RemoveSource (source);
});
} catch (Exception e) {
Log.Exception (e);
}
}
}
private void OnSourceRemoved (SourceEventArgs args)
{
DapSource dap_source = args.Source as DapSource;
if (dap_source != null) {
UnmapDevice (dap_source.Device.Uuid);
}
}
private void OnHardwareDeviceAdded (object o, DeviceAddedArgs args)
{
MapDevice (args.Device);
}
private void OnHardwareDeviceRemoved (object o, DeviceRemovedArgs args)
{
UnmapDevice (args.DeviceUuid);
}
#region DeviceCommand Handling
private void HandleDeviceCommand (DapSource source, DeviceCommandAction action)
{
if ((action & DeviceCommandAction.Activate) != 0) {
ServiceManager.SourceManager.SetActiveSource (source);
}
}
private void OnDeviceCommand (object o, DeviceCommand command)
{
lock (this) {
// Check to see if we have an already mapped disc volume that should
// handle this incoming command; if not, queue it for later devices
foreach (DapSource source in sources.Values) {
if (source.CanHandleDeviceCommand (command)) {
HandleDeviceCommand (source, command.Action);
return;
}
}
if (unhandled_device_commands == null) {
unhandled_device_commands = new List<DeviceCommand> ();
}
unhandled_device_commands.Add (command);
}
}
#endregion
string IService.ServiceName {
get { return "DapService"; }
}
}
}
| |
//
// System.Net.Mime.ContentType.cs
//
// Authors:
// Tim Coleman (tim@timcoleman.com)
// John Luke (john.luke@gmail.com)
//
// Copyright (C) Tim Coleman, 2004
// Copyright (C) John Luke, 2005
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Text;
namespace SendGrid.Net.Mime
{
public class ContentType
{
#region Fields
static Encoding utf8unmarked;
string mediaType;
Dictionary<string, string> parameters = new Dictionary<string, string>();
#endregion // Fields
#region Constructors
public ContentType()
{
mediaType = "application/octet-stream";
}
public ContentType(string contentType)
{
if (contentType == null)
throw new ArgumentNullException("contentType");
if (contentType.Length == 0)
throw new ArgumentException("contentType");
string[] split = contentType.Split(';');
this.MediaType = split[0].Trim();
for (int i = 1; i < split.Length; i++)
Parse(split[i].Trim());
}
// parse key=value pairs like:
// "charset=us-ascii"
static char[] eq = new char[] { '=' };
void Parse(string pair)
{
if (String.IsNullOrEmpty(pair))
return;
string[] split = pair.Split(eq, StringSplitOptions.RemoveEmptyEntries);
string key = split[0].Trim();
string val = (split.Length > 1) ? split[1].Trim() : "";
int l = val.Length;
if (l >= 2 && val[0] == '"' && val[l - 1] == '"')
val = val.Substring(1, l - 2);
parameters[key] = val;
}
#endregion // Constructors
#region Properties
static Encoding UTF8Unmarked
{
get
{
if (utf8unmarked == null)
utf8unmarked = new UTF8Encoding(false);
return utf8unmarked;
}
}
public string Boundary
{
get { return parameters["boundary"]; }
set { parameters["boundary"] = value; }
}
public string CharSet
{
get { return parameters["charset"]; }
set { parameters["charset"] = value; }
}
public string MediaType
{
get { return mediaType; }
set
{
if (value == null)
throw new ArgumentNullException();
if (value.Length < 1)
throw new ArgumentException();
if (value.IndexOf('/') < 1)
throw new FormatException();
if (value.IndexOf(';') != -1)
throw new FormatException();
mediaType = value;
}
}
public string Name
{
get { return parameters["name"]; }
set { parameters["name"] = value; }
}
public Dictionary<string, string> Parameters
{
get { return parameters; }
}
#endregion // Properties
#region Methods
public override bool Equals(object obj)
{
return Equals(obj as ContentType);
}
bool Equals(ContentType other)
{
return other != null && ToString() == other.ToString();
}
public override int GetHashCode()
{
return ToString().GetHashCode();
}
public override string ToString()
{
var sb = new StringBuilder();
Encoding enc = CharSet != null ? Encoding.GetEncoding(CharSet) : Encoding.UTF8;
sb.Append(MediaType);
if (Parameters != null && Parameters.Count > 0)
{
foreach (var pair in parameters)
{
if (pair.Value != null && pair.Value.ToString().Length > 0)
{
sb.Append("; ");
sb.Append(pair.Key);
sb.Append("=");
sb.Append(WrapIfEspecialsExist(EncodeSubjectRFC2047(pair.Value as string, enc)));
}
}
}
return sb.ToString();
}
// see RFC 2047
static readonly char[] especials = { '(', ')', '<', '>', '@', ',', ';', ':', '<', '>', '/', '[', ']', '?', '.', '=' };
static string WrapIfEspecialsExist(string s)
{
s = s.Replace("\"", "\\\"");
if (s.IndexOfAny(especials) >= 0)
return '"' + s + '"';
else
return s;
}
internal static Encoding GuessEncoding(string s)
{
for (int i = 0; i < s.Length; i++)
if (s[i] >= '\u0080')
return UTF8Unmarked;
return null;
}
internal static TransferEncoding GuessTransferEncoding(Encoding enc)
{
//if (Encoding.ASCII.Equals(enc))
// return TransferEncoding.SevenBit;
if (Encoding.UTF8.Equals(enc) || Encoding.Unicode.Equals(enc))
return TransferEncoding.Base64;
else
return TransferEncoding.QuotedPrintable;
}
static char[] hex = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
internal static string To2047(byte[] bytes)
{
var sb = new StringBuilder();
foreach (byte i in bytes)
{
if (i < 0x21 || i > 0x7E || i == '?' || i == '=' || i == '_')
{
sb.Append('=');
sb.Append(hex[(i >> 4) & 0x0f]);
sb.Append(hex[i & 0x0f]);
}
else
sb.Append((char)i);
}
return sb.ToString();
}
internal static string EncodeSubjectRFC2047(string s, Encoding enc)
{
if (s == null)
return s;
for (int i = 0; i < s.Length; i++)
if (s[i] >= '\u0080')
{
string quoted = To2047(enc.GetBytes(s));
return String.Concat("=?", enc.WebName, "?Q?", quoted, "?=");
}
return s;
}
#endregion // Methods
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.ObjectModel;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.ExceptionServices;
using Microsoft.CodeAnalysis.Collections;
using Microsoft.VisualStudio.Debugger;
using Microsoft.VisualStudio.Debugger.Clr;
using Microsoft.VisualStudio.Debugger.ComponentInterfaces;
using Microsoft.VisualStudio.Debugger.Evaluation;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Xunit;
using Roslyn.Test.Utilities;
using System.Collections;
namespace Microsoft.CodeAnalysis.ExpressionEvaluator
{
public abstract class ResultProviderTestBase
{
private readonly IDkmClrFormatter _formatter;
private readonly IDkmClrResultProvider _resultProvider;
internal readonly DkmInspectionContext DefaultInspectionContext;
protected static readonly string DynamicDebugViewEmptyMessage;
static ResultProviderTestBase()
{
var exceptionType = typeof(Microsoft.CSharp.RuntimeBinder.RuntimeBinderException).Assembly.GetType(
"Microsoft.CSharp.RuntimeBinder.DynamicMetaObjectProviderDebugView+DynamicDebugViewEmptyException");
var emptyProperty = exceptionType.GetProperty("Empty");
DynamicDebugViewEmptyMessage = (string)emptyProperty.GetValue(exceptionType.Instantiate());
}
protected ResultProviderTestBase(ResultProvider resultProvider, DkmInspectionContext defaultInspectionContext)
{
_formatter = resultProvider.Formatter;
_resultProvider = resultProvider;
this.DefaultInspectionContext = defaultInspectionContext;
// We never want to swallow Exceptions (generate a non-fatal Watson) when running tests.
ExpressionEvaluatorFatalError.IsFailFastEnabled = true;
}
internal DkmClrValue CreateDkmClrValue(
object value,
Type type = null,
string alias = null,
DkmEvaluationResultFlags evalFlags = DkmEvaluationResultFlags.None,
DkmClrValueFlags valueFlags = DkmClrValueFlags.None)
{
if (type == null)
{
type = value.GetType();
}
return new DkmClrValue(
value,
DkmClrValue.GetHostObjectValue((TypeImpl)type, value),
new DkmClrType((TypeImpl)type),
alias,
_formatter,
evalFlags,
valueFlags);
}
internal DkmClrValue CreateDkmClrValue(
object value,
DkmClrType type,
string alias = null,
DkmEvaluationResultFlags evalFlags = DkmEvaluationResultFlags.None,
DkmClrValueFlags valueFlags = DkmClrValueFlags.None,
ulong nativeComPointer = 0)
{
return new DkmClrValue(
value,
DkmClrValue.GetHostObjectValue(type.GetLmrType(), value),
type,
alias,
_formatter,
evalFlags,
valueFlags,
nativeComPointer: nativeComPointer);
}
internal DkmClrValue CreateErrorValue(
DkmClrType type,
string message)
{
return new DkmClrValue(
value: null,
hostObjectValue: message,
type: type,
alias: null,
formatter: _formatter,
evalFlags: DkmEvaluationResultFlags.None,
valueFlags: DkmClrValueFlags.Error);
}
#region Formatter Tests
internal string FormatNull<T>(bool useHexadecimal = false)
{
return FormatValue(null, typeof(T), useHexadecimal);
}
internal string FormatValue(object value, bool useHexadecimal = false)
{
return FormatValue(value, value.GetType(), useHexadecimal);
}
internal string FormatValue(object value, Type type, bool useHexadecimal = false)
{
var clrValue = CreateDkmClrValue(value, type);
var inspectionContext = CreateDkmInspectionContext(_formatter, DkmEvaluationFlags.None, radix: useHexadecimal ? 16u : 10u);
return clrValue.GetValueString(inspectionContext, Formatter.NoFormatSpecifiers);
}
internal bool HasUnderlyingString(object value)
{
return HasUnderlyingString(value, value.GetType());
}
internal bool HasUnderlyingString(object value, Type type)
{
var clrValue = GetValueForUnderlyingString(value, type);
return clrValue.HasUnderlyingString(DefaultInspectionContext);
}
internal string GetUnderlyingString(object value)
{
var clrValue = GetValueForUnderlyingString(value, value.GetType());
return clrValue.GetUnderlyingString(DefaultInspectionContext);
}
internal DkmClrValue GetValueForUnderlyingString(object value, Type type)
{
return CreateDkmClrValue(
value,
type,
evalFlags: DkmEvaluationResultFlags.RawString);
}
#endregion
#region ResultProvider Tests
internal DkmInspectionContext CreateDkmInspectionContext(
DkmEvaluationFlags flags = DkmEvaluationFlags.None,
uint radix = 10,
DkmRuntimeInstance runtimeInstance = null)
{
return CreateDkmInspectionContext(_formatter, flags, radix, runtimeInstance);
}
internal static DkmInspectionContext CreateDkmInspectionContext(
IDkmClrFormatter formatter,
DkmEvaluationFlags flags,
uint radix,
DkmRuntimeInstance runtimeInstance = null)
{
return new DkmInspectionContext(formatter, flags, radix, runtimeInstance);
}
internal DkmEvaluationResult FormatResult(string name, DkmClrValue value, DkmClrType declaredType = null, DkmInspectionContext inspectionContext = null)
{
return FormatResult(name, name, value, declaredType, inspectionContext: inspectionContext);
}
internal DkmEvaluationResult FormatResult(string name, string fullName, DkmClrValue value, DkmClrType declaredType = null, bool[] declaredTypeInfo = null, DkmInspectionContext inspectionContext = null)
{
DkmEvaluationResult evaluationResult = null;
var workList = new DkmWorkList();
_resultProvider.GetResult(
value,
workList,
declaredType: declaredType ?? value.Type,
customTypeInfo: DynamicFlagsCustomTypeInfo.Create(declaredTypeInfo).GetCustomTypeInfo(),
inspectionContext: inspectionContext ?? DefaultInspectionContext,
formatSpecifiers: Formatter.NoFormatSpecifiers,
resultName: name,
resultFullName: null,
completionRoutine: asyncResult => evaluationResult = asyncResult.Result);
workList.Execute();
return evaluationResult;
}
internal DkmEvaluationResult[] GetChildren(DkmEvaluationResult evalResult, DkmInspectionContext inspectionContext = null)
{
DkmEvaluationResultEnumContext enumContext;
var builder = ArrayBuilder<DkmEvaluationResult>.GetInstance();
// Request 0-3 children.
int size;
DkmEvaluationResult[] items;
for (size = 0; size < 3; size++)
{
items = GetChildren(evalResult, size, inspectionContext, out enumContext);
var totalChildCount = enumContext.Count;
Assert.InRange(totalChildCount, 0, int.MaxValue);
var expectedSize = (size < totalChildCount) ? size : totalChildCount;
Assert.Equal(expectedSize, items.Length);
}
// Request items (increasing the size of the request with each iteration).
size = 1;
items = GetChildren(evalResult, size, inspectionContext, out enumContext);
while (items.Length > 0)
{
builder.AddRange(items);
Assert.True(builder.Count <= enumContext.Count);
int offset = builder.Count;
// Request 0 items.
items = GetItems(enumContext, offset, 0);
Assert.Equal(items.Length, 0);
// Request >0 items.
size++;
items = GetItems(enumContext, offset, size);
}
Assert.Equal(builder.Count, enumContext.Count);
return builder.ToArrayAndFree();
}
internal DkmEvaluationResult[] GetChildren(DkmEvaluationResult evalResult, int initialRequestSize, DkmInspectionContext inspectionContext, out DkmEvaluationResultEnumContext enumContext)
{
DkmGetChildrenAsyncResult getChildrenResult = default(DkmGetChildrenAsyncResult);
var workList = new DkmWorkList();
_resultProvider.GetChildren(evalResult, workList, initialRequestSize, inspectionContext ?? DefaultInspectionContext, r => { getChildrenResult = r; });
workList.Execute();
var exception = getChildrenResult.Exception;
if (exception != null)
{
ExceptionDispatchInfo.Capture(exception).Throw();
}
enumContext = getChildrenResult.EnumContext;
return getChildrenResult.InitialChildren;
}
internal DkmEvaluationResult[] GetItems(DkmEvaluationResultEnumContext enumContext, int startIndex, int count)
{
DkmEvaluationEnumAsyncResult getItemsResult = default(DkmEvaluationEnumAsyncResult);
var workList = new DkmWorkList();
_resultProvider.GetItems(enumContext, workList, startIndex, count, r => { getItemsResult = r; });
workList.Execute();
var exception = getItemsResult.Exception;
if (exception != null)
{
ExceptionDispatchInfo.Capture(exception).Throw();
}
return getItemsResult.Items;
}
private const DkmEvaluationResultCategory UnspecifiedCategory = (DkmEvaluationResultCategory)(-1);
private const DkmEvaluationResultAccessType UnspecifiedAccessType = (DkmEvaluationResultAccessType)(-1);
internal static DkmEvaluationResult EvalResult(
string name,
string value,
string type,
string fullName,
DkmEvaluationResultFlags flags = DkmEvaluationResultFlags.None,
DkmEvaluationResultCategory category = UnspecifiedCategory,
DkmEvaluationResultAccessType access = UnspecifiedAccessType,
string editableValue = null,
DkmCustomUIVisualizerInfo[] customUIVisualizerInfo = null)
{
return DkmSuccessEvaluationResult.Create(
null,
null,
name,
fullName,
flags,
value,
editableValue,
type,
category,
access,
default(DkmEvaluationResultStorageType),
default(DkmEvaluationResultTypeModifierFlags),
null,
(customUIVisualizerInfo != null) ? new ReadOnlyCollection<DkmCustomUIVisualizerInfo>(customUIVisualizerInfo) : null,
null,
null);
}
internal static DkmIntermediateEvaluationResult EvalIntermediateResult(
string name,
string fullName,
string expression,
DkmLanguage language)
{
return DkmIntermediateEvaluationResult.Create(
InspectionContext: null,
StackFrame: null,
Name: name,
FullName: fullName,
Expression: expression,
IntermediateLanguage: language,
TargetRuntime: null,
DataItem: null);
}
internal static DkmEvaluationResult EvalFailedResult(
string name,
string message,
string type = null,
string fullName = null,
DkmEvaluationResultFlags flags = DkmEvaluationResultFlags.None)
{
return DkmFailedEvaluationResult.Create(
null,
null,
name,
fullName,
message,
flags,
type,
null);
}
internal static void Verify(IReadOnlyList<DkmEvaluationResult> actual, params DkmEvaluationResult[] expected)
{
try
{
int n = actual.Count;
Assert.Equal(expected.Length, n);
for (int i = 0; i < n; i++)
{
Verify(actual[i], expected[i]);
}
}
catch
{
foreach (var result in actual)
{
Console.WriteLine("{0}, ", ToString(result));
}
throw;
}
}
private static string ToString(DkmEvaluationResult result)
{
var success = result as DkmSuccessEvaluationResult;
if (success != null) return ToString(success);
var intermediate = result as DkmIntermediateEvaluationResult;
if (intermediate != null) return ToString(intermediate);
return ToString((DkmFailedEvaluationResult)result);
}
private static string ToString(DkmSuccessEvaluationResult result)
{
var pooledBuilder = PooledStringBuilder.GetInstance();
var builder = pooledBuilder.Builder;
builder.Append("EvalResult(");
builder.Append(Quote(result.Name));
builder.Append(", ");
builder.Append((result.Value == null) ? "null" : Quote(Escape(result.Value)));
builder.Append(", ");
builder.Append(Quote(result.Type));
builder.Append(", ");
builder.Append((result.FullName != null) ? Quote(Escape(result.FullName)) : "null");
if (result.Flags != DkmEvaluationResultFlags.None)
{
builder.Append(", ");
builder.Append(FormatEnumValue(result.Flags));
}
if (result.Category != DkmEvaluationResultCategory.Other)
{
builder.Append(", ");
builder.Append(FormatEnumValue(result.Category));
}
if (result.Access != DkmEvaluationResultAccessType.None)
{
builder.Append(", ");
builder.Append(FormatEnumValue(result.Access));
}
if (result.EditableValue != null)
{
builder.Append(", ");
builder.Append(Quote(result.EditableValue));
}
builder.Append(")");
return pooledBuilder.ToStringAndFree();
}
private static string ToString(DkmIntermediateEvaluationResult result)
{
var pooledBuilder = PooledStringBuilder.GetInstance();
var builder = pooledBuilder.Builder;
builder.Append("IntermediateEvalResult(");
builder.Append(Quote(result.Name));
builder.Append(", ");
builder.Append(Quote(result.Expression));
if (result.Type != null)
{
builder.Append(", ");
builder.Append(Quote(result.Type));
}
if (result.FullName != null)
{
builder.Append(", ");
builder.Append(Quote(Escape(result.FullName)));
}
if (result.Flags != DkmEvaluationResultFlags.None)
{
builder.Append(", ");
builder.Append(FormatEnumValue(result.Flags));
}
builder.Append(")");
return pooledBuilder.ToStringAndFree();
}
private static string ToString(DkmFailedEvaluationResult result)
{
var pooledBuilder = PooledStringBuilder.GetInstance();
var builder = pooledBuilder.Builder;
builder.Append("EvalFailedResult(");
builder.Append(Quote(result.Name));
builder.Append(", ");
builder.Append(Quote(result.ErrorMessage));
if (result.Type != null)
{
builder.Append(", ");
builder.Append(Quote(result.Type));
}
if (result.FullName != null)
{
builder.Append(", ");
builder.Append(Quote(Escape(result.FullName)));
}
if (result.Flags != DkmEvaluationResultFlags.None)
{
builder.Append(", ");
builder.Append(FormatEnumValue(result.Flags));
}
builder.Append(")");
return pooledBuilder.ToStringAndFree();
}
private static string Escape(string str)
{
return str.Replace("\"", "\\\"");
}
private static string Quote(string str)
{
return '"' + str + '"';
}
private static string FormatEnumValue(Enum e)
{
var parts = e.ToString().Split(new[] { ", " }, StringSplitOptions.RemoveEmptyEntries);
var enumTypeName = e.GetType().Name;
return string.Join(" | ", parts.Select(p => enumTypeName + "." + p));
}
internal static void Verify(DkmEvaluationResult actual, DkmEvaluationResult expected)
{
Assert.Equal(expected.Name, actual.Name);
Assert.Equal(expected.FullName, actual.FullName);
var expectedSuccess = expected as DkmSuccessEvaluationResult;
var expectedIntermediate = expected as DkmIntermediateEvaluationResult;
if (expectedSuccess != null)
{
var actualSuccess = (DkmSuccessEvaluationResult)actual;
Assert.Equal(expectedSuccess.Value, actualSuccess.Value);
Assert.Equal(expectedSuccess.Type, actualSuccess.Type);
Assert.Equal(expectedSuccess.Flags, actualSuccess.Flags);
if (expectedSuccess.Category != UnspecifiedCategory)
{
Assert.Equal(expectedSuccess.Category, actualSuccess.Category);
}
if (expectedSuccess.Access != UnspecifiedAccessType)
{
Assert.Equal(expectedSuccess.Access, actualSuccess.Access);
}
Assert.Equal(expectedSuccess.EditableValue, actualSuccess.EditableValue);
Assert.True(
(expectedSuccess.CustomUIVisualizers == actualSuccess.CustomUIVisualizers) ||
(expectedSuccess.CustomUIVisualizers != null && actualSuccess.CustomUIVisualizers != null &&
expectedSuccess.CustomUIVisualizers.SequenceEqual(actualSuccess.CustomUIVisualizers, CustomUIVisualizerInfoComparer.Instance)));
}
else if (expectedIntermediate != null)
{
var actualIntermediate = (DkmIntermediateEvaluationResult)actual;
Assert.Equal(expectedIntermediate.Expression, actualIntermediate.Expression);
Assert.Equal(expectedIntermediate.IntermediateLanguage.Id.LanguageId, actualIntermediate.IntermediateLanguage.Id.LanguageId);
Assert.Equal(expectedIntermediate.IntermediateLanguage.Id.VendorId, actualIntermediate.IntermediateLanguage.Id.VendorId);
}
else
{
var actualFailed = (DkmFailedEvaluationResult)actual;
var expectedFailed = (DkmFailedEvaluationResult)expected;
Assert.Equal(expectedFailed.ErrorMessage, actualFailed.ErrorMessage);
Assert.Equal(expectedFailed.Type, actualFailed.Type);
Assert.Equal(expectedFailed.Flags, actualFailed.Flags);
}
}
#endregion
private sealed class CustomUIVisualizerInfoComparer : IEqualityComparer<DkmCustomUIVisualizerInfo>
{
internal static readonly CustomUIVisualizerInfoComparer Instance = new CustomUIVisualizerInfoComparer();
bool IEqualityComparer<DkmCustomUIVisualizerInfo>.Equals(DkmCustomUIVisualizerInfo x, DkmCustomUIVisualizerInfo y)
{
return x == y ||
(x != null && y != null &&
x.Id == y.Id &&
x.MenuName == y.MenuName &&
x.Description == y.Description &&
x.Metric == y.Metric &&
x.UISideVisualizerTypeName == y.UISideVisualizerTypeName &&
x.UISideVisualizerAssemblyName == y.UISideVisualizerAssemblyName &&
x.UISideVisualizerAssemblyLocation == y.UISideVisualizerAssemblyLocation &&
x.DebuggeeSideVisualizerTypeName == y.DebuggeeSideVisualizerTypeName &&
x.DebuggeeSideVisualizerAssemblyName == y.DebuggeeSideVisualizerAssemblyName);
}
int IEqualityComparer<DkmCustomUIVisualizerInfo>.GetHashCode(DkmCustomUIVisualizerInfo obj)
{
throw new NotImplementedException();
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using DeleteLine.Properties;
namespace DeleteLine
{
/// <summary>
/// Class of the main program.
/// </summary>
public static class Program
{
/// <summary>
/// Entry point of the main program.
/// </summary>
/// <param name="arguments">
/// All the arguments separated by a white space.
/// </param>
private static void Main(string[] arguments)
{
Action<string> display = Console.WriteLine;
var argumentDictionary = new Dictionary<string, string>
{
// Initialization of the dictionary with default values
{"filename", string.Empty},
{"outputfilename", string.Empty},
{"separator", ";" },
{"hasheader", "false" },
{"hasfooter", "false"},
{"deleteheader", "false"},
{"deletefooter", "false"},
{"deletefirstcolumn", "false"},
{"samename", "false"},
{"newname", string.Empty},
{"log", "false"},
{"removeemptylines", "true"},
{"countlines", "false"},
{"verifyheaderandfooter", "false"}
};
// the variable numberOfInitialDictionaryItems is used for the log to list all non-standard arguments passed in.
int numberOfInitialDictionaryItems = argumentDictionary.Count;
var fileContent = new List<string>();
var fileTransformed = new List<string>();
int numberOfLineInfile = 0;
bool hasExtraArguments = false;
bool fileHasHeader = false;
bool fileHasFooter = false;
string datedLogFileName = string.Empty;
byte returnCode = 1;
Stopwatch chrono = new Stopwatch();
if (arguments.Length == 0 || arguments[0].ToLower().Contains("help") || arguments[0].Contains("?"))
{
Usage();
return;
}
chrono.Start();
// Saving application version number for easy config file reading.
Settings.Default.ApplicationVersionNumber = GetAssemblyVersion();
Settings.Default.Save();
// We remove Windows forbidden characters from return code file name
if (Settings.Default.ReturnCodeFileName != RemoveWindowsForbiddenCharacters(Settings.Default.ReturnCodeFileName))
{
Settings.Default.ReturnCodeFileName = RemoveWindowsForbiddenCharacters(Settings.Default.ReturnCodeFileName.Trim());
Settings.Default.Save();
}
// We delete previous coderetour.txt file
if (Settings.Default.ReturnCodeFileName.Trim() == string.Empty)
{
Settings.Default.ReturnCodeFileName = "ReturnCode.txt";
Settings.Default.Save();
}
try
{
if (File.Exists(Settings.Default.ReturnCodeFileName))
{
File.Delete(Settings.Default.ReturnCodeFileName);
}
}
catch (DirectoryNotFoundException directoryNotFoundException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {directoryNotFoundException.Message}");
}
catch (DriveNotFoundException driveNotFoundException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {driveNotFoundException.Message}");
}
catch (FileNotFoundException fileNotFoundException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {fileNotFoundException.Message}");
}
catch (PathTooLongException pathTooLongException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {pathTooLongException.Message}");
}
catch (IOException ioException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {ioException.Message}");
}
catch (UnauthorizedAccessException unauthorizedAccessException)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {unauthorizedAccessException.Message}");
}
catch (Exception exception)
{
Console.WriteLine("There was an error while trying to delete previous returncode.txt file.");
Console.WriteLine($"The exception was {exception.Message}");
}
// we split arguments into the dictionary
foreach (string argument in arguments)
{
string argumentKey = string.Empty;
string argumentValue = string.Empty;
if (argument.IndexOf(':') != -1)
{
argumentKey = argument.Substring(1, argument.IndexOf(':') - 1).ToLower();
argumentValue = argument.Substring(argument.IndexOf(':') + 1,
argument.Length - (argument.IndexOf(':') + 1));
}
else
{
// If we have an argument without the colon sign (:) then we add it to the dictionary as extra argument not taken into account
argumentKey = argument;
argumentValue = $"The argument passed in ({argumentKey}) does not have any value. The colon sign (:) is missing.";
}
if (argumentDictionary.ContainsKey(argumentKey))
{
// set the value of the argument
argumentDictionary[argumentKey] = argumentValue;
}
else
{
// we add any other or new argument into the dictionary to look at them in the log
argumentDictionary.Add(argumentKey, argumentValue);
hasExtraArguments = true;
}
}
// We check if countlines is true then removeemptyline = true
if (argumentDictionary["countlines"] == "true")
{
argumentDictionary["removeemptylines"] = "true";
}
// check that filename doesn't any Windows forbidden characters and trim all space characters at the start of the name.
argumentDictionary["filename"] = RemoveWindowsForbiddenCharacters(argumentDictionary["filename"]).TrimStart();
// check that output file name doesn't any Windows forbidden characters and trim all space characters at the start of the name.
argumentDictionary["outputfilename"] = RemoveWindowsForbiddenCharacters(argumentDictionary["outputfilename"]).TrimStart();
// if log file name is empty in XML file then we define it with a default value like "Log"
if (Settings.Default.LogFileName.Trim() == string.Empty)
{
Settings.Default.LogFileName = "Log.txt";
Settings.Default.Save();
datedLogFileName = AddDateToFileName(Settings.Default.LogFileName);
}
else
{
// we remove Windows forbidden characters from the log file name
Settings.Default.LogFileName = RemoveWindowsForbiddenCharacters(Settings.Default.LogFileName).TrimStart();
Settings.Default.Save();
// we leave the name of the log file name as the user wants including the path with UNC possible
datedLogFileName = Settings.Default.LogFileName;
}
// if Company name is empty in XML file then we define it with a default value like "Company name"
if (Settings.Default.CompanyName.Trim() == string.Empty)
{
Settings.Default.CompanyName = "Company name";
Settings.Default.Save();
}
if (argumentDictionary["filename"].Trim() == string.Empty)
{
Usage();
return;
}
// Add version of the program at the beginning of the log
Log(datedLogFileName, argumentDictionary["log"], $"{Assembly.GetExecutingAssembly().GetName().Name} is in version {GetAssemblyVersion()}.");
// We log all arguments passed in.
foreach (KeyValuePair<string, string> keyValuePair in argumentDictionary)
{
if (argumentDictionary["log"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"Argument requested: {keyValuePair.Key}");
Log(datedLogFileName, argumentDictionary["log"], $"Value of the argument: {keyValuePair.Value}");
}
}
// We log extra arguments passed in.
if (hasExtraArguments && argumentDictionary["log"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], "Here are a list of argument passed in but not understood and thus not used (for debug purpose only):");
for (int i = numberOfInitialDictionaryItems; i <= argumentDictionary.Count - 1; i++)
{
Log(datedLogFileName, argumentDictionary["log"], $"Extra argument requested: {argumentDictionary.Keys.ElementAt(i)}");
Log(datedLogFileName, argumentDictionary["log"], $"Value of the extra argument: {argumentDictionary.Values.ElementAt(i)}");
}
}
// reading of the CSV file
try
{
if (argumentDictionary["filename"].Trim() != string.Empty)
{
//string FilePath = argumentDictionary["inputFilePath"] + argumentDictionary["filename"];
if (File.Exists(argumentDictionary["filename"]))
{
using (StreamReader sr = new StreamReader(argumentDictionary["filename"]))
{
while (!sr.EndOfStream)
{
string tmpLine = sr.ReadLine();
if (tmpLine != null && tmpLine.StartsWith("0;"))
{
fileHasHeader = true;
}
if (tmpLine != null && tmpLine.StartsWith("9;"))
{
fileHasFooter = true;
bool parseLastLineTointOk = int.TryParse(tmpLine.Substring(2, tmpLine.Length - 2).TrimEnd(argumentDictionary["separator"][0]), NumberStyles.Any, CultureInfo.InvariantCulture, out numberOfLineInfile);
if (!parseLastLineTointOk)
{
const string tmpErrorMessage = "There was an error while parsing the last line of the file to an integer to know the number of lines in the file.";
Log(datedLogFileName, argumentDictionary["log"], $"{tmpErrorMessage}");
Console.WriteLine($"{tmpErrorMessage}"); // if no log then display error message in console
}
}
if (tmpLine != null)
{
if (argumentDictionary["removeemptylines"] == "false")
{
fileContent.Add(tmpLine);
}
else if (argumentDictionary["removeemptylines"] == "true" && tmpLine.Trim() != string.Empty)
{
fileContent.Add(tmpLine);
}
}
}
}
Log(datedLogFileName, argumentDictionary["log"], "The file has been read correctly.");
if (argumentDictionary["countlines"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"The footer of the file states {numberOfLineInfile} line{Plural(numberOfLineInfile)}.");
}
}
else
{
Log(datedLogFileName, argumentDictionary["log"], $"the filename: {argumentDictionary["filename"]} could be read because it doesn't exist.");
}
}
else
{
Log(datedLogFileName, argumentDictionary["log"], $"the filename: {argumentDictionary["filename"]} is empty, it cannot be read.");
}
}
catch (Exception exception)
{
Log(datedLogFileName, argumentDictionary["log"], $"There was an error while processing the file {exception}.");
Console.WriteLine($"There was an error while processing the file {exception}");
}
if (fileContent.Count != 0)
{
if (argumentDictionary["deleteheader"] == "true" && argumentDictionary["hasheader"] == "true" && fileHasHeader)
{
Log(datedLogFileName, argumentDictionary["log"], $"Header (which is the first line) has been removed, it was: {fileContent[0]}");
fileContent.RemoveAt(0);
}
if (argumentDictionary["deletefooter"] == "true" && argumentDictionary["hasfooter"] == "true" && fileContent.Count != 0 && fileHasFooter)
{
if (argumentDictionary["countlines"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"{numberOfLineInfile} line{Plural(numberOfLineInfile)} stated in footer.");
Log(datedLogFileName, argumentDictionary["log"], $"Footer (which is the last line) has been removed, it was: {fileContent[fileContent.Count - 1]}");
}
Log(datedLogFileName, argumentDictionary["log"], $"The file has {fileContent.Count - 1} line{Plural(fileContent.Count)}.");
fileContent.RemoveAt(fileContent.Count - 1);
}
if (argumentDictionary["deletefirstcolumn"] == "true" && fileContent.Count != 0)
{
Log(datedLogFileName, argumentDictionary["log"], "The first column has been deleted.");
fileTransformed = new List<string>();
foreach (string line in fileContent)
{
fileTransformed.Add(line.Substring(line.IndexOf(argumentDictionary["separator"], StringComparison.InvariantCulture) + 1, line.Length - line.IndexOf(argumentDictionary["separator"], StringComparison.InvariantCulture) - 1));
}
fileContent = fileTransformed;
}
// We free up memory
fileTransformed = null;
//We check integrity of the file i.e. number of line stated equals to the number of line written
if (fileContent.Count == numberOfLineInfile && argumentDictionary["countlines"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"The file has the same number of lines as stated in the last line which is {numberOfLineInfile} line{Plural(numberOfLineInfile)}.");
returnCode = Settings.Default.ReturnCodeOK;
}
else if (fileContent.Count != numberOfLineInfile && argumentDictionary["countlines"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"The file has not the same number of lines {fileContent.Count} as stated in the last line which is {numberOfLineInfile} line{Plural(numberOfLineInfile)}.");
returnCode = Settings.Default.ReturnCodeKO;
}
if (argumentDictionary["countlines"] == "false")
{
returnCode = Settings.Default.ReturnCodeOK;
}
// If the user wants a different name for the transformed file
if (argumentDictionary["samename"] == "true" && argumentDictionary["filename"] != string.Empty)
{
try
{
File.Delete(argumentDictionary["filename"]);
using (StreamWriter sw = new StreamWriter(argumentDictionary["filename"], true))
{
foreach (string line in fileContent)
{
if (argumentDictionary["removeemptylines"] == "true" && line.Trim() != string.Empty)
{
sw.WriteLine(line);
}
}
}
Log(datedLogFileName, argumentDictionary["log"], $"The transformed file has been written correctly:{argumentDictionary["filename"]}.");
}
catch (Exception exception)
{
Log(datedLogFileName, argumentDictionary["log"], $"The filename {argumentDictionary["filename"]} cannot be written.");
Log(datedLogFileName, argumentDictionary["log"], $"The exception is: {exception}");
}
}
if (argumentDictionary["samename"] == "false" && argumentDictionary["outputfilename"] != string.Empty)
{
try
{
using (StreamWriter sw = new StreamWriter(argumentDictionary["outputfilename"]))
{
foreach (string line in fileContent)
{
if (argumentDictionary["removeemptylines"] == "true" && line.Trim() != string.Empty)
{
sw.WriteLine(line);
}
}
}
Log(datedLogFileName, argumentDictionary["log"], $"The transformed file has been written correctly with the new name {argumentDictionary["outputfilename"]}.");
}
catch (Exception exception)
{
Log(datedLogFileName, argumentDictionary["log"], $"The filename: {argumentDictionary["outputfilename"]} cannot be written.");
Log(datedLogFileName, argumentDictionary["log"], $"The exception is: {exception}");
}
}
else
{
if (argumentDictionary["newname"] != string.Empty)
{
try
{
using (StreamWriter sw = new StreamWriter(argumentDictionary["newname"]))
{
foreach (string line in fileContent)
{
if (argumentDictionary["removeemptylines"] == "true" && line.Trim() != string.Empty)
{
sw.WriteLine(line);
}
}
}
Log(datedLogFileName, argumentDictionary["log"], $"The transformed file has been written correctly with the new name {argumentDictionary["newname"]}.");
}
catch (Exception exception)
{
Log(datedLogFileName, argumentDictionary["log"], $"The filename: {argumentDictionary["newname"]} cannot be written.");
Log(datedLogFileName, argumentDictionary["log"], $"The exception is: {exception}");
}
}
}
if (argumentDictionary["deleteheader"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"The header (first line starts with 0;) was {Negative(fileHasHeader)}found in the file.");
}
if (argumentDictionary["deletefooter"] == "true")
{
Log(datedLogFileName, argumentDictionary["log"], $"The footer (last line starts with 9;) was {Negative(fileHasFooter)}found in the file.");
}
}
else
{
// file content is empty
Log(datedLogFileName, argumentDictionary["log"], "The file cannot be processed because it is empty.");
}
// Managing return code if header or footer were not found
if (!fileHasHeader && argumentDictionary["countlines"] == "true" && returnCode != 0)
{
returnCode = Settings.Default.ReturnCodeHeaderMissing;
}
if (!fileHasFooter && argumentDictionary["countlines"] == "true" && returnCode != 0)
{
returnCode = Settings.Default.ReturnCodeFooterMissing;
}
if (argumentDictionary["countlines"] == "true")
{
// Managing return code : we write a file with the return code which will be read by the DOS script to import SQL tables into a database.
string returnCodeFileName = string.Empty;
if (Settings.Default.ReturnCodeFileName.Trim() == string.Empty)
{
Settings.Default.ReturnCodeFileName = "ReturnCode.txt";
Settings.Default.Save();
}
else
{
Settings.Default.ReturnCodeFileName = RemoveWindowsForbiddenCharacters(Settings.Default.ReturnCodeFileName);
Settings.Default.Save();
}
returnCodeFileName = Settings.Default.ReturnCodeFileName;
try
{
if (File.Exists(returnCodeFileName))
{
File.Delete(returnCodeFileName);
}
StreamWriter sw = new StreamWriter(returnCodeFileName, false);
sw.WriteLine(returnCode);
sw.Close();
Log(datedLogFileName, argumentDictionary["log"], $"The return code has been written into the file {returnCodeFileName}, the return code is {returnCode}.");
}
catch (UnauthorizedAccessException unauthorizedAccessException)
{
Log(datedLogFileName, argumentDictionary["log"], $"There was an error while writing the return code file: {returnCodeFileName}. The exception is: {unauthorizedAccessException}");
Console.WriteLine($"There was an error while writing the return code file: {returnCodeFileName}. The exception is:{unauthorizedAccessException}");
}
catch (IOException ioException)
{
Log(datedLogFileName, argumentDictionary["log"], $"There was an error while writing the return code file: {returnCodeFileName}. The exception is: {ioException}");
Console.WriteLine($"There was an error while writing the return code file: {returnCodeFileName}. The exception is:{ioException}");
}
catch (Exception exception)
{
Log(datedLogFileName, argumentDictionary["log"], $"There was an error while writing the return code file: {returnCodeFileName}. The exception is: {exception}");
Console.WriteLine($"There was an error while writing the return code file: {returnCodeFileName}. The exception is:{exception}");
}
}
chrono.Stop();
TimeSpan tickTimeSpan = chrono.Elapsed;
Log(datedLogFileName, argumentDictionary["log"], $"This program took {chrono.ElapsedMilliseconds} milliseconds which is {ConvertToTimeString(tickTimeSpan)}.");
Log(datedLogFileName, argumentDictionary["log"], "END OF LOG.");
Log(datedLogFileName, argumentDictionary["log"], "-----------");
}
/// <summary>
/// Convert a Time span to days hours minutes seconds milliseconds.
/// </summary>
/// <param name="ts">The time span.</param>
/// <param name="removeZeroArgument">Do you want zero argument not send back, true by default.</param>
/// <returns>Returns a string with the number of days, hours, minutes, seconds and milliseconds.</returns>
public static string ConvertToTimeString(TimeSpan ts, bool removeZeroArgument = true)
{
string result = string.Empty;
if (!removeZeroArgument || ts.Days != 0)
{
result = $"{ts.Days} jour{Plural(ts.Days)} ";
}
if (!removeZeroArgument || ts.Hours != 0)
{
result += $"{ts.Hours} heure{Plural(ts.Hours)} ";
}
if (!removeZeroArgument || ts.Minutes != 0)
{
result += $"{ts.Minutes} minute{Plural(ts.Minutes)} ";
}
if (!removeZeroArgument || ts.Seconds != 0)
{
result += $"{ts.Seconds} seconde{Plural(ts.Seconds)} ";
}
if (!removeZeroArgument || ts.Milliseconds != 0)
{
result += $"{ts.Milliseconds} milliseconde{Plural(ts.Milliseconds)}";
}
return result.TrimEnd();
}
/// <summary>
/// Add an 's' if the number is greater than 1.
/// </summary>
/// <param name="number"></param>
/// <returns>Returns an 's' if number if greater than one ortherwise returns an empty string.</returns>
public static string Plural(int number)
{
return number > 1 ? "s" : string.Empty;
}
/// <summary>
/// The method returns the string Not according to the boolean value passed in.
/// </summary>
/// <param name="booleanValue"></param>
/// <returns>Returns the string "Not" or nothing according to the boolean value passed in.</returns>
public static string Negative(bool booleanValue)
{
return booleanValue ? string.Empty : "not ";
}
/// <summary>
/// Remove all Windows forbidden characters for a Windows path.
/// </summary>
/// <param name="filename">The initial string to be processed.</param>
/// <returns>A string without Windows forbidden characters.</returns>
private static string RemoveWindowsForbiddenCharacters(string filename)
{
string result = filename;
// We remove all characters which are forbidden for a Windows path
string[] forbiddenWindowsFilenameCharacters = { "/", "\":", "*", "?", "\"", "<", ">", "|" };
foreach (var item in forbiddenWindowsFilenameCharacters)
{
result = result.Replace(item, string.Empty);
}
return result;
}
/// <summary>
/// Add date to the file name.
/// </summary>
/// <param name="fileName">The name of the file.</param>
/// <returns>A string with the date at the end of the file name.</returns>
public static string AddDateToFileName(string fileName)
{
if (fileName == string.Empty) return string.Empty;
string result = string.Empty;
// We strip the fileName and add a datetime before the extension of the filename.
//Don't use Path.GetFileNameWithoutExtension(fileName) because of UNC path.
string tmpFileNameWithoutExtension = fileName.Substring(0, fileName.IndexOf('.'));
string tmpFileNameExtension = Path.GetExtension(fileName);
string tmpDateTime = DateTime.Now.ToShortDateString();
tmpDateTime = tmpDateTime.Replace('/', '-');
result = $"{tmpFileNameWithoutExtension}_{tmpDateTime}{tmpFileNameExtension}";
return result;
}
/// <summary>
/// Get assembly version.
/// </summary>
/// <returns>A string with all assembly versions like major, minor, build.</returns>
private static string GetAssemblyVersion()
{
Assembly assembly = Assembly.GetExecutingAssembly();
FileVersionInfo fvi = FileVersionInfo.GetVersionInfo(assembly.Location);
return $"{fvi.FileMajorPart}.{fvi.FileMinorPart}.{fvi.FileBuildPart}.{fvi.FilePrivatePart}";
}
/// <summary>
/// The log file to record all activities.
/// </summary>
/// <param name="filename">The name of the file.</param>
/// <param name="logging">Do we log or not?</param>
/// <param name="message">The message to be logged.</param>
private static void Log(string filename, string logging, string message)
{
if (logging.ToLower() != "true") return;
if (filename.Trim() == string.Empty) return;
try
{
StreamWriter sw = new StreamWriter(filename, true);
sw.WriteLine($"{DateTime.Now} - {message}");
sw.Close();
}
catch (Exception exception)
{
Console.WriteLine($"There was an error while writing the file: {filename}. The exception is:{exception}");
}
}
/// <summary>Get a file name safer.</summary>
/// <param name="fromString">The path to be checked for safe characters.</param>
/// <returns>A safe file name stripped of any unsafe characters.</returns>
private static string GetSafeFileName(string fromString)
{
var invalidChars = Path.GetInvalidFileNameChars();
const char replacementChar = '_';
return new string(fromString.Select((inputChar) =>
invalidChars.Any((invalidChar) =>
(inputChar == invalidChar)) ? replacementChar : inputChar).ToArray());
}
/// <summary>
/// If the user requests help or gives no argument, then we display the help section.
/// </summary>
private static void Usage()
{
Action<string> display = Console.WriteLine;
display(string.Empty);
display($"DeleteLine is a console application written by Freddy Juhel for {Settings.Default.CompanyName}.");
display($"DeleteLine.exe is in version {GetAssemblyVersion()}");
display("DeleteLine needs Microsoft .NET framework 3.5 to run, if you don't have it, download it from microsoft.com.");
display($"Copyrighted (c) 2017 by {Settings.Default.CompanyName}, all rights reserved.");
display(string.Empty);
display("Usage of this program:");
display(string.Empty);
display("List of arguments:");
display(string.Empty);
display("/help (this help)");
display("/? (this help)");
display(string.Empty);
display(
"You can write argument name (not its value) in uppercase or lowercase or a mixed of them (case insensitive)");
display("/filename is the same as /FileName or /fileName or /FILENAME");
display(string.Empty);
display("/fileName:<name of the file to be processed>");
display("/output file name:<name of the file to be written after being processed>");
display("/separator:<the CSV separator> semicolon (;) is the default separator");
display("/hasHeader:<true or false> false by default");
display("/hasFooter:<true or false> false by default");
display("/deleteHeader:<true or false> false by default");
display("/deleteFooter:<true or false> false by default");
display("/deleteFirstColumn:<true or false> true by default");
display("/sameName:<true or false> true by default");
display("/newName:<new name of the file which has been processed>");
display("/log:<true or false> false by default");
display("/removeemptylines:<true or false> true by default");
display("countlines:<true or false> false by default");
display("verifyheaderandfooter:<true or false> false by default");
display(string.Empty);
display("Examples:");
display(string.Empty);
display("DeleteLine /filename:MyCSVFile.txt /separator:, /hasheader:true /hasfooter:true /deleteheader:true /deletefooter:true /deletefirstcolumn:true /log:true");
display(string.Empty);
display("DeleteLine /help (this help)");
display("DeleteLine /? (this help)");
display(string.Empty);
}
}
}
| |
//-----------------------------------------------------------------------------
// Filename: SIPTCPChannel.cs
//
// Description: SIP transport for TCP.
//
// History:
// 19 Apr 2008 Aaron Clauson Created.
//
// License:
// This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php
//
// Copyright (c) 2006-2009 Aaron Clauson (aaron@sipsorcery.com), SIP Sorcery PTY LTD, Hobart, Australia (www.sipsorcery.com)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of SIP Sorcery PTY LTD.
// nor the names of its contributors may be used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
// BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
// OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//-----------------------------------------------------------------------------
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using SIPSorcery.Sys;
using log4net;
#if UNITTEST
using NUnit.Framework;
#endif
namespace SIPSorcery.SIP
{
public class SIPTCPChannel : SIPChannel
{
private const string ACCEPT_THREAD_NAME = "siptcp-";
private const string PRUNE_THREAD_NAME = "siptcpprune-";
private const int MAX_TCP_CONNECTIONS = 1000; // Maximum number of connections for the TCP listener.
//private const int MAX_TCP_CONNECTIONS_PER_IPADDRESS = 10; // Maximum number of connections allowed for a single remote IP address.
private const int CONNECTION_ATTEMPTS_ALLOWED = 3; // The number of failed connection attempts permitted before classifying a remote socket as failed.
private const int FAILED_CONNECTION_DONTUSE_INTERVAL = 300; // If a socket cannot be connected to don't try and reconnect to it for this interval.
private static int MaxSIPTCPMessageSize = SIPConstants.SIP_MAXIMUM_RECEIVE_LENGTH;
private TcpListener m_tcpServerListener;
private Dictionary<string, SIPConnection> m_connectedSockets = new Dictionary<string, SIPConnection>();
private List<string> m_connectingSockets = new List<string>(); // List of sockets that are in the process of being connected to. Need to avoid SIP re-transmits initiating multiple connect attempts.
private Dictionary<string, int> m_connectionFailureStrikes = new Dictionary<string, int>(); // Tracks the number of connection attempts made to a remote socket, three strikes and it's out.
private Dictionary<string, DateTime> m_connectionFailures = new Dictionary<string, DateTime>(); // Tracks sockets that have had a connection failure on them to avoid endless re-connect attmepts.
public SIPTCPChannel(IPEndPoint endPoint)
{
m_localSIPEndPoint = new SIPEndPoint(SIPProtocolsEnum.tcp, endPoint);
LocalTCPSockets.Add(endPoint.ToString());
m_isReliable = true;
Initialise();
}
private void Initialise()
{
try
{
m_tcpServerListener = new TcpListener(m_localSIPEndPoint.GetIPEndPoint());
m_tcpServerListener.Server.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
m_tcpServerListener.Start(MAX_TCP_CONNECTIONS);
ThreadPool.QueueUserWorkItem(delegate { AcceptConnections(ACCEPT_THREAD_NAME + m_localSIPEndPoint.Port); });
ThreadPool.QueueUserWorkItem(delegate { PruneConnections(PRUNE_THREAD_NAME + m_localSIPEndPoint.Port); });
logger.Debug("SIP TCP Channel listener created " + m_localSIPEndPoint.GetIPEndPoint() + ".");
}
catch (Exception excp)
{
logger.Error("Exception SIPTCPChannel Initialise. " + excp.Message);
throw excp;
}
}
private void AcceptConnections(string threadName)
{
try
{
Thread.CurrentThread.Name = threadName;
logger.Debug("SIPTCPChannel socket on " + m_localSIPEndPoint + " accept connections thread started.");
while (!Closed)
{
try
{
TcpClient tcpClient = m_tcpServerListener.AcceptTcpClient();
tcpClient.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
//clientSocket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
//IPEndPoint remoteEndPoint = (IPEndPoint)clientSocket.RemoteEndPoint;
IPEndPoint remoteEndPoint = (IPEndPoint)tcpClient.Client.RemoteEndPoint;
logger.Debug("SIP TCP Channel connection accepted from " + remoteEndPoint + ".");
//SIPTCPConnection sipTCPClient = new SIPTCPConnection(this, clientSocket, remoteEndPoint, SIPTCPConnectionsEnum.Listener);
SIPConnection sipTCPConnection = new SIPConnection(this, tcpClient.GetStream(), remoteEndPoint, SIPProtocolsEnum.tcp, SIPConnectionsEnum.Listener);
//SIPConnection sipTCPClient = new SIPConnection(this, tcpClient.Client, remoteEndPoint, SIPProtocolsEnum.tcp, SIPConnectionsEnum.Listener);
lock (m_connectedSockets)
{
m_connectedSockets.Add(remoteEndPoint.ToString(), sipTCPConnection);
}
sipTCPConnection.SIPSocketDisconnected += SIPTCPSocketDisconnected;
sipTCPConnection.SIPMessageReceived += SIPTCPMessageReceived;
// clientSocket.BeginReceive(sipTCPClient.SocketBuffer, 0, SIPTCPConnection.MaxSIPTCPMessageSize, SocketFlags.None, new AsyncCallback(sipTCPClient.ReceiveCallback), null);
//byte[] receiveBuffer = new byte[MaxSIPTCPMessageSize];
sipTCPConnection.SIPStream.BeginRead(sipTCPConnection.SocketBuffer, 0, MaxSIPTCPMessageSize, new AsyncCallback(ReceiveCallback), sipTCPConnection);
}
catch (Exception acceptExcp)
{
// This exception gets thrown if the remote end disconnects during the socket accept.
logger.Warn("Exception SIPTCPChannel accepting socket (" + acceptExcp.GetType() + "). " + acceptExcp.Message);
}
}
logger.Debug("SIPTCPChannel socket on " + m_localSIPEndPoint + " listening halted.");
}
catch (Exception excp)
{
logger.Error("Exception SIPTCPChannel Listen. " + excp.Message);
//throw excp;
}
}
public void ReceiveCallback(IAsyncResult ar)
{
SIPConnection sipTCPConnection = (SIPConnection)ar.AsyncState;
try
{
int bytesRead = sipTCPConnection.SIPStream.EndRead(ar);
if (sipTCPConnection.SocketReadCompleted(bytesRead))
{
sipTCPConnection.SIPStream.BeginRead(sipTCPConnection.SocketBuffer, sipTCPConnection.SocketBufferEndPosition, MaxSIPTCPMessageSize - sipTCPConnection.SocketBufferEndPosition, new AsyncCallback(ReceiveCallback), sipTCPConnection);
}
}
catch (SocketException) // Occurs if the remote end gets disconnected.
{ }
catch (Exception excp)
{
logger.Warn("Exception SIPTCPChannel ReceiveCallback. " + excp.Message);
SIPTCPSocketDisconnected(sipTCPConnection.RemoteEndPoint);
}
}
public override bool IsConnectionEstablished(IPEndPoint remoteEndPoint)
{
lock (m_connectedSockets)
{
return m_connectedSockets.ContainsKey(remoteEndPoint.ToString());
}
}
protected override Dictionary<string, SIPConnection> GetConnectionsList()
{
return m_connectedSockets;
}
private void SIPTCPSocketDisconnected(IPEndPoint remoteEndPoint)
{
try
{
logger.Debug("TCP socket from " + remoteEndPoint + " disconnected.");
lock (m_connectedSockets)
{
if(m_connectedSockets.ContainsKey(remoteEndPoint.ToString()))
{
m_connectedSockets.Remove(remoteEndPoint.ToString());
}
}
}
catch (Exception excp)
{
logger.Error("Exception SIPTCPClientDisconnected. " + excp.Message);
}
}
private void SIPTCPMessageReceived(SIPChannel channel, SIPEndPoint remoteEndPoint, byte[] buffer)
{
if (m_connectionFailures.ContainsKey(remoteEndPoint.GetIPEndPoint().ToString()))
{
m_connectionFailures.Remove(remoteEndPoint.GetIPEndPoint().ToString());
}
if (m_connectionFailureStrikes.ContainsKey(remoteEndPoint.GetIPEndPoint().ToString()))
{
m_connectionFailureStrikes.Remove(remoteEndPoint.GetIPEndPoint().ToString());
}
if (SIPMessageReceived != null)
{
SIPMessageReceived(channel, remoteEndPoint, buffer);
}
}
public override void Send(IPEndPoint destinationEndPoint, string message)
{
byte[] messageBuffer = Encoding.UTF8.GetBytes(message);
Send(destinationEndPoint, messageBuffer);
}
public override void Send(IPEndPoint dstEndPoint, byte[] buffer)
{
try
{
if (buffer == null)
{
throw new ApplicationException("An empty buffer was specified to Send in SIPTCPChannel.");
}
else if (LocalTCPSockets.Contains(dstEndPoint.ToString()))
{
logger.Error("SIPTCPChannel blocked Send to " + dstEndPoint.ToString() + " as it was identified as a locally hosted TCP socket.\r\n" + Encoding.UTF8.GetString(buffer));
throw new ApplicationException("A Send call was made in SIPTCPChannel to send to another local TCP socket.");
}
else
{
bool sent = false;
// Lookup a client socket that is connected to the destination.
//m_sipConn(buffer, buffer.Length, destinationEndPoint);
if (m_connectedSockets.ContainsKey(dstEndPoint.ToString()))
{
SIPConnection sipTCPClient = m_connectedSockets[dstEndPoint.ToString()];
try
{
//logger.Warn("TCP channel BeginWrite from " + SIPChannelEndPoint.ToString() + " to " + sipTCPClient.RemoteEndPoint + ": " + Encoding.ASCII.GetString(buffer, 0, 32) + ".");
sipTCPClient.SIPStream.BeginWrite(buffer, 0, buffer.Length, new AsyncCallback(EndSend), sipTCPClient);
//logger.Warn("TCP channel BeginWrite complete from " + SIPChannelEndPoint.ToString() + " to " + sipTCPClient.RemoteEndPoint + ".");
//sipTCPClient.SIPStream.Flush();
sent = true;
sipTCPClient.LastTransmission = DateTime.Now;
}
catch (SocketException)
{
logger.Warn("Could not send to TCP socket " + dstEndPoint + ", closing and removing.");
sipTCPClient.SIPStream.Close();
m_connectedSockets.Remove(dstEndPoint.ToString());
}
}
if (!sent)
{
if (m_connectionFailures.ContainsKey(dstEndPoint.ToString()) && m_connectionFailures[dstEndPoint.ToString()] < DateTime.Now.AddSeconds(FAILED_CONNECTION_DONTUSE_INTERVAL * -1))
{
m_connectionFailures.Remove(dstEndPoint.ToString());
}
if (m_connectionFailures.ContainsKey(dstEndPoint.ToString()))
{
throw new ApplicationException("TCP connection attempt to " + dstEndPoint.ToString() + " was not attempted, too many failures.");
}
else if (!m_connectingSockets.Contains(dstEndPoint.ToString()))
{
logger.Debug("Attempting to establish TCP connection to " + dstEndPoint + ".");
TcpClient tcpClient = new TcpClient();
tcpClient.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
tcpClient.Client.Bind(m_localSIPEndPoint.GetIPEndPoint());
m_connectingSockets.Add(dstEndPoint.ToString());
tcpClient.BeginConnect(dstEndPoint.Address, dstEndPoint.Port, EndConnect, new object[] { tcpClient, dstEndPoint, buffer });
}
else
{
//logger.Warn("Could not send SIP packet to TCP " + dstEndPoint + " and another connection was already in progress so dropping message.");
}
}
}
}
catch (ApplicationException appExcp)
{
logger.Warn("ApplicationException SIPTCPChannel Send (sendto=>" + dstEndPoint + "). " + appExcp.Message);
throw;
}
catch (Exception excp)
{
logger.Error("Exception (" + excp.GetType().ToString() + ") SIPTCPChannel Send (sendto=>" + dstEndPoint + "). " + excp.Message);
throw;
}
}
private void EndSend(IAsyncResult ar)
{
try
{
SIPConnection sipTCPConnection = (SIPConnection)ar.AsyncState;
sipTCPConnection.SIPStream.EndWrite(ar);
//logger.Debug("EndSend on TCP " + SIPChannelEndPoint.ToString() + ".");
}
catch (Exception excp)
{
logger.Error("Exception EndSend. " + excp.Message);
}
}
public override void Send(IPEndPoint dstEndPoint, byte[] buffer, string serverCertificateName)
{
throw new ApplicationException("This Send method is not available in the SIP TCP channel, please use an alternative overload.");
}
private void EndConnect(IAsyncResult ar)
{
bool connected = false;
IPEndPoint dstEndPoint = null;
try
{
object[] stateObj = (object[])ar.AsyncState;
TcpClient tcpClient = (TcpClient)stateObj[0];
dstEndPoint = (IPEndPoint)stateObj[1];
byte[] buffer = (byte[])stateObj[2];
m_connectingSockets.Remove(dstEndPoint.ToString());
tcpClient.EndConnect(ar);
if (tcpClient != null && tcpClient.Connected)
{
logger.Debug("Established TCP connection to " + dstEndPoint + ".");
connected = true;
m_connectionFailureStrikes.Remove(dstEndPoint.ToString());
m_connectionFailures.Remove(dstEndPoint.ToString());
SIPConnection callerConnection = new SIPConnection(this, tcpClient.GetStream(), dstEndPoint, SIPProtocolsEnum.tcp, SIPConnectionsEnum.Caller);
m_connectedSockets.Add(dstEndPoint.ToString(), callerConnection);
callerConnection.SIPSocketDisconnected += SIPTCPSocketDisconnected;
callerConnection.SIPMessageReceived += SIPTCPMessageReceived;
//byte[] receiveBuffer = new byte[MaxSIPTCPMessageSize];
callerConnection.SIPStream.BeginRead(callerConnection.SocketBuffer, 0, MaxSIPTCPMessageSize, new AsyncCallback(ReceiveCallback), callerConnection);
callerConnection.SIPStream.BeginWrite(buffer, 0, buffer.Length, EndSend, callerConnection);
}
else
{
logger.Warn("Could not establish TCP connection to " + dstEndPoint + ".");
}
}
catch (SocketException sockExcp)
{
logger.Warn("SocketException SIPTCPChannel EndConnect. " + sockExcp.Message);
}
catch (Exception excp)
{
logger.Error("Exception SIPTCPChannel EndConnect (" + excp.GetType() + "). " + excp.Message);
}
finally
{
if (!connected && dstEndPoint != null)
{
if (m_connectionFailureStrikes.ContainsKey(dstEndPoint.ToString()))
{
m_connectionFailureStrikes[dstEndPoint.ToString()] = m_connectionFailureStrikes[dstEndPoint.ToString()] + 1;
}
else
{
m_connectionFailureStrikes.Add(dstEndPoint.ToString(), 1);
}
if (m_connectionFailureStrikes[dstEndPoint.ToString()] >= CONNECTION_ATTEMPTS_ALLOWED)
{
if (!m_connectionFailures.ContainsKey(dstEndPoint.ToString()))
{
m_connectionFailures.Add(dstEndPoint.ToString(), DateTime.Now);
}
m_connectionFailureStrikes.Remove(dstEndPoint.ToString());
}
}
}
}
public override void Close()
{
logger.Debug("Closing SIP TCP Channel " + SIPChannelEndPoint + ".");
Closed = true;
try
{
m_tcpServerListener.Stop();
}
catch (Exception listenerCloseExcp)
{
logger.Warn("Exception SIPTCPChannel Close (shutting down listener). " + listenerCloseExcp.Message);
}
foreach (SIPConnection tcpConnection in m_connectedSockets.Values)
{
try
{
tcpConnection.SIPStream.Close();
}
catch (Exception connectionCloseExcp)
{
logger.Warn("Exception SIPTCPChannel Close (shutting down connection to " + tcpConnection.RemoteEndPoint + "). " + connectionCloseExcp.Message);
}
}
}
private void Dispose(bool disposing)
{
try
{
this.Close();
}
catch (Exception excp)
{
logger.Error("Exception Disposing SIPTCPChannel. " + excp.Message);
}
}
}
}
| |
using EventCounterCollector.Tests;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.DataContracts;
using Microsoft.ApplicationInsights.Extensibility;
using Microsoft.ApplicationInsights.Extensibility.EventCounterCollector;
using Microsoft.ApplicationInsights.Extensibility.EventCounterCollector.Implementation;
using Microsoft.ApplicationInsights.Extensibility.Implementation;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading.Tasks;
namespace EventCounterCollector.Tests
{
[TestClass]
public class EventCounterCollectionModuleTests
{
private string TestEventCounterSourceName = "Microsoft-ApplicationInsights-Extensibility-EventCounterCollector.Tests.TestEventCounter";
private string TestEventCounterName1 = "mycountername1";
[TestMethod]
[TestCategory("EventCounter")]
public void WarnsIfNoCountersConfigured()
{
using (var eventListener = new EventCounterCollectorDiagnosticListener())
using (var module = new EventCounterCollectionModule())
{
ConcurrentQueue<ITelemetry> itemsReceived = new ConcurrentQueue<ITelemetry>();
module.Initialize(GetTestTelemetryConfiguration(itemsReceived));
Assert.IsTrue(CheckEventReceived(eventListener.EventsReceived, nameof(EventCounterCollectorEventSource.ModuleIsBeingInitializedEvent)));
Assert.IsTrue(CheckEventReceived(eventListener.EventsReceived, nameof(EventCounterCollectorEventSource.EventCounterCollectorNoCounterConfigured)));
}
}
[TestMethod]
[TestCategory("EventCounter")]
public void IgnoresUnconfiguredEventCounter()
{
// ARRANGE
const int refreshTimeInSecs = 1;
ConcurrentQueue<ITelemetry> itemsReceived = new ConcurrentQueue<ITelemetry>();
using (var eventListener = new EventCounterCollectorDiagnosticListener())
using (var module = new EventCounterCollectionModule(refreshTimeInSecs))
{
module.Counters.Add(new EventCounterCollectionRequest() { EventSourceName = this.TestEventCounterSourceName, EventCounterName = this.TestEventCounterName1 });
module.Initialize(GetTestTelemetryConfiguration(itemsReceived));
// ACT
// These will fire counters 'mycountername2' which is not in the configured list.
TestEventCounter.Log.SampleCounter2(1500);
TestEventCounter.Log.SampleCounter2(400);
// Wait at least for refresh time.
Task.Delay(((int)refreshTimeInSecs * 1000) + 500).Wait();
// VALIDATE
Assert.IsTrue(CheckEventReceived(eventListener.EventsReceived, nameof(EventCounterCollectorEventSource.IgnoreEventWrittenAsCounterNotInConfiguredList)));
}
}
[TestMethod]
[TestCategory("EventCounter")]
public void ValidateSingleEventCounterCollection()
{
// ARRANGE
const int refreshTimeInSecs = 1;
ConcurrentQueue<ITelemetry> itemsReceived = new ConcurrentQueue<ITelemetry>();
string expectedName = this.TestEventCounterSourceName + "|" + this.TestEventCounterName1;
string expectedMetricNamespace = String.Empty;
double expectedMetricValue = (1000 + 1500 + 1500 + 400) / 4;
int expectedMetricCount = 4;
using (var module = new EventCounterCollectionModule(refreshTimeInSecs))
{
module.Counters.Add(new EventCounterCollectionRequest() {EventSourceName = this.TestEventCounterSourceName, EventCounterName = this.TestEventCounterName1 });
module.Initialize(GetTestTelemetryConfiguration(itemsReceived));
// ACT
// Making 4 calls with 1000, 1500, 1500, 400 value, leading to an average of 1100.
TestEventCounter.Log.SampleCounter1(1000);
TestEventCounter.Log.SampleCounter1(1500);
TestEventCounter.Log.SampleCounter1(1500);
TestEventCounter.Log.SampleCounter1(400);
// Wait at least for refresh time.
Task.Delay(((int) refreshTimeInSecs * 1000) + 500).Wait();
PrintTelemetryItems(itemsReceived);
// VALIDATE
ValidateTelemetry(itemsReceived, expectedName, expectedMetricNamespace, expectedMetricValue, expectedMetricCount);
// Wait another refresh interval to receive more events, but with zero as counter values.
// as nobody is publishing events.
Task.Delay(((int)refreshTimeInSecs * 1000)).Wait();
Assert.IsTrue(itemsReceived.Count >= 1);
PrintTelemetryItems(itemsReceived);
ValidateTelemetry(itemsReceived, expectedName, expectedMetricNamespace, 0.0, 0);
}
}
[TestMethod]
[TestCategory("EventCounter")]
public void ValidateConfiguredNamingOptions()
{
// ARRANGE
const int refreshTimeInSecs = 1;
ConcurrentQueue<ITelemetry> itemsReceived = new ConcurrentQueue<ITelemetry>();
string expectedName = this.TestEventCounterName1;
string expectedMetricNamespace = this.TestEventCounterSourceName;
double expectedMetricValue = 1000;
int expectedMetricCount = 1;
using (var module = new EventCounterCollectionModule(refreshTimeInSecs))
{
module.UseEventSourceNameAsMetricsNamespace = true;
module.Counters.Add(new EventCounterCollectionRequest() { EventSourceName = this.TestEventCounterSourceName, EventCounterName = this.TestEventCounterName1 });
module.Initialize(GetTestTelemetryConfiguration(itemsReceived));
// ACT
// Making a call with 1000
TestEventCounter.Log.SampleCounter1(1000);
// Wait at least for refresh time.
Task.Delay(((int)refreshTimeInSecs * 1000) + 500).Wait();
PrintTelemetryItems(itemsReceived);
// VALIDATE
ValidateTelemetry(itemsReceived, expectedName, expectedMetricNamespace, expectedMetricValue, expectedMetricCount);
}
}
private void ValidateTelemetry(ConcurrentQueue<ITelemetry> metricTelemetries, string expectedName, string expectedMetricNamespace, double expectedSum, double expectedCount)
{
double sum = 0.0;
int count = 0;
while (metricTelemetries.TryDequeue(out ITelemetry telemetry))
{
var metricTelemetry = telemetry as MetricTelemetry;
count = count + metricTelemetry.Count.Value;
if (!double.IsNaN(metricTelemetry.Sum)) // TODO: WHY IS SUM NaN ?
{
sum += metricTelemetry.Sum;
}
Assert.IsTrue(metricTelemetry.Context.GetInternalContext().SdkVersion.StartsWith("evtc"));
Assert.AreEqual(expectedName, metricTelemetry.Name);
Assert.AreEqual(expectedMetricNamespace, metricTelemetry.MetricNamespace);
Assert.IsFalse((telemetry as ISupportProperties).Properties.ContainsKey("CustomPerfCounter"));
}
Assert.AreEqual(expectedSum, sum);
Assert.AreEqual(expectedCount, count);
}
private void PrintTelemetryItems(ConcurrentQueue<ITelemetry> telemetry)
{
Trace.WriteLine("Received count:" + telemetry.Count);
foreach (var item in telemetry)
{
if (item is MetricTelemetry metric)
{
Trace.WriteLine("Metric.Name:" + metric.Name);
Trace.WriteLine("Metric.MetricNamespace:" + metric.MetricNamespace);
Trace.WriteLine("Metric.Sum:" + metric.Sum);
Trace.WriteLine("Metric.Count:" + metric.Count);
Trace.WriteLine("Metric.Timestamp:" + metric.Timestamp);
Trace.WriteLine("Metric.Sdk:" + metric.Context.GetInternalContext().SdkVersion);
foreach (var prop in metric.Properties)
{
Trace.WriteLine("Metric. Prop:" + "Key:" + prop.Key + "Value:" + prop.Value);
}
}
Trace.WriteLine("======================================");
}
}
private bool CheckEventReceived(ConcurrentQueue<string> allEvents, string expectedEvent)
{
bool found = false;
foreach(var evt in allEvents)
{
if(evt.Equals(expectedEvent))
{
found = true;
break;
}
}
return found;
}
private TelemetryConfiguration GetTestTelemetryConfiguration(ConcurrentQueue<ITelemetry> itemsReceived)
{
var configuration = new TelemetryConfiguration();
configuration.InstrumentationKey = "testkey";
configuration.TelemetryChannel = new TestChannel(itemsReceived);
return configuration;
}
}
}
| |
// leveldb-sharp
//
// Copyright (c) 2011 The LevelDB Authors
// Copyright (c) 2012-2013, Mirco Bauer <meebey@meebey.net>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Text;
using System.Runtime.InteropServices;
namespace LevelDB
{
/// <summary>
/// Native method P/Invoke declarations for LevelDB
/// </summary>
public static class Native
{
public static void CheckError(string error)
{
if (String.IsNullOrEmpty(error)) {
return;
}
throw new ApplicationException(error);
}
public static void CheckError(IntPtr error)
{
if (error == IntPtr.Zero) {
return;
}
CheckError(GetAndReleaseString(error));
}
public static UIntPtr GetStringLength(string value)
{
if (value == null || value.Length == 0) {
return UIntPtr.Zero;
}
return new UIntPtr((uint) Encoding.UTF8.GetByteCount(value));
}
public static string GetAndReleaseString(IntPtr ptr)
{
if (ptr == IntPtr.Zero) {
return null;
}
var str = Marshal.PtrToStringAnsi(ptr);
leveldb_free(ptr);
return str;
}
#region DB operations
#region leveldb_open
// extern leveldb_t* leveldb_open(const leveldb_options_t* options, const char* name, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_open(IntPtr options, string name, out IntPtr error);
public static IntPtr leveldb_open(IntPtr options, string name, out string error)
{
IntPtr errorPtr;
var db = leveldb_open(options, name, out errorPtr);
error = GetAndReleaseString(errorPtr);
return db;
}
public static IntPtr leveldb_open(IntPtr options, string name)
{
string error;
var db = leveldb_open(options, name, out error);
CheckError(error);
return db;
}
#endregion
// extern void leveldb_close(leveldb_t* db);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_close(IntPtr db);
#region leveldb_put
// extern void leveldb_put(leveldb_t* db, const leveldb_writeoptions_t* options, const char* key, size_t keylen, const char* val, size_t vallen, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_put(IntPtr db,
IntPtr writeOptions,
string key,
UIntPtr keyLength,
string value,
UIntPtr valueLength,
out IntPtr error);
public static void leveldb_put(IntPtr db,
IntPtr writeOptions,
string key,
UIntPtr keyLength,
string value,
UIntPtr valueLength,
out string error)
{
IntPtr errorPtr;
leveldb_put(db, writeOptions, key, keyLength, value, valueLength,
out errorPtr);
error = GetAndReleaseString(errorPtr);
}
public static void leveldb_put(IntPtr db,
IntPtr writeOptions,
string key,
string value)
{
string error;
var keyLength = GetStringLength(key);
var valueLength = GetStringLength(value);
Native.leveldb_put(db, writeOptions,
key, keyLength,
value, valueLength, out error);
CheckError(error);
}
#endregion
#region leveldb_delete
// extern void leveldb_delete(leveldb_t* db, const leveldb_writeoptions_t* options, const char* key, size_t keylen, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_delete(IntPtr db, IntPtr writeOptions, string key, UIntPtr keylen, out IntPtr error);
public static void leveldb_delete(IntPtr db, IntPtr writeOptions, string key, UIntPtr keylen, out string error)
{
IntPtr errorPtr;
leveldb_delete(db, writeOptions, key, keylen, out errorPtr);
error = GetAndReleaseString(errorPtr);
}
public static void leveldb_delete(IntPtr db, IntPtr writeOptions, string key)
{
string error;
var keyLength = GetStringLength(key);
leveldb_delete(db, writeOptions, key, keyLength, out error);
CheckError(error);
}
#endregion
#region leveldb_write
// extern void leveldb_write(leveldb_t* db, const leveldb_writeoptions_t* options, leveldb_writebatch_t* batch, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_write(IntPtr db, IntPtr writeOptions, IntPtr writeBatch, out IntPtr error);
public static void leveldb_write(IntPtr db, IntPtr writeOptions, IntPtr writeBatch, out string error)
{
IntPtr errorPtr;
leveldb_write(db, writeOptions, writeBatch, out errorPtr);
error = GetAndReleaseString(errorPtr);
}
public static void leveldb_write(IntPtr db, IntPtr writeOptions, IntPtr writeBatch)
{
string error;
leveldb_write(db, writeOptions, writeBatch, out error);
CheckError(error);
}
#endregion
#region leveldb_get
// extern char* leveldb_get(leveldb_t* db, const leveldb_readoptions_t* options, const char* key, size_t keylen, size_t* vallen, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_get(IntPtr db,
IntPtr readOptions,
string key,
UIntPtr keyLength,
out UIntPtr valueLength,
out IntPtr error);
public static IntPtr leveldb_get(IntPtr db,
IntPtr readOptions,
string key,
UIntPtr keyLength,
out UIntPtr valueLength,
out string error)
{
IntPtr errorPtr;
var valuePtr = leveldb_get(db, readOptions, key, keyLength,
out valueLength, out errorPtr);
error = GetAndReleaseString(errorPtr);
return valuePtr;
}
public static string leveldb_get(IntPtr db,
IntPtr readOptions,
string key)
{
UIntPtr valueLength;
string error;
var keyLength = GetStringLength(key);
var valuePtr = leveldb_get(db, readOptions, key, keyLength,
out valueLength, out error);
CheckError(error);
if (valuePtr == IntPtr.Zero || valueLength == UIntPtr.Zero) {
return null;
}
var value = Marshal.PtrToStringAnsi(valuePtr, (int) valueLength);
leveldb_free(valuePtr);
return value;
}
#endregion
// extern leveldb_iterator_t* leveldb_create_iterator(leveldb_t* db, const leveldb_readoptions_t* options);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_create_iterator(IntPtr db, IntPtr readOptions);
// extern const leveldb_snapshot_t* leveldb_create_snapshot(leveldb_t* db);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_create_snapshot(IntPtr db);
// extern void leveldb_release_snapshot(leveldb_t* db, const leveldb_snapshot_t* snapshot);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_release_snapshot(IntPtr db, IntPtr snapshot);
/// <summary>
/// Returns NULL if property name is unknown.
/// Else returns a pointer to a malloc()-ed null-terminated value.
/// </summary>
// extern char* leveldb_property_value(leveldb_t* db, const char* propname);
[DllImport("leveldb", EntryPoint="leveldb_property_value", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_property_value_native(IntPtr db, string propname);
public static string leveldb_property_value(IntPtr db, string propname)
{
var valuePtr = leveldb_property_value_native(db, propname);
if (valuePtr == IntPtr.Zero) {
return null;
}
var value = Marshal.PtrToStringAnsi(valuePtr);
leveldb_free(valuePtr);
return value;
}
// extern void leveldb_approximate_sizes(
// leveldb_t* db, int num_ranges,
// const char* const* range_start_key,
// const size_t* range_start_key_len,
// const char* const* range_limit_key,
// const size_t* range_limit_key_len,
// uint64_t* sizes);
/// <summary>
/// Compact the underlying storage for the key range [startKey,limitKey].
/// In particular, deleted and overwritten versions are discarded,
/// and the data is rearranged to reduce the cost of operations
/// needed to access the data. This operation should typically only
/// be invoked by users who understand the underlying implementation.
///
/// startKey==null is treated as a key before all keys in the database.
/// limitKey==null is treated as a key after all keys in the database.
/// Therefore the following call will compact the entire database:
/// leveldb_compact_range(db, null, null);
/// </summary>
// extern void leveldb_compact_range(leveldb_t* db,
// const char* start_key, size_t start_key_len,
// const char* limit_key, size_t limit_key_len);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_compact_range(IntPtr db,
string startKey,
UIntPtr startKeyLen,
string limitKey,
UIntPtr limitKeyLen);
public static void leveldb_compact_range(IntPtr db,
string startKey,
string limitKey)
{
leveldb_compact_range(db,
startKey, GetStringLength(startKey),
limitKey, GetStringLength(limitKey));
}
#endregion
#region Management operations
#region leveldb_destroy_db
// extern void leveldb_destroy_db(const leveldb_options_t* options, const char* name, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_destroy_db(IntPtr options, string path, out IntPtr error);
public static void leveldb_destroy_db(IntPtr options, string path, out string error)
{
IntPtr errorPtr;
leveldb_destroy_db(options, path, out errorPtr);
error = GetAndReleaseString(errorPtr);
}
public static void leveldb_destroy_db(IntPtr options, string path)
{
string error;
leveldb_destroy_db(options, path, out error);
CheckError(error);
}
#endregion
#region leveldb_repair_db
// extern void leveldb_repair_db(const leveldb_options_t* options, const char* name, char** errptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_repair_db(IntPtr options, string path, out IntPtr error);
public static void leveldb_repair_db(IntPtr options, string path, out string error)
{
IntPtr errorPtr;
leveldb_repair_db(options, path, out errorPtr);
error = GetAndReleaseString(errorPtr);
}
public static void leveldb_repair_db(IntPtr options, string path)
{
string error;
leveldb_repair_db(options, path, out error);
CheckError(error);
}
#endregion
#endregion
#region Write batch
// extern leveldb_writebatch_t* leveldb_writebatch_create();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_writebatch_create();
// extern void leveldb_writebatch_destroy(leveldb_writebatch_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writebatch_destroy(IntPtr writeBatch);
// extern void leveldb_writebatch_clear(leveldb_writebatch_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writebatch_clear(IntPtr writeBatch);
// extern void leveldb_writebatch_put(leveldb_writebatch_t*, const char* key, size_t klen, const char* val, size_t vlen);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writebatch_put(IntPtr writeBatch,
string key,
UIntPtr keyLength,
string value,
UIntPtr valueLength);
public static void leveldb_writebatch_put(IntPtr writeBatch,
string key,
string value)
{
var keyLength = GetStringLength(key);
var valueLength = GetStringLength(value);
Native.leveldb_writebatch_put(writeBatch,
key, keyLength,
value, valueLength);
}
// extern void leveldb_writebatch_delete(leveldb_writebatch_t*, const char* key, size_t klen);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writebatch_delete(IntPtr writeBatch, string key, UIntPtr keylen);
public static void leveldb_writebatch_delete(IntPtr writeBatch, string key)
{
var keyLength = GetStringLength(key);
leveldb_writebatch_delete(writeBatch, key, keyLength);
}
// TODO:
// extern void leveldb_writebatch_iterate(leveldb_writebatch_t*, void* state, void (*put)(void*, const char* k, size_t klen, const char* v, size_t vlen), void (*deleted)(void*, const char* k, size_t klen));
#endregion
#region Options
// extern leveldb_options_t* leveldb_options_create();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_options_create();
// extern void leveldb_options_destroy(leveldb_options_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_destroy(IntPtr options);
// extern void leveldb_options_set_comparator(leveldb_options_t*, leveldb_comparator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_comparator(IntPtr options, IntPtr comparator);
/// <summary>
/// If true, the database will be created if it is missing.
/// Default: false
/// </summary>
// extern void leveldb_options_set_create_if_missing(leveldb_options_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_create_if_missing(IntPtr options, bool value);
/// <summary>
/// If true, an error is raised if the database already exists.
/// Default: false
/// </summary>
// extern void leveldb_options_set_error_if_exists(leveldb_options_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_error_if_exists(IntPtr options, bool value);
/// <summary>
/// If true, the implementation will do aggressive checking of the
/// data it is processing and will stop early if it detects any
/// errors. This may have unforeseen ramifications: for example, a
/// corruption of one DB entry may cause a large number of entries to
/// become unreadable or for the entire DB to become unopenable.
/// Default: false
/// </summary>
// extern void leveldb_options_set_paranoid_checks(leveldb_options_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_paranoid_checks(IntPtr options, bool value);
/// <summary>
/// Number of open files that can be used by the DB. You may need to
/// increase this if your database has a large working set (budget
/// one open file per 2MB of working set).
/// Default: 1000
/// </summary>
// extern void leveldb_options_set_max_open_files(leveldb_options_t*, int);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_max_open_files(IntPtr options, int value);
/// <summary>
/// Each block is individually compressed before being written to
/// persistent storage. Compression is on by default since the default
/// compression method is very fast, and is automatically disabled for
/// uncompressible data. In rare cases, applications may want to
/// disable compression entirely, but should only do so if benchmarks
/// show a performance improvement.
/// Default: 1 (SnappyCompression)
/// </summary>
/// <seealso cref="T:CompressionType"/>
// extern void leveldb_options_set_compression(leveldb_options_t*, int);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_compression(IntPtr options, int value);
/// <summary>
/// Control over blocks (user data is stored in a set of blocks, and
/// a block is the unit of reading from disk).
///
/// If non-NULL, use the specified cache for blocks.
/// If NULL, leveldb will automatically create and use an 8MB internal cache.
/// Default: NULL
/// </summary>
// extern void leveldb_options_set_cache(leveldb_options_t*, leveldb_cache_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_cache(IntPtr options, IntPtr cache);
public static void leveldb_options_set_cache_size(IntPtr options, int capacity)
{
var cache = leveldb_cache_create_lru((UIntPtr) capacity);
leveldb_options_set_cache(options, cache);
}
/// <summary>
/// Approximate size of user data packed per block. Note that the
/// block size specified here corresponds to uncompressed data. The
/// actual size of the unit read from disk may be smaller if
/// compression is enabled. This parameter can be changed dynamically.
///
/// Default: 4K
/// </summary>
// extern void leveldb_options_set_block_size(leveldb_options_t*, size_t);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_block_size(IntPtr options, UIntPtr size);
public static void leveldb_options_set_block_size(IntPtr options, int size)
{
leveldb_options_set_block_size(options, (UIntPtr) size);
}
/// <summary>
/// Amount of data to build up in memory (backed by an unsorted log
/// on disk) before converting to a sorted on-disk file.
///
/// Larger values increase performance, especially during bulk loads.
/// Up to two write buffers may be held in memory at the same time,
/// so you may wish to adjust this parameter to control memory usage.
/// Also, a larger write buffer will result in a longer recovery time
/// the next time the database is opened.
///
/// Default: 4MB
/// </summary>
// extern void leveldb_options_set_write_buffer_size(leveldb_options_t*, size_t);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_write_buffer_size(IntPtr options, UIntPtr size);
public static void leveldb_options_set_write_buffer_size(IntPtr options, int size)
{
leveldb_options_set_write_buffer_size(options, (UIntPtr) size);
}
/// <summary>
/// Number of keys between restart points for delta encoding of keys.
/// This parameter can be changed dynamically. Most clients should
/// leave this parameter alone.
/// Default: 16
/// </summary>
// extern void leveldb_options_set_block_restart_interval(leveldb_options_t*, int);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_options_set_block_restart_interval(IntPtr options, int interval);
#endregion
#region Read Options
// extern leveldb_readoptions_t* leveldb_readoptions_create();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_readoptions_create();
// extern void leveldb_readoptions_destroy(leveldb_readoptions_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_readoptions_destroy(IntPtr readOptions);
// extern void leveldb_readoptions_set_verify_checksums(leveldb_readoptions_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_readoptions_set_verify_checksums(IntPtr readOptions, bool value);
// extern void leveldb_readoptions_set_fill_cache(leveldb_readoptions_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_readoptions_set_fill_cache(IntPtr readOptions, bool value);
// extern void leveldb_readoptions_set_snapshot(leveldb_readoptions_t*, const leveldb_snapshot_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_readoptions_set_snapshot(IntPtr readOptions, IntPtr snapshot);
#endregion
#region Write Options
// extern leveldb_writeoptions_t* leveldb_writeoptions_create();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_writeoptions_create();
// extern void leveldb_writeoptions_destroy(leveldb_writeoptions_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writeoptions_destroy(IntPtr writeOptions);
// extern void leveldb_writeoptions_set_sync(leveldb_writeoptions_t*, unsigned char);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_writeoptions_set_sync(IntPtr writeOptions, bool value);
#endregion
#region Iterator
// extern void leveldb_iter_seek_to_first(leveldb_iterator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_seek_to_first(IntPtr iter);
// extern void leveldb_iter_seek_to_last(leveldb_iterator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_seek_to_last(IntPtr iter);
// extern void leveldb_iter_seek(leveldb_iterator_t*, const char* k, size_t klen);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_seek(IntPtr iter, string key, UIntPtr keyLength);
public static void leveldb_iter_seek(IntPtr iter, string key)
{
var keyLength = GetStringLength(key);
leveldb_iter_seek(iter, key, keyLength);
}
// extern unsigned char leveldb_iter_valid(const leveldb_iterator_t*);
[DllImport("leveldb", EntryPoint="leveldb_iter_valid", CallingConvention = CallingConvention.Cdecl)]
public static extern byte leveldb_iter_valid_native(IntPtr iter);
public static bool leveldb_iter_valid(IntPtr iter)
{
return leveldb_iter_valid_native(iter) != 0;
}
// extern void leveldb_iter_prev(leveldb_iterator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_prev(IntPtr iter);
// extern void leveldb_iter_next(leveldb_iterator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_next(IntPtr iter);
// extern const char* leveldb_iter_key(const leveldb_iterator_t*, size_t* klen);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_iter_key(IntPtr iter, out UIntPtr keyLength);
public static string leveldb_iter_key(IntPtr iter)
{
UIntPtr keyLength;
var keyPtr = leveldb_iter_key(iter, out keyLength);
if (keyPtr == IntPtr.Zero || keyLength == UIntPtr.Zero) {
return null;
}
var key = Marshal.PtrToStringAnsi(keyPtr, (int) keyLength);
return key;
}
// extern const char* leveldb_iter_value(const leveldb_iterator_t*, size_t* vlen);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_iter_value(IntPtr iter, out UIntPtr valueLength);
public static string leveldb_iter_value(IntPtr iter)
{
UIntPtr valueLength;
var valuePtr = leveldb_iter_value(iter, out valueLength);
if (valuePtr == IntPtr.Zero || valueLength == UIntPtr.Zero) {
return null;
}
var value = Marshal.PtrToStringAnsi(valuePtr, (int) valueLength);
return value;
}
// extern void leveldb_iter_destroy(leveldb_iterator_t*);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_iter_destroy(IntPtr iter);
// TODO:
// extern void leveldb_iter_get_error(const leveldb_iterator_t*, char** errptr);
#endregion
#region Cache
// extern leveldb_cache_t* leveldb_cache_create_lru(size_t capacity);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr leveldb_cache_create_lru(UIntPtr capacity);
// extern void leveldb_cache_destroy(leveldb_cache_t* cache);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_cache_destroy(IntPtr cache);
#endregion
#region Env
// TODO:
// extern leveldb_env_t* leveldb_create_default_env();
// extern void leveldb_env_destroy(leveldb_env_t*);
#endregion
#region Utility
/// <summary>
/// Calls free(ptr).
/// REQUIRES: ptr was malloc()-ed and returned by one of the routines
/// in this file. Note that in certain cases (typically on Windows),
/// you may need to call this routine instead of free(ptr) to dispose
/// of malloc()-ed memory returned by this library.
/// </summary>
// extern void leveldb_free(void* ptr);
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern void leveldb_free(IntPtr ptr);
/// <summary>
/// Return the major version number for this release.
/// </summary>
// extern int leveldb_major_version();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern int leveldb_major_version();
/// <summary>
/// Return the minor version number for this release.
/// </summary>
// extern int leveldb_minor_version();
[DllImport("leveldb", CallingConvention = CallingConvention.Cdecl)]
public static extern int leveldb_minor_version();
#endregion
public static void Dump(IntPtr db)
{
var options = Native.leveldb_readoptions_create();
IntPtr iter = Native.leveldb_create_iterator(db, options);
for (Native.leveldb_iter_seek_to_first(iter);
Native.leveldb_iter_valid(iter);
Native.leveldb_iter_next(iter)) {
string key = Native.leveldb_iter_key(iter);
string value = Native.leveldb_iter_value(iter);
Console.WriteLine("'{0}' => '{1}'", key, value);
}
Native.leveldb_iter_destroy(iter);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.FindSymbols;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Rename;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Text;
using Microsoft.VisualStudio.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.RenameTracking
{
internal sealed partial class RenameTrackingTaggerProvider
{
internal enum TriggerIdentifierKind
{
NotRenamable,
RenamableDeclaration,
RenamableReference,
}
/// <summary>
/// Determines whether the original token was a renameable identifier on a background thread
/// </summary>
private class TrackingSession : ForegroundThreadAffinitizedObject
{
private static readonly Task<TriggerIdentifierKind> s_notRenamableTask = Task.FromResult(TriggerIdentifierKind.NotRenamable);
private readonly Task<TriggerIdentifierKind> _isRenamableIdentifierTask;
private readonly CancellationTokenSource _cancellationTokenSource;
private readonly CancellationToken _cancellationToken;
private readonly IAsynchronousOperationListener _asyncListener;
private Task<bool> _newIdentifierBindsTask = SpecializedTasks.False;
private readonly string _originalName;
public string OriginalName { get { return _originalName; } }
private readonly ITrackingSpan _trackingSpan;
public ITrackingSpan TrackingSpan { get { return _trackingSpan; } }
private bool _forceRenameOverloads;
public bool ForceRenameOverloads { get { return _forceRenameOverloads; } }
public TrackingSession(StateMachine stateMachine, SnapshotSpan snapshotSpan, IAsynchronousOperationListener asyncListener)
{
AssertIsForeground();
_asyncListener = asyncListener;
_trackingSpan = snapshotSpan.Snapshot.CreateTrackingSpan(snapshotSpan.Span, SpanTrackingMode.EdgeInclusive);
_cancellationTokenSource = new CancellationTokenSource();
_cancellationToken = _cancellationTokenSource.Token;
if (snapshotSpan.Length > 0)
{
// If the snapshotSpan is nonempty, then the session began with a change that
// was touching a word. Asynchronously determine whether that word was a
// renameable identifier. If it is, alert the state machine so it can trigger
// tagging.
_originalName = snapshotSpan.GetText();
_isRenamableIdentifierTask = Task.Factory.SafeStartNewFromAsync(
() => DetermineIfRenamableIdentifierAsync(snapshotSpan, initialCheck: true),
_cancellationToken,
TaskScheduler.Default);
var asyncToken = _asyncListener.BeginAsyncOperation(GetType().Name + ".UpdateTrackingSessionAfterIsRenamableIdentifierTask");
_isRenamableIdentifierTask.SafeContinueWith(
t => stateMachine.UpdateTrackingSessionIfRenamable(),
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
ForegroundTaskScheduler).CompletesAsyncOperation(asyncToken);
QueueUpdateToStateMachine(stateMachine, _isRenamableIdentifierTask);
}
else
{
// If the snapshotSpan is empty, that means text was added in a location that is
// not touching an existing word, which happens a fair amount when writing new
// code. In this case we already know that the user is not renaming an
// identifier.
_isRenamableIdentifierTask = s_notRenamableTask;
}
}
private void QueueUpdateToStateMachine(StateMachine stateMachine, Task task)
{
var asyncToken = _asyncListener.BeginAsyncOperation($"{GetType().Name}.{nameof(QueueUpdateToStateMachine)}");
task.SafeContinueWith(t =>
{
AssertIsForeground();
if (_isRenamableIdentifierTask.Result != TriggerIdentifierKind.NotRenamable)
{
stateMachine.OnTrackingSessionUpdated(this);
}
},
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
ForegroundTaskScheduler).CompletesAsyncOperation(asyncToken);
}
internal void CheckNewIdentifier(StateMachine stateMachine, ITextSnapshot snapshot)
{
AssertIsForeground();
_newIdentifierBindsTask = _isRenamableIdentifierTask.SafeContinueWithFromAsync(
async t => t.Result != TriggerIdentifierKind.NotRenamable &&
TriggerIdentifierKind.RenamableReference ==
await DetermineIfRenamableIdentifierAsync(
TrackingSpan.GetSpan(snapshot),
initialCheck: false).ConfigureAwait(false),
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
TaskScheduler.Default);
QueueUpdateToStateMachine(stateMachine, _newIdentifierBindsTask);
}
internal bool IsDefinitelyRenamableIdentifier()
{
// This needs to be able to run on a background thread for the CodeFix
return IsRenamableIdentifier(_isRenamableIdentifierTask, waitForResult: false, cancellationToken: CancellationToken.None);
}
public void Cancel()
{
AssertIsForeground();
_cancellationTokenSource.Cancel();
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableIdentifierAsync(SnapshotSpan snapshotSpan, bool initialCheck)
{
AssertIsBackground();
var document = snapshotSpan.Snapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
var syntaxFactsService = document.Project.LanguageServices.GetService<ISyntaxFactsService>();
var syntaxTree = await document.GetSyntaxTreeAsync(_cancellationToken).ConfigureAwait(false);
var token = await syntaxTree.GetTouchingWordAsync(snapshotSpan.Start.Position, syntaxFactsService, _cancellationToken).ConfigureAwait(false);
// The OriginalName is determined with a simple textual check, so for a
// statement such as "Dim [x = 1" the textual check will return a name of "[x".
// The token found for "[x" is an identifier token, but only due to error
// recovery (the "[x" is actually in the trailing trivia). If the OriginalName
// found through the textual check has a different length than the span of the
// touching word, then we cannot perform a rename.
if (initialCheck && token.Span.Length != this.OriginalName.Length)
{
return TriggerIdentifierKind.NotRenamable;
}
var languageHeuristicsService = document.Project.LanguageServices.GetService<IRenameTrackingLanguageHeuristicsService>();
if (syntaxFactsService.IsIdentifier(token) && languageHeuristicsService.IsIdentifierValidForRenameTracking(token.Text))
{
var semanticModel = await document.GetSemanticModelForNodeAsync(token.Parent, _cancellationToken).ConfigureAwait(false);
var semanticFacts = document.GetLanguageService<ISemanticFactsService>();
var renameSymbolInfo = RenameUtilities.GetTokenRenameInfo(semanticFacts, semanticModel, token, _cancellationToken);
if (!renameSymbolInfo.HasSymbols)
{
return TriggerIdentifierKind.NotRenamable;
}
if (renameSymbolInfo.IsMemberGroup)
{
// This is a reference from a nameof expression. Allow the rename but set the RenameOverloads option
_forceRenameOverloads = true;
return await DetermineIfRenamableSymbolsAsync(renameSymbolInfo.Symbols, document, token).ConfigureAwait(false);
}
else
{
return await DetermineIfRenamableSymbolAsync(renameSymbolInfo.Symbols.Single(), document, token).ConfigureAwait(false);
}
}
}
return TriggerIdentifierKind.NotRenamable;
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableSymbolsAsync(IEnumerable<ISymbol> symbols, Document document, SyntaxToken token)
{
foreach (var symbol in symbols)
{
// Get the source symbol if possible
var sourceSymbol = await SymbolFinder.FindSourceDefinitionAsync(symbol, document.Project.Solution, _cancellationToken).ConfigureAwait(false) ?? symbol;
if (!sourceSymbol.Locations.All(loc => loc.IsInSource))
{
return TriggerIdentifierKind.NotRenamable;
}
}
return TriggerIdentifierKind.RenamableReference;
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableSymbolAsync(ISymbol symbol, Document document, SyntaxToken token)
{
// Get the source symbol if possible
var sourceSymbol = await SymbolFinder.FindSourceDefinitionAsync(symbol, document.Project.Solution, _cancellationToken).ConfigureAwait(false) ?? symbol;
if (!sourceSymbol.Locations.All(loc => loc.IsInSource))
{
return TriggerIdentifierKind.NotRenamable;
}
return sourceSymbol.Locations.Any(loc => loc == token.GetLocation())
? TriggerIdentifierKind.RenamableDeclaration
: TriggerIdentifierKind.RenamableReference;
}
internal bool CanInvokeRename(
ISyntaxFactsService syntaxFactsService,
IRenameTrackingLanguageHeuristicsService languageHeuristicsService,
bool isSmartTagCheck,
bool waitForResult,
CancellationToken cancellationToken)
{
if (IsRenamableIdentifier(_isRenamableIdentifierTask, waitForResult, cancellationToken))
{
var isRenamingDeclaration = _isRenamableIdentifierTask.Result == TriggerIdentifierKind.RenamableDeclaration;
var newName = TrackingSpan.GetText(TrackingSpan.TextBuffer.CurrentSnapshot);
var comparison = isRenamingDeclaration || syntaxFactsService.IsCaseSensitive ? StringComparison.Ordinal : StringComparison.OrdinalIgnoreCase;
if (!string.Equals(OriginalName, newName, comparison) &&
syntaxFactsService.IsValidIdentifier(newName) &&
languageHeuristicsService.IsIdentifierValidForRenameTracking(newName))
{
// At this point, we want to allow renaming if the user invoked Ctrl+. explicitly, but we
// want to avoid showing a smart tag if we're renaming a reference that binds to an existing
// symbol.
if (!isSmartTagCheck || isRenamingDeclaration || !NewIdentifierDefinitelyBindsToReference())
{
return true;
}
}
}
return false;
}
private bool NewIdentifierDefinitelyBindsToReference()
{
return _newIdentifierBindsTask.Status == TaskStatus.RanToCompletion && _newIdentifierBindsTask.Result;
}
}
}
}
| |
// ---------------------------------------------------------------------------
// <copyright file="OrderByCollection.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ---------------------------------------------------------------------------
//-----------------------------------------------------------------------
// <summary>Defines the OrderByCollection class.</summary>
//-----------------------------------------------------------------------
namespace Microsoft.Exchange.WebServices.Data
{
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using PropertyDefinitionSortDirectionPair = System.Collections.Generic.KeyValuePair<PropertyDefinitionBase, SortDirection>;
/// <summary>
/// Represents an ordered collection of property definitions qualified with a sort direction.
/// </summary>
public sealed class OrderByCollection : IEnumerable<PropertyDefinitionSortDirectionPair>, IJsonSerializable
{
private List<PropertyDefinitionSortDirectionPair> propDefSortOrderPairList;
/// <summary>
/// Initializes a new instance of the <see cref="OrderByCollection"/> class.
/// </summary>
internal OrderByCollection()
{
this.propDefSortOrderPairList = new List<PropertyDefinitionSortDirectionPair>();
}
/// <summary>
/// Adds the specified property definition / sort direction pair to the collection.
/// </summary>
/// <param name="propertyDefinition">The property definition.</param>
/// <param name="sortDirection">The sort direction.</param>
public void Add(PropertyDefinitionBase propertyDefinition, SortDirection sortDirection)
{
if (this.Contains(propertyDefinition))
{
throw new ServiceLocalException(string.Format(Strings.PropertyAlreadyExistsInOrderByCollection, propertyDefinition.GetPrintableName()));
}
this.propDefSortOrderPairList.Add(new PropertyDefinitionSortDirectionPair(propertyDefinition, sortDirection));
}
/// <summary>
/// Removes all elements from the collection.
/// </summary>
public void Clear()
{
this.propDefSortOrderPairList.Clear();
}
/// <summary>
/// Determines whether the collection contains the specified property definition.
/// </summary>
/// <param name="propertyDefinition">The property definition.</param>
/// <returns>True if the collection contains the specified property definition; otherwise, false.</returns>
internal bool Contains(PropertyDefinitionBase propertyDefinition)
{
return this.propDefSortOrderPairList.Exists((pair) => pair.Key.Equals(propertyDefinition));
}
/// <summary>
/// Gets the number of elements contained in the collection.
/// </summary>
public int Count
{
get { return this.propDefSortOrderPairList.Count; }
}
/// <summary>
/// Removes the specified property definition from the collection.
/// </summary>
/// <param name="propertyDefinition">The property definition.</param>
/// <returns>True if the property definition is successfully removed; otherwise, false</returns>
public bool Remove(PropertyDefinitionBase propertyDefinition)
{
int count = this.propDefSortOrderPairList.RemoveAll((pair) => pair.Key.Equals(propertyDefinition));
return count > 0;
}
/// <summary>
/// Removes the element at the specified index from the collection.
/// </summary>
/// <param name="index">The index.</param>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Index is less than 0 or index is equal to or greater than Count.
/// </exception>
public void RemoveAt(int index)
{
this.propDefSortOrderPairList.RemoveAt(index);
}
/// <summary>
/// Tries to get the value for a property definition in the collection.
/// </summary>
/// <param name="propertyDefinition">The property definition.</param>
/// <param name="sortDirection">The sort direction.</param>
/// <returns>True if collection contains property definition, otherwise false.</returns>
public bool TryGetValue(PropertyDefinitionBase propertyDefinition, out SortDirection sortDirection)
{
foreach (PropertyDefinitionSortDirectionPair pair in this.propDefSortOrderPairList)
{
if (pair.Value.Equals(propertyDefinition))
{
sortDirection = pair.Value;
return true;
}
}
sortDirection = SortDirection.Ascending; // out parameter has to be set to some value.
return false;
}
/// <summary>
/// Writes to XML.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="xmlElementName">Name of the XML element.</param>
internal void WriteToXml(EwsServiceXmlWriter writer, string xmlElementName)
{
if (this.Count > 0)
{
writer.WriteStartElement(XmlNamespace.Messages, xmlElementName);
foreach (PropertyDefinitionSortDirectionPair keyValuePair in this)
{
writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.FieldOrder);
writer.WriteAttributeValue(XmlAttributeNames.Order, keyValuePair.Value);
keyValuePair.Key.WriteToXml(writer);
writer.WriteEndElement(); // FieldOrder
}
writer.WriteEndElement();
}
}
/// <summary>
/// Creates a JSON representation of this object.
/// </summary>
/// <param name="service">The service.</param>
/// <returns>
/// A Json value (either a JsonObject, an array of Json values, or a Json primitive)
/// </returns>
object IJsonSerializable.ToJson(ExchangeService service)
{
if (this.Count > 0)
{
List<object> sortRestrictions = new List<object>();
foreach (PropertyDefinitionSortDirectionPair keyValuePair in this)
{
JsonObject jsonSortRestriction = new JsonObject();
jsonSortRestriction.Add(XmlAttributeNames.Order, keyValuePair.Value);
jsonSortRestriction.Add("Path", ((IJsonSerializable)keyValuePair.Key).ToJson(service));
sortRestrictions.Add(jsonSortRestriction);
}
return sortRestrictions.ToArray();
}
else
{
return null;
}
}
/// <summary>
/// Gets the element at the specified index from the collection.
/// </summary>
/// <param name="index">Index.</param>
public PropertyDefinitionSortDirectionPair this[int index]
{
get { return this.propDefSortOrderPairList[index]; }
}
#region IEnumerable<KeyValuePair<PropertyDefinitionBase,SortDirection>> Members
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection.
/// </returns>
public IEnumerator<KeyValuePair<PropertyDefinitionBase, SortDirection>> GetEnumerator()
{
return this.propDefSortOrderPairList.GetEnumerator();
}
#endregion
#region IEnumerable Members
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>
/// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection.
/// </returns>
IEnumerator IEnumerable.GetEnumerator()
{
return this.propDefSortOrderPairList.GetEnumerator();
}
#endregion
}
}
| |
using TXTextControl.ReportingCloud;
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Collections.Generic;
using System.IO;
namespace TXTextControl.ReportingCloud.Tests
{
[TestClass()]
public class ReportingCloudUnitTest
{
string sUsername = "";
string sPassword = "";
Uri uriBasePath = new Uri("https://api.reporting.cloud/");
[TestMethod()]
public void ReportingCloudTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ReportingCloudAPIKeyTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string sAPIKey;
bool bKeyCreated = false;
// create a new key, if no keys exist
if (rc.GetAccountAPIKeys().Count == 0)
{
sAPIKey = rc.CreateAccountAPIKey();
bKeyCreated = true;
}
else
sAPIKey = rc.GetAccountAPIKeys()[0].Key;
// create new instance with new API Key
ReportingCloud rc2 = new ReportingCloud(sAPIKey, uriBasePath);
// check account settings
var accountSettings = rc2.GetAccountSettings();
Assert.IsFalse(accountSettings.MaxDocuments == 0);
// remove created key
if(bKeyCreated == true)
rc.DeleteAccountAPIKey(sAPIKey);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetAccountAPIKeysTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// create a new key
string sAPIKey = rc.CreateAccountAPIKey();
// get all keys
List<APIKey> lAPIKeys = rc.GetAccountAPIKeys();
// check, if at least 1 key is in list
Assert.IsFalse(lAPIKeys.Count == 0);
// clean up
rc.DeleteAccountAPIKey(sAPIKey);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ShareDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
List<Template> lTemplates = rc.ListTemplates();
string sSharedHash = rc.ShareDocument(lTemplates[0].TemplateName);
// check, if images are created
Assert.IsFalse(sSharedHash.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ProofingCheckTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
List<IncorrectWord> incorrectWords = rc.CheckText("Thiss%20is%20a%20sample%20text", rc.GetAvailableDictionaries()[0]);
// check, if images are created
Assert.IsFalse(incorrectWords.Count == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void AvailableDictionariesTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string[] saDictionaries = rc.GetAvailableDictionaries();
// check, if images are created
Assert.IsFalse(saDictionaries.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetSuggestionsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
string[] saSuggestions = rc.GetSuggestions("dooper", rc.GetAvailableDictionaries()[0], 10);
// check, if images are created
Assert.IsFalse(saSuggestions.Length == 0);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateInfoTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// get template information
TemplateInfo templateInfo = rc.GetTemplateInfo(sTempFilename);
// check, if images are created
Assert.IsFalse(templateInfo.TemplateName == "");
// delete temp file
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateThumbnailsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// create thumbnails
List<string> images = rc.GetTemplateThumbnails(sTempFilename, 20, 1, 1, ImageFormat.PNG);
// check, if images are created
Assert.IsFalse((images.Count == 0));
// delete temp file
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void FindAndReplaceDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/replace_template.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// create a new FindAndReplaceBody object
FindAndReplaceBody body = new FindAndReplaceBody();
body.FindAndReplaceData = new List<string[]>()
{
new string[] { "%%TextToReplace%%", "ReplacedString" },
new string[] { "%%SecondTextToReplace%%", "ReplacedString2" }
};
// merge the document
byte[] results = rc.FindAndReplaceDocument(body, sTempFilename, ReturnFormat.HTML);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("ReplacedString"));
// delete the template
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void MergeDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// create dummy data
Invoice invoice = new Invoice();
invoice.yourcompany_companyname = "Text Control, LLC";
invoice.invoice_no = "Test_R667663";
invoice.billto_name = "<html><strong>Test</strong> <em>Company</em></html>";
// create a new MergeBody object
MergeBody body = new MergeBody();
body.MergeData = invoice;
MergeSettings settings = new MergeSettings();
settings.Author = "Text Control GmbH";
settings.MergeHtml = true;
settings.Culture = "de-DE"; //new System.Globalization.CultureInfo("de-DE");
body.MergeSettings = settings;
// merge the document
List<byte[]> results = rc.MergeDocument(body, sTempFilename, ReturnFormat.HTML);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results[0]);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("Test_R667663"));
// delete the template
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void AppendDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// create a new MergeBody object
AppendBody body = new AppendBody();
body.Documents.Add(new AppendDocument()
{
Document = File.ReadAllBytes("documents/sample_docx.docx"),
DocumentDivider = DocumentDivider.None
});
body.Documents.Add(new AppendDocument()
{
Document = File.ReadAllBytes("documents/invoice.tx"),
DocumentDivider = DocumentDivider.NewSection
});
DocumentSettings settings = new DocumentSettings();
settings.Author = "Text Control GmbH";
body.DocumentSettings = settings;
// append the documents
byte[] results = rc.AppendDocument(body, ReturnFormat.HTML, true);
string bHtmlDocument = System.Text.Encoding.UTF8.GetString(results);
// check whether the created HTML contains the test string
Assert.IsTrue(bHtmlDocument.Contains("<title>ReportingCloud Test Mode</title>"));
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void UploadTemplateTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// template exists?
Assert.IsTrue(rc.TemplateExists(sTempFilename), "Template doesn't exist");
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ConvertDocumentTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
byte[] bHtml = rc.ConvertDocument(bDocument, ReturnFormat.HTML);
Assert.IsTrue(System.Text.Encoding.UTF8.GetString(bHtml).Contains("INVOICE"));
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetAccountSettingsTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
AccountSettings settings = rc.GetAccountSettings();
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(settings.UploadedTemplates + 1, rc.GetTemplateCount());
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplateCountTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// store current template number
int iTemplateCount = rc.GetTemplateCount();
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(iTemplateCount + 1, rc.GetTemplateCount());
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void GetTemplatePageCountTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload 1 more document with unique file name
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// check, if the count went up
Assert.AreEqual(1, rc.GetTemplatePageCount(sTempFilename));
// delete temp document
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void DeleteTemplateTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload test document
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
if (rc.TemplateExists(sTempFilename) == true)
{
// delete template
rc.DeleteTemplate(sTempFilename);
// check, if template has been deleted
Assert.IsFalse(rc.TemplateExists(sTempFilename), "Template is not deleted.");
}
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void DownloadTemplateTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// upload local test document
byte[] bDocument = File.ReadAllBytes("documents/invoice.tx");
string sTempFilename = "test" + Guid.NewGuid().ToString() + ".tx";
rc.UploadTemplate(sTempFilename, bDocument);
// download document
byte[] bTemplate = rc.DownloadTemplate(sTempFilename);
// compare documents
Assert.IsNotNull(bTemplate);
rc.DeleteTemplate(sTempFilename);
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ListTemplatesTest()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// list all templates
List<Template> templates = rc.ListTemplates();
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
[TestMethod()]
public void ListFonts()
{
try
{
ReportingCloud rc = new ReportingCloud(sUsername, sPassword, uriBasePath);
// list all templates
string[] fonts = rc.ListFonts();
foreach (string font in fonts)
{
Console.WriteLine(font);
}
}
catch (Exception exc)
{
Assert.Fail(exc.Message);
}
}
}
public class Invoice
{
public string yourcompany_companyname { get; set; }
public string invoice_no { get; set; }
public string billto_name { get; set; }
}
}
| |
using System;
using System.Collections.Generic;
using Content.Client.Stylesheets;
using Content.Shared.AI;
using Robust.Client.Graphics;
using Robust.Client.UserInterface;
using Robust.Client.UserInterface.Controls;
using Robust.Shared.GameObjects;
using Robust.Shared.IoC;
using Robust.Shared.Maths;
using static Robust.Client.UserInterface.Controls.BoxContainer;
namespace Content.Client.AI
{
#if DEBUG
public sealed class ClientAiDebugSystem : EntitySystem
{
[Dependency] private readonly IEyeManager _eyeManager = default!;
private AiDebugMode _tooltips = AiDebugMode.None;
private readonly Dictionary<EntityUid, PanelContainer> _aiBoxes = new();
public override void Update(float frameTime)
{
base.Update(frameTime);
if (_tooltips == 0)
{
if (_aiBoxes.Count > 0)
{
foreach (var (_, panel) in _aiBoxes)
{
panel.Dispose();
}
_aiBoxes.Clear();
}
return;
}
var deletedEntities = new List<EntityUid>(0);
foreach (var (entity, panel) in _aiBoxes)
{
if (Deleted(entity))
{
deletedEntities.Add(entity);
continue;
}
if (!_eyeManager.GetWorldViewport().Contains(EntityManager.GetComponent<TransformComponent>(entity).WorldPosition))
{
panel.Visible = false;
continue;
}
var (x, y) = _eyeManager.CoordinatesToScreen(EntityManager.GetComponent<TransformComponent>(entity).Coordinates).Position;
var offsetPosition = new Vector2(x - panel.Width / 2, y - panel.Height - 50f);
panel.Visible = true;
LayoutContainer.SetPosition(panel, offsetPosition);
}
foreach (var entity in deletedEntities)
{
_aiBoxes.Remove(entity);
}
}
public override void Initialize()
{
base.Initialize();
UpdatesOutsidePrediction = true;
SubscribeNetworkEvent<SharedAiDebug.UtilityAiDebugMessage>(HandleUtilityAiDebugMessage);
SubscribeNetworkEvent<SharedAiDebug.AStarRouteMessage>(HandleAStarRouteMessage);
SubscribeNetworkEvent<SharedAiDebug.JpsRouteMessage>(HandleJpsRouteMessage);
}
private void HandleUtilityAiDebugMessage(SharedAiDebug.UtilityAiDebugMessage message)
{
if ((_tooltips & AiDebugMode.Thonk) != 0)
{
// I guess if it's out of range we don't know about it?
var entity = message.EntityUid;
TryCreatePanel(entity);
// Probably shouldn't access by index but it's a debugging tool so eh
var label = (Label) _aiBoxes[entity].GetChild(0).GetChild(0);
label.Text = $"Current Task: {message.FoundTask}\n" +
$"Task score: {message.ActionScore}\n" +
$"Planning time (ms): {message.PlanningTime * 1000:0.0000}\n" +
$"Considered {message.ConsideredTaskCount} tasks";
}
}
private void HandleAStarRouteMessage(SharedAiDebug.AStarRouteMessage message)
{
if ((_tooltips & AiDebugMode.Paths) != 0)
{
var entity = message.EntityUid;
TryCreatePanel(entity);
var label = (Label) _aiBoxes[entity].GetChild(0).GetChild(1);
label.Text = $"Pathfinding time (ms): {message.TimeTaken * 1000:0.0000}\n" +
$"Nodes traversed: {message.CameFrom.Count}\n" +
$"Nodes per ms: {message.CameFrom.Count / (message.TimeTaken * 1000)}";
}
}
private void HandleJpsRouteMessage(SharedAiDebug.JpsRouteMessage message)
{
if ((_tooltips & AiDebugMode.Paths) != 0)
{
var entity = message.EntityUid;
TryCreatePanel(entity);
var label = (Label) _aiBoxes[entity].GetChild(0).GetChild(1);
label.Text = $"Pathfinding time (ms): {message.TimeTaken * 1000:0.0000}\n" +
$"Jump Nodes: {message.JumpNodes.Count}\n" +
$"Jump Nodes per ms: {message.JumpNodes.Count / (message.TimeTaken * 1000)}";
}
}
public void Disable()
{
foreach (var tooltip in _aiBoxes.Values)
{
tooltip.Dispose();
}
_aiBoxes.Clear();
_tooltips = AiDebugMode.None;
}
private void EnableTooltip(AiDebugMode tooltip)
{
_tooltips |= tooltip;
}
private void DisableTooltip(AiDebugMode tooltip)
{
_tooltips &= ~tooltip;
}
public void ToggleTooltip(AiDebugMode tooltip)
{
if ((_tooltips & tooltip) != 0)
{
DisableTooltip(tooltip);
}
else
{
EnableTooltip(tooltip);
}
}
private bool TryCreatePanel(EntityUid entity)
{
if (!_aiBoxes.ContainsKey(entity))
{
var userInterfaceManager = IoCManager.Resolve<IUserInterfaceManager>();
var actionLabel = new Label
{
MouseFilter = Control.MouseFilterMode.Ignore,
};
var pathfindingLabel = new Label
{
MouseFilter = Control.MouseFilterMode.Ignore,
};
var vBox = new BoxContainer()
{
Orientation = LayoutOrientation.Vertical,
SeparationOverride = 15,
Children = {actionLabel, pathfindingLabel},
};
var panel = new PanelContainer
{
StyleClasses = { StyleNano.StyleClassTooltipPanel },
Children = {vBox},
MouseFilter = Control.MouseFilterMode.Ignore,
ModulateSelfOverride = Color.White.WithAlpha(0.75f),
};
userInterfaceManager.StateRoot.AddChild(panel);
_aiBoxes[entity] = panel;
return true;
}
return false;
}
}
[Flags]
public enum AiDebugMode : byte
{
None = 0,
Paths = 1 << 1,
Thonk = 1 << 2,
}
#endif
}
| |
// <copyright file="Cookie.cs" company="WebDriver Committers">
// Copyright 2007-2011 WebDriver committers
// Copyright 2007-2011 Google Inc.
// Portions copyright 2011 Software Freedom Conservancy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Globalization;
namespace OpenQA.Selenium
{
/// <summary>
/// Represents a cookie in the browser.
/// </summary>
[Serializable]
public class Cookie
{
private string cookieName;
private string cookieValue;
private string cookiePath;
private string cookieDomain;
private DateTime? cookieExpiry;
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, domain, path and expiration date.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="domain">The domain of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <param name="expiry">The expiration date of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string domain, string path, DateTime? expiry)
{
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException("Cookie name cannot be null or empty string", "name");
}
if (value == null)
{
throw new ArgumentNullException("value", "Cookie value cannot be null");
}
if (name.IndexOf(';') != -1)
{
throw new ArgumentException("Cookie names cannot contain a ';': " + name, "name");
}
this.cookieName = name;
this.cookieValue = value;
if (!string.IsNullOrEmpty(path))
{
this.cookiePath = path;
}
else
{
this.cookiePath = "/";
}
this.cookieDomain = StripPort(domain);
if (expiry != null)
{
this.cookieExpiry = expiry;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, path and expiration date.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <param name="expiry">The expiration date of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string path, DateTime? expiry)
: this(name, value, null, path, expiry)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, and path.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string path)
: this(name, value, path, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name and value.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value)
: this(name, value, "/", null)
{
}
/// <summary>
/// Gets the name of the cookie.
/// </summary>
public string Name
{
get { return this.cookieName; }
}
/// <summary>
/// Gets the value of the cookie.
/// </summary>
public string Value
{
get { return this.cookieValue; }
}
/// <summary>
/// Gets the domain of the cookie.
/// </summary>
public string Domain
{
get { return this.cookieDomain; }
}
/// <summary>
/// Gets the path of the cookie.
/// </summary>
public virtual string Path
{
get { return this.cookiePath; }
}
/// <summary>
/// Gets a value indicating whether the cookie is secure.
/// </summary>
public virtual bool Secure
{
get { return false; }
}
/// <summary>
/// Gets the expiration date of the cookie.
/// </summary>
public DateTime? Expiry
{
get { return this.cookieExpiry; }
}
/// <summary>
/// Creates and returns a string representation of the cookie.
/// </summary>
/// <returns>A string representation of the cookie.</returns>
public override string ToString()
{
return this.cookieName + "=" + this.cookieValue
+ (this.cookieExpiry == null ? string.Empty : "; expires=" + this.cookieExpiry.Value.ToUniversalTime().ToString("ddd MM dd yyyy hh:mm:ss UTC", CultureInfo.InvariantCulture))
+ (string.IsNullOrEmpty(this.cookiePath) ? string.Empty : "; path=" + this.cookiePath)
+ (string.IsNullOrEmpty(this.cookieDomain) ? string.Empty : "; domain=" + this.cookieDomain);
//// + (isSecure ? ";secure;" : "");
}
/// <summary>
/// Determines whether the specified <see cref="System.Object">Object</see> is equal
/// to the current <see cref="System.Object">Object</see>.
/// </summary>
/// <param name="obj">The <see cref="System.Object">Object</see> to compare with the
/// current <see cref="System.Object">Object</see>.</param>
/// <returns><see langword="true"/> if the specified <see cref="System.Object">Object</see>
/// is equal to the current <see cref="System.Object">Object</see>; otherwise,
/// <see langword="false"/>.</returns>
public override bool Equals(object obj)
{
// Two cookies are equal if the name and value match
Cookie cookie = obj as Cookie;
if (this == obj)
{
return true;
}
if (cookie == null)
{
return false;
}
if (!this.cookieName.Equals(cookie.cookieName))
{
return false;
}
return !(this.cookieValue != null ? !this.cookieValue.Equals(cookie.cookieValue) : cookie.Value != null);
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>A hash code for the current <see cref="System.Object">Object</see>.</returns>
public override int GetHashCode()
{
return this.cookieName.GetHashCode();
}
private static string StripPort(string domain)
{
return string.IsNullOrEmpty(domain) ? null : domain.Split(':')[0];
}
}
}
| |
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using FluentAssertions;
using Xunit;
using NSubstitute;
namespace Okanshi.Test
{
public class TimerTest
{
private readonly IStopwatch stopwatch = Substitute.For<IStopwatch>();
private readonly Timer timer;
public TimerTest()
{
DefaultMonitorRegistry.Instance.Clear();
timer = new Timer(MonitorConfig.Build("Test"), () => stopwatch);
}
[Fact]
public void Initial_max_value_is_zero()
{
var max = timer.GetMax();
max.Value.Should().Be(0);
}
[Fact]
public void Initial_min_value_is_zero()
{
var min = timer.GetMin();
min.Value.Should().Be(0);
}
[Fact]
public void Initial_count_value_is_zero()
{
var count = timer.GetCount();
count.Value.Should().Be(0);
}
[Fact]
public void Initial_total_time_is_zero()
{
var totalTime = timer.GetTotalTime();
totalTime.Value.Should().Be(0);
}
[Fact]
public void Initial_value_is_zero()
{
var value = timer.GetValues();
value.First().Value.Should().Be(0.0);
}
[Fact]
public void Timing_a_call_sets_count()
{
timer.GetCount();
timer.Record(() => { });
timer.GetCount().Value.Should().Be(1);
}
[Fact]
public void Timing_a_call_sets_max()
{
timer.GetCount();
stopwatch.Time(Arg.Any<Action>()).Returns(50);
timer.Record(() => { });
var max = timer.GetMax();
max.Value.Should().Be(50);
}
[Fact]
public void Timing_a_call_sets_min()
{
timer.GetCount();
stopwatch.Time(Arg.Any<Action>()).Returns(50);
timer.Record(() => { });
var min = timer.GetMin();
min.Value.Should().Be(50);
}
[Fact]
public void Timing_a_call_sets_total_time()
{
timer.GetTotalTime();
stopwatch.Time(Arg.Any<Action>()).Returns(50);
timer.Record(() => { });
var totalTime = timer.GetTotalTime();
totalTime.Value.Should().Be(50);
}
[Fact]
public void Get_and_reset_resets_count()
{
timer.GetCount();
timer.Record(() => { });
timer.GetValuesAndReset().ToList();
timer.GetCount().Value.Should().Be(0);
}
[Fact]
public void Get_and_reset_resets_max()
{
timer.GetCount();
timer.Record(() => { });
timer.GetValuesAndReset();
timer.GetMax().Value.Should().Be(0);
}
[Fact]
public void Get_and_reset_resets_min()
{
timer.GetCount();
timer.Record(() => { });
timer.GetValuesAndReset();
timer.GetMin().Value.Should().Be(0);
}
[Fact]
public void Get_and_reset_resets_total_time()
{
timer.GetTotalTime();
timer.Record(() => { });
timer.GetValuesAndReset();
timer.GetTotalTime().Value.Should().Be(0);
}
[Fact]
public void Manual_timing_sets_count()
{
timer.GetCount();
var okanshiTimer = timer.Start();
stopwatch.IsRunning.Returns(true);
okanshiTimer.Stop();
timer.GetCount().Value.Should().Be(1);
}
[Fact]
public void Manual_timing_sets_max()
{
timer.GetCount();
var okanshiTimer = timer.Start();
stopwatch.IsRunning.Returns(true);
stopwatch.ElapsedMilliseconds.Returns(50);
okanshiTimer.Stop();
var max = timer.GetMax();
max.Value.Should().Be(50);
}
[Fact]
public void Manual_timing_sets_min()
{
timer.GetCount();
var okanshiTimer = timer.Start();
stopwatch.IsRunning.Returns(true);
stopwatch.ElapsedMilliseconds.Returns(50);
okanshiTimer.Stop();
var min = timer.GetMin();
min.Value.Should().Be(50);
}
[Fact]
public void Manual_timing_sets_total_time()
{
timer.GetTotalTime();
var okanshiTimer = timer.Start();
stopwatch.IsRunning.Returns(true);
stopwatch.ElapsedMilliseconds.Returns(50);
okanshiTimer.Stop();
var totalTime = timer.GetTotalTime();
totalTime.Value.Should().Be(50);
}
[Fact]
public void Manual_registration_updates_sets_max()
{
const long elapsed = 1000;
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.GetMax().Value.Should().Be(elapsed);
}
[Fact]
public void Manual_registration_updates_sets_min()
{
const long elapsed = 1000;
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.GetMin().Value.Should().Be(elapsed);
}
[Fact]
public void Manual_registration_sets_count()
{
const long elapsed = 1000;
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.GetCount().Value.Should().Be(1);
}
[Fact]
public void Manual_registration_with_long_sets_total_time()
{
const long elapsed = 1000;
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.GetTotalTime().Value.Should().Be(elapsed);
}
[Fact]
public void Manual_registration_with_timespan_sets_total_time()
{
timer.Register(TimeSpan.FromSeconds(1));
timer.GetTotalTime().Value.Should().Be(1000);
}
[Fact]
public void Manual_registration_with_stopwatch_sets_total_time()
{
var sw = Stopwatch.StartNew();
Thread.Sleep(50);
sw.Stop();
timer.RegisterElapsed(sw);
timer.GetTotalTime().Value.Should().Be(sw.ElapsedMilliseconds);
}
[Fact]
public void Manual_registrations_accumulate_total_time()
{
const long elapsed = 1000;
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.Register(TimeSpan.FromMilliseconds(elapsed));
timer.GetTotalTime().Value.Should().Be(2 * elapsed);
}
[Fact]
public void Values_are_correct()
{
var values = timer.GetValues().Select(x => x.Name);
values.Should().BeEquivalentTo("value", "max", "min", "count", "totalTime");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class BinaryExclusiveOrTests
{
#region Test methods
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckByteExclusiveOrTest(bool useInterpreter)
{
byte[] array = new byte[] { 0, 1, byte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyByteExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckSByteExclusiveOrTest(bool useInterpreter)
{
sbyte[] array = new sbyte[] { 0, 1, -1, sbyte.MinValue, sbyte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifySByteExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUShortExclusiveOrTest(bool useInterpreter)
{
ushort[] array = new ushort[] { 0, 1, ushort.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyUShortExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckShortExclusiveOrTest(bool useInterpreter)
{
short[] array = new short[] { 0, 1, -1, short.MinValue, short.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyShortExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUIntExclusiveOrTest(bool useInterpreter)
{
uint[] array = new uint[] { 0, 1, uint.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyUIntExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIntExclusiveOrTest(bool useInterpreter)
{
int[] array = new int[] { 0, 1, -1, int.MinValue, int.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyIntExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckULongExclusiveOrTest(bool useInterpreter)
{
ulong[] array = new ulong[] { 0, 1, ulong.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyULongExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckLongExclusiveOrTest(bool useInterpreter)
{
long[] array = new long[] { 0, 1, -1, long.MinValue, long.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyLongExclusiveOr(array[i], array[j], useInterpreter);
}
}
}
#endregion
#region Test verifiers
private static void VerifyByteExclusiveOr(byte a, byte b, bool useInterpreter)
{
Expression<Func<byte>> e =
Expression.Lambda<Func<byte>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(byte)),
Expression.Constant(b, typeof(byte))),
Enumerable.Empty<ParameterExpression>());
Func<byte> f = e.Compile(useInterpreter);
Assert.Equal((byte)(a ^ b), f());
}
private static void VerifySByteExclusiveOr(sbyte a, sbyte b, bool useInterpreter)
{
Expression<Func<sbyte>> e =
Expression.Lambda<Func<sbyte>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(sbyte)),
Expression.Constant(b, typeof(sbyte))),
Enumerable.Empty<ParameterExpression>());
Func<sbyte> f = e.Compile(useInterpreter);
Assert.Equal((sbyte)(a ^ b), f());
}
private static void VerifyUShortExclusiveOr(ushort a, ushort b, bool useInterpreter)
{
Expression<Func<ushort>> e =
Expression.Lambda<Func<ushort>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(ushort)),
Expression.Constant(b, typeof(ushort))),
Enumerable.Empty<ParameterExpression>());
Func<ushort> f = e.Compile(useInterpreter);
Assert.Equal((ushort)(a ^ b), f());
}
private static void VerifyShortExclusiveOr(short a, short b, bool useInterpreter)
{
Expression<Func<short>> e =
Expression.Lambda<Func<short>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(short)),
Expression.Constant(b, typeof(short))),
Enumerable.Empty<ParameterExpression>());
Func<short> f = e.Compile(useInterpreter);
Assert.Equal((short)(a ^ b), f());
}
private static void VerifyUIntExclusiveOr(uint a, uint b, bool useInterpreter)
{
Expression<Func<uint>> e =
Expression.Lambda<Func<uint>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(uint)),
Expression.Constant(b, typeof(uint))),
Enumerable.Empty<ParameterExpression>());
Func<uint> f = e.Compile(useInterpreter);
Assert.Equal(a ^ b, f());
}
private static void VerifyIntExclusiveOr(int a, int b, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(int)),
Expression.Constant(b, typeof(int))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(a ^ b, f());
}
private static void VerifyULongExclusiveOr(ulong a, ulong b, bool useInterpreter)
{
Expression<Func<ulong>> e =
Expression.Lambda<Func<ulong>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(ulong)),
Expression.Constant(b, typeof(ulong))),
Enumerable.Empty<ParameterExpression>());
Func<ulong> f = e.Compile(useInterpreter);
Assert.Equal(a ^ b, f());
}
private static void VerifyLongExclusiveOr(long a, long b, bool useInterpreter)
{
Expression<Func<long>> e =
Expression.Lambda<Func<long>>(
Expression.ExclusiveOr(
Expression.Constant(a, typeof(long)),
Expression.Constant(b, typeof(long))),
Enumerable.Empty<ParameterExpression>());
Func<long> f = e.Compile(useInterpreter);
Assert.Equal(a ^ b, f());
}
#endregion
[Fact]
public static void CannotReduce()
{
Expression exp = Expression.ExclusiveOr(Expression.Constant(0), Expression.Constant(0));
Assert.False(exp.CanReduce);
Assert.Same(exp, exp.Reduce());
AssertExtensions.Throws<ArgumentException>(null, () => exp.ReduceAndCheck());
}
[Fact]
public static void ThrowsOnLeftNull()
{
AssertExtensions.Throws<ArgumentNullException>("left", () => Expression.ExclusiveOr(null, Expression.Constant("")));
}
[Fact]
public static void ThrowsOnRightNull()
{
AssertExtensions.Throws<ArgumentNullException>("right", () => Expression.ExclusiveOr(Expression.Constant(""), null));
}
private static class Unreadable<T>
{
public static T WriteOnly
{
set { }
}
}
[Fact]
public static void ThrowsOnLeftUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("left", () => Expression.ExclusiveOr(value, Expression.Constant(1)));
}
[Fact]
public static void ThrowsOnRightUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("right", () => Expression.ExclusiveOr(Expression.Constant(1), value));
}
[Fact]
public static void ToStringTest()
{
BinaryExpression e = Expression.ExclusiveOr(Expression.Parameter(typeof(int), "a"), Expression.Parameter(typeof(int), "b"));
Assert.Equal("(a ^ b)", e.ToString());
// NB: Unlike And and Or, there's no special case for bool and bool? here.
}
}
}
| |
//
// MessageBarManager.cs
//
// Author:
// Prashant Cholachagudda <pvc@outlook.com>
//
// Copyright (c) 2013 Prashant Cholachagudda
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using MonoTouch.UIKit;
using System.Drawing;
using MonoTouch.Foundation;
using System.Collections.Generic;
using System.Threading;
namespace MessageBar
{
interface IStyleSheetProvider
{
/// <summary>
/// Stylesheet for message view.
/// </summary>
/// <returns>The style sheet for message view.</returns>
/// <param name="messageView">Message view.</param>
MessageBarStyleSheet StyleSheetForMessageView (MessageView messageView);
}
public class MessageBarManager : NSObject, IStyleSheetProvider
{
public static MessageBarManager SharedInstance {
get{ return instance ?? (instance = new MessageBarManager ()); }
}
MessageBarManager ()
{
messageBarQueue = new Queue<MessageView> ();
MessageVisible = false;
MessageBarOffset = 20;
styleSheet = new MessageBarStyleSheet ();
}
float MessageBarOffset { get; set; }
bool MessageVisible{ get; set; }
Queue<MessageView> MessageBarQueue {
get{ return messageBarQueue; }
}
/// <summary>
/// Gets or sets the style sheet.
/// </summary>
/// <value>The style sheet.</value>
public MessageBarStyleSheet StyleSheet {
get {
return styleSheet;
}
set {
if (value != null) {
styleSheet = value;
}
}
}
UIView MessageWindowView{
get{
return GetMessageBarViewController ().View;
}
}
/// <summary>
/// Shows the message
/// </summary>
/// <param name="title">Messagebar title</param>
/// <param name="description">Messagebar description</param>
/// <param name="type">Message type</param>
public void ShowMessage (string title, string description, MessageType type)
{
ShowMessage (title, description, type, null);
}
/// <summary>
/// Shows the message
/// </summary>
/// <param name="title">Messagebar title</param>
/// <param name="description">Messagebar description</param>
/// <param name="type">Message type</param>
/// <param name = "onDismiss">OnDismiss callback</param>
public void ShowMessage (string title, string description, MessageType type, Action onDismiss)
{
var messageView = new MessageView (title, description, type);
messageView.StylesheetProvider = this;
messageView.OnDismiss = onDismiss;
messageView.Hidden = true;
//UIApplication.SharedApplication.KeyWindow.InsertSubview (messageView, 1);
MessageWindowView.AddSubview (messageView);
MessageWindowView.BringSubviewToFront (messageView);
MessageBarQueue.Enqueue (messageView);
if (!MessageVisible) {
ShowNextMessage ();
}
}
void ShowNextMessage ()
{
if (MessageBarQueue.Count > 0) {
MessageVisible = true;
MessageView messageView = MessageBarQueue.Dequeue ();
messageView.Frame = new RectangleF (0, -messageView.Height, messageView.Width, messageView.Height);
messageView.Hidden = false;
messageView.SetNeedsDisplay ();
var gest = new UITapGestureRecognizer (MessageTapped);
messageView.AddGestureRecognizer (gest);
if (messageView == null)
return;
UIView.Animate (DismissAnimationDuration,
() =>
messageView.Frame = new RectangleF (messageView.Frame.X,
MessageBarOffset + messageView.Frame.Y + messageView.Height,
messageView.Width, messageView.Height)
);
//Need a better way of dissmissing the method
var dismiss = new Timer (DismissMessage, messageView, TimeSpan.FromSeconds (DisplayDelay),
TimeSpan.FromMilliseconds (-1));
}
}
/// <summary>
/// Hides all messages
/// </summary>
public void HideAll ()
{
MessageView currentMessageView = null;
var subviews = MessageWindowView.Subviews;
foreach (UIView subview in subviews) {
var view = subview as MessageView;
if (view != null) {
currentMessageView = view;
currentMessageView.RemoveFromSuperview ();
}
}
MessageVisible = false;
MessageBarQueue.Clear ();
CancelPreviousPerformRequest (this);
}
void MessageTapped (UIGestureRecognizer recognizer)
{
var view = recognizer.View as MessageView;
if (view != null) {
DismissMessage (view);
}
}
void DismissMessage (object messageView)
{
var view = messageView as MessageView;
if (view != null) {
InvokeOnMainThread (() => DismissMessage (view));
}
}
void DismissMessage (MessageView messageView)
{
if (messageView != null && !messageView.Hit) {
messageView.Hit = true;
UIView.Animate (DismissAnimationDuration,
delegate {
messageView.Frame = new RectangleF (
messageView.Frame.X,
- (messageView.Frame.Height - MessageBarOffset),
messageView.Frame.Width, messageView.Frame.Height);
},
delegate {
MessageVisible = false;
messageView.RemoveFromSuperview ();
var action = messageView.OnDismiss;
if (action != null) {
action ();
}
if (MessageBarQueue.Count > 0) {
ShowNextMessage ();
}
}
);
}
}
MessageBarViewController GetMessageBarViewController ()
{
if (messageWindow == null) {
messageWindow = new MessageWindow () {
Frame = UIApplication.SharedApplication.KeyWindow.Frame,
Hidden = false,
WindowLevel = UIWindowLevel.Normal,
BackgroundColor = UIColor.Clear,
RootViewController = new MessageBarViewController()
};
}
return (MessageBarViewController) messageWindow.RootViewController;
}
MessageWindow messageWindow;
const float DisplayDelay = 3.0f;
const float DismissAnimationDuration = 0.25f;
MessageBarStyleSheet styleSheet;
readonly Queue<MessageView> messageBarQueue;
static MessageBarManager instance;
#region IStyleSheetProvider implementation
public MessageBarStyleSheet StyleSheetForMessageView (MessageView messageView)
{
return StyleSheet;
}
#endregion
}
}
| |
//
// SourceView.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
//
// Copyright (C) 2005-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Linq;
using System.Collections.Generic;
using Gtk;
using Cairo;
using Mono.Unix;
using Hyena;
using Hyena.Gui;
using Hyena.Gui.Theming;
using Hyena.Gui.Theatrics;
using Banshee.Configuration;
using Banshee.ServiceStack;
using Banshee.Sources;
using Banshee.Playlist;
using Banshee.Gui;
namespace Banshee.Sources.Gui
{
// Note: This is a partial class - the drag and drop code is split
// out into a separate file to make this class more manageable.
// See SourceView_DragAndDrop.cs for the DnD code.
public partial class SourceView : TreeView
{
private TreeViewColumn source_column;
private SourceRowRenderer source_renderer;
private CellRendererText header_renderer;
private Theme theme;
private Stage<TreeIter> notify_stage = new Stage<TreeIter> (2000);
private TreeIter highlight_iter = TreeIter.Zero;
private SourceModel store;
private int current_timeout = -1;
private bool editing_row = false;
private bool need_resort = false;
protected SourceView (IntPtr ptr) : base (ptr) {}
public SourceView ()
{
FixedHeightMode = false;
BuildColumns ();
store = new SourceModel ();
store.SourceRowInserted += OnSourceRowInserted;
store.SourceRowRemoved += OnSourceRowRemoved;
store.RowChanged += OnRowChanged;
Model = store;
EnableSearch = false;
ShowExpanders = false;
LevelIndentation = 6;
ConfigureDragAndDrop ();
store.Refresh ();
ConnectEvents ();
Selection.SelectFunction = (selection, model, path, selected) => {
Source source = store.GetSource (path);
if (source == null || source is SourceManager.GroupSource) {
return false;
}
return true;
};
ResetSelection ();
}
#region Setup Methods
private void BuildColumns ()
{
// Hidden expander column
TreeViewColumn col = new TreeViewColumn ();
col.Visible = false;
AppendColumn (col);
ExpanderColumn = col;
source_column = new TreeViewColumn ();
source_column.Sizing = TreeViewColumnSizing.Autosize;
uint xpad = 2;
// Special renderer for header rows; hidden for normal source rows
header_renderer = new CellRendererText () {
Xpad = xpad,
Ypad = 4,
Ellipsize = Pango.EllipsizeMode.End,
Weight = (int)Pango.Weight.Bold,
Variant = Pango.Variant.SmallCaps
};
// Renderer for source rows; hidden for header rows
source_renderer = new SourceRowRenderer ();
source_renderer.Xpad = xpad;
source_column.PackStart (header_renderer, true);
source_column.SetCellDataFunc (header_renderer, new Gtk.CellLayoutDataFunc ((layout, cell, model, iter) => {
if (model == null) {
throw new ArgumentNullException ("model");
}
// be paranoid about the values returned from model.GetValue(), they may be null or have unexpected types, see bgo#683359
var obj_type = model.GetValue (iter, (int)SourceModel.Columns.Type);
if (obj_type == null || !(obj_type is SourceModel.EntryType)) {
var source = model.GetValue (iter, (int)SourceModel.Columns.Source) as Source;
var source_name = source == null ? "some source" : String.Format ("source {0}", source.Name);
Log.ErrorFormat (
"SourceView of {0} could not render its source column because its type value returned {1} from the iter",
source_name, obj_type == null ? "null" : String.Format ("an instance of {0}", obj_type.GetType ().FullName));
header_renderer.Visible = false;
source_renderer.Visible = false;
return;
}
var type = (SourceModel.EntryType) obj_type;
header_renderer.Visible = type == SourceModel.EntryType.Group;
source_renderer.Visible = type == SourceModel.EntryType.Source;
if (type == SourceModel.EntryType.Group) {
var source = (Source) model.GetValue (iter, (int)SourceModel.Columns.Source);
header_renderer.Visible = true;
header_renderer.Text = source.Name;
} else {
header_renderer.Visible = false;
}
}));
int width, height;
Gtk.Icon.SizeLookup (IconSize.Menu, out width, out height);
source_renderer.RowHeight = RowHeight.Get ();
source_renderer.RowHeight = height;
source_renderer.Ypad = (uint)RowPadding.Get ();
source_renderer.Ypad = 2;
source_column.PackStart (source_renderer, true);
source_column.SetCellDataFunc (source_renderer, new CellLayoutDataFunc (SourceRowRenderer.CellDataHandler));
AppendColumn (source_column);
HeadersVisible = false;
}
private void ConnectEvents ()
{
ServiceManager.SourceManager.ActiveSourceChanged += delegate (SourceEventArgs args) {
ThreadAssist.ProxyToMain (ResetSelection);
};
ServiceManager.SourceManager.SourceUpdated += delegate (SourceEventArgs args) {
ThreadAssist.ProxyToMain (delegate {
lock (args.Source) {
TreeIter iter = store.FindSource (args.Source);
if (!TreeIter.Zero.Equals (iter)) {
if (args.Source.Expanded) {
Expand (args.Source);
}
need_resort = true;
QueueDraw ();
}
}
});
};
ServiceManager.PlaybackController.NextSourceChanged += delegate {
ThreadAssist.ProxyToMain (QueueDraw);
};
notify_stage.ActorStep += delegate (Actor<TreeIter> actor) {
ThreadAssist.AssertInMainThread ();
if (!store.IterIsValid (actor.Target)) {
return false;
}
using (var path = store.GetPath (actor.Target) ) {
Gdk.Rectangle rect = GetBackgroundArea (path, source_column);
QueueDrawArea (rect.X, rect.Y, rect.Width, rect.Height);
}
return true;
};
ServiceManager.Get<InterfaceActionService> ().SourceActions["OpenSourceSwitcher"].Activated += delegate {
new SourceSwitcherEntry (this);
};
}
#endregion
#region Gtk.Widget Overrides
protected override void OnStyleUpdated ()
{
base.OnStyleUpdated ();
theme = Hyena.Gui.Theming.ThemeEngine.CreateTheme (this);
var light_text = Hyena.Gui.Theming.GtkTheme.GetCairoTextMidColor (this);
header_renderer.Foreground = CairoExtensions.ColorGetHex (light_text, false);
}
// While scrolling the source view with the keyboard, we want to
// just skip group sources and jump to the next source in the view.
protected override bool OnKeyPressEvent (Gdk.EventKey press)
{
TreeIter iter;
bool movedCursor = false;
Selection.GetSelected (out iter);
TreePath path = store.GetPath (iter);
// Move the path to the next source in line as we need to check if it's a group
IncrementPathForKeyPress (press, path);
Source source = store.GetSource (path);
while (source is SourceManager.GroupSource && IncrementPathForKeyPress (press, path)) {
source = store.GetSource (path);
SetCursor (path, source_column, false);
movedCursor = true;
}
return movedCursor ? true : base.OnKeyPressEvent (press);
}
protected override bool OnButtonPressEvent (Gdk.EventButton press)
{
TreePath path;
TreeViewColumn column;
if (press.Button == 1) {
ResetHighlight ();
}
// If there is not a row at the click position let the base handler take care of the press
if (!GetPathAtPos ((int)press.X, (int)press.Y, out path, out column)) {
return base.OnButtonPressEvent (press);
}
Source source = store.GetSource (path);
if (source == null || source is SourceManager.GroupSource) {
return false;
}
// From F-Spot's SaneTreeView class
if (source_renderer.InExpander ((int)press.X)) {
if (!source.Expanded) {
ExpandRow (path, false);
} else {
CollapseRow (path);
}
// If the active source is a child of this source, and we are about to collapse it, switch
// the active source to the parent.
if (source == ServiceManager.SourceManager.ActiveSource.Parent && GetRowExpanded (path)) {
ServiceManager.SourceManager.SetActiveSource (source);
}
return true;
}
// For Sources that can't be activated, when they're clicked just
// expand or collapse them and return.
if (press.Button == 1 && !source.CanActivate) {
if (!source.Expanded) {
ExpandRow (path, false);
} else {
CollapseRow (path);
}
return false;
}
if (press.Button == 3) {
TreeIter iter;
if (Model.GetIter (out iter, path)) {
HighlightIter (iter);
OnPopupMenu ();
return true;
}
}
if (!source.CanActivate) {
return false;
}
if (press.Button == 1) {
if (ServiceManager.SourceManager.ActiveSource != source) {
ServiceManager.SourceManager.SetActiveSource (source);
}
}
if ((press.State & Gdk.ModifierType.ControlMask) != 0) {
if (press.Type == Gdk.EventType.TwoButtonPress && press.Button == 1) {
ActivateRow (path, null);
}
return true;
}
return base.OnButtonPressEvent (press);
}
protected override bool OnPopupMenu ()
{
ServiceManager.Get<InterfaceActionService> ().SourceActions["SourceContextMenuAction"].Activate ();
return true;
}
protected override bool OnDrawn (Cairo.Context cr)
{
if (need_resort) {
need_resort = false;
// Resort the tree store. This is performed in an event handler
// known not to conflict with gtk_tree_view_bin_expose() to prevent
// errors about corrupting the TreeView's internal state.
foreach (Source dsource in ServiceManager.SourceManager.Sources.ToArray ()) {
TreeIter iter = store.FindSource (dsource);
if (!TreeIter.Zero.Equals (iter) &&
(int)store.GetValue (iter, (int)SourceModel.Columns.Order) != dsource.Order)
{
store.SetValue (iter, (int)SourceModel.Columns.Order, dsource.Order);
}
}
QueueDraw ();
}
base.OnDrawn (cr);
return true;
}
private bool IncrementPathForKeyPress (Gdk.EventKey press, TreePath path)
{
switch (press.Key) {
case Gdk.Key.Up:
case Gdk.Key.KP_Up:
return path.Prev ();
case Gdk.Key.Down:
case Gdk.Key.KP_Down:
path.Next ();
return true;
}
return false;
}
#endregion
#region Gtk.TreeView Overrides
protected override void OnRowExpanded (TreeIter iter, TreePath path)
{
base.OnRowExpanded (iter, path);
var source = store.GetSource (iter);
if (source != null) {
source.Expanded = true;
}
}
protected override void OnRowCollapsed (TreeIter iter, TreePath path)
{
base.OnRowCollapsed (iter, path);
var source = store.GetSource (iter);
if (source != null) {
source.Expanded = false;
}
}
protected override void OnCursorChanged ()
{
if (current_timeout < 0) {
current_timeout = (int)GLib.Timeout.Add (200, OnCursorChangedTimeout);
}
}
private bool OnCursorChangedTimeout ()
{
TreeIter iter;
ITreeModel model;
current_timeout = -1;
if (!Selection.GetSelected (out model, out iter)) {
return false;
}
Source new_source = store.GetValue (iter, (int)SourceModel.Columns.Source) as Source;
if (ServiceManager.SourceManager.ActiveSource == new_source) {
return false;
}
ServiceManager.SourceManager.SetActiveSource (new_source);
QueueDraw ();
return false;
}
#endregion
#region Add/Remove Sources / SourceManager interaction
private void OnSourceRowInserted (object o, SourceRowEventArgs args)
{
args.Source.UserNotifyUpdated += OnSourceUserNotifyUpdated;
if (args.Source.Parent != null && args.Source.Parent.AutoExpand == true) {
Expand (args.ParentIter);
}
if (args.Source.Expanded || args.Source.AutoExpand == true) {
Expand (args.Iter);
}
UpdateView ();
if (args.Source.Properties.Get<bool> ("NotifyWhenAdded")) {
args.Source.NotifyUser ();
}
}
private void OnSourceRowRemoved (object o, SourceRowEventArgs args)
{
args.Source.UserNotifyUpdated -= OnSourceUserNotifyUpdated;
UpdateView ();
}
private void OnRowChanged (object o, RowChangedArgs args)
{
QueueDraw ();
}
internal new void Expand (Source src)
{
Expand (store.FindSource (src));
src.Expanded = true;
}
private new void Expand (TreeIter iter)
{
using (var path = store.GetPath (iter)) {
ExpandRow (path, true);
}
}
private void OnSourceUserNotifyUpdated (object o, EventArgs args)
{
ThreadAssist.ProxyToMain (delegate {
TreeIter iter = store.FindSource ((Source)o);
if (iter.Equals (TreeIter.Zero)) {
return;
}
notify_stage.AddOrReset (iter);
});
}
#endregion
#region List/View Utility Methods
private bool UpdateView ()
{
for (int i = 0, m = store.IterNChildren (); i < m; i++) {
TreeIter iter = TreeIter.Zero;
if (!store.IterNthChild (out iter, i)) {
continue;
}
if (store.IterNChildren (iter) > 0) {
ExpanderColumn = source_column;
return true;
}
}
ExpanderColumn = Columns[0];
return false;
}
internal void UpdateRow (TreePath path, string text)
{
TreeIter iter;
if (!store.GetIter (out iter, path)) {
return;
}
Source source = store.GetValue (iter, (int)SourceModel.Columns.Source) as Source;
source.Rename (text);
}
public void BeginRenameSource (Source source)
{
TreeIter iter = store.FindSource (source);
if (iter.Equals (TreeIter.Zero)) {
return;
}
source_renderer.Editable = true;
using (var path = store.GetPath (iter)) {
SetCursor (path, source_column, true);
}
source_renderer.Editable = false;
}
private void ResetSelection ()
{
TreeIter iter = store.FindSource (ServiceManager.SourceManager.ActiveSource);
if (!iter.Equals (TreeIter.Zero)){
Selection.SelectIter (iter);
}
}
public void HighlightIter (TreeIter iter)
{
highlight_iter = iter;
QueueDraw ();
}
public void ResetHighlight ()
{
highlight_iter = TreeIter.Zero;
QueueDraw ();
}
#endregion
#region Public Properties
public Source HighlightedSource {
get {
if (TreeIter.Zero.Equals (highlight_iter)) {
return null;
}
return store.GetValue (highlight_iter, (int)SourceModel.Columns.Source) as Source;
}
}
public bool EditingRow {
get { return editing_row; }
set {
editing_row = value;
QueueDraw ();
}
}
#endregion
#region Internal Properties
internal TreeIter HighlightedIter {
get { return highlight_iter; }
}
internal Theme Theme {
get { return theme; }
}
internal Stage<TreeIter> NotifyStage {
get { return notify_stage; }
}
internal Source NewPlaylistSource {
get {
return new_playlist_source ??
(new_playlist_source = new PlaylistSource (Catalog.GetString ("New Playlist"),
ServiceManager.SourceManager.MusicLibrary));
}
}
#endregion
#region Property Schemas
private static SchemaEntry<int> RowHeight = new SchemaEntry<int> (
"player_window", "source_view_row_height", 22, "The height of each source row in the SourceView. 22 is the default.", "");
private static SchemaEntry<int> RowPadding = new SchemaEntry<int> (
"player_window", "source_view_row_padding", 5, "The padding between sources in the SourceView. 5 is the default.", "");
#endregion
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
using System;
using System.Threading;
namespace System.Text
{
[Serializable]
public abstract class EncoderFallback
{
internal bool bIsMicrosoftBestFitFallback = false;
private static EncoderFallback replacementFallback; // Default fallback, uses no best fit & "?"
private static EncoderFallback exceptionFallback;
// Private object for locking instead of locking on a public type for SQL reliability work.
private static Object s_InternalSyncObject;
private static Object InternalSyncObject
{
get
{
if(s_InternalSyncObject == null)
{
Object o = new Object();
Interlocked.CompareExchange( ref s_InternalSyncObject, o, null );
}
return s_InternalSyncObject;
}
}
// Get each of our generic fallbacks.
public static EncoderFallback ReplacementFallback
{
get
{
if(replacementFallback == null)
lock(InternalSyncObject)
if(replacementFallback == null)
replacementFallback = new EncoderReplacementFallback();
return replacementFallback;
}
}
public static EncoderFallback ExceptionFallback
{
get
{
if(exceptionFallback == null)
{
lock(InternalSyncObject)
{
if(exceptionFallback == null)
{
exceptionFallback = new EncoderExceptionFallback();
}
}
}
return exceptionFallback;
}
}
// Fallback
//
// Return the appropriate unicode string alternative to the character that need to fall back.
// Most implimentations will be:
// return new MyCustomEncoderFallbackBuffer(this);
public abstract EncoderFallbackBuffer CreateFallbackBuffer();
// Maximum number of characters that this instance of this fallback could return
public abstract int MaxCharCount { get; }
}
public abstract class EncoderFallbackBuffer
{
// Most implimentations will probably need an implimenation-specific constructor
// Public methods that cannot be overriden that let us do our fallback thing
// These wrap the internal methods so that we can check for people doing stuff that's too silly
public abstract bool Fallback( char charUnknown, int index );
public abstract bool Fallback( char charUnknownHigh, char charUnknownLow, int index );
// Get next character
public abstract char GetNextChar();
// Back up a character
public abstract bool MovePrevious();
// How many chars left in this fallback?
public abstract int Remaining { get; }
// Not sure if this should be public or not.
// Clear the buffer
public virtual void Reset()
{
while(GetNextChar() != (char)0) ;
}
// Internal items to help us figure out what we're doing as far as error messages, etc.
// These help us with our performance and messages internally
internal unsafe char* charStart = null;
internal unsafe char* charEnd;
internal EncoderNLS encoder;
internal bool setEncoder;
internal bool bUsedEncoder;
internal bool bFallingBack = false;
internal int iRecursionCount = 0;
private const int iMaxRecursion = 250;
// Internal Reset
// For example, what if someone fails a conversion and wants to reset one of our fallback buffers?
internal unsafe void InternalReset()
{
charStart = null;
bFallingBack = false;
iRecursionCount = 0;
Reset();
}
// Set the above values
// This can't be part of the constructor because EncoderFallbacks would have to know how to impliment these.
internal unsafe void InternalInitialize( char* charStart, char* charEnd, EncoderNLS encoder, bool setEncoder )
{
this.charStart = charStart;
this.charEnd = charEnd;
this.encoder = encoder;
this.setEncoder = setEncoder;
this.bUsedEncoder = false;
this.bFallingBack = false;
this.iRecursionCount = 0;
}
internal char InternalGetNextChar()
{
char ch = GetNextChar();
bFallingBack = (ch != 0);
if(ch == 0) iRecursionCount = 0;
return ch;
}
// Fallback the current character using the remaining buffer and encoder if necessary
// This can only be called by our encodings (other have to use the public fallback methods), so
// we can use our EncoderNLS here too.
// setEncoder is true if we're calling from a GetBytes method, false if we're calling from a GetByteCount
//
// Note that this could also change the contents of this.encoder, which is the same
// object that the caller is using, so the caller could mess up the encoder for us
// if they aren't careful.
internal unsafe virtual bool InternalFallback( char ch, ref char* chars )
{
// Shouldn't have null charStart
BCLDebug.Assert( charStart != null,
"[EncoderFallback.InternalFallbackBuffer]Fallback buffer is not initialized" );
// Get our index, remember chars was preincremented to point at next char, so have to -1
int index = (int)(chars - charStart) - 1;
// See if it was a high surrogate
if(Char.IsHighSurrogate( ch ))
{
// See if there's a low surrogate to go with it
if(chars >= this.charEnd)
{
// Nothing left in input buffer
// No input, return 0 if mustflush is false
if(this.encoder != null && !this.encoder.MustFlush)
{
// Done, nothing to fallback
if(this.setEncoder)
{
bUsedEncoder = true;
this.encoder.charLeftOver = ch;
}
bFallingBack = false;
return false;
}
}
else
{
// Might have a low surrogate
char cNext = *chars;
if(Char.IsLowSurrogate( cNext ))
{
// If already falling back then fail
if(bFallingBack && iRecursionCount++ > iMaxRecursion)
ThrowLastCharRecursive( Char.ConvertToUtf32( ch, cNext ) );
// Next is a surrogate, add it as surrogate pair, and increment chars
chars++;
bFallingBack = Fallback( ch, cNext, index );
return bFallingBack;
}
// Next isn't a low surrogate, just fallback the high surrogate
}
}
// If already falling back then fail
if(bFallingBack && iRecursionCount++ > iMaxRecursion)
ThrowLastCharRecursive( (int)ch );
// Fall back our char
bFallingBack = Fallback( ch, index );
return bFallingBack;
}
// private helper methods
internal void ThrowLastCharRecursive( int charRecursive )
{
// Throw it, using our complete character
#if EXCEPTION_STRINGS
throw new ArgumentException(
Environment.GetResourceString( "Argument_RecursiveFallback",
charRecursive ), "chars" );
#else
throw new ArgumentException();
#endif
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Newtonsoft.Json.Linq;
using OpenIddict.Abstractions;
using OrchardCore.OpenId.Abstractions.Stores;
using OrchardCore.OpenId.YesSql.Indexes;
using OrchardCore.OpenId.YesSql.Models;
using YesSql;
namespace OrchardCore.OpenId.YesSql.Stores
{
public class OpenIdApplicationStore<TApplication> : IOpenIdApplicationStore<TApplication>
where TApplication : OpenIdApplication, new()
{
private readonly ISession _session;
public OpenIdApplicationStore(ISession session)
{
_session = session;
}
/// <inheritdoc/>
public virtual async ValueTask<long> CountAsync(CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
return await _session.Query<TApplication>().CountAsync();
}
/// <inheritdoc/>
public virtual ValueTask<long> CountAsync<TResult>(Func<IQueryable<TApplication>, IQueryable<TResult>> query, CancellationToken cancellationToken)
=> throw new NotSupportedException();
/// <inheritdoc/>
public virtual async ValueTask CreateAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
cancellationToken.ThrowIfCancellationRequested();
_session.Save(application);
await _session.CommitAsync();
}
/// <inheritdoc/>
public virtual async ValueTask DeleteAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
cancellationToken.ThrowIfCancellationRequested();
_session.Delete(application);
await _session.CommitAsync();
}
/// <inheritdoc/>
public virtual async ValueTask<TApplication> FindByIdAsync(string identifier, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(identifier))
{
throw new ArgumentException("The identifier cannot be null or empty.", nameof(identifier));
}
cancellationToken.ThrowIfCancellationRequested();
return await _session.Query<TApplication, OpenIdApplicationIndex>(index => index.ApplicationId == identifier).FirstOrDefaultAsync();
}
/// <inheritdoc/>
public virtual async ValueTask<TApplication> FindByClientIdAsync(string identifier, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(identifier))
{
throw new ArgumentException("The identifier cannot be null or empty.", nameof(identifier));
}
cancellationToken.ThrowIfCancellationRequested();
return await _session.Query<TApplication, OpenIdApplicationIndex>(index => index.ClientId == identifier).FirstOrDefaultAsync();
}
/// <inheritdoc/>
public virtual async ValueTask<TApplication> FindByPhysicalIdAsync(string identifier, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(identifier))
{
throw new ArgumentException("The identifier cannot be null or empty.", nameof(identifier));
}
cancellationToken.ThrowIfCancellationRequested();
return await _session.GetAsync<TApplication>(int.Parse(identifier, CultureInfo.InvariantCulture));
}
/// <inheritdoc/>
public virtual IAsyncEnumerable<TApplication> FindByPostLogoutRedirectUriAsync(string address, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(address))
{
throw new ArgumentException("The address cannot be null or empty.", nameof(address));
}
cancellationToken.ThrowIfCancellationRequested();
return _session.Query<TApplication, OpenIdAppByLogoutUriIndex>(
index => index.LogoutRedirectUri == address).ToAsyncEnumerable();
}
/// <inheritdoc/>
public virtual IAsyncEnumerable<TApplication> FindByRedirectUriAsync(string address, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(address))
{
throw new ArgumentException("The address cannot be null or empty.", nameof(address));
}
cancellationToken.ThrowIfCancellationRequested();
return _session.Query<TApplication, OpenIdAppByRedirectUriIndex>(
index => index.RedirectUri == address).ToAsyncEnumerable();
}
/// <inheritdoc/>
public virtual ValueTask<TResult> GetAsync<TState, TResult>(
Func<IQueryable<TApplication>, TState, IQueryable<TResult>> query,
TState state, CancellationToken cancellationToken)
=> throw new NotSupportedException();
/// <inheritdoc/>
public virtual ValueTask<string> GetClientIdAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.ClientId);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetClientSecretAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.ClientSecret);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetClientTypeAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.Type);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetConsentTypeAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.ConsentType);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetDisplayNameAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.DisplayName);
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableDictionary<CultureInfo, string>> GetDisplayNamesAsync(
TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
if (application.DisplayNames == null)
{
return new ValueTask<ImmutableDictionary<CultureInfo, string>>(ImmutableDictionary.Create<CultureInfo, string>());
}
return new ValueTask<ImmutableDictionary<CultureInfo, string>>(application.DisplayNames);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetIdAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.ApplicationId);
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableArray<string>> GetPermissionsAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<ImmutableArray<string>>(application.Permissions);
}
/// <inheritdoc/>
public virtual ValueTask<string> GetPhysicalIdAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<string>(application.Id.ToString(CultureInfo.InvariantCulture));
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableArray<string>> GetPostLogoutRedirectUrisAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<ImmutableArray<string>>(application.PostLogoutRedirectUris);
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableDictionary<string, JsonElement>> GetPropertiesAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
if (application.Properties == null)
{
return new ValueTask<ImmutableDictionary<string, JsonElement>>(ImmutableDictionary.Create<string, JsonElement>());
}
return new ValueTask<ImmutableDictionary<string, JsonElement>>(
JsonSerializer.Deserialize<ImmutableDictionary<string, JsonElement>>(application.Properties.ToString()));
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableArray<string>> GetRedirectUrisAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<ImmutableArray<string>>(application.RedirectUris);
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableArray<string>> GetRequirementsAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<ImmutableArray<string>>(application.Requirements);
}
/// <inheritdoc/>
public virtual ValueTask<TApplication> InstantiateAsync(CancellationToken cancellationToken)
=> new ValueTask<TApplication>(new TApplication { ApplicationId = Guid.NewGuid().ToString("n") });
/// <inheritdoc/>
public virtual IAsyncEnumerable<TApplication> ListAsync(int? count, int? offset, CancellationToken cancellationToken)
{
var query = _session.Query<TApplication>();
if (offset.HasValue)
{
query = query.Skip(offset.Value);
}
if (count.HasValue)
{
query = query.Take(count.Value);
}
return query.ToAsyncEnumerable();
}
/// <inheritdoc/>
public virtual IAsyncEnumerable<TResult> ListAsync<TState, TResult>(
Func<IQueryable<TApplication>, TState, IQueryable<TResult>> query,
TState state, CancellationToken cancellationToken)
=> throw new NotSupportedException();
/// <inheritdoc/>
public virtual ValueTask SetClientIdAsync(TApplication application,
string identifier, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.ClientId = identifier;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetClientSecretAsync(TApplication application, string secret, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.ClientSecret = secret;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetClientTypeAsync(TApplication application, string type, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.Type = type;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetConsentTypeAsync(TApplication application, string type, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.ConsentType = type;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetDisplayNameAsync(TApplication application, string name, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.DisplayName = name;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetDisplayNamesAsync(TApplication application, ImmutableDictionary<CultureInfo, string> names, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.DisplayNames = names;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetPermissionsAsync(TApplication application, ImmutableArray<string> permissions, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.Permissions = permissions;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetPostLogoutRedirectUrisAsync(TApplication application,
ImmutableArray<string> addresses, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.PostLogoutRedirectUris = addresses;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetPropertiesAsync(TApplication application, ImmutableDictionary<string, JsonElement> properties, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
if (properties == null || properties.IsEmpty)
{
application.Properties = null;
return default;
}
application.Properties = JObject.Parse(JsonSerializer.Serialize(properties, new JsonSerializerOptions
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
WriteIndented = false
}));
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetRedirectUrisAsync(TApplication application,
ImmutableArray<string> addresses, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.RedirectUris = addresses;
return default;
}
/// <inheritdoc/>
public virtual ValueTask SetRequirementsAsync(TApplication application,
ImmutableArray<string> requirements, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.Requirements = requirements;
return default;
}
/// <inheritdoc/>
public virtual async ValueTask UpdateAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
cancellationToken.ThrowIfCancellationRequested();
_session.Save(application, checkConcurrency: true);
try
{
await _session.CommitAsync();
}
catch (ConcurrencyException exception)
{
throw new OpenIddictExceptions.ConcurrencyException(new StringBuilder()
.AppendLine("The application was concurrently updated and cannot be persisted in its current state.")
.Append("Reload the application from the database and retry the operation.")
.ToString(), exception);
}
}
/// <inheritdoc/>
public virtual ValueTask<ImmutableArray<string>> GetRolesAsync(TApplication application, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
return new ValueTask<ImmutableArray<string>>(application.Roles);
}
/// <inheritdoc/>
public virtual IAsyncEnumerable<TApplication> ListInRoleAsync(string role, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(role))
{
throw new ArgumentException("The role name cannot be null or empty.", nameof(role));
}
return _session.Query<TApplication, OpenIdAppByRoleNameIndex>(index => index.RoleName == role).ToAsyncEnumerable();
}
/// <inheritdoc/>
public virtual ValueTask SetRolesAsync(TApplication application, ImmutableArray<string> roles, CancellationToken cancellationToken)
{
if (application == null)
{
throw new ArgumentNullException(nameof(application));
}
application.Roles = roles;
return default;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V9.Services
{
/// <summary>Settings for <see cref="LanguageConstantServiceClient"/> instances.</summary>
public sealed partial class LanguageConstantServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="LanguageConstantServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="LanguageConstantServiceSettings"/>.</returns>
public static LanguageConstantServiceSettings GetDefault() => new LanguageConstantServiceSettings();
/// <summary>
/// Constructs a new <see cref="LanguageConstantServiceSettings"/> object with default settings.
/// </summary>
public LanguageConstantServiceSettings()
{
}
private LanguageConstantServiceSettings(LanguageConstantServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetLanguageConstantSettings = existing.GetLanguageConstantSettings;
OnCopy(existing);
}
partial void OnCopy(LanguageConstantServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageConstantServiceClient.GetLanguageConstant</c> and
/// <c>LanguageConstantServiceClient.GetLanguageConstantAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetLanguageConstantSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="LanguageConstantServiceSettings"/> object.</returns>
public LanguageConstantServiceSettings Clone() => new LanguageConstantServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="LanguageConstantServiceClient"/> to provide simple configuration of credentials,
/// endpoint etc.
/// </summary>
internal sealed partial class LanguageConstantServiceClientBuilder : gaxgrpc::ClientBuilderBase<LanguageConstantServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public LanguageConstantServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public LanguageConstantServiceClientBuilder()
{
UseJwtAccessWithScopes = LanguageConstantServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref LanguageConstantServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<LanguageConstantServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override LanguageConstantServiceClient Build()
{
LanguageConstantServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<LanguageConstantServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<LanguageConstantServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private LanguageConstantServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return LanguageConstantServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<LanguageConstantServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return LanguageConstantServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => LanguageConstantServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => LanguageConstantServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => LanguageConstantServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>LanguageConstantService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to fetch language constants.
/// </remarks>
public abstract partial class LanguageConstantServiceClient
{
/// <summary>
/// The default endpoint for the LanguageConstantService service, which is a host of "googleads.googleapis.com"
/// and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default LanguageConstantService scopes.</summary>
/// <remarks>
/// The default LanguageConstantService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="LanguageConstantServiceClient"/> using the default credentials, endpoint
/// and settings. To specify custom credentials or other settings, use
/// <see cref="LanguageConstantServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="LanguageConstantServiceClient"/>.</returns>
public static stt::Task<LanguageConstantServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new LanguageConstantServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="LanguageConstantServiceClient"/> using the default credentials, endpoint
/// and settings. To specify custom credentials or other settings, use
/// <see cref="LanguageConstantServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="LanguageConstantServiceClient"/>.</returns>
public static LanguageConstantServiceClient Create() => new LanguageConstantServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="LanguageConstantServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="LanguageConstantServiceSettings"/>.</param>
/// <returns>The created <see cref="LanguageConstantServiceClient"/>.</returns>
internal static LanguageConstantServiceClient Create(grpccore::CallInvoker callInvoker, LanguageConstantServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
LanguageConstantService.LanguageConstantServiceClient grpcClient = new LanguageConstantService.LanguageConstantServiceClient(callInvoker);
return new LanguageConstantServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC LanguageConstantService client</summary>
public virtual LanguageConstantService.LanguageConstantServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LanguageConstant GetLanguageConstant(GetLanguageConstantRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(GetLanguageConstantRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(GetLanguageConstantRequest request, st::CancellationToken cancellationToken) =>
GetLanguageConstantAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LanguageConstant GetLanguageConstant(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLanguageConstant(new GetLanguageConstantRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLanguageConstantAsync(new GetLanguageConstantRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetLanguageConstantAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LanguageConstant GetLanguageConstant(gagvr::LanguageConstantName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLanguageConstant(new GetLanguageConstantRequest
{
ResourceNameAsLanguageConstantName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(gagvr::LanguageConstantName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLanguageConstantAsync(new GetLanguageConstantRequest
{
ResourceNameAsLanguageConstantName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the language constant to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(gagvr::LanguageConstantName resourceName, st::CancellationToken cancellationToken) =>
GetLanguageConstantAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>LanguageConstantService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to fetch language constants.
/// </remarks>
public sealed partial class LanguageConstantServiceClientImpl : LanguageConstantServiceClient
{
private readonly gaxgrpc::ApiCall<GetLanguageConstantRequest, gagvr::LanguageConstant> _callGetLanguageConstant;
/// <summary>
/// Constructs a client wrapper for the LanguageConstantService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="LanguageConstantServiceSettings"/> used within this client.
/// </param>
public LanguageConstantServiceClientImpl(LanguageConstantService.LanguageConstantServiceClient grpcClient, LanguageConstantServiceSettings settings)
{
GrpcClient = grpcClient;
LanguageConstantServiceSettings effectiveSettings = settings ?? LanguageConstantServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetLanguageConstant = clientHelper.BuildApiCall<GetLanguageConstantRequest, gagvr::LanguageConstant>(grpcClient.GetLanguageConstantAsync, grpcClient.GetLanguageConstant, effectiveSettings.GetLanguageConstantSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetLanguageConstant);
Modify_GetLanguageConstantApiCall(ref _callGetLanguageConstant);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetLanguageConstantApiCall(ref gaxgrpc::ApiCall<GetLanguageConstantRequest, gagvr::LanguageConstant> call);
partial void OnConstruction(LanguageConstantService.LanguageConstantServiceClient grpcClient, LanguageConstantServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC LanguageConstantService client</summary>
public override LanguageConstantService.LanguageConstantServiceClient GrpcClient { get; }
partial void Modify_GetLanguageConstantRequest(ref GetLanguageConstantRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::LanguageConstant GetLanguageConstant(GetLanguageConstantRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetLanguageConstantRequest(ref request, ref callSettings);
return _callGetLanguageConstant.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested language constant.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::LanguageConstant> GetLanguageConstantAsync(GetLanguageConstantRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetLanguageConstantRequest(ref request, ref callSettings);
return _callGetLanguageConstant.Async(request, callSettings);
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using NodaTime;
using NUnit.Framework;
using QuantConnect.Data;
using QuantConnect.Data.Market;
using QuantConnect.Orders;
using QuantConnect.Orders.Fees;
using QuantConnect.Securities;
using QuantConnect.Securities.Crypto;
using QuantConnect.Securities.Future;
namespace QuantConnect.Tests.Common.Securities
{
[TestFixture]
public class SecurityPortfolioModelTests
{
[Test]
public void LastTradeProfit_FlatToLong()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
var fillPrice = 100m;
var fillQuantity = 100;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// zero since we're from flat
Assert.AreEqual(0, security.Holdings.LastTradeProfit);
}
[Test]
public void LastTradeProfit_FlatToShort()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
var fillPrice = 100m;
var fillQuantity = -100;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// zero since we're from flat
Assert.AreEqual(0, security.Holdings.LastTradeProfit);
}
[Test]
public void LastTradeProfit_LongToLonger()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
security.Holdings.SetHoldings(50m, 100);
var fillPrice = 100m;
var fillQuantity = 100;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// zero since we're from flat
Assert.AreEqual(0, security.Holdings.LastTradeProfit);
}
[Test]
public void LastTradeProfit_LongToFlat()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
security.Holdings.SetHoldings(50m, 100);
var fillPrice = 100m;
var fillQuantity = -security.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// bought @50 and sold @100 = (-50*100)+(100*100 - 1) = 4999
// current implementation doesn't back out fees.
Assert.AreEqual(5000m, security.Holdings.LastTradeProfit);
}
[Test]
public void LastTradeProfit_LongToShort()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
security.Holdings.SetHoldings(50m, 100);
var fillPrice = 100m;
var fillQuantity = -2*security.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// we can only take 'profit' on the closing part of the position, so we closed 100
// shares and opened a new for the second 100, so ony the frst 100 go into the calculation
// bought @50 and sold @100 = (-50*100)+(100*100 - 1) = 4999
// current implementation doesn't back out fees.
Assert.AreEqual(5000m, security.Holdings.LastTradeProfit);
}
[Test]
public void LastTradeProfit_ShortToShorter()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
security.Holdings.SetHoldings(50m, -100);
var fillPrice = 100m;
var fillQuantity = -100;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
Assert.AreEqual(0, security.Holdings.LastTradeProfit);
}
[TestCase("USD")]
[TestCase("ARG")]
public void LastTradeProfit_ShortToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio, accountCurrency);
security.Holdings.SetHoldings(50m, -100);
var fillPrice = 100m;
var fillQuantity = -security.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// sold @50 and bought @100 = (50*100)+(-100*100 - 1) = -5001
// current implementation doesn't back out fees.
Assert.AreEqual(-5000m, security.Holdings.LastTradeProfit);
}
public void LastTradeProfit_ShortToLong()
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
var security = InitializeTest(reference, out portfolio);
security.Holdings.SetHoldings(50m, -100);
var fillPrice = 100m;
var fillQuantity = -2*security.Holdings.Quantity; // flip from -100 to +100
var orderFee = new OrderFee(new CashAmount(1m, Currencies.USD));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, security.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// we can only take 'profit' on the closing part of the position, so we closed 100
// shares and opened a new for the second 100, so ony the frst 100 go into the calculation
// sold @50 and bought @100 = (50*100)+(-100*100 - 1) = -5001
// current implementation doesn't back out fees.
Assert.AreEqual(-5000m, security.Holdings.LastTradeProfit);
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyEquity_LongToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var equity = new Security(
Symbols.AAPL,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
equity.Holdings.SetHoldings(50m, 100);
portfolio.Securities.Add(equity);
var fillPrice = 100m;
var fillQuantity = -equity.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, equity.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, equity.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
// bought @50 and sold @100 = (-50*100)+(100*100) = 50000 * 10 (conversion rate to account currency)
Assert.AreEqual(50000m, equity.Holdings.LastTradeProfit);
// sold @100 = (100*100) = 10000 - 1 fee
Assert.AreEqual(9999, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(0m, equity.Holdings.AveragePrice);
Assert.AreEqual(0m, equity.Holdings.AbsoluteQuantity);
Assert.AreEqual(0m, equity.Holdings.AbsoluteHoldingsCost);
Assert.AreEqual(0m, equity.Holdings.AbsoluteHoldingsValue);
Assert.AreEqual(0m, equity.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyEquity_ShortToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var equity = new Security(
Symbols.AAPL,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
equity.Holdings.SetHoldings(50m, -100);
portfolio.Securities.Add(equity);
var fillPrice = 100m;
var fillQuantity = -equity.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, equity.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, equity.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
// sold @50 and bought @100 = (-50*100)+(100*100) = -50000 * 10 (conversion rate to account currency)
Assert.AreEqual(-50000m, equity.Holdings.LastTradeProfit);
// bought @100 = (-100*100) = -10000 - 1 fee
Assert.AreEqual(-10001, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(0m, equity.Holdings.AveragePrice);
Assert.AreEqual(0m, equity.Holdings.AbsoluteQuantity);
Assert.AreEqual(0m, equity.Holdings.AbsoluteHoldingsCost);
Assert.AreEqual(0m, equity.Holdings.AbsoluteHoldingsValue);
Assert.AreEqual(0m, equity.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyEquity_FlatToShort(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var equity = new Security(
Symbols.AAPL,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
portfolio.Securities.Add(equity);
var fillPrice = 100m;
var fillQuantity = -100;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, equity.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, equity.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
Assert.AreEqual(-10, equity.Holdings.NetProfit); // fees
Assert.AreEqual(0m, equity.Holdings.LastTradeProfit);
// sold @100 = (100*100) = 10000 - 1 fee
Assert.AreEqual(9999, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(100m, equity.Holdings.AveragePrice);
Assert.AreEqual(100m, equity.Holdings.AbsoluteQuantity);
equity.SetMarketPrice(new Tick(DateTime.UtcNow, equity.Symbol, 90, 90));
// -100 quantity * 100 average price * 10 rate = 100000m
Assert.AreEqual(100000m, equity.Holdings.AbsoluteHoldingsCost);
// -100 quantity * 90 current price * 10 rate = 90000m
Assert.AreEqual(90000m, equity.Holdings.AbsoluteHoldingsValue);
// (90 average price - 100 current price) * -100 quantity * 10 rate - 1 fee = 9999m
Assert.AreEqual(9999m, equity.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyEquity_FlatToLong(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var equity = new Security(
Symbols.AAPL,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
portfolio.Securities.Add(equity);
var fillPrice = 100m;
var fillQuantity = 100;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, equity.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, equity.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
Assert.AreEqual(-10, equity.Holdings.NetProfit); // fees
Assert.AreEqual(0m, equity.Holdings.LastTradeProfit);
// bought @100 = -(100*100) = -10000 - 1 fee
Assert.AreEqual(-10001, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(100m, equity.Holdings.AveragePrice);
Assert.AreEqual(100m, equity.Holdings.AbsoluteQuantity);
equity.SetMarketPrice(new Tick(DateTime.UtcNow, equity.Symbol, 110, 110));
// 100 quantity * 100 average price * 10 rate = 100000m
Assert.AreEqual(100000m, equity.Holdings.AbsoluteHoldingsCost);
// 100 quantity * 110 current price * 10 rate = 110000m
Assert.AreEqual(110000m, equity.Holdings.AbsoluteHoldingsValue);
// (110 current price - 100 average price) * 100 quantity * 10 rate - 1 fee = 9999m
Assert.AreEqual(9999m, equity.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyFuture_LongToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var future = new Future(
Symbols.Fut_SPY_Feb19_2016,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
future.Holdings.SetHoldings(50m, 100);
portfolio.Securities.Add(future);
var fillPrice = 100m;
var fillQuantity = -future.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, future.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, future.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
// bought @50 and sold @100 = (-50*100)+(100*100) = 50000 * 10 (conversion rate to account currency)
Assert.AreEqual(50000m, future.Holdings.LastTradeProfit);
Assert.AreEqual(49990m, future.Holdings.NetProfit); // LastTradeProfit - fees
// bought @50 and sold @100 = (-50*100)+(100*100) = 5000 - 1 fee
Assert.AreEqual(4999, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(0m, future.Holdings.AveragePrice);
Assert.AreEqual(0m, future.Holdings.AbsoluteQuantity);
Assert.AreEqual(0m, future.Holdings.AbsoluteHoldingsCost);
Assert.AreEqual(0m, future.Holdings.AbsoluteHoldingsValue);
Assert.AreEqual(0m, future.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyFuture_ShortToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
var future = new Future(
Symbols.Fut_SPY_Feb19_2016,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
future.Holdings.SetHoldings(50m, -100);
portfolio.Securities.Add(future);
var fillPrice = 100m;
var fillQuantity = -future.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, future.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, future.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
// sold @50 and bought @100 = (50*100)+(-100*100) = -50000 * 10 (conversion rate to account currency)
Assert.AreEqual(-50000m, future.Holdings.LastTradeProfit);
Assert.AreEqual(-50010m, future.Holdings.NetProfit); // LastTradeProfit - fees
// sold @50 and bought @100 = (50*100)+(-100*100) = -5000 - 1 fee
Assert.AreEqual(-5001, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(0m, future.Holdings.AveragePrice);
Assert.AreEqual(0m, future.Holdings.AbsoluteQuantity);
Assert.AreEqual(0m, future.Holdings.AbsoluteHoldingsCost);
Assert.AreEqual(0m, future.Holdings.AbsoluteHoldingsValue);
Assert.AreEqual(0m, future.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyFuture_FlatToLong(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 1, 10);
portfolio.CashBook.Add("EUR", cash);
var future = new Future(
Symbols.Fut_SPY_Feb19_2016,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
portfolio.Securities.Add(future);
var fillPrice = 100m;
var fillQuantity = 100;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, future.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, future.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
Assert.AreEqual(0m, future.Holdings.LastTradeProfit);
Assert.AreEqual(100m, future.Holdings.Quantity);
Assert.AreEqual(100m, future.Holdings.AveragePrice);
// had 1 EUR - 1 fee
Assert.AreEqual(0, portfolio.CashBook["EUR"].Amount);
// 100 quantity * 100 average price * 10 rate = 100000m
Assert.AreEqual(100000m, future.Holdings.AbsoluteHoldingsCost);
future.SetMarketPrice(new Tick(DateTime.UtcNow, future.Symbol, 110, 110));
// 100 quantity * 110 current price * 10 rate = 110000m
Assert.AreEqual(110000m, future.Holdings.AbsoluteHoldingsValue);
// (110 current price - 100 average price) * 100 quantity * 10 rate - 1.85 fee * 100 quantity = 9815m
Assert.AreEqual(9815m, future.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyFuture_FlatToShort(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 1, 10);
portfolio.CashBook.Add("EUR", cash);
var future = new Future(
Symbols.Fut_SPY_Feb19_2016,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
portfolio.Securities.Add(future);
var fillPrice = 100m;
var fillQuantity = -100;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, future.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, future.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
Assert.AreEqual(0m, future.Holdings.LastTradeProfit);
Assert.AreEqual(-100m, future.Holdings.Quantity);
Assert.AreEqual(100m, future.Holdings.AveragePrice);
// had 1 EUR - 1 fee
Assert.AreEqual(0, portfolio.CashBook["EUR"].Amount);
// 100 quantity * 100 average price * 10 rate = 100000m
Assert.AreEqual(100000m, future.Holdings.AbsoluteHoldingsCost);
future.SetMarketPrice(new Tick(DateTime.UtcNow, future.Symbol, 110, 110));
// 100 quantity * 110 current price * 10 rate = 110000m
Assert.AreEqual(110000m, future.Holdings.AbsoluteHoldingsValue);
// (110 current price - 100 average price) * - 100 quantity * 10 rate - 1.85 fee * 100 quantity = 9815m
Assert.AreEqual(-10185, future.Holdings.TotalCloseProfit());
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyCrypto_LongToFlat(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
portfolio.CashBook.Add("BTC", 0, 1000);
var crypto = new Crypto(
Symbols.BTCEUR,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
crypto.Holdings.SetHoldings(50m, 100);
portfolio.Securities.Add(crypto);
var fillPrice = 100m;
var fillQuantity = -crypto.Holdings.Quantity;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, crypto.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, crypto.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
// bought @50 and sold @100 = (-50*100)+(100*100) = 50000 * 10 (conversion rate to account currency)
Assert.AreEqual(50000m, crypto.Holdings.LastTradeProfit);
// sold @100 * 100 = 10000 - 1 fee
Assert.AreEqual(9999, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(0m, crypto.Holdings.AveragePrice);
Assert.AreEqual(0m, crypto.Holdings.AbsoluteQuantity);
}
[TestCase("USD")]
[TestCase("ARG")]
public void NonAccountCurrencyCrypto_FlatToLong(string accountCurrency)
{
var reference = new DateTime(2016, 02, 16, 11, 53, 30);
SecurityPortfolioManager portfolio;
InitializeTest(reference, out portfolio, accountCurrency);
var cash = new Cash("EUR", 0, 10);
portfolio.CashBook.Add("EUR", cash);
portfolio.CashBook.Add("BTC", 0, 1000);
var crypto = new Crypto(
Symbols.BTCEUR,
SecurityExchangeHours.AlwaysOpen(DateTimeZone.Utc),
cash,
SymbolProperties.GetDefault("EUR"),
portfolio.CashBook
);
portfolio.Securities.Add(crypto);
var fillPrice = 100m;
var fillQuantity = 100;
var orderFee = new OrderFee(new CashAmount(1m, "EUR"));
var orderDirection = fillQuantity > 0 ? OrderDirection.Buy : OrderDirection.Sell;
var fill = new OrderEvent(1, crypto.Symbol, reference, OrderStatus.Filled, orderDirection, fillPrice, fillQuantity, orderFee);
portfolio.ProcessFill(fill);
// current implementation doesn't back out fees.
Assert.AreEqual(10, crypto.Holdings.TotalFees); // 1 * 10 (conversion rate to account currency)
Assert.AreEqual(0m, crypto.Holdings.LastTradeProfit);
Assert.AreEqual(100m, crypto.Holdings.Quantity);
Assert.AreEqual(100m, crypto.Holdings.AveragePrice);
// had 0 EUR - 1 fee
Assert.AreEqual(-10001, portfolio.CashBook["EUR"].Amount);
Assert.AreEqual(100, portfolio.CashBook["BTC"].Amount);
}
private Security InitializeTest(DateTime reference,
out SecurityPortfolioManager portfolio,
string accountCurrency = "USD")
{
var security = new Security(
SecurityExchangeHours.AlwaysOpen(TimeZones.NewYork),
CreateTradeBarConfig(),
new Cash(Currencies.USD, 0, 1m),
SymbolProperties.GetDefault(Currencies.USD),
ErrorCurrencyConverter.Instance
);
security.SetMarketPrice(new Tick { Value = 100 });
var timeKeeper = new TimeKeeper(reference);
var securityManager = new SecurityManager(timeKeeper);
securityManager.Add(security);
var transactionManager = new SecurityTransactionManager(null, securityManager);
portfolio = new SecurityPortfolioManager(securityManager, transactionManager);
portfolio.SetCash(accountCurrency, 100 * 1000m, 1m);
Assert.AreEqual(0, security.Holdings.Quantity);
Assert.AreEqual(100*1000m, portfolio.CashBook[accountCurrency].Amount);
portfolio.SetCash(security.QuoteCurrency.Symbol, 0, 1m);
return security;
}
private static SubscriptionDataConfig CreateTradeBarConfig()
{
return new SubscriptionDataConfig(typeof(TradeBar), Symbols.SPY, Resolution.Minute, TimeZones.NewYork, TimeZones.NewYork, true, true, false);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.Core.Objects;
using System.Data.Entity.Infrastructure;
using System.Data.SqlClient;
using System.Linq;
using KBVault.Dal.Entities;
namespace KBVault.Dal
{
public partial class KbVaultContext : DbContext
{
public KbVaultContext()
: base("name=KbVaultContext")
{
}
public virtual DbSet<Activities> Activities { get; set; }
public virtual DbSet<Article> Articles { get; set; }
public virtual DbSet<ArticleTag> ArticleTags { get; set; }
public virtual DbSet<Attachment> Attachments { get; set; }
public virtual DbSet<Category> Categories { get; set; }
public virtual DbSet<Entities.KbUser> KbUsers { get; set; }
public virtual DbSet<Settings> Settings { get; set; }
public virtual DbSet<Tag> Tags { get; set; }
public override int SaveChanges()
{
ChangeTracker.DetectChanges(); // Important!
ObjectContext ctx = ((IObjectContextAdapter)this).ObjectContext;
List<ObjectStateEntry> objectStateEntryList =
ctx.ObjectStateManager.GetObjectStateEntries(EntityState.Added
| EntityState.Modified
| EntityState.Deleted)
.ToList();
foreach (ObjectStateEntry entry in objectStateEntryList)
{
var props = entry.GetModifiedProperties();
string modifiedProperty = props.FirstOrDefault();
bool isUserViewAction = props.Count() == 1 && modifiedProperty == "Views";
bool isProfileUpdateAction = entry.Entity is KbUser;
if (!entry.IsRelationship && !isUserViewAction && !isProfileUpdateAction)
{
var operationDescription = string.Empty;
var act = new Activities
{
ActivityDate = DateTime.Now
};
switch (entry.State)
{
case EntityState.Added:
operationDescription = "Added ";
break;
case EntityState.Deleted:
operationDescription = "Deleted ";
break;
case EntityState.Modified:
operationDescription = "Modified ";
break;
default:
break;
}
if (entry.Entity is Article)
{
var a = (Article)entry.Entity;
operationDescription += " Article ";
act.Information = "Title: " + a.Title + " Id:" + a.Id.ToString();
act.UserId = a.Author;
}
else if (entry.Entity is Category)
{
var c = (Category)entry.Entity;
operationDescription += " Category ";
act.Information = "Name: " + c.Name + " Id:" + c.Id.ToString();
act.UserId = c.Author;
}
else if (entry.Entity is Tag)
{
var t = (Tag)entry.Entity;
operationDescription += " Tag ";
act.Information = "Name: " + t.Name + " Id:" + t.Id.ToString();
act.UserId = t.Author;
}
else if (entry.Entity is ArticleTag)
{
var at = (ArticleTag)entry.Entity;
operationDescription += " ArticleTag ";
act.Information = "ArticleId: " + at.ArticleId + " TagId:" + at.TagId.ToString();
act.UserId = at.Author;
}
else if (entry.Entity is Attachment)
{
var a = (Attachment)entry.Entity;
operationDescription += " Attachment ";
act.Information = "ArticleId: " + a.ArticleId + " Id:" + a.Id.ToString();
act.UserId = a.Author;
}
else if (entry.Entity is Settings)
{
var s = (Settings)entry.Entity;
operationDescription += " Settings ";
act.Information = "Settings updated";
act.UserId = s.Author;
}
act.Operation = operationDescription;
List<SqlParameter> procParams = new List<SqlParameter>
{
new SqlParameter("1", act.UserId),
new SqlParameter("2", act.ActivityDate),
new SqlParameter("3", act.Operation),
new SqlParameter("4", act.Information)
};
this.Database.ExecuteSqlCommand(
"Insert Into Activities(UserId,ActivityDate,Operation,Information) " +
"Values(@1,@2,@3,@4 )",
procParams.ToArray());
/*
switch (entry.State)
{
case EntityState.Added:
// write log...
break;
case EntityState.Deleted:
// write log...
break;
case EntityState.Modified:
{
foreach (string propertyName in
entry.GetModifiedProperties())
{
DbDataRecord original = entry.OriginalValues;
string oldValue = original.GetValue(
original.GetOrdinal(propertyName))
.ToString();
CurrentValueRecord current = entry.CurrentValues;
string newValue = current.GetValue(
current.GetOrdinal(propertyName))
.ToString();
if (oldValue != newValue) // probably not necessary
{
Log.WriteAudit(
"Entry: {0} Original :{1} New: {2}",
entry.Entity.GetType().Name,
oldValue, newValue);
}
}
break;
}
}*/
}
}
return base.SaveChanges();
}
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<Article>()
.Property(e => e.SefName)
.IsUnicode(false);
modelBuilder.Entity<Article>()
.HasMany(e => e.ArticleTags)
.WithRequired(e => e.Article)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Article>()
.HasMany(e => e.Attachments)
.WithRequired(e => e.Article)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Attachment>()
.Property(e => e.MimeType)
.IsUnicode(false);
modelBuilder.Entity<Category>()
.Property(e => e.SefName)
.IsUnicode(false);
modelBuilder.Entity<Category>()
.HasMany(e => e.Articles)
.WithRequired(e => e.Category)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Category>()
.HasMany(e => e.ChildCategories)
.WithOptional(e => e.ParentCategory)
.HasForeignKey(e => e.Parent);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Activities)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.UserId)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Article)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.ArticleTag)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Attachment)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Category)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.KbUser1)
.WithRequired(e => e.KbUser2)
.HasForeignKey(e => e.Author);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Settings)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Entities.KbUser>()
.HasMany(e => e.Tag)
.WithRequired(e => e.KbUser)
.HasForeignKey(e => e.Author)
.WillCascadeOnDelete(false);
modelBuilder.Entity<Settings>()
.Property(e => e.IndexFileExtensions)
.IsUnicode(false);
modelBuilder.Entity<Tag>()
.HasMany(e => e.ArticleTag)
.WithRequired(e => e.Tag)
.WillCascadeOnDelete(false);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Provides a way for an app to not start an operation unless
** there's a reasonable chance there's enough memory
** available for the operation to succeed.
**
**
===========================================================*/
using System;
using System.IO;
using Microsoft.Win32;
using System.Runtime.InteropServices;
using System.Threading;
using System.Runtime.CompilerServices;
using System.Runtime.ConstrainedExecution;
using System.Security.Permissions;
using System.Runtime.Versioning;
using System.Diagnostics;
using System.Diagnostics.Contracts;
/*
This class allows an application to fail before starting certain
activities. The idea is to fail early instead of failing in the middle
of some long-running operation to increase the survivability of the
application and ensure you don't have to write tricky code to handle an
OOM anywhere in your app's code (which implies state corruption, meaning you
should unload the appdomain, if you have a transacted environment to ensure
rollback of individual transactions). This is an incomplete tool to attempt
hoisting all your OOM failures from anywhere in your worker methods to one
particular point where it is easier to handle an OOM failure, and you can
optionally choose to not start a workitem if it will likely fail. This does
not help the performance of your code directly (other than helping to avoid
AD unloads). The point is to avoid starting work if it is likely to fail.
The Enterprise Services team has used these memory gates effectively in the
unmanaged world for a decade.
In Whidbey, we will simply check to see if there is enough memory available
in the OS's page file & attempt to ensure there might be enough space free
within the process's address space (checking for address space fragmentation
as well). We will not commit or reserve any memory. To avoid race conditions with
other threads using MemoryFailPoints, we'll also keep track of a
process-wide amount of memory "reserved" via all currently-active
MemoryFailPoints. This has two problems:
1) This can account for memory twice. If a thread creates a
MemoryFailPoint for 100 MB then allocates 99 MB, we'll see 99 MB
less free memory and 100 MB less reserved memory. Yet, subtracting
off the 100 MB is necessary because the thread may not have started
allocating memory yet. Disposing of this class immediately after
front-loaded allocations have completed is a great idea.
2) This is still vulnerable to race conditions with other threads that don't use
MemoryFailPoints.
So this class is far from perfect. But it may be good enough to
meaningfully reduce the frequency of OutOfMemoryExceptions in managed apps.
In Orcas or later, we might allocate some memory from the OS and add it
to a allocation context for this thread. Obviously, at that point we need
some way of conveying when we release this block of memory. So, we
implemented IDisposable on this type in Whidbey and expect all users to call
this from within a using block to provide lexical scope for their memory
usage. The call to Dispose (implicit with the using block) will give us an
opportunity to release this memory, perhaps. We anticipate this will give
us the possibility of a more effective design in a future version.
In Orcas, we may also need to differentiate between allocations that would
go into the normal managed heap vs. the large object heap, or we should
consider checking for enough free space in both locations (with any
appropriate adjustments to ensure the memory is contiguous).
*/
namespace System.Runtime
{
public sealed class MemoryFailPoint : CriticalFinalizerObject, IDisposable
{
// Find the top section of user mode memory. Avoid the last 64K.
// Windows reserves that block for the kernel, apparently, and doesn't
// let us ask about that memory. But since we ask for memory in 1 MB
// chunks, we don't have to special case this. Also, we need to
// deal with 32 bit machines in 3 GB mode.
// Using Win32's GetSystemInfo should handle all this for us.
private static readonly ulong TopOfMemory;
// Walking the address space is somewhat expensive, taking around half
// a millisecond. Doing that per transaction limits us to a max of
// ~2000 transactions/second. Instead, let's do this address space
// walk once every 10 seconds, or when we will likely fail. This
// amortization scheme can reduce the cost of a memory gate by about
// a factor of 100.
private static long hiddenLastKnownFreeAddressSpace = 0;
private static long hiddenLastTimeCheckingAddressSpace = 0;
private const int CheckThreshold = 10 * 1000; // 10 seconds
private static long LastKnownFreeAddressSpace
{
get { return Volatile.Read(ref hiddenLastKnownFreeAddressSpace); }
set { Volatile.Write(ref hiddenLastKnownFreeAddressSpace, value); }
}
private static long AddToLastKnownFreeAddressSpace(long addend)
{
return Interlocked.Add(ref hiddenLastKnownFreeAddressSpace, addend);
}
private static long LastTimeCheckingAddressSpace
{
get { return Volatile.Read(ref hiddenLastTimeCheckingAddressSpace); }
set { Volatile.Write(ref hiddenLastTimeCheckingAddressSpace, value); }
}
// When allocating memory segment by segment, we've hit some cases
// where there are only 22 MB of memory available on the machine,
// we need 1 16 MB segment, and the OS does not succeed in giving us
// that memory. Reasons for this could include:
// 1) The GC does allocate memory when doing a collection.
// 2) Another process on the machine could grab that memory.
// 3) Some other part of the runtime might grab this memory.
// If we build in a little padding, we can help protect
// ourselves against some of these cases, and we want to err on the
// conservative side with this class.
private const int LowMemoryFudgeFactor = 16 << 20;
// Round requested size to a 16MB multiple to have a better granularity
// when checking for available memory.
private const int MemoryCheckGranularity = 16;
// Note: This may become dynamically tunable in the future.
// Also note that we can have different segment sizes for the normal vs.
// large object heap. We currently use the max of the two.
private static readonly ulong GCSegmentSize;
// For multi-threaded workers, we want to ensure that if two workers
// use a MemoryFailPoint at the same time, and they both succeed, that
// they don't trample over each other's memory. Keep a process-wide
// count of "reserved" memory, and decrement this in Dispose and
// in the critical finalizer. See
// SharedStatics.MemoryFailPointReservedMemory
private ulong _reservedMemory; // The size of this request (from user)
private bool _mustSubtractReservation; // Did we add data to SharedStatics?
static MemoryFailPoint()
{
GetMemorySettings(out GCSegmentSize, out TopOfMemory);
}
// We can remove this link demand in a future version - we will
// have scenarios for this in partial trust in the future, but
// we're doing this just to restrict this in case the code below
// is somehow incorrect.
public MemoryFailPoint(int sizeInMegabytes)
{
if (sizeInMegabytes <= 0)
throw new ArgumentOutOfRangeException(nameof(sizeInMegabytes), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum"));
Contract.EndContractBlock();
#if !FEATURE_PAL // Remove this when CheckForAvailableMemory is able to provide legitimate estimates
ulong size = ((ulong)sizeInMegabytes) << 20;
_reservedMemory = size;
// Check to see that we both have enough memory on the system
// and that we have enough room within the user section of the
// process's address space. Also, we need to use the GC segment
// size, not the amount of memory the user wants to allocate.
// Consider correcting this to reflect free memory within the GC
// heap, and to check both the normal & large object heaps.
ulong segmentSize = (ulong) (Math.Ceiling((double)size / GCSegmentSize) * GCSegmentSize);
if (segmentSize >= TopOfMemory)
throw new InsufficientMemoryException(Environment.GetResourceString("InsufficientMemory_MemFailPoint_TooBig"));
ulong requestedSizeRounded = (ulong)(Math.Ceiling((double)sizeInMegabytes / MemoryCheckGranularity) * MemoryCheckGranularity);
//re-convert into bytes
requestedSizeRounded <<= 20;
ulong availPageFile = 0; // available VM (physical + page file)
ulong totalAddressSpaceFree = 0; // non-contiguous free address space
// Check for available memory, with 2 attempts at getting more
// memory.
// Stage 0: If we don't have enough, trigger a GC.
// Stage 1: If we don't have enough, try growing the swap file.
// Stage 2: Update memory state, then fail or leave loop.
//
// (In the future, we could consider adding another stage after
// Stage 0 to run finalizers. However, before doing that make sure
// that we could abort this constructor when we call
// GC.WaitForPendingFinalizers, noting that this method uses a CER
// so it can't be aborted, and we have a critical finalizer. It
// would probably work, but do some thinking first.)
for(int stage = 0; stage < 3; stage++) {
CheckForAvailableMemory(out availPageFile, out totalAddressSpaceFree);
// If we have enough room, then skip some stages.
// Note that multiple threads can still lead to a race condition for our free chunk
// of address space, which can't be easily solved.
ulong reserved = SharedStatics.MemoryFailPointReservedMemory;
ulong segPlusReserved = segmentSize + reserved;
bool overflow = segPlusReserved < segmentSize || segPlusReserved < reserved;
bool needPageFile = availPageFile < (requestedSizeRounded + reserved + LowMemoryFudgeFactor) || overflow;
bool needAddressSpace = totalAddressSpaceFree < segPlusReserved || overflow;
// Ensure our cached amount of free address space is not stale.
long now = Environment.TickCount; // Handle wraparound.
if ((now > LastTimeCheckingAddressSpace + CheckThreshold || now < LastTimeCheckingAddressSpace) ||
LastKnownFreeAddressSpace < (long) segmentSize) {
CheckForFreeAddressSpace(segmentSize, false);
}
bool needContiguousVASpace = (ulong) LastKnownFreeAddressSpace < segmentSize;
BCLDebug.Trace("MEMORYFAILPOINT", "MemoryFailPoint: Checking for {0} MB, for allocation size of {1} MB, stage {9}. Need page file? {2} Need Address Space? {3} Need Contiguous address space? {4} Avail page file: {5} MB Total free VA space: {6} MB Contiguous free address space (found): {7} MB Space reserved via process's MemoryFailPoints: {8} MB",
segmentSize >> 20, sizeInMegabytes, needPageFile,
needAddressSpace, needContiguousVASpace,
availPageFile >> 20, totalAddressSpaceFree >> 20,
LastKnownFreeAddressSpace >> 20, reserved, stage);
if (!needPageFile && !needAddressSpace && !needContiguousVASpace)
break;
switch(stage) {
case 0:
// The GC will release empty segments to the OS. This will
// relieve us from having to guess whether there's
// enough memory in either GC heap, and whether
// internal fragmentation will prevent those
// allocations from succeeding.
GC.Collect();
continue;
case 1:
// Do this step if and only if the page file is too small.
if (!needPageFile)
continue;
// Attempt to grow the OS's page file. Note that we ignore
// any allocation routines from the host intentionally.
RuntimeHelpers.PrepareConstrainedRegions();
try {
}
finally {
// This shouldn't overflow due to the if clauses above.
UIntPtr numBytes = new UIntPtr(segmentSize);
unsafe {
void * pMemory = Win32Native.VirtualAlloc(null, numBytes, Win32Native.MEM_COMMIT, Win32Native.PAGE_READWRITE);
if (pMemory != null) {
bool r = Win32Native.VirtualFree(pMemory, UIntPtr.Zero, Win32Native.MEM_RELEASE);
if (!r)
__Error.WinIOError();
}
}
}
continue;
case 2:
// The call to CheckForAvailableMemory above updated our
// state.
if (needPageFile || needAddressSpace) {
InsufficientMemoryException e = new InsufficientMemoryException(Environment.GetResourceString("InsufficientMemory_MemFailPoint"));
#if _DEBUG
e.Data["MemFailPointState"] = new MemoryFailPointState(sizeInMegabytes, segmentSize,
needPageFile, needAddressSpace, needContiguousVASpace,
availPageFile >> 20, totalAddressSpaceFree >> 20,
LastKnownFreeAddressSpace >> 20, reserved);
#endif
throw e;
}
if (needContiguousVASpace) {
InsufficientMemoryException e = new InsufficientMemoryException(Environment.GetResourceString("InsufficientMemory_MemFailPoint_VAFrag"));
#if _DEBUG
e.Data["MemFailPointState"] = new MemoryFailPointState(sizeInMegabytes, segmentSize,
needPageFile, needAddressSpace, needContiguousVASpace,
availPageFile >> 20, totalAddressSpaceFree >> 20,
LastKnownFreeAddressSpace >> 20, reserved);
#endif
throw e;
}
break;
default:
Debug.Assert(false, "Fell through switch statement!");
break;
}
}
// Success - we have enough room the last time we checked.
// Now update our shared state in a somewhat atomic fashion
// and handle a simple race condition with other MemoryFailPoint instances.
AddToLastKnownFreeAddressSpace(-((long) size));
if (LastKnownFreeAddressSpace < 0)
CheckForFreeAddressSpace(segmentSize, true);
RuntimeHelpers.PrepareConstrainedRegions();
try {
}
finally {
SharedStatics.AddMemoryFailPointReservation((long) size);
_mustSubtractReservation = true;
}
#endif
}
private static void CheckForAvailableMemory(out ulong availPageFile, out ulong totalAddressSpaceFree)
{
bool r;
Win32Native.MEMORYSTATUSEX memory = new Win32Native.MEMORYSTATUSEX();
r = Win32Native.GlobalMemoryStatusEx(ref memory);
if (!r)
__Error.WinIOError();
availPageFile = memory.availPageFile;
totalAddressSpaceFree = memory.availVirtual;
//Console.WriteLine("Memory gate: Mem load: {0}% Available memory (physical + page file): {1} MB Total free address space: {2} MB GC Heap: {3} MB", memory.memoryLoad, memory.availPageFile >> 20, memory.availVirtual >> 20, GC.GetTotalMemory(true) >> 20);
}
// Based on the shouldThrow parameter, this will throw an exception, or
// returns whether there is enough space. In all cases, we update
// our last known free address space, hopefully avoiding needing to
// probe again.
private static unsafe bool CheckForFreeAddressSpace(ulong size, bool shouldThrow)
{
// Start walking the address space at 0. VirtualAlloc may wrap
// around the address space. We don't need to find the exact
// pages that VirtualAlloc would return - we just need to
// know whether VirtualAlloc could succeed.
ulong freeSpaceAfterGCHeap = MemFreeAfterAddress(null, size);
BCLDebug.Trace("MEMORYFAILPOINT", "MemoryFailPoint: Checked for free VA space. Found enough? {0} Asked for: {1} Found: {2}", (freeSpaceAfterGCHeap >= size), size, freeSpaceAfterGCHeap);
// We may set these without taking a lock - I don't believe
// this will hurt, as long as we never increment this number in
// the Dispose method. If we do an extra bit of checking every
// once in a while, but we avoid taking a lock, we may win.
LastKnownFreeAddressSpace = (long) freeSpaceAfterGCHeap;
LastTimeCheckingAddressSpace = Environment.TickCount;
if (freeSpaceAfterGCHeap < size && shouldThrow)
throw new InsufficientMemoryException(Environment.GetResourceString("InsufficientMemory_MemFailPoint_VAFrag"));
return freeSpaceAfterGCHeap >= size;
}
// Returns the amount of consecutive free memory available in a block
// of pages. If we didn't have enough address space, we still return
// a positive value < size, to help potentially avoid the overhead of
// this check if we use a MemoryFailPoint with a smaller size next.
private static unsafe ulong MemFreeAfterAddress(void * address, ulong size)
{
if (size >= TopOfMemory)
return 0;
ulong largestFreeRegion = 0;
Win32Native.MEMORY_BASIC_INFORMATION memInfo = new Win32Native.MEMORY_BASIC_INFORMATION();
UIntPtr sizeOfMemInfo = (UIntPtr) Marshal.SizeOf(memInfo);
while (((ulong)address) + size < TopOfMemory) {
UIntPtr r = Win32Native.VirtualQuery(address, ref memInfo, sizeOfMemInfo);
if (r == UIntPtr.Zero)
__Error.WinIOError();
ulong regionSize = memInfo.RegionSize.ToUInt64();
if (memInfo.State == Win32Native.MEM_FREE) {
if (regionSize >= size)
return regionSize;
else
largestFreeRegion = Math.Max(largestFreeRegion, regionSize);
}
address = (void *) ((ulong) address + regionSize);
}
return largestFreeRegion;
}
[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void GetMemorySettings(out ulong maxGCSegmentSize, out ulong topOfMemory);
~MemoryFailPoint()
{
Dispose(false);
}
// Applications must call Dispose, which conceptually "releases" the
// memory that was "reserved" by the MemoryFailPoint. This affects a
// global count of reserved memory in this version (helping to throttle
// future MemoryFailPoints) in this version. We may in the
// future create an allocation context and release it in the Dispose
// method. While the finalizer will eventually free this block of
// memory, apps will help their performance greatly by calling Dispose.
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private void Dispose(bool disposing)
{
// This is just bookkeeping to ensure multiple threads can really
// get enough memory, and this does not actually reserve memory
// within the GC heap.
if (_mustSubtractReservation) {
RuntimeHelpers.PrepareConstrainedRegions();
try {
}
finally {
SharedStatics.AddMemoryFailPointReservation(-((long)_reservedMemory));
_mustSubtractReservation = false;
}
}
/*
// Prototype performance
// Let's pretend that we returned at least some free memory to
// the GC heap. We don't know this is true - the objects could
// have a longer lifetime, and the memory could be elsewhere in the
// GC heap. Additionally, we subtracted off the segment size, not
// this size. That's ok - we don't mind if this slowly degrades
// and requires us to refresh the value a little bit sooner.
// But releasing the memory here should help us avoid probing for
// free address space excessively with large workItem sizes.
Interlocked.Add(ref LastKnownFreeAddressSpace, _reservedMemory);
*/
}
#if _DEBUG
[Serializable]
internal sealed class MemoryFailPointState
{
private ulong _segmentSize;
private int _allocationSizeInMB;
private bool _needPageFile;
private bool _needAddressSpace;
private bool _needContiguousVASpace;
private ulong _availPageFile;
private ulong _totalFreeAddressSpace;
private long _lastKnownFreeAddressSpace;
private ulong _reservedMem;
private String _stackTrace; // Where did we fail, for additional debugging.
internal MemoryFailPointState(int allocationSizeInMB, ulong segmentSize, bool needPageFile, bool needAddressSpace, bool needContiguousVASpace, ulong availPageFile, ulong totalFreeAddressSpace, long lastKnownFreeAddressSpace, ulong reservedMem)
{
_allocationSizeInMB = allocationSizeInMB;
_segmentSize = segmentSize;
_needPageFile = needPageFile;
_needAddressSpace = needAddressSpace;
_needContiguousVASpace = needContiguousVASpace;
_availPageFile = availPageFile;
_totalFreeAddressSpace = totalFreeAddressSpace;
_lastKnownFreeAddressSpace = lastKnownFreeAddressSpace;
_reservedMem = reservedMem;
try
{
_stackTrace = Environment.StackTrace;
}
catch (System.Security.SecurityException)
{
_stackTrace = "no permission";
}
catch (OutOfMemoryException)
{
_stackTrace = "out of memory";
}
}
public override String ToString()
{
return String.Format(System.Globalization.CultureInfo.InvariantCulture, "MemoryFailPoint detected insufficient memory to guarantee an operation could complete. Checked for {0} MB, for allocation size of {1} MB. Need page file? {2} Need Address Space? {3} Need Contiguous address space? {4} Avail page file: {5} MB Total free VA space: {6} MB Contiguous free address space (found): {7} MB Space reserved by process's MemoryFailPoints: {8} MB",
_segmentSize >> 20, _allocationSizeInMB, _needPageFile,
_needAddressSpace, _needContiguousVASpace,
_availPageFile >> 20, _totalFreeAddressSpace >> 20,
_lastKnownFreeAddressSpace >> 20, _reservedMem);
}
public String StackTrace {
get { return _stackTrace; }
}
}
#endif
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using LibGit2Sharp.Core;
using LibGit2Sharp.Core.Handles;
namespace LibGit2Sharp
{
/// <summary>
/// Provides methods to directly work against the Git object database
/// without involving the index nor the working directory.
/// </summary>
public class ObjectDatabase : IEnumerable<GitObject>
{
private readonly Repository repo;
private readonly ObjectDatabaseSafeHandle handle;
/// <summary>
/// Needed for mocking purposes.
/// </summary>
protected ObjectDatabase()
{ }
internal ObjectDatabase(Repository repo)
{
this.repo = repo;
handle = Proxy.git_repository_odb(repo.Handle);
repo.RegisterForCleanup(handle);
}
#region Implementation of IEnumerable
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>An <see cref="IEnumerator{T}"/> object that can be used to iterate through the collection.</returns>
public virtual IEnumerator<GitObject> GetEnumerator()
{
ICollection<GitOid> oids = Proxy.git_odb_foreach(handle,
ptr => ptr.MarshalAs<GitOid>());
return oids
.Select(gitOid => repo.Lookup<GitObject>(new ObjectId(gitOid)))
.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>An <see cref="IEnumerator"/> object that can be used to iterate through the collection.</returns>
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
#endregion
/// <summary>
/// Determines if the given object can be found in the object database.
/// </summary>
/// <param name="objectId">Identifier of the object being searched for.</param>
/// <returns>True if the object has been found; false otherwise.</returns>
public virtual bool Contains(ObjectId objectId)
{
Ensure.ArgumentNotNull(objectId, "objectId");
return Proxy.git_odb_exists(handle, objectId);
}
/// <summary>
/// Retrieves the header of a GitObject from the object database. The header contains the Size
/// and Type of the object. Note that most backends do not support reading only the header
/// of an object, so the whole object will be read and then size would be returned.
/// </summary>
/// <param name="objectId">Object Id of the queried object</param>
/// <returns>GitObjectMetadata object instance containg object header information</returns>
public virtual GitObjectMetadata RetrieveObjectMetadata(ObjectId objectId)
{
Ensure.ArgumentNotNull(objectId, "objectId");
return Proxy.git_odb_read_header(handle, objectId);
}
/// <summary>
/// Inserts a <see cref="Blob"/> into the object database, created from the content of a file.
/// </summary>
/// <param name="path">Path to the file to create the blob from. A relative path is allowed to
/// be passed if the <see cref="Repository"/> is a standard, non-bare, repository. The path
/// will then be considered as a path relative to the root of the working directory.</param>
/// <returns>The created <see cref="Blob"/>.</returns>
public virtual Blob CreateBlob(string path)
{
Ensure.ArgumentNotNullOrEmptyString(path, "path");
if (repo.Info.IsBare && !Path.IsPathRooted(path))
{
throw new InvalidOperationException(
string.Format(CultureInfo.InvariantCulture,
"Cannot create a blob in a bare repository from a relative path ('{0}').", path));
}
ObjectId id = Path.IsPathRooted(path)
? Proxy.git_blob_create_fromdisk(repo.Handle, path)
: Proxy.git_blob_create_fromfile(repo.Handle, path);
return repo.Lookup<Blob>(id);
}
/// <summary>
/// Adds the provided backend to the object database with the specified priority.
/// <para>
/// If the provided backend implements <see cref="IDisposable"/>, the <see cref="IDisposable.Dispose"/>
/// method will be honored and invoked upon the disposal of the repository.
/// </para>
/// </summary>
/// <param name="backend">The backend to add</param>
/// <param name="priority">The priority at which libgit2 should consult this backend (higher values are consulted first)</param>
public virtual void AddBackend(OdbBackend backend, int priority)
{
Ensure.ArgumentNotNull(backend, "backend");
Ensure.ArgumentConformsTo(priority, s => s > 0, "priority");
Proxy.git_odb_add_backend(handle, backend.GitOdbBackendPointer, priority);
}
private class Processor
{
private readonly Stream stream;
private readonly int? numberOfBytesToConsume;
private int totalNumberOfReadBytes;
public Processor(Stream stream, int? numberOfBytesToConsume)
{
this.stream = stream;
this.numberOfBytesToConsume = numberOfBytesToConsume;
}
public int Provider(IntPtr content, int max_length, IntPtr data)
{
var local = new byte[max_length];
int bytesToRead = max_length;
if (numberOfBytesToConsume.HasValue)
{
int totalRemainingBytesToRead = numberOfBytesToConsume.Value - totalNumberOfReadBytes;
if (totalRemainingBytesToRead < max_length)
{
bytesToRead = totalRemainingBytesToRead;
}
}
if (bytesToRead == 0)
{
return 0;
}
int numberOfReadBytes = stream.Read(local, 0, bytesToRead);
if (numberOfBytesToConsume.HasValue
&& numberOfReadBytes == 0)
{
return (int)GitErrorCode.User;
}
totalNumberOfReadBytes += numberOfReadBytes;
Marshal.Copy(local, 0, content, numberOfReadBytes);
return numberOfReadBytes;
}
}
/// <summary>
/// Inserts a <see cref="Blob"/> into the object database, created from the content of a stream.
/// <para>Optionally, git filters will be applied to the content before storing it.</para>
/// </summary>
/// <param name="stream">The stream from which will be read the content of the blob to be created.</param>
/// <param name="hintpath">The hintpath is used to determine what git filters should be applied to the object before it can be placed to the object database.</param>
/// <param name="numberOfBytesToConsume">The number of bytes to consume from the stream.</param>
/// <returns>The created <see cref="Blob"/>.</returns>
public virtual Blob CreateBlob(Stream stream, string hintpath = null, int? numberOfBytesToConsume = null)
{
Ensure.ArgumentNotNull(stream, "stream");
// there's no need to buffer the file for filtering, so simply use a stream
if (hintpath == null && numberOfBytesToConsume.HasValue)
{
return CreateBlob(stream, numberOfBytesToConsume.Value);
}
if (!stream.CanRead)
{
throw new ArgumentException("The stream cannot be read from.", "stream");
}
var proc = new Processor(stream, numberOfBytesToConsume);
ObjectId id = Proxy.git_blob_create_fromchunks(repo.Handle, hintpath, proc.Provider);
return repo.Lookup<Blob>(id);
}
/// <summary>
/// Inserts a <see cref="Blob"/> into the object database created from the content of the stream.
/// </summary>
/// <param name="stream">The stream from which will be read the content of the blob to be created.</param>
/// <param name="numberOfBytesToConsume">Number of bytes to consume from the stream.</param>
/// <returns>The created <see cref="Blob"/>.</returns>
public virtual Blob CreateBlob(Stream stream, int numberOfBytesToConsume)
{
Ensure.ArgumentNotNull(stream, "stream");
if (!stream.CanRead)
{
throw new ArgumentException("The stream cannot be read from.", "stream");
}
using (var odbStream = Proxy.git_odb_open_wstream(handle, (UIntPtr)numberOfBytesToConsume, GitObjectType.Blob))
{
var buffer = new byte[4*1024];
int totalRead = 0;
while (totalRead < numberOfBytesToConsume)
{
var left = numberOfBytesToConsume - totalRead;
var toRead = left < buffer.Length ? left : buffer.Length;
var read = stream.Read(buffer, 0, toRead);
if (read == 0)
{
throw new EndOfStreamException("The stream ended unexpectedly");
}
Proxy.git_odb_stream_write(odbStream, buffer, read);
totalRead += read;
}
var id = Proxy.git_odb_stream_finalize_write(odbStream);
return repo.Lookup<Blob>(id);
}
}
/// <summary>
/// Inserts a <see cref="Tree"/> into the object database, created from a <see cref="TreeDefinition"/>.
/// </summary>
/// <param name="treeDefinition">The <see cref="TreeDefinition"/>.</param>
/// <returns>The created <see cref="Tree"/>.</returns>
public virtual Tree CreateTree(TreeDefinition treeDefinition)
{
Ensure.ArgumentNotNull(treeDefinition, "treeDefinition");
return treeDefinition.Build(repo);
}
/// <summary>
/// Inserts a <see cref="Tree"/> into the object database, created from the <see cref="Index"/>.
/// <para>
/// It recursively creates tree objects for each of the subtrees stored in the index, but only returns the root tree.
/// </para>
/// <para>
/// The index must be fully merged.
/// </para>
/// </summary>
/// <param name="index">The <see cref="Index"/>.</param>
/// <returns>The created <see cref="Tree"/>. This can be used e.g. to create a <see cref="Commit"/>.</returns>
public virtual Tree CreateTree(Index index)
{
Ensure.ArgumentNotNull(index, "index");
var treeId = Proxy.git_tree_create_fromindex(index);
return this.repo.Lookup<Tree>(treeId);
}
/// <summary>
/// Inserts a <see cref="Commit"/> into the object database, referencing an existing <see cref="Tree"/>.
/// <para>
/// Prettifing the message includes:
/// * Removing empty lines from the beginning and end.
/// * Removing trailing spaces from every line.
/// * Turning multiple consecutive empty lines between paragraphs into just one empty line.
/// * Ensuring the commit message ends with a newline.
/// * Removing every line starting with "#".
/// </para>
/// </summary>
/// <param name="author">The <see cref="Signature"/> of who made the change.</param>
/// <param name="committer">The <see cref="Signature"/> of who added the change to the repository.</param>
/// <param name="message">The description of why a change was made to the repository.</param>
/// <param name="tree">The <see cref="Tree"/> of the <see cref="Commit"/> to be created.</param>
/// <param name="parents">The parents of the <see cref="Commit"/> to be created.</param>
/// <param name="prettifyMessage">True to prettify the message, or false to leave it as is.</param>
/// <param name="commentChar">Character that lines start with to be stripped if prettifyMessage is true.</param>
/// <returns>The created <see cref="Commit"/>.</returns>
public virtual Commit CreateCommit(Signature author, Signature committer, string message, Tree tree, IEnumerable<Commit> parents, bool prettifyMessage, char? commentChar = null)
{
Ensure.ArgumentNotNull(message, "message");
Ensure.ArgumentDoesNotContainZeroByte(message, "message");
Ensure.ArgumentNotNull(author, "author");
Ensure.ArgumentNotNull(committer, "committer");
Ensure.ArgumentNotNull(tree, "tree");
Ensure.ArgumentNotNull(parents, "parents");
if (prettifyMessage)
{
message = Proxy.git_message_prettify(message, commentChar);
}
GitOid[] parentIds = parents.Select(p => p.Id.Oid).ToArray();
ObjectId commitId = Proxy.git_commit_create(repo.Handle, null, author, committer, message, tree, parentIds);
return repo.Lookup<Commit>(commitId);
}
/// <summary>
/// Inserts a <see cref="TagAnnotation"/> into the object database, pointing to a specific <see cref="GitObject"/>.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="target">The <see cref="GitObject"/> being pointed at.</param>
/// <param name="tagger">The tagger.</param>
/// <param name="message">The message.</param>
/// <returns>The created <see cref="TagAnnotation"/>.</returns>
public virtual TagAnnotation CreateTagAnnotation(string name, GitObject target, Signature tagger, string message)
{
Ensure.ArgumentNotNullOrEmptyString(name, "name");
Ensure.ArgumentNotNull(message, "message");
Ensure.ArgumentNotNull(target, "target");
Ensure.ArgumentNotNull(tagger, "tagger");
Ensure.ArgumentDoesNotContainZeroByte(name, "name");
Ensure.ArgumentDoesNotContainZeroByte(message, "message");
string prettifiedMessage = Proxy.git_message_prettify(message, null);
ObjectId tagId = Proxy.git_tag_annotation_create(repo.Handle, name, target, tagger, prettifiedMessage);
return repo.Lookup<TagAnnotation>(tagId);
}
/// <summary>
/// Archive the given commit.
/// </summary>
/// <param name="commit">The commit.</param>
/// <param name="archiver">The archiver to use.</param>
public virtual void Archive(Commit commit, ArchiverBase archiver)
{
Ensure.ArgumentNotNull(commit, "commit");
Ensure.ArgumentNotNull(archiver, "archiver");
archiver.OrchestrateArchiving(commit.Tree, commit.Id, commit.Committer.When);
}
/// <summary>
/// Archive the given tree.
/// </summary>
/// <param name="tree">The tree.</param>
/// <param name="archiver">The archiver to use.</param>
public virtual void Archive(Tree tree, ArchiverBase archiver)
{
Ensure.ArgumentNotNull(tree, "tree");
Ensure.ArgumentNotNull(archiver, "archiver");
archiver.OrchestrateArchiving(tree, null, DateTimeOffset.UtcNow);
}
/// <summary>
/// Returns the merge base (best common ancestor) of the given commits
/// and the distance between each of these commits and this base.
/// </summary>
/// <param name="one">The <see cref="Commit"/> being used as a reference.</param>
/// <param name="another">The <see cref="Commit"/> being compared against <paramref name="one"/>.</param>
/// <returns>A instance of <see cref="HistoryDivergence"/>.</returns>
public virtual HistoryDivergence CalculateHistoryDivergence(Commit one, Commit another)
{
Ensure.ArgumentNotNull(one, "one");
Ensure.ArgumentNotNull(another, "another");
return new HistoryDivergence(repo, one, another);
}
/// <summary>
/// Calculates the current shortest abbreviated <see cref="ObjectId"/>
/// string representation for a <see cref="GitObject"/>.
/// </summary>
/// <param name="gitObject">The <see cref="GitObject"/> which identifier should be shortened.</param>
/// <param name="minLength">Minimum length of the shortened representation.</param>
/// <returns>A short string representation of the <see cref="ObjectId"/>.</returns>
public virtual string ShortenObjectId(GitObject gitObject, int? minLength = null)
{
if (minLength.HasValue && (minLength <= 0 || minLength > ObjectId.HexSize))
{
throw new ArgumentOutOfRangeException("minLength", minLength,
string.Format("Expected value should be greater than zero and less than or equal to {0}.", ObjectId.HexSize));
}
string shortSha = Proxy.git_object_short_id(repo.Handle, gitObject.Id);
if (minLength == null || (minLength <= shortSha.Length))
{
return shortSha;
}
return gitObject.Sha.Substring(0, minLength.Value);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using EnvDTE;
using Thinktecture.Tools.Web.Services.CodeGeneration;
using VsWebSite;
using VSLangProj80;
using System.Diagnostics;
using System.IO;
namespace Thinktecture.Tools.Web.Services.ContractFirst.VsObjectWrappers
{
internal class VisualStudioProject
{
#region Private Fields
private readonly Project project;
private const string prjKindVBProject = "{F184B08F-C81C-45F6-A57F-5ABD9991F28F}";
private const string prjKindCSharpProject = "{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}";
#endregion
#region Public Constructors
public VisualStudioProject(Project project)
{
this.project = project;
}
#endregion
#region Public Properties
public string ProjectName
{
get { return project.Name; }
}
public string ProjectFileName
{
get { return project.FileName; }
}
public bool IsWebProject
{
get { return project.Object is VSWebSite; }
}
public CodeLanguage ProjectLanguage
{
get
{
return (IsWebProject) ? GetWebProjectLanguage() : GetProjectLanguage();
}
}
public string AssemblyNamespace
{
get
{
return GetProjectProperty("DefaultNamespace");
}
}
public string ProjectDirectory
{
get
{
return GetProjectProperty("FullPath");
}
}
private string AssemblyName
{
get
{
return GetProjectProperty("AssemblyName");
}
}
#endregion
#region Public Methods
public void AddFile(string file)
{
Debug.Assert(File.Exists(file), "Attemp to add a non-existing file.");
project.ProjectItems.AddFromFile(file);
Refresh();
}
public string AddCodeFolderToWebProject(string name)
{
Debug.Assert(IsWebProject, "Cannot add code folders to a web project.");
VSWebSite website = (VSWebSite)project.Object;
string relativePath = string.Format(@"App_Code/{0}", name);
website.CodeFolders.Add(relativePath);
CodeFolder appCodeFolder = website.CodeFolders.Item(1);
return Path.Combine(appCodeFolder.ProjectItem.get_FileNames(0), name);
}
public void AddReference(string assembly)
{
if (IsWebProject)
{
VSWebSite website = this.project.Object as VSWebSite;
website.References.AddFromGAC(assembly);
}
else
{
VSProject2 prj = this.project.Object as VSProject2;
prj.References.Add(assembly);
}
}
public string GetDefaultDestinationFilename(string fileName)
{
string baseFileName = Path.GetFileNameWithoutExtension(fileName);
string extension = (ProjectLanguage == CodeLanguage.VisualBasic) ? "vb" : "cs";
return Path.ChangeExtension(baseFileName, extension);
}
#endregion
#region Private Methods
private string GetProjectProperty(string key)
{
try
{
return (string)project.Properties.Item(key).Value;
}
catch (Exception)
{
return string.Empty;
}
}
private CodeLanguage GetWebProjectLanguage()
{
string language = GetProjectProperty("CurrentWebSiteLanguage");
switch (language)
{
case "Visual Basic":
return CodeLanguage.VisualBasic;
case "Visual C#":
return CodeLanguage.CSharp;
default:
return CodeLanguage.CSharp;
}
}
private CodeLanguage GetProjectLanguage()
{
//public abstract class PrjKind
//{
// [MarshalAs(UnmanagedType.LPStr)]
// public const string prjKindVBProject = "{F184B08F-C81C-45F6-A57F-5ABD9991F28F}";
// [MarshalAs(UnmanagedType.LPStr)]
// public const string prjKindCSharpProject = "{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}";
// [MarshalAs(UnmanagedType.LPStr)]
// public const string prjKindVSAProject = "{13B7A3EE-4614-11D3-9BC7-00C04F79DE25}";
//}
switch (project.Kind)
{
case prjKindVBProject:
return CodeLanguage.VisualBasic;
case prjKindCSharpProject:
return CodeLanguage.CSharp;
default:
return CodeLanguage.CSharp;
}
}
private bool Refresh()
{
if (IsWebProject)
{
VSWebSite website = this.project.Object as VSWebSite;
if (website != null)
{
website.Refresh();
return true;
}
}
else
{
VSProject2 project = this.project.Object as VSProject2;
if (project != null)
{
project.Refresh();
return true;
}
}
return false;
}
#endregion
}
}
| |
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace System.IdentityModel.Tokens
{
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Security.Authentication.ExtendedProtection;
using System.Security.Principal;
using System.IdentityModel.Diagnostics;
public class KerberosReceiverSecurityToken : WindowsSecurityToken
{
string id;
byte[] request;
SymmetricSecurityKey symmetricSecurityKey = null;
ReadOnlyCollection<SecurityKey> securityKeys = null;
bool isAuthenticated = false;
string valueTypeUri = null;
ChannelBinding channelBinding;
ExtendedProtectionPolicy extendedProtectionPolicy;
public KerberosReceiverSecurityToken(byte[] request)
: this(request, SecurityUniqueId.Create().Value)
{ }
public KerberosReceiverSecurityToken(byte[] request, string id)
: this(request, id, true, null)
{
}
public KerberosReceiverSecurityToken(byte[] request, string id, string valueTypeUri)
: this(request, id, true, valueTypeUri)
{
}
internal KerberosReceiverSecurityToken( byte[] request, string id, bool doAuthenticate, string valueTypeUri )
: this(request, id, doAuthenticate, valueTypeUri, null, null)
{ }
internal KerberosReceiverSecurityToken(
byte[] request,
string id,
bool doAuthenticate,
string valueTypeUri,
ChannelBinding channelBinding,
ExtendedProtectionPolicy extendedProtectionPolicy )
{
if (request == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("request"));
if (id == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("id"));
this.id = id;
this.request = request;
this.valueTypeUri = valueTypeUri;
this.channelBinding = channelBinding;
this.extendedProtectionPolicy = extendedProtectionPolicy;
if (doAuthenticate)
{
Initialize(null, channelBinding, extendedProtectionPolicy);
}
}
public override ReadOnlyCollection<SecurityKey> SecurityKeys
{
get
{
if (this.securityKeys == null)
{
List<SecurityKey> temp = new List<SecurityKey>(1);
temp.Add(this.SecurityKey);
this.securityKeys = temp.AsReadOnly();
}
return this.securityKeys;
}
}
public SymmetricSecurityKey SecurityKey
{
get
{
if (!this.isAuthenticated)
{
Initialize(null, this.channelBinding, this.extendedProtectionPolicy);
}
return this.symmetricSecurityKey;
}
}
public override DateTime ValidFrom
{
get
{
if (!this.isAuthenticated)
{
Initialize(null, this.channelBinding, this.extendedProtectionPolicy);
}
return base.ValidFrom;
}
}
public override DateTime ValidTo
{
get
{
if (!this.isAuthenticated)
{
Initialize(null, this.channelBinding, this.extendedProtectionPolicy);
}
return base.ValidTo;
}
}
public override WindowsIdentity WindowsIdentity
{
get
{
ThrowIfDisposed();
if (!this.isAuthenticated)
{
Initialize(null, this.channelBinding, this.extendedProtectionPolicy);
}
return base.WindowsIdentity;
}
}
/// <summary>
/// The Uri that defines the ValueType of the kerberos blob.
/// </summary>
public string ValueTypeUri
{
get
{
return valueTypeUri;
}
}
public byte[] GetRequest()
{
return SecurityUtils.CloneBuffer(this.request);
}
// This internal API is not thread-safe. It is acceptable since ..
// 1) From public OM, Initialize happens at ctor time.
// 2) From internal OM (Sfx), Initialize happens right after ctor (single thread env).
// i.e. ReadToken and then AuthenticateToken.
internal void Initialize( SafeFreeCredentials credentialsHandle, ChannelBinding channelBinding, ExtendedProtectionPolicy extendedProtectionPolicy )
{
if (this.isAuthenticated)
{
return;
}
bool ownCredentialsHandle = false;
SafeDeleteContext securityContext = null;
SafeCloseHandle tokenHandle = null;
#if RECOMPUTEGSS
int tokenSize = DEREncoding.TokenSize(this.request.Length);
byte[] rawRequest = new byte[tokenSize];
int offset = 0;
int len = this.request.Length;
DEREncoding.MakeTokenHeader(this.request.Length, rawRequest, ref offset, ref len);
System.Buffer.BlockCopy(this.request, 0, rawRequest, offset, this.request.Length);
#else
byte[] rawRequest = this.request;
#endif
try
{
if (credentialsHandle == null)
{
credentialsHandle = SspiWrapper.AcquireDefaultCredential("Kerberos", CredentialUse.Inbound);
ownCredentialsHandle = true;
}
SspiContextFlags fContextReq = SspiContextFlags.AllocateMemory | SspiContextFlags.Confidentiality
| SspiContextFlags.Confidentiality
| SspiContextFlags.ReplayDetect
| SspiContextFlags.SequenceDetect;
ExtendedProtectionPolicyHelper policyHelper = new ExtendedProtectionPolicyHelper(channelBinding, extendedProtectionPolicy);
if (policyHelper.PolicyEnforcement == PolicyEnforcement.Always && policyHelper.ChannelBinding == null && policyHelper.ProtectionScenario != ProtectionScenario.TrustedProxy)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityTokenException(SR.GetString(SR.SecurityChannelBindingMissing)));
}
if (policyHelper.PolicyEnforcement == PolicyEnforcement.WhenSupported)
{
fContextReq |= SspiContextFlags.ChannelBindingAllowMissingBindings;
}
if (policyHelper.ProtectionScenario == ProtectionScenario.TrustedProxy)
{
fContextReq |= SspiContextFlags.ChannelBindingProxyBindings;
}
SspiContextFlags contextFlags = SspiContextFlags.Zero;
SecurityBuffer outSecurityBuffer = new SecurityBuffer(0, BufferType.Token);
List<SecurityBuffer> list = new List<SecurityBuffer>(2);
list.Add(new SecurityBuffer(rawRequest, BufferType.Token));
if (policyHelper.ShouldAddChannelBindingToASC())
{
list.Add(new SecurityBuffer(policyHelper.ChannelBinding));
}
SecurityBuffer[] inSecurityBuffer = null;
if (list.Count > 0)
{
inSecurityBuffer = list.ToArray();
}
int statusCode = SspiWrapper.AcceptSecurityContext(credentialsHandle,
ref securityContext,
fContextReq,
Endianness.Native,
inSecurityBuffer,
outSecurityBuffer,
ref contextFlags);
if (DiagnosticUtility.ShouldTraceInformation)
{
SecurityTraceRecordHelper.TraceChannelBindingInformation(policyHelper, true, channelBinding);
}
if (statusCode != (int)SecurityStatus.OK)
{
if (statusCode == (int)SecurityStatus.ContinueNeeded)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new SecurityTokenException(SR.GetString(SR.KerberosMultilegsNotSupported), new Win32Exception(statusCode)));
}
else if (statusCode == (int)SecurityStatus.OutOfMemory)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new SecurityTokenException(SR.GetString(SR.KerberosApReqInvalidOrOutOfMemory), new Win32Exception(statusCode)));
}
else
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new SecurityTokenException(SR.GetString(SR.FailAcceptSecurityContext), new Win32Exception(statusCode)));
}
}
// Expiration
LifeSpan lifeSpan = (LifeSpan)SspiWrapper.QueryContextAttributes(securityContext, ContextAttribute.Lifespan);
DateTime effectiveTime = lifeSpan.EffectiveTimeUtc;
DateTime expirationTime = lifeSpan.ExpiryTimeUtc;
// SessionKey
SecuritySessionKeyClass sessionKey = (SecuritySessionKeyClass)SspiWrapper.QueryContextAttributes(securityContext, ContextAttribute.SessionKey);
this.symmetricSecurityKey = new InMemorySymmetricSecurityKey(sessionKey.SessionKey);
// WindowsSecurityToken
statusCode = SspiWrapper.QuerySecurityContextToken(securityContext, out tokenHandle);
if (statusCode != (int)SecurityStatus.OK)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new Win32Exception(statusCode));
}
WindowsIdentity windowsIdentity = new WindowsIdentity( tokenHandle.DangerousGetHandle(), SecurityUtils.AuthTypeKerberos);
Initialize(this.id, SecurityUtils.AuthTypeKerberos, effectiveTime, expirationTime, windowsIdentity, false);
// Authenticated
this.isAuthenticated = true;
}
finally
{
if (tokenHandle != null)
tokenHandle.Close();
if (securityContext != null)
securityContext.Close();
if (ownCredentialsHandle && credentialsHandle != null)
credentialsHandle.Close();
}
}
public override bool CanCreateKeyIdentifierClause<T>()
{
if (typeof(T) == typeof(KerberosTicketHashKeyIdentifierClause))
return true;
return base.CanCreateKeyIdentifierClause<T>();
}
public override T CreateKeyIdentifierClause<T>()
{
if (typeof(T) == typeof(KerberosTicketHashKeyIdentifierClause))
return new KerberosTicketHashKeyIdentifierClause(CryptoHelper.ComputeHash(this.request), false, null, 0) as T;
return base.CreateKeyIdentifierClause<T>();
}
public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause)
{
KerberosTicketHashKeyIdentifierClause kerbKeyIdentifierClause = keyIdentifierClause as KerberosTicketHashKeyIdentifierClause;
if (kerbKeyIdentifierClause != null)
return kerbKeyIdentifierClause.Matches(CryptoHelper.ComputeHash(this.request));
return base.MatchesKeyIdentifierClause(keyIdentifierClause);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using System;
using System.Collections;
using System.Collections.Specialized;
using GenStrings;
namespace System.Collections.Specialized.Tests
{
public class SetStrStrTests
{
public const int MAX_LEN = 50; // max length of random strings
[Fact]
public void Test01()
{
IntlStrings intl;
NameValueCollection nvc;
// simple string values
string[] values =
{
"",
" ",
"a",
"aA",
"text",
" SPaces",
"1",
"$%^#",
"2222222222222222222222222",
System.DateTime.Today.ToString(),
Int32.MaxValue.ToString()
};
// keys for simple string values
string[] keys =
{
"zero",
"oNe",
" ",
"",
"aa",
"1",
System.DateTime.Today.ToString(),
"$%^#",
Int32.MaxValue.ToString(),
" spaces",
"2222222222222222222222222"
};
int cnt = 0; // Count
// initialize IntStrings
intl = new IntlStrings();
// [] NameValueCollection is constructed as expected
//-----------------------------------------------------------------
nvc = new NameValueCollection();
// [] Set() - new simple strings
//
for (int i = 0; i < values.Length; i++)
{
cnt = nvc.Count;
nvc.Set(keys[i], values[i]);
if (nvc.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", i, nvc.Count, cnt + 1));
}
// verify that collection contains newly added item
//
if (Array.IndexOf(nvc.AllKeys, keys[i]) < 0)
{
Assert.False(true, string.Format("Error, collection doesn't contain key of new item", i));
}
// access the item
//
if (String.Compare(nvc[keys[i]], values[i]) != 0)
{
Assert.False(true, string.Format("Error, returned item \"{1}\" instead of \"{2}\"", i, nvc[keys[i]], values[i]));
}
}
//
// Intl strings
// [] Set() - new Intl strings
//
int len = values.Length;
string[] intlValues = new string[len * 2];
// fill array with unique strings
//
for (int i = 0; i < len * 2; i++)
{
string val = intl.GetRandomString(MAX_LEN);
while (Array.IndexOf(intlValues, val) != -1)
val = intl.GetRandomString(MAX_LEN);
intlValues[i] = val;
}
Boolean caseInsensitive = false;
for (int i = 0; i < len * 2; i++)
{
if (intlValues[i].Length != 0 && intlValues[i].ToLowerInvariant() == intlValues[i].ToUpperInvariant())
caseInsensitive = true;
}
//
// will use first half of array as values and second half as keys
//
for (int i = 0; i < len; i++)
{
cnt = nvc.Count;
nvc.Set(intlValues[i + len], intlValues[i]);
if (nvc.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", i, nvc.Count, cnt + 1));
}
// verify that collection contains newly added item
//
if (Array.IndexOf(nvc.AllKeys, intlValues[i + len]) < 0)
{
Assert.False(true, string.Format("Error, collection doesn't contain key of new item", i));
}
// access the item
//
if (String.Compare(nvc[intlValues[i + len]], intlValues[i]) != 0)
{
Assert.False(true, string.Format("Error, returned item \"{1}\" instead of \"{2}\"", i, nvc[intlValues[i + len]], intlValues[i]));
}
}
//
// [] Case sensitivity
// Casing doesn't change ( keya are not converted to lower!)
//
string[] intlValuesLower = new string[len * 2];
// fill array with unique strings
//
for (int i = 0; i < len * 2; i++)
{
intlValues[i] = intlValues[i].ToUpperInvariant();
}
for (int i = 0; i < len * 2; i++)
{
intlValuesLower[i] = intlValues[i].ToLowerInvariant();
}
nvc.Clear();
//
// will use first half of array as values and second half as keys
//
for (int i = 0; i < len; i++)
{
cnt = nvc.Count;
// add uppercase items
nvc.Set(intlValues[i + len], intlValues[i]);
if (nvc.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", i, nvc.Count, cnt + 1));
}
// verify that collection contains newly added uppercase item
//
if (Array.IndexOf(nvc.AllKeys, intlValues[i + len]) < 0)
{
Assert.False(true, string.Format("Error, collection doesn't contain key of new item", i));
}
// access the item
//
if (String.Compare(nvc[intlValues[i + len]], intlValues[i]) != 0)
{
Assert.False(true, string.Format("Error, returned item \"{1}\" instead of \"{2}\"", i, nvc[intlValues[i + len]], intlValues[i]));
}
// verify that collection doesn't contains lowercase item
//
if (!caseInsensitive && String.Compare(nvc[intlValuesLower[i + len]], intlValuesLower[i]) == 0)
{
Assert.False(true, string.Format("Error, returned item \"{1}\" is lowercase after adding uppercase", i, nvc[intlValuesLower[i + len]]));
}
// key is not converted to lower
if (!caseInsensitive && Array.IndexOf(nvc.AllKeys, intlValuesLower[i + len]) >= 0)
{
Assert.False(true, string.Format("Error, key was converted to lower", i));
}
// but search among keys is case-insensitive
if (String.Compare(nvc[intlValuesLower[i + len]], intlValues[i]) != 0)
{
Assert.False(true, string.Format("Error, could not find item using differently cased key", i));
}
}
//
// [] Set multiple values with the same key
//
nvc.Clear();
len = values.Length;
string k = "keykey";
for (int i = 0; i < len; i++)
{
nvc.Set(k, "Value" + i);
// should replace previous value
if (nvc.Count != 1)
{
Assert.False(true, string.Format("Error, count is {0} instead of 1", nvc.Count, i));
}
if (String.Compare(nvc[k], "Value" + i) != 0)
{
Assert.False(true, string.Format("Error, didn't replace value", i));
}
}
if (nvc.AllKeys.Length != 1)
{
Assert.False(true, "Error, should contain only 1 key");
}
// verify that collection contains newly added item
//
if (Array.IndexOf(nvc.AllKeys, k) < 0)
{
Assert.False(true, "Error, collection doesn't contain key of new item");
}
// access the item
//
string[] vals = nvc.GetValues(k);
if (vals.Length != 1)
{
Assert.False(true, string.Format("Error, number of values at given key is {0} instead of {1}", vals.Length, 1));
}
if (Array.IndexOf(vals, "Value" + (len - 1).ToString()) < 0)
{
Assert.False(true, string.Format("Error, value is not {0}", "Value" + (len - 1)));
}
//
// [] Set(string, null)
//
k = "kk";
nvc.Remove(k); // make sure there is no such item already
cnt = nvc.Count;
nvc.Set(k, null);
if (nvc.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {0} instead of {1}", nvc.Count, cnt + 1));
}
if (Array.IndexOf(nvc.AllKeys, k) < 0)
{
Assert.False(true, "Error, collection doesn't contain key of new item");
}
// verify that collection contains null
//
if (nvc[k] != null)
{
Assert.False(true, "Error, returned non-null on place of null");
}
nvc.Remove(k); // make sure there is no such item already
nvc.Add(k, "kItem");
cnt = nvc.Count;
nvc.Set(k, null);
if (nvc.Count != cnt)
{
Assert.False(true, string.Format("Error, count has changed: {0} instead of {1}", nvc.Count, cnt));
}
if (Array.IndexOf(nvc.AllKeys, k) < 0)
{
Assert.False(true, "Error, collection doesn't contain key of new item");
}
// verify that item at k-key was replaced with null
//
if (nvc[k] != null)
{
Assert.False(true, "Error, non-null was not replaced with null");
}
//
// Set item with null key - no NullReferenceException expected
// [] Set(null, string)
//
nvc.Remove(null);
cnt = nvc.Count;
nvc.Set(null, "item");
if (nvc.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {0} instead of {1}", nvc.Count, cnt + 1));
}
if (Array.IndexOf(nvc.AllKeys, null) < 0)
{
Assert.False(true, "Error, collection doesn't contain null key ");
}
// verify that collection contains null
//
if (nvc[null] != "item")
{
Assert.False(true, "Error, returned wrong value at null key");
}
// replace item with null key
cnt = nvc.Count;
nvc.Set(null, "newItem");
if (nvc.Count != cnt)
{
Assert.False(true, string.Format("Error, count has changed: {0} instead of {1}", nvc.Count, cnt));
}
if (Array.IndexOf(nvc.AllKeys, null) < 0)
{
Assert.False(true, "Error, collection doesn't contain null key ");
}
// verify that item with null key was replaced
//
if (nvc[null] != "newItem")
{
Assert.False(true, "Error, didn't replace value at null key");
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using OpenSSL;
namespace sandbox
{
class Naming
{
class Node
{
public Node parent;
public Dictionary<string, Node> children = new Dictionary<string,Node>();
public Authority auth;
}
private static Node root;
static Naming()
{
root = new Node();
root.parent = root;
}
public static void Publish(string path, Authority auth)
{
Node node = root;
if (path == ".")
{
node.auth = auth;
return;
}
string[] parts = path.Split('.');
for (int i = parts.Length-1; i > 0; --i)
{
if (parts[i] == "")
continue;
node = node.children[parts[i]];
}
//string issuer = auth.Certificate.Issuer.Common;
//if (node.auth.Certificate.Subject.Common != issuer)
// throw new InvalidOperationException("This cert is not authorized to be published here");
if(!auth.Certificate.Verify(node.auth.Certificate.PublicKey))
throw new InvalidOperationException("This cert is not authorized to be published here");
string name = auth.Certificate.Subject.Common;
Node newNode = new Node();
newNode.parent = node;
newNode.children.Add(name, newNode);
newNode.auth = auth;
}
public static Authority Lookup(string path)
{
if (path == ".")
return root.auth;
Node node = root;
string[] parts = path.Split('.');
for (int i = parts.Length - 1; i > 0; --i)
{
if (parts[i] == "")
continue;
node = node.children[parts[i]];
}
return node.auth;
}
}
class Authority
{
public static Authority Root = MakeRoot();
private SimpleSerialNumber serial = new SimpleSerialNumber();
private CryptoKey key;
private X509CertificateAuthority ca;
private string name;
public Authority(string name)
{
DSA dsa = new DSA(new DSAParameters(512));
this.key = new CryptoKey(dsa);
this.name = name;
}
public Authority(X509Certificate cert, CryptoKey key)
{
this.key = key;
this.ca = new X509CertificateAuthority(cert, key, this.serial);
this.name = cert.Subject.Common;
}
private static Authority MakeRoot()
{
DSA dsa = new DSA(new DSAParameters(512));
CryptoKey key = new CryptoKey(dsa);
X509Name subject = new X509Name("CN=.");
X509Certificate cert = new X509Certificate(
0,
subject,
subject,
key,
TimeSpan.FromDays(365));
cert.Sign(key, MessageDigest.DSS1);
return new Authority(cert, key);
}
public X509Certificate Certificate
{
get { return this.ca.Certificate; }
}
public CryptoKey Key
{
get { return this.ca.Key; }
}
public string FullName
{
get
{
if (this.name == ".")
return this.name;
return this.Name + "." + this.Parent;
}
}
public string Name
{
get
{
return this.name;
}
}
public string Parent
{
get
{
if (this.ca == null)
return null;
string issuer = this.ca.Certificate.Issuer.Common;
if(issuer == ".")
return "";
return GetFullName(issuer);
}
}
private string GetFullName(string issuer)
{
Authority parent = Naming.Lookup(issuer);
string next = parent.Certificate.Issuer.Common;
return parent.Certificate.Subject.Common + "." + GetFullName(next);
}
public X509Request Request
{
get
{
X509Name subject = new X509Name();
subject.Common = this.Name;
X509Request request = new X509Request(0, subject, this.key);
request.Sign(this.key, MessageDigest.DSS1);
return request;
}
}
public X509Certificate Authorize(X509Request request)
{
return this.ca.ProcessRequest(request, TimeSpan.FromDays(365));
}
public void Promote(X509Certificate cert)
{
cert.Verify(this.key);
this.ca = new X509CertificateAuthority(cert, this.key, this.serial);
}
}
class Program
{
static void Authorities()
{
Authority root = Authority.Root;
Authority com = new Authority("com");
Authority coco = new Authority("coco");
Authority frank = new Authority("frank");
Naming.Publish(root.FullName, root);
com.Promote(root.Authorize(com.Request));
Naming.Publish(com.FullName, com);
coco.Promote(com.Authorize(coco.Request));
Naming.Publish(coco.FullName, coco);
frank.Promote(coco.Authorize(frank.Request));
Naming.Publish(frank.FullName, frank);
Console.WriteLine(frank.Certificate);
}
static void Main(string[] args)
{
Authorities();
return;
SimpleSerialNumber seq = new SimpleSerialNumber();
X509CertificateAuthority ca = X509CertificateAuthority.SelfSigned(
seq,
new X509Name("CN=."),
TimeSpan.FromDays(10)
);
Console.WriteLine(ca.Certificate);
DSA dsa = new DSA(new DSAParameters(512));
CryptoKey key = new CryptoKey(dsa);
X509Request req = new X509Request(0, new X509Name("CN=com."), key);
req.Sign(key, MessageDigest.DSS1);
X509Certificate cert = ca.ProcessRequest(req, TimeSpan.FromDays(10));
Console.WriteLine(cert);
Console.WriteLine("CA Verified: " + cert.Verify(ca.Key));
Console.WriteLine("Self Verified: " + cert.Verify(key));
SimpleSerialNumber serial2 = new SimpleSerialNumber();
X509CertificateAuthority caSelf = new X509CertificateAuthority(
cert,
key,
serial2);
X509Request req2 = cert.CreateRequest(key, MessageDigest.DSS1);
X509Name subject = req2.Subject;
Console.WriteLine("Request1: " + req);
Console.WriteLine("Request2: " + req2);
X509Certificate cert2 = caSelf.ProcessRequest(req2, TimeSpan.FromDays(10));
Console.WriteLine("Cert2: " + cert2);
DH dh = new DH(128, 5);
MessageDigestContext mdc = new MessageDigestContext(MessageDigest.DSS1);
byte[] msg = dh.PublicKey;
byte[] sig = mdc.Sign(msg, key);
Console.WriteLine(dh);
Console.WriteLine("DH P : " + BitConverter.ToString(dh.P));
Console.WriteLine("DH G : " + BitConverter.ToString(dh.G));
Console.WriteLine("DH Secret Key: " + BitConverter.ToString(dh.PrivateKey));
Console.WriteLine("DH Public Key: " + BitConverter.ToString(msg));
Console.WriteLine("DH Signature : " + BitConverter.ToString(sig));
Console.WriteLine(mdc.Verify(msg, sig, key));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text;
#if !READ_ONLY
namespace SquabPie.Mono.Cecil.Pdb {
[ComImport, InterfaceType (ComInterfaceType.InterfaceIsIUnknown), Guid ("BA3FEE4C-ECB9-4e41-83B7-183FA41CD859")]
interface IMetaDataEmit {
void SetModuleProps (string szName);
void Save (string szFile, uint dwSaveFlags);
void SaveToStream (IntPtr pIStream, uint dwSaveFlags);
uint GetSaveSize (uint fSave);
uint DefineTypeDef (IntPtr szTypeDef, uint dwTypeDefFlags, uint tkExtends, IntPtr rtkImplements);
uint DefineNestedType (IntPtr szTypeDef, uint dwTypeDefFlags, uint tkExtends, IntPtr rtkImplements, uint tdEncloser);
void SetHandler ([MarshalAs (UnmanagedType.IUnknown), In]object pUnk);
uint DefineMethod (uint td, IntPtr zName, uint dwMethodFlags, IntPtr pvSigBlob, uint cbSigBlob, uint ulCodeRVA, uint dwImplFlags);
void DefineMethodImpl (uint td, uint tkBody, uint tkDecl);
uint DefineTypeRefByName (uint tkResolutionScope, IntPtr szName);
uint DefineImportType (IntPtr pAssemImport, IntPtr pbHashValue, uint cbHashValue, IMetaDataImport pImport,
uint tdImport, IntPtr pAssemEmit);
uint DefineMemberRef (uint tkImport, string szName, IntPtr pvSigBlob, uint cbSigBlob);
uint DefineImportMember (IntPtr pAssemImport, IntPtr /* void* */ pbHashValue, uint cbHashValue,
IMetaDataImport pImport, uint mbMember, IntPtr pAssemEmit, uint tkParent);
uint DefineEvent (uint td, string szEvent, uint dwEventFlags, uint tkEventType, uint mdAddOn, uint mdRemoveOn, uint mdFire, IntPtr /* uint* */ rmdOtherMethods);
void SetClassLayout (uint td, uint dwPackSize, IntPtr /*COR_FIELD_OFFSET**/ rFieldOffsets, uint ulClassSize);
void DeleteClassLayout (uint td);
void SetFieldMarshal (uint tk, IntPtr /* byte* */ pvNativeType, uint cbNativeType);
void DeleteFieldMarshal (uint tk);
uint DefinePermissionSet (uint tk, uint dwAction, IntPtr /* void* */ pvPermission, uint cbPermission);
void SetRVA (uint md, uint ulRVA);
uint GetTokenFromSig (IntPtr /* byte* */ pvSig, uint cbSig);
uint DefineModuleRef (string szName);
void SetParent (uint mr, uint tk);
uint GetTokenFromTypeSpec (IntPtr /* byte* */ pvSig, uint cbSig);
void SaveToMemory (IntPtr /* void* */ pbData, uint cbData);
uint DefineUserString (string szString, uint cchString);
void DeleteToken (uint tkObj);
void SetMethodProps (uint md, uint dwMethodFlags, uint ulCodeRVA, uint dwImplFlags);
void SetTypeDefProps (uint td, uint dwTypeDefFlags, uint tkExtends, IntPtr /* uint* */ rtkImplements);
void SetEventProps (uint ev, uint dwEventFlags, uint tkEventType, uint mdAddOn, uint mdRemoveOn, uint mdFire, IntPtr /* uint* */ rmdOtherMethods);
uint SetPermissionSetProps (uint tk, uint dwAction, IntPtr /* void* */ pvPermission, uint cbPermission);
void DefinePinvokeMap (uint tk, uint dwMappingFlags, string szImportName, uint mrImportDLL);
void SetPinvokeMap (uint tk, uint dwMappingFlags, string szImportName, uint mrImportDLL);
void DeletePinvokeMap (uint tk);
uint DefineCustomAttribute (uint tkObj, uint tkType, IntPtr /* void* */ pCustomAttribute, uint cbCustomAttribute);
void SetCustomAttributeValue (uint pcv, IntPtr /* void* */ pCustomAttribute, uint cbCustomAttribute);
uint DefineField (uint td, string szName, uint dwFieldFlags, IntPtr /* byte* */ pvSigBlob, uint cbSigBlob, uint dwCPlusTypeFlag, IntPtr /* void* */ pValue, uint cchValue);
uint DefineProperty (uint td, string szProperty, uint dwPropFlags, IntPtr /* byte* */ pvSig, uint cbSig, uint dwCPlusTypeFlag,
IntPtr /* void* */ pValue, uint cchValue, uint mdSetter, uint mdGetter, IntPtr /* uint* */ rmdOtherMethods);
uint DefineParam (uint md, uint ulParamSeq, string szName, uint dwParamFlags, uint dwCPlusTypeFlag, IntPtr /* void* */ pValue, uint cchValue);
void SetFieldProps (uint fd, uint dwFieldFlags, uint dwCPlusTypeFlag, IntPtr /* void* */ pValue, uint cchValue);
void SetPropertyProps (uint pr, uint dwPropFlags, uint dwCPlusTypeFlag, IntPtr /* void* */ pValue, uint cchValue, uint mdSetter, uint mdGetter, IntPtr /* uint* */ rmdOtherMethods);
void SetParamProps (uint pd, string szName, uint dwParamFlags, uint dwCPlusTypeFlag, IntPtr /* void* */ pValue, uint cchValue);
uint DefineSecurityAttributeSet (uint tkObj, IntPtr rSecAttrs, uint cSecAttrs);
void ApplyEditAndContinue ([MarshalAs (UnmanagedType.IUnknown)]object pImport);
uint TranslateSigWithScope (IntPtr pAssemImport, IntPtr /* void* */ pbHashValue, uint cbHashValue,
IMetaDataImport import, IntPtr /* byte* */ pbSigBlob, uint cbSigBlob, IntPtr pAssemEmit, IMetaDataEmit emit, IntPtr /* byte* */ pvTranslatedSig, uint cbTranslatedSigMax);
void SetMethodImplFlags (uint md, uint dwImplFlags);
void SetFieldRVA (uint fd, uint ulRVA);
void Merge (IMetaDataImport pImport, IntPtr pHostMapToken, [MarshalAs (UnmanagedType.IUnknown)]object pHandler);
void MergeEnd ();
}
[ComImport, InterfaceType (ComInterfaceType.InterfaceIsIUnknown), Guid ("7DAC8207-D3AE-4c75-9B67-92801A497D44")]
interface IMetaDataImport {
[PreserveSig]
void CloseEnum (uint hEnum);
uint CountEnum (uint hEnum);
void ResetEnum (uint hEnum, uint ulPos);
uint EnumTypeDefs (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rTypeDefs, uint cMax);
uint EnumInterfaceImpls (ref uint phEnum, uint td, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] uint [] rImpls, uint cMax);
uint EnumTypeRefs (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rTypeRefs, uint cMax);
uint FindTypeDefByName (string szTypeDef, uint tkEnclosingClass);
Guid GetScopeProps (StringBuilder szName, uint cchName, out uint pchName);
uint GetModuleFromScope ();
uint GetTypeDefProps (uint td, IntPtr szTypeDef, uint cchTypeDef, out uint pchTypeDef, IntPtr pdwTypeDefFlags);
uint GetInterfaceImplProps (uint iiImpl, out uint pClass);
uint GetTypeRefProps (uint tr, out uint ptkResolutionScope, StringBuilder szName, uint cchName);
uint ResolveTypeRef (uint tr, [In] ref Guid riid, [MarshalAs (UnmanagedType.Interface)] out object ppIScope);
uint EnumMembers (ref uint phEnum, uint cl, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] uint [] rMembers, uint cMax);
uint EnumMembersWithName (ref uint phEnum, uint cl, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rMembers, uint cMax);
uint EnumMethods (ref uint phEnum, uint cl, IntPtr /* uint* */ rMethods, uint cMax);
uint EnumMethodsWithName (ref uint phEnum, uint cl, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rMethods, uint cMax);
uint EnumFields (ref uint phEnum, uint cl, IntPtr /* uint* */ rFields, uint cMax);
uint EnumFieldsWithName (ref uint phEnum, uint cl, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rFields, uint cMax);
uint EnumParams (ref uint phEnum, uint mb, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] uint [] rParams, uint cMax);
uint EnumMemberRefs (ref uint phEnum, uint tkParent, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] uint [] rMemberRefs, uint cMax);
uint EnumMethodImpls (ref uint phEnum, uint td, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rMethodBody,
[MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rMethodDecl, uint cMax);
uint EnumPermissionSets (ref uint phEnum, uint tk, uint dwActions, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rPermission,
uint cMax);
uint FindMember (uint td, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] byte [] pvSigBlob, uint cbSigBlob);
uint FindMethod (uint td, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] byte [] pvSigBlob, uint cbSigBlob);
uint FindField (uint td, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] byte [] pvSigBlob, uint cbSigBlob);
uint FindMemberRef (uint td, string szName, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] byte [] pvSigBlob, uint cbSigBlob);
uint GetMethodProps (uint mb, out uint pClass, IntPtr szMethod, uint cchMethod, out uint pchMethod, IntPtr pdwAttr, IntPtr ppvSigBlob, IntPtr pcbSigBlob, IntPtr pulCodeRVA);
uint GetMemberRefProps (uint mr, ref uint ptk, StringBuilder szMember, uint cchMember, out uint pchMember, out IntPtr /* byte* */ ppvSigBlob);
uint EnumProperties (ref uint phEnum, uint td, IntPtr /* uint* */ rProperties, uint cMax);
uint EnumEvents (ref uint phEnum, uint td, IntPtr /* uint* */ rEvents, uint cMax);
uint GetEventProps (uint ev, out uint pClass, StringBuilder szEvent, uint cchEvent, out uint pchEvent, out uint pdwEventFlags,
out uint ptkEventType, out uint pmdAddOn, out uint pmdRemoveOn, out uint pmdFire,
[MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 11)] uint [] rmdOtherMethod, uint cMax);
uint EnumMethodSemantics (ref uint phEnum, uint mb, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] uint [] rEventProp, uint cMax);
uint GetMethodSemantics (uint mb, uint tkEventProp);
uint GetClassLayout (uint td, out uint pdwPackSize, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 3)] IntPtr /*COR_FIELD_OFFSET **/ rFieldOffset, uint cMax, out uint pcFieldOffset);
uint GetFieldMarshal (uint tk, out IntPtr /* byte* */ ppvNativeType);
uint GetRVA (uint tk, out uint pulCodeRVA);
uint GetPermissionSetProps (uint pm, out uint pdwAction, out IntPtr /* void* */ ppvPermission);
uint GetSigFromToken (uint mdSig, out IntPtr /* byte* */ ppvSig);
uint GetModuleRefProps (uint mur, StringBuilder szName, uint cchName);
uint EnumModuleRefs (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rModuleRefs, uint cmax);
uint GetTypeSpecFromToken (uint typespec, out IntPtr /* byte* */ ppvSig);
uint GetNameFromToken (uint tk);
uint EnumUnresolvedMethods (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rMethods, uint cMax);
uint GetUserString (uint stk, StringBuilder szString, uint cchString);
uint GetPinvokeMap (uint tk, out uint pdwMappingFlags, StringBuilder szImportName, uint cchImportName, out uint pchImportName);
uint EnumSignatures (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rSignatures, uint cmax);
uint EnumTypeSpecs (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rTypeSpecs, uint cmax);
uint EnumUserStrings (ref uint phEnum, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 2)] uint [] rStrings, uint cmax);
[PreserveSig]
int GetParamForMethodIndex (uint md, uint ulParamSeq, out uint pParam);
uint EnumCustomAttributes (ref uint phEnum, uint tk, uint tkType, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 4)] uint [] rCustomAttributes, uint cMax);
uint GetCustomAttributeProps (uint cv, out uint ptkObj, out uint ptkType, out IntPtr /* void* */ ppBlob);
uint FindTypeRef (uint tkResolutionScope, string szName);
uint GetMemberProps (uint mb, out uint pClass, StringBuilder szMember, uint cchMember, out uint pchMember, out uint pdwAttr,
out IntPtr /* byte* */ ppvSigBlob, out uint pcbSigBlob, out uint pulCodeRVA, out uint pdwImplFlags, out uint pdwCPlusTypeFlag, out IntPtr /* void* */ ppValue);
uint GetFieldProps (uint mb, out uint pClass, StringBuilder szField, uint cchField, out uint pchField, out uint pdwAttr,
out IntPtr /* byte* */ ppvSigBlob, out uint pcbSigBlob, out uint pdwCPlusTypeFlag, out IntPtr /* void* */ ppValue);
uint GetPropertyProps (uint prop, out uint pClass, StringBuilder szProperty, uint cchProperty, out uint pchProperty, out uint pdwPropFlags,
out IntPtr /* byte* */ ppvSig, out uint pbSig, out uint pdwCPlusTypeFlag, out IntPtr /* void* */ ppDefaultValue, out uint pcchDefaultValue, out uint pmdSetter,
out uint pmdGetter, [MarshalAs (UnmanagedType.LPArray, SizeParamIndex = 14)] uint [] rmdOtherMethod, uint cMax);
uint GetParamProps (uint tk, out uint pmd, out uint pulSequence, StringBuilder szName, uint cchName, out uint pchName,
out uint pdwAttr, out uint pdwCPlusTypeFlag, out IntPtr /* void* */ ppValue);
uint GetCustomAttributeByName (uint tkObj, string szName, out IntPtr /* void* */ ppData);
[PreserveSig]
[return: MarshalAs (UnmanagedType.Bool)]
bool IsValidToken (uint tk);
uint GetNestedClassProps (uint tdNestedClass);
uint GetNativeCallConvFromSig (IntPtr /* void* */ pvSig, uint cbSig);
int IsGlobal (uint pd);
}
class ModuleMetadata : IMetaDataEmit, IMetaDataImport {
readonly ModuleDefinition module;
Dictionary<uint, TypeDefinition> types;
Dictionary<uint, MethodDefinition> methods;
public ModuleMetadata (ModuleDefinition module)
{
this.module = module;
}
bool TryGetType (uint token, out TypeDefinition type)
{
if (types == null)
InitializeMetadata (module);
return types.TryGetValue (token, out type);
}
bool TryGetMethod (uint token, out MethodDefinition method)
{
if (methods == null)
InitializeMetadata (module);
return methods.TryGetValue (token, out method);
}
void InitializeMetadata (ModuleDefinition module)
{
types = new Dictionary<uint, TypeDefinition> ();
methods = new Dictionary<uint, MethodDefinition> ();
foreach (var type in module.GetTypes ()) {
types.Add (type.MetadataToken.ToUInt32 (), type);
InitializeMethods (type);
}
}
void InitializeMethods (TypeDefinition type)
{
foreach (var method in type.Methods)
methods.Add (method.MetadataToken.ToUInt32 (), method);
}
public void SetModuleProps (string szName)
{
throw new NotImplementedException ();
}
public void Save (string szFile, uint dwSaveFlags)
{
throw new NotImplementedException ();
}
public void SaveToStream (IntPtr pIStream, uint dwSaveFlags)
{
throw new NotImplementedException ();
}
public uint GetSaveSize (uint fSave)
{
throw new NotImplementedException ();
}
public uint DefineTypeDef (IntPtr szTypeDef, uint dwTypeDefFlags, uint tkExtends, IntPtr rtkImplements)
{
throw new NotImplementedException ();
}
public uint DefineNestedType (IntPtr szTypeDef, uint dwTypeDefFlags, uint tkExtends, IntPtr rtkImplements, uint tdEncloser)
{
throw new NotImplementedException ();
}
public void SetHandler (object pUnk)
{
throw new NotImplementedException ();
}
public uint DefineMethod (uint td, IntPtr zName, uint dwMethodFlags, IntPtr pvSigBlob, uint cbSigBlob, uint ulCodeRVA, uint dwImplFlags)
{
throw new NotImplementedException ();
}
public void DefineMethodImpl (uint td, uint tkBody, uint tkDecl)
{
throw new NotImplementedException ();
}
public uint DefineTypeRefByName (uint tkResolutionScope, IntPtr szName)
{
throw new NotImplementedException ();
}
public uint DefineImportType (IntPtr pAssemImport, IntPtr pbHashValue, uint cbHashValue, IMetaDataImport pImport, uint tdImport, IntPtr pAssemEmit)
{
throw new NotImplementedException ();
}
public uint DefineMemberRef (uint tkImport, string szName, IntPtr pvSigBlob, uint cbSigBlob)
{
throw new NotImplementedException ();
}
public uint DefineImportMember (IntPtr pAssemImport, IntPtr pbHashValue, uint cbHashValue, IMetaDataImport pImport, uint mbMember, IntPtr pAssemEmit, uint tkParent)
{
throw new NotImplementedException ();
}
public uint DefineEvent (uint td, string szEvent, uint dwEventFlags, uint tkEventType, uint mdAddOn, uint mdRemoveOn, uint mdFire, IntPtr rmdOtherMethods)
{
throw new NotImplementedException ();
}
public void SetClassLayout (uint td, uint dwPackSize, IntPtr rFieldOffsets, uint ulClassSize)
{
throw new NotImplementedException ();
}
public void DeleteClassLayout (uint td)
{
throw new NotImplementedException ();
}
public void SetFieldMarshal (uint tk, IntPtr pvNativeType, uint cbNativeType)
{
throw new NotImplementedException ();
}
public void DeleteFieldMarshal (uint tk)
{
throw new NotImplementedException ();
}
public uint DefinePermissionSet (uint tk, uint dwAction, IntPtr pvPermission, uint cbPermission)
{
throw new NotImplementedException ();
}
public void SetRVA (uint md, uint ulRVA)
{
throw new NotImplementedException ();
}
public uint GetTokenFromSig (IntPtr pvSig, uint cbSig)
{
throw new NotImplementedException ();
}
public uint DefineModuleRef (string szName)
{
throw new NotImplementedException ();
}
public void SetParent (uint mr, uint tk)
{
throw new NotImplementedException ();
}
public uint GetTokenFromTypeSpec (IntPtr pvSig, uint cbSig)
{
throw new NotImplementedException ();
}
public void SaveToMemory (IntPtr pbData, uint cbData)
{
throw new NotImplementedException ();
}
public uint DefineUserString (string szString, uint cchString)
{
throw new NotImplementedException ();
}
public void DeleteToken (uint tkObj)
{
throw new NotImplementedException ();
}
public void SetMethodProps (uint md, uint dwMethodFlags, uint ulCodeRVA, uint dwImplFlags)
{
throw new NotImplementedException ();
}
public void SetTypeDefProps (uint td, uint dwTypeDefFlags, uint tkExtends, IntPtr rtkImplements)
{
throw new NotImplementedException ();
}
public void SetEventProps (uint ev, uint dwEventFlags, uint tkEventType, uint mdAddOn, uint mdRemoveOn, uint mdFire, IntPtr rmdOtherMethods)
{
throw new NotImplementedException ();
}
public uint SetPermissionSetProps (uint tk, uint dwAction, IntPtr pvPermission, uint cbPermission)
{
throw new NotImplementedException ();
}
public void DefinePinvokeMap (uint tk, uint dwMappingFlags, string szImportName, uint mrImportDLL)
{
throw new NotImplementedException ();
}
public void SetPinvokeMap (uint tk, uint dwMappingFlags, string szImportName, uint mrImportDLL)
{
throw new NotImplementedException ();
}
public void DeletePinvokeMap (uint tk)
{
throw new NotImplementedException ();
}
public uint DefineCustomAttribute (uint tkObj, uint tkType, IntPtr pCustomAttribute, uint cbCustomAttribute)
{
throw new NotImplementedException ();
}
public void SetCustomAttributeValue (uint pcv, IntPtr pCustomAttribute, uint cbCustomAttribute)
{
throw new NotImplementedException ();
}
public uint DefineField (uint td, string szName, uint dwFieldFlags, IntPtr pvSigBlob, uint cbSigBlob, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue)
{
throw new NotImplementedException ();
}
public uint DefineProperty (uint td, string szProperty, uint dwPropFlags, IntPtr pvSig, uint cbSig, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue, uint mdSetter, uint mdGetter, IntPtr rmdOtherMethods)
{
throw new NotImplementedException ();
}
public uint DefineParam (uint md, uint ulParamSeq, string szName, uint dwParamFlags, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue)
{
throw new NotImplementedException ();
}
public void SetFieldProps (uint fd, uint dwFieldFlags, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue)
{
throw new NotImplementedException ();
}
public void SetPropertyProps (uint pr, uint dwPropFlags, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue, uint mdSetter, uint mdGetter, IntPtr rmdOtherMethods)
{
throw new NotImplementedException ();
}
public void SetParamProps (uint pd, string szName, uint dwParamFlags, uint dwCPlusTypeFlag, IntPtr pValue, uint cchValue)
{
throw new NotImplementedException ();
}
public uint DefineSecurityAttributeSet (uint tkObj, IntPtr rSecAttrs, uint cSecAttrs)
{
throw new NotImplementedException ();
}
public void ApplyEditAndContinue (object pImport)
{
throw new NotImplementedException ();
}
public uint TranslateSigWithScope (IntPtr pAssemImport, IntPtr pbHashValue, uint cbHashValue, IMetaDataImport import, IntPtr pbSigBlob, uint cbSigBlob, IntPtr pAssemEmit, IMetaDataEmit emit, IntPtr pvTranslatedSig, uint cbTranslatedSigMax)
{
throw new NotImplementedException ();
}
public void SetMethodImplFlags (uint md, uint dwImplFlags)
{
throw new NotImplementedException ();
}
public void SetFieldRVA (uint fd, uint ulRVA)
{
throw new NotImplementedException ();
}
public void Merge (IMetaDataImport pImport, IntPtr pHostMapToken, object pHandler)
{
throw new NotImplementedException ();
}
public void MergeEnd ()
{
throw new NotImplementedException ();
}
public void CloseEnum (uint hEnum)
{
throw new NotImplementedException ();
}
public uint CountEnum (uint hEnum)
{
throw new NotImplementedException ();
}
public void ResetEnum (uint hEnum, uint ulPos)
{
throw new NotImplementedException ();
}
public uint EnumTypeDefs (ref uint phEnum, uint[] rTypeDefs, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumInterfaceImpls (ref uint phEnum, uint td, uint[] rImpls, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumTypeRefs (ref uint phEnum, uint[] rTypeRefs, uint cMax)
{
throw new NotImplementedException ();
}
public uint FindTypeDefByName (string szTypeDef, uint tkEnclosingClass)
{
throw new NotImplementedException ();
}
public Guid GetScopeProps (StringBuilder szName, uint cchName, out uint pchName)
{
throw new NotImplementedException ();
}
public uint GetModuleFromScope ()
{
throw new NotImplementedException ();
}
public uint GetTypeDefProps (uint td, IntPtr szTypeDef, uint cchTypeDef, out uint pchTypeDef, IntPtr pdwTypeDefFlags)
{
TypeDefinition type;
if (!TryGetType (td, out type)) {
Marshal.WriteInt16 (szTypeDef, 0);
pchTypeDef = 1;
return 0;
}
WriteString (type.IsNested ? type.Name : type.FullName, szTypeDef, cchTypeDef, out pchTypeDef);
WriteIntPtr (pdwTypeDefFlags, (uint) type.Attributes);
return type.BaseType != null ? type.BaseType.MetadataToken.ToUInt32 () : 0;
}
static void WriteIntPtr (IntPtr ptr, uint value)
{
if (ptr == IntPtr.Zero)
return;
Marshal.WriteInt32 (ptr, (int) value);
}
static void WriteString (string str, IntPtr buffer, uint bufferSize, out uint chars)
{
var length = str.Length + 1 >= bufferSize ? bufferSize - 1 : (uint) str.Length;
chars = length + 1;
var offset = 0;
for (int i = 0; i < length; i++) {
Marshal.WriteInt16 (buffer, offset, str [i]);
offset += 2;
}
Marshal.WriteInt16 (buffer, offset, 0);
}
public uint GetInterfaceImplProps (uint iiImpl, out uint pClass)
{
throw new NotImplementedException ();
}
public uint GetTypeRefProps (uint tr, out uint ptkResolutionScope, StringBuilder szName, uint cchName)
{
throw new NotImplementedException ();
}
public uint ResolveTypeRef (uint tr, ref Guid riid, out object ppIScope)
{
throw new NotImplementedException ();
}
public uint EnumMembers (ref uint phEnum, uint cl, uint[] rMembers, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMembersWithName (ref uint phEnum, uint cl, string szName, uint[] rMembers, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMethods (ref uint phEnum, uint cl, IntPtr rMethods, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMethodsWithName (ref uint phEnum, uint cl, string szName, uint[] rMethods, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumFields (ref uint phEnum, uint cl, IntPtr rFields, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumFieldsWithName (ref uint phEnum, uint cl, string szName, uint[] rFields, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumParams (ref uint phEnum, uint mb, uint[] rParams, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMemberRefs (ref uint phEnum, uint tkParent, uint[] rMemberRefs, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMethodImpls (ref uint phEnum, uint td, uint[] rMethodBody, uint[] rMethodDecl, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumPermissionSets (ref uint phEnum, uint tk, uint dwActions, uint[] rPermission, uint cMax)
{
throw new NotImplementedException ();
}
public uint FindMember (uint td, string szName, byte[] pvSigBlob, uint cbSigBlob)
{
throw new NotImplementedException ();
}
public uint FindMethod (uint td, string szName, byte[] pvSigBlob, uint cbSigBlob)
{
throw new NotImplementedException ();
}
public uint FindField (uint td, string szName, byte[] pvSigBlob, uint cbSigBlob)
{
throw new NotImplementedException ();
}
public uint FindMemberRef (uint td, string szName, byte[] pvSigBlob, uint cbSigBlob)
{
throw new NotImplementedException ();
}
public uint GetMethodProps (uint mb, out uint pClass, IntPtr szMethod, uint cchMethod, out uint pchMethod, IntPtr pdwAttr, IntPtr ppvSigBlob, IntPtr pcbSigBlob, IntPtr pulCodeRVA)
{
MethodDefinition method;
if (!TryGetMethod (mb, out method)) {
Marshal.WriteInt16 (szMethod, 0);
pchMethod = 1;
pClass = 0;
return 0;
}
pClass = method.DeclaringType.MetadataToken.ToUInt32 ();
WriteString (method.Name, szMethod, cchMethod, out pchMethod);
WriteIntPtr (pdwAttr, (uint) method.Attributes);
WriteIntPtr (pulCodeRVA, (uint) method.RVA);
return (uint) method.ImplAttributes;
}
public uint GetMemberRefProps (uint mr, ref uint ptk, StringBuilder szMember, uint cchMember, out uint pchMember, out IntPtr ppvSigBlob)
{
throw new NotImplementedException ();
}
public uint EnumProperties (ref uint phEnum, uint td, IntPtr rProperties, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumEvents (ref uint phEnum, uint td, IntPtr rEvents, uint cMax)
{
throw new NotImplementedException ();
}
public uint GetEventProps (uint ev, out uint pClass, StringBuilder szEvent, uint cchEvent, out uint pchEvent, out uint pdwEventFlags, out uint ptkEventType, out uint pmdAddOn, out uint pmdRemoveOn, out uint pmdFire, uint[] rmdOtherMethod, uint cMax)
{
throw new NotImplementedException ();
}
public uint EnumMethodSemantics (ref uint phEnum, uint mb, uint[] rEventProp, uint cMax)
{
throw new NotImplementedException ();
}
public uint GetMethodSemantics (uint mb, uint tkEventProp)
{
throw new NotImplementedException ();
}
public uint GetClassLayout (uint td, out uint pdwPackSize, IntPtr rFieldOffset, uint cMax, out uint pcFieldOffset)
{
throw new NotImplementedException ();
}
public uint GetFieldMarshal (uint tk, out IntPtr ppvNativeType)
{
throw new NotImplementedException ();
}
public uint GetRVA (uint tk, out uint pulCodeRVA)
{
throw new NotImplementedException ();
}
public uint GetPermissionSetProps (uint pm, out uint pdwAction, out IntPtr ppvPermission)
{
throw new NotImplementedException ();
}
public uint GetSigFromToken (uint mdSig, out IntPtr ppvSig)
{
throw new NotImplementedException ();
}
public uint GetModuleRefProps (uint mur, StringBuilder szName, uint cchName)
{
throw new NotImplementedException ();
}
public uint EnumModuleRefs (ref uint phEnum, uint[] rModuleRefs, uint cmax)
{
throw new NotImplementedException ();
}
public uint GetTypeSpecFromToken (uint typespec, out IntPtr ppvSig)
{
throw new NotImplementedException ();
}
public uint GetNameFromToken (uint tk)
{
throw new NotImplementedException ();
}
public uint EnumUnresolvedMethods (ref uint phEnum, uint[] rMethods, uint cMax)
{
throw new NotImplementedException ();
}
public uint GetUserString (uint stk, StringBuilder szString, uint cchString)
{
throw new NotImplementedException ();
}
public uint GetPinvokeMap (uint tk, out uint pdwMappingFlags, StringBuilder szImportName, uint cchImportName, out uint pchImportName)
{
throw new NotImplementedException ();
}
public uint EnumSignatures (ref uint phEnum, uint[] rSignatures, uint cmax)
{
throw new NotImplementedException ();
}
public uint EnumTypeSpecs (ref uint phEnum, uint[] rTypeSpecs, uint cmax)
{
throw new NotImplementedException ();
}
public uint EnumUserStrings (ref uint phEnum, uint[] rStrings, uint cmax)
{
throw new NotImplementedException ();
}
public int GetParamForMethodIndex (uint md, uint ulParamSeq, out uint pParam)
{
throw new NotImplementedException ();
}
public uint EnumCustomAttributes (ref uint phEnum, uint tk, uint tkType, uint[] rCustomAttributes, uint cMax)
{
throw new NotImplementedException ();
}
public uint GetCustomAttributeProps (uint cv, out uint ptkObj, out uint ptkType, out IntPtr ppBlob)
{
throw new NotImplementedException ();
}
public uint FindTypeRef (uint tkResolutionScope, string szName)
{
throw new NotImplementedException ();
}
public uint GetMemberProps (uint mb, out uint pClass, StringBuilder szMember, uint cchMember, out uint pchMember, out uint pdwAttr, out IntPtr ppvSigBlob, out uint pcbSigBlob, out uint pulCodeRVA, out uint pdwImplFlags, out uint pdwCPlusTypeFlag, out IntPtr ppValue)
{
throw new NotImplementedException ();
}
public uint GetFieldProps (uint mb, out uint pClass, StringBuilder szField, uint cchField, out uint pchField, out uint pdwAttr, out IntPtr ppvSigBlob, out uint pcbSigBlob, out uint pdwCPlusTypeFlag, out IntPtr ppValue)
{
throw new NotImplementedException ();
}
public uint GetPropertyProps (uint prop, out uint pClass, StringBuilder szProperty, uint cchProperty, out uint pchProperty, out uint pdwPropFlags, out IntPtr ppvSig, out uint pbSig, out uint pdwCPlusTypeFlag, out IntPtr ppDefaultValue, out uint pcchDefaultValue, out uint pmdSetter, out uint pmdGetter, uint[] rmdOtherMethod, uint cMax)
{
throw new NotImplementedException ();
}
public uint GetParamProps (uint tk, out uint pmd, out uint pulSequence, StringBuilder szName, uint cchName, out uint pchName, out uint pdwAttr, out uint pdwCPlusTypeFlag, out IntPtr ppValue)
{
throw new NotImplementedException ();
}
public uint GetCustomAttributeByName (uint tkObj, string szName, out IntPtr ppData)
{
throw new NotImplementedException ();
}
public bool IsValidToken (uint tk)
{
throw new NotImplementedException ();
}
public uint GetNestedClassProps (uint tdNestedClass)
{
TypeDefinition type;
if (!TryGetType (tdNestedClass, out type))
return 0;
return type.IsNested ? type.DeclaringType.MetadataToken.ToUInt32 () : 0;
}
public uint GetNativeCallConvFromSig (IntPtr pvSig, uint cbSig)
{
throw new NotImplementedException ();
}
public int IsGlobal (uint pd)
{
throw new NotImplementedException ();
}
}
}
#endif
| |
using UnityEngine;
using UnityEditor;
using System.Collections;
[System.Serializable]
public class ZoomArea
{
// Global state
private static Vector2 m_MouseDownPosition = new Vector2(-1000000, -1000000); // in transformed space
private static int zoomableAreaHash = "ZoomableArea".GetHashCode();
// Range lock settings
[SerializeField]
private bool m_HRangeLocked;
[SerializeField]
private bool m_VRangeLocked;
public bool hRangeLocked { get { return m_HRangeLocked; } set { m_HRangeLocked = value; } }
public bool vRangeLocked { get { return m_VRangeLocked; } set { m_VRangeLocked = value; } }
[SerializeField]
private float m_HBaseRangeMin = 0;
[SerializeField]
private float m_HBaseRangeMax = 1;
[SerializeField]
private float m_VBaseRangeMin = 0;
[SerializeField]
private float m_VBaseRangeMax = 1;
public float hBaseRangeMin { get { return m_HBaseRangeMin; } set { m_HBaseRangeMin = value; } }
public float hBaseRangeMax { get { return m_HBaseRangeMax; } set { m_HBaseRangeMax = value; } }
public float vBaseRangeMin { get { return m_VBaseRangeMin; } set { m_VBaseRangeMin = value; } }
public float vBaseRangeMax { get { return m_VBaseRangeMax; } set { m_VBaseRangeMax = value; } }
[SerializeField]
private bool m_HAllowExceedBaseRangeMin = true;
[SerializeField]
private bool m_HAllowExceedBaseRangeMax = true;
[SerializeField]
private bool m_VAllowExceedBaseRangeMin = true;
[SerializeField]
private bool m_VAllowExceedBaseRangeMax = true;
public bool hAllowExceedBaseRangeMin { get { return m_HAllowExceedBaseRangeMin; } set { m_HAllowExceedBaseRangeMin = value; } }
public bool hAllowExceedBaseRangeMax { get { return m_HAllowExceedBaseRangeMax; } set { m_HAllowExceedBaseRangeMax = value; } }
public bool vAllowExceedBaseRangeMin { get { return m_VAllowExceedBaseRangeMin; } set { m_VAllowExceedBaseRangeMin = value; } }
public bool vAllowExceedBaseRangeMax { get { return m_VAllowExceedBaseRangeMax; } set { m_VAllowExceedBaseRangeMax = value; } }
public float hRangeMin
{
get { return (hAllowExceedBaseRangeMin ? Mathf.NegativeInfinity : hBaseRangeMin); }
set { SetAllowExceed(ref m_HBaseRangeMin, ref m_HAllowExceedBaseRangeMin, value); }
}
public float hRangeMax
{
get { return (hAllowExceedBaseRangeMax ? Mathf.Infinity : hBaseRangeMax); }
set { SetAllowExceed(ref m_HBaseRangeMax, ref m_HAllowExceedBaseRangeMax, value); }
}
public float vRangeMin
{
get { return (vAllowExceedBaseRangeMin ? Mathf.NegativeInfinity : vBaseRangeMin); }
set { SetAllowExceed(ref m_VBaseRangeMin, ref m_VAllowExceedBaseRangeMin, value); }
}
public float vRangeMax
{
get { return (vAllowExceedBaseRangeMax ? Mathf.Infinity : vBaseRangeMax); }
set { SetAllowExceed(ref m_VBaseRangeMax, ref m_VAllowExceedBaseRangeMax, value); }
}
private void SetAllowExceed(ref float rangeEnd, ref bool allowExceed, float value)
{
if (value == Mathf.NegativeInfinity || value == Mathf.Infinity)
{
rangeEnd = (value == Mathf.NegativeInfinity ? 0 : 1);
allowExceed = true;
}
else
{
rangeEnd = value;
allowExceed = false;
}
}
private float m_HScaleMin = 0.001f;
private float m_HScaleMax = 100000.0f;
private float m_VScaleMin = 0.001f;
private float m_VScaleMax = 100000.0f;
// Window resize settings
[SerializeField]
private bool m_ScaleWithWindow = false;
public bool scaleWithWindow { get { return m_ScaleWithWindow; } set { m_ScaleWithWindow = value; } }
// Slider settings
[SerializeField]
private bool m_HSlider = true;
[SerializeField]
private bool m_VSlider = true;
public bool hSlider { get { return m_HSlider; } set { Rect r = rect; m_HSlider = value; rect = r; } }
public bool vSlider { get { return m_VSlider; } set { Rect r = rect; m_VSlider = value; rect = r; } }
[SerializeField]
private bool m_IgnoreScrollWheelUntilClicked = false;
public bool ignoreScrollWheelUntilClicked { get { return m_IgnoreScrollWheelUntilClicked; } set { m_IgnoreScrollWheelUntilClicked = value; } }
public bool m_UniformScale;
public bool uniformScale { get { return m_UniformScale; } set { m_UniformScale = value; } }
// View and drawing settings
[SerializeField]
private Rect m_DrawArea = new Rect(0, 0, 100, 100);
internal void SetDrawRectHack(Rect r) { m_DrawArea = r; }
[SerializeField]
internal Vector2 m_Scale = new Vector2(1, -1);
[SerializeField]
internal Vector2 m_Translation = new Vector2(0, 0);
[SerializeField]
private float m_MarginLeft, m_MarginRight, m_MarginTop, m_MarginBottom;
[SerializeField]
private Rect m_LastShownAreaInsideMargins = new Rect(0, 0, 100, 100);
public Vector2 scale { get { return m_Scale; } }
public float margin { set { m_MarginLeft = m_MarginRight = m_MarginTop = m_MarginBottom = value; } }
public float leftmargin { get { return m_MarginLeft; } set { m_MarginLeft = value; } }
public float rightmargin { get { return m_MarginRight; } set { m_MarginRight = value; } }
public float topmargin { get { return m_MarginTop; } set { m_MarginTop = value; } }
public float bottommargin { get { return m_MarginBottom; } set { m_MarginBottom = value; } }
[SerializeField]
bool m_MinimalGUI;
[System.Serializable]
public class Styles
{
public GUIStyle background = "AnimationCurveEditorBackground";
public GUIStyle horizontalScrollbar;
public GUIStyle horizontalMinMaxScrollbarThumb;
public GUIStyle horizontalScrollbarLeftButton;
public GUIStyle horizontalScrollbarRightButton;
public GUIStyle verticalScrollbar;
public GUIStyle verticalMinMaxScrollbarThumb;
public GUIStyle verticalScrollbarUpButton;
public GUIStyle verticalScrollbarDownButton;
public float sliderWidth;
public float visualSliderWidth;
public Styles(bool minimalGUI)
{
if (minimalGUI)
{
visualSliderWidth = 0;
sliderWidth = 15;
}
else
{
visualSliderWidth = 15;
sliderWidth = 15;
}
}
public void InitGUIStyles(bool minimalGUI)
{
if (minimalGUI)
{
horizontalMinMaxScrollbarThumb = "MiniMinMaxSliderHorizontal";
horizontalScrollbarLeftButton = GUIStyle.none;
horizontalScrollbarRightButton = GUIStyle.none;
horizontalScrollbar = GUIStyle.none;
verticalMinMaxScrollbarThumb = "MiniMinMaxSlidervertical";
verticalScrollbarUpButton = GUIStyle.none;
verticalScrollbarDownButton = GUIStyle.none;
verticalScrollbar = GUIStyle.none;
}
else
{
horizontalMinMaxScrollbarThumb = "horizontalMinMaxScrollbarThumb";
horizontalScrollbarLeftButton = "horizontalScrollbarLeftbutton";
horizontalScrollbarRightButton = "horizontalScrollbarRightbutton";
horizontalScrollbar = GUI.skin.horizontalScrollbar;
verticalMinMaxScrollbarThumb = "verticalMinMaxScrollbarThumb";
verticalScrollbarUpButton = "verticalScrollbarUpbutton";
verticalScrollbarDownButton = "verticalScrollbarDownbutton";
verticalScrollbar = GUI.skin.verticalScrollbar;
}
}
}
private Styles m_Styles;
private Styles styles
{
get
{
if (m_Styles == null)
m_Styles = new Styles(m_MinimalGUI);
return m_Styles;
}
}
public Rect rect
{
get { return new Rect(drawRect.x, drawRect.y, drawRect.width + (m_VSlider ? styles.visualSliderWidth : 0), drawRect.height + (m_HSlider ? styles.visualSliderWidth : 0)); }
set
{
Rect newDrawArea = new Rect(value.x, value.y, value.width - (m_VSlider ? styles.visualSliderWidth : 0), value.height - (m_HSlider ? styles.visualSliderWidth : 0));
if (newDrawArea != m_DrawArea)
{
if (m_ScaleWithWindow)
{
m_DrawArea = newDrawArea;
shownAreaInsideMargins = m_LastShownAreaInsideMargins;
}
else
{
m_Translation += new Vector2((newDrawArea.width - m_DrawArea.width) / 2, (newDrawArea.height - m_DrawArea.height) / 2);
m_DrawArea = newDrawArea;
}
}
EnforceScaleAndRange();
}
}
public Rect drawRect { get { return m_DrawArea; } }
public void SetShownHRangeInsideMargins(float min, float max)
{
m_Scale.x = (drawRect.width - leftmargin - rightmargin) / (max - min);
m_Translation.x = -min * m_Scale.x + leftmargin;
EnforceScaleAndRange();
}
public void SetShownHRange(float min, float max)
{
m_Scale.x = drawRect.width / (max - min);
m_Translation.x = -min * m_Scale.x;
EnforceScaleAndRange();
}
public void SetShownVRangeInsideMargins(float min, float max)
{
m_Scale.y = -(drawRect.height - topmargin - bottommargin) / (max - min);
m_Translation.y = drawRect.height - min * m_Scale.y - topmargin;
EnforceScaleAndRange();
}
public void SetShownVRange(float min, float max)
{
m_Scale.y = -drawRect.height / (max - min);
m_Translation.y = drawRect.height - min * m_Scale.y;
EnforceScaleAndRange();
}
// ShownArea is in curve space
public Rect shownArea
{
set
{
m_Scale.x = drawRect.width / value.width;
m_Scale.y = -drawRect.height / value.height;
m_Translation.x = -value.x * m_Scale.x;
m_Translation.y = drawRect.height - value.y * m_Scale.y;
EnforceScaleAndRange();
}
get
{
return new Rect(
-m_Translation.x / m_Scale.x,
-(m_Translation.y - drawRect.height) / m_Scale.y,
drawRect.width / m_Scale.x,
drawRect.height / -m_Scale.y
);
}
}
public Rect shownAreaInsideMargins
{
set
{
shownAreaInsideMarginsInternal = value;
EnforceScaleAndRange();
}
get
{
return shownAreaInsideMarginsInternal;
}
}
private Rect shownAreaInsideMarginsInternal
{
set
{
m_Scale.x = (drawRect.width - leftmargin - rightmargin) / value.width;
m_Scale.y = -(drawRect.height - topmargin - bottommargin) / value.height;
m_Translation.x = -value.x * m_Scale.x + leftmargin;
m_Translation.y = drawRect.height - value.y * m_Scale.y - topmargin;
}
get
{
float leftmarginRel = leftmargin / m_Scale.x;
float rightmarginRel = rightmargin / m_Scale.x;
float topmarginRel = topmargin / m_Scale.y;
float bottommarginRel = bottommargin / m_Scale.y;
Rect area = shownArea;
area.x += leftmarginRel;
area.y -= topmarginRel;
area.width -= leftmarginRel + rightmarginRel;
area.height += topmarginRel + bottommarginRel;
return area;
}
}
public virtual Bounds drawingBounds
{
get
{
return new Bounds(
new Vector3((hBaseRangeMin + hBaseRangeMax) * 0.5f, (vBaseRangeMin + vBaseRangeMax) * 0.5f, 0),
new Vector3(hBaseRangeMax - hBaseRangeMin, vBaseRangeMax - vBaseRangeMin, 1)
);
}
}
// Utility transform functions
public Matrix4x4 drawingToViewMatrix
{
get
{
return Matrix4x4.TRS(m_Translation, Quaternion.identity, new Vector3(m_Scale.x, m_Scale.y, 1));
}
}
public Vector2 DrawingToViewTransformPoint(Vector2 lhs)
{ return new Vector2(lhs.x * m_Scale.x + m_Translation.x, lhs.y * m_Scale.y + m_Translation.y); }
public Vector3 DrawingToViewTransformPoint(Vector3 lhs)
{ return new Vector3(lhs.x * m_Scale.x + m_Translation.x, lhs.y * m_Scale.y + m_Translation.y, 0); }
public Vector2 ViewToDrawingTransformPoint(Vector2 lhs)
{ return new Vector2((lhs.x - m_Translation.x) / m_Scale.x, (lhs.y - m_Translation.y) / m_Scale.y); }
public Vector3 ViewToDrawingTransformPoint(Vector3 lhs)
{ return new Vector3((lhs.x - m_Translation.x) / m_Scale.x, (lhs.y - m_Translation.y) / m_Scale.y, 0); }
public Vector2 DrawingToViewTransformVector(Vector2 lhs)
{ return new Vector2(lhs.x * m_Scale.x, lhs.y * m_Scale.y); }
public Vector3 DrawingToViewTransformVector(Vector3 lhs)
{ return new Vector3(lhs.x * m_Scale.x, lhs.y * m_Scale.y, 0); }
public Vector2 ViewToDrawingTransformVector(Vector2 lhs)
{ return new Vector2(lhs.x / m_Scale.x, lhs.y / m_Scale.y); }
public Vector3 ViewToDrawingTransformVector(Vector3 lhs)
{ return new Vector3(lhs.x / m_Scale.x, lhs.y / m_Scale.y, 0); }
public Vector2 mousePositionInDrawing
{
get { return ViewToDrawingTransformPoint(Event.current.mousePosition); }
}
public Vector2 NormalizeInViewSpace(Vector2 vec)
{
vec = Vector2.Scale(vec, m_Scale);
vec /= vec.magnitude;
return Vector2.Scale(vec, new Vector2(1 / m_Scale.x, 1 / m_Scale.y));
}
// Utility mouse event functions
private bool IsZoomEvent()
{
return (
(Event.current.button == 1 && Event.current.alt) // right+alt drag
//|| (Event.current.button == 0 && Event.current.command) // left+commend drag
//|| (Event.current.button == 2 && Event.current.command) // middle+command drag
);
}
private bool IsPanEvent()
{
return (
(Event.current.button == 0 && Event.current.alt) // left+alt drag
|| (Event.current.button == 2 && !Event.current.command) // middle drag
);
}
public ZoomArea()
{
m_MinimalGUI = false;
}
public ZoomArea(bool minimalGUI)
{
m_MinimalGUI = minimalGUI;
}
public void BeginViewGUI()
{
if (styles.horizontalScrollbar == null)
styles.InitGUIStyles(m_MinimalGUI);
GUILayout.BeginArea(m_DrawArea, styles.background);
HandleZoomAndPanEvents(m_DrawArea);
GUILayout.EndArea();
}
public void HandleZoomAndPanEvents(Rect area)
{
area.x = 0;
area.y = 0;
int id = GUIUtility.GetControlID(zoomableAreaHash, FocusType.Native, area);
switch (Event.current.GetTypeForControl(id))
{
case EventType.mouseDown:
if (area.Contains(Event.current.mousePosition))
{
// Catch keyboard control when clicked inside zoomable area
// (used to restrict scrollwheel)
GUIUtility.keyboardControl = id;
if (IsZoomEvent() || IsPanEvent())
{
GUIUtility.hotControl = id;
m_MouseDownPosition = mousePositionInDrawing;
Event.current.Use();
}
}
break;
case EventType.mouseUp:
//Debug.Log("mouse-up!");
if (GUIUtility.hotControl == id)
{
GUIUtility.hotControl = 0;
// If we got the mousedown, the mouseup is ours as well
// (no matter if the click was in the area or not)
m_MouseDownPosition = new Vector2(-1000000, -1000000);
//Event.current.Use();
}
break;
case EventType.mouseDrag:
if (GUIUtility.hotControl != id) break;
if (IsZoomEvent())
{
// Zoom in around mouse down position
Zoom(m_MouseDownPosition, false);
Event.current.Use();
}
else if (IsPanEvent())
{
// Pan view
Pan();
Event.current.Use();
}
break;
case EventType.scrollWheel:
if (!area.Contains(Event.current.mousePosition))
break;
if (m_IgnoreScrollWheelUntilClicked && GUIUtility.keyboardControl != id)
break;
// Zoom in around cursor position
Zoom(mousePositionInDrawing, true);
Event.current.Use();
break;
}
}
public void EndViewGUI()
{
}
private void Pan()
{
if (!m_HRangeLocked)
m_Translation.x += Event.current.delta.x;
if (!m_VRangeLocked)
m_Translation.y += Event.current.delta.y;
EnforceScaleAndRange();
}
private void Zoom(Vector2 zoomAround, bool scrollwhell)
{
// Get delta (from scroll wheel or mouse pad)
// Add x and y delta to cover all cases
// (scrool view has only y or only x when shift is pressed,
// while mouse pad has both x and y at all times)
float delta = Event.current.delta.x + Event.current.delta.y;
if (scrollwhell)
delta = -delta;
// Scale multiplier. Don't allow scale of zero or below!
float scale = Mathf.Max(0.01F, 1 + delta * 0.01F);
if (!m_HRangeLocked && !Event.current.shift)
{
// Offset to make zoom centered around cursor position
m_Translation.x -= zoomAround.x * (scale - 1) * m_Scale.x;
// Apply zooming
m_Scale.x *= scale;
}
if (!m_VRangeLocked && !EditorGUI.actionKey)
{
// Offset to make zoom centered around cursor position
m_Translation.y -= zoomAround.y * (scale - 1) * m_Scale.y;
// Apply zooming
m_Scale.y *= scale;
}
EnforceScaleAndRange();
}
public void EnforceScaleAndRange()
{
float hScaleMin = m_HScaleMin;
float vScaleMin = m_VScaleMin;
float hScaleMax = m_HScaleMax;
float vScaleMax = m_VScaleMax;
if (hRangeMax != Mathf.Infinity && hRangeMin != Mathf.NegativeInfinity)
hScaleMax = Mathf.Min(m_HScaleMax, hRangeMax - hRangeMin);
if (vRangeMax != Mathf.Infinity && vRangeMin != Mathf.NegativeInfinity)
vScaleMax = Mathf.Min(m_VScaleMax, vRangeMax - vRangeMin);
Rect oldArea = m_LastShownAreaInsideMargins;
Rect newArea = shownAreaInsideMargins;
if (newArea == oldArea)
return;
float epsilon = 0.00001f;
if (newArea.width < oldArea.width - epsilon)
{
float xLerp = Mathf.InverseLerp(oldArea.width, newArea.width, hScaleMin);
newArea = new Rect(
Mathf.Lerp(oldArea.x, newArea.x, xLerp),
newArea.y,
Mathf.Lerp(oldArea.width, newArea.width, xLerp),
newArea.height
);
}
if (newArea.height < oldArea.height - epsilon)
{
float yLerp = Mathf.InverseLerp(oldArea.height, newArea.height, vScaleMin);
newArea = new Rect(
newArea.x,
Mathf.Lerp(oldArea.y, newArea.y, yLerp),
newArea.width,
Mathf.Lerp(oldArea.height, newArea.height, yLerp)
);
}
if (newArea.width > oldArea.width + epsilon)
{
float xLerp = Mathf.InverseLerp(oldArea.width, newArea.width, hScaleMax);
newArea = new Rect(
Mathf.Lerp(oldArea.x, newArea.x, xLerp),
newArea.y,
Mathf.Lerp(oldArea.width, newArea.width, xLerp),
newArea.height
);
}
if (newArea.height > oldArea.height + epsilon)
{
float yLerp = Mathf.InverseLerp(oldArea.height, newArea.height, vScaleMax);
newArea = new Rect(
newArea.x,
Mathf.Lerp(oldArea.y, newArea.y, yLerp),
newArea.width,
Mathf.Lerp(oldArea.height, newArea.height, yLerp)
);
}
// Enforce ranges
if (newArea.xMin < hRangeMin)
newArea.x = hRangeMin;
if (newArea.xMax > hRangeMax)
newArea.x = hRangeMax - newArea.width;
if (newArea.yMin < vRangeMin)
newArea.y = vRangeMin;
if (newArea.yMax > vRangeMax)
newArea.y = vRangeMax - newArea.height;
shownAreaInsideMarginsInternal = newArea;
m_LastShownAreaInsideMargins = newArea;
}
public float PixelToTime(float pixelX, Rect rect)
{
return ((pixelX - rect.x) * shownArea.width / rect.width + shownArea.x);
}
public float TimeToPixel(float time, Rect rect)
{
return (time - shownArea.x) / shownArea.width * rect.width + rect.x;
}
public float PixelDeltaToTime(Rect rect)
{
return shownArea.width / rect.width;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using java.lang;
using stab.query;
using stab.reflection;
using cnatural.helpers;
namespace cnatural.eclipse.helpers {
public class JvmTypeSystemHelper {
public static bool isStructurallyEqual(TypeInfo oldType, TypeInfo newType) {
if (oldType.IsNestedPrivate) {
return true;
}
if (!sameTypes(oldType.BaseType, newType.BaseType)) {
return false;
}
foreach (var i in oldType.Interfaces) {
bool found = false;
foreach (var j in newType.Interfaces) {
if (sameTypes(i, j)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
foreach (var f in oldType.Fields.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var g = newType.getField(f.Name);
if (g == null) {
return false;
}
if (!sameTypes(f.Type, g.Type)) {
return false;
}
if (f.IsStatic != g.IsStatic) {
return false;
}
if (f.IsPublic != g.IsPublic) {
return false;
}
if (f.IsProtected != g.IsProtected) {
return false;
}
if (f.IsPrivate != g.IsPrivate) {
return false;
}
if (f.IsFinal != g.IsFinal) {
return false;
}
}
foreach (var f in newType.Fields.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var g = oldType.getField(f.Name);
if (g == null) {
return false;
}
}
foreach (var m in oldType.Methods.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var found = false;
foreach (var n in newType.Methods.where(p => !p.IsSynthetic && !p.IsPrivate)) {
if (sameMethods(m, n)) {
if (m.IsStatic != n.IsStatic) {
return false;
}
if (m.IsPublic != n.IsPublic) {
return false;
}
if (m.IsProtected != n.IsProtected) {
return false;
}
if (m.IsPrivate != n.IsPrivate) {
return false;
}
if (m.IsFinal != n.IsFinal) {
return false;
}
found = true;
break;
}
}
if (!found) {
return false;
}
}
foreach (var m in newType.Methods.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var found = false;
foreach (var n in oldType.Methods) {
if (sameMethods(m, n)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
return true;
}
public static TypeInfo getType(Library typeSystem, String fullName) {
TypeInfo result = null;
int index;
while ((index = fullName.indexOf('$')) != -1) {
String prefix = fullName.substring(0, index);
if (result == null) {
result = typeSystem.getType(prefix);
} else {
result = result.getNestedType(prefix);
}
fullName = fullName.substring(index + 1);
}
if (result == null) {
result = typeSystem.getType(fullName);
} else {
result = result.getNestedType(fullName);
}
return result;
}
public static void cloneTypes(Iterable<TypeInfo> types, Library targetTypeSystem) {
var t0 = System.nanoTime();
foreach (var type in types) {
defineType(type, targetTypeSystem, null);
}
foreach (var type in types) {
declareType(type, targetTypeSystem, (TypeBuilder)targetTypeSystem.getType(type.FullName), new Scope<String, TypeInfo>());
}
Environment.trace(targetTypeSystem, types.count() + " types cloned in " + ((System.nanoTime() - t0) / 1e6) + "ms");
}
private static bool sameMethods(MethodInfo oldMethod, MethodInfo newMethod) {
if (!oldMethod.Name.equals(newMethod.Name)) {
return false;
}
if (oldMethod.Parameters.count() != newMethod.Parameters.count()) {
return false;
}
var it1 = oldMethod.getParameters().iterator();
var it2 = newMethod.getParameters().iterator();
while (it1.hasNext()) {
if (!sameTypes(it1.next().Type, it2.next().Type)) {
return false;
}
}
return true;
}
private static bool sameTypes(TypeInfo oldType, TypeInfo newType) {
switch (oldType.TypeKind) {
case Boolean:
case Byte:
case Char:
case Double:
case Float:
case Int:
case Long:
case Short:
case Void:
case UnboundedWildcard:
return oldType.TypeKind == newType.TypeKind;
case Array:
if (newType.TypeKind != TypeKind.Array) {
return false;
}
return sameTypes(oldType.ElementType, newType.ElementType);
case LowerBoundedWildcard:
case UpperBoundedWildcard:
if (newType.TypeKind != oldType.TypeKind) {
return false;
}
return sameTypes(oldType.WildcardBound, newType.WildcardBound);
case GenericParameter:
if (newType.TypeKind != TypeKind.GenericParameter) {
return false;
}
return oldType.FullName.equals(newType.FullName);
case Reference:
if (newType.TypeKind != TypeKind.Reference) {
return false;
}
if (oldType.OriginalTypeDefinition.FullName.equals(newType.OriginalTypeDefinition.FullName)) {
if (oldType.GenericArguments.count() == newType.GenericArguments.count()) {
var it1 = oldType.GenericArguments.iterator();
var it2 = newType.GenericArguments.iterator();
while (it1.hasNext()) {
if (!sameTypes(it1.next(), it2.next())) {
return false;
}
}
return true;
}
}
return false;
default:
throw new RuntimeException("Internal error " + oldType.getTypeKind());
}
}
private static void defineType(TypeInfo type, Library targetTypeSystem, TypeBuilder declaringType) {
if (type.IsSynthetic) {
return;
}
TypeBuilder clone;
if (declaringType == null) {
clone = targetTypeSystem.defineType(type.FullName);
} else {
clone = declaringType.defineNestedType(type.Name);
clone.setNestedAbstract(type.IsNestedAbstract);
clone.setNestedAnnotation(type.IsNestedAnnotation);
clone.setNestedEnum(type.IsNestedEnum);
clone.setNestedFinal(type.IsNestedFinal);
clone.setNestedInterface(type.IsNestedInterface);
clone.setNestedPrivate(type.IsNestedPrivate);
clone.setNestedProtected(type.IsNestedProtected);
clone.setNestedPublic(type.IsNestedPublic);
clone.setNestedStatic(type.IsNestedStatic);
}
clone.setAbstract(type.IsAbstract);
clone.setAnnotation(type.IsNestedAnnotation);
clone.setEnum(type.IsEnum);
clone.setFinal(type.IsFinal);
clone.setInterface(type.IsInterface);
clone.setPublic(type.IsPublic);
clone.setSuper(type.IsSynthetic);
foreach (var ga in type.GenericArguments) {
clone.addGenericArgument(ga.FullName);
}
foreach (var nt in type.getNestedTypes()) {
defineType(nt, targetTypeSystem, clone);
}
}
private static void declareType(TypeInfo type, Library targetTypeSystem, TypeBuilder clone, Scope<String, TypeInfo> genericArgs) {
if (type.IsSynthetic) {
return;
}
genericArgs.enterScope();
foreach (var ga in clone.GenericArguments) {
genericArgs.declareBinding(ga.FullName, ga);
}
clone.setBaseType(getType(targetTypeSystem, type.BaseType, genericArgs));
foreach (var t in type.Interfaces) {
clone.addInterface(getType(targetTypeSystem, t, genericArgs));
}
foreach (var av in type.Annotations) {
var avb = clone.addAnnotation(getType(targetTypeSystem, av.Type, genericArgs), av.IsRuntimeVisible);
cloneAnnotationValue(av, targetTypeSystem, avb, genericArgs);
}
foreach (var f in type.Fields.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var fb = clone.defineField(f.Name, getType(targetTypeSystem, f.Type, genericArgs));
fb.setEnum(f.IsEnum);
fb.setFinal(f.IsFinal);
fb.setProtected(f.IsProtected);
fb.setPublic(f.IsPublic);
fb.setStatic(f.IsStatic);
fb.setTransient(f.IsTransient);
fb.setVolatile(f.IsVolatile);
fb.setValue(f.Value);
foreach (var av in f.getAnnotations()) {
var avb = fb.addAnnotation(getType(targetTypeSystem, av.Type, genericArgs), av.IsRuntimeVisible);
cloneAnnotationValue(av, targetTypeSystem, avb, genericArgs);
}
}
foreach (var m in type.Methods.where(p => !p.IsSynthetic && !p.IsPrivate)) {
var mb = clone.defineMethod(m.Name);
mb.setAbstract(m.IsAbstract);
mb.setBridge(m.IsBridge);
mb.setFinal(m.IsFinal);
mb.setNative(m.IsNative);
mb.setProtected(m.IsProtected);
mb.setPublic(m.IsPublic);
mb.setStatic(m.IsStatic);
mb.setStrict(m.IsStrict);
mb.setSynchronized(m.IsSynchronized);
mb.setVarargs(m.IsVarargs);
foreach (var av in m.Annotations) {
var avb = mb.addAnnotation(getType(targetTypeSystem, av.Type, genericArgs), av.IsRuntimeVisible);
cloneAnnotationValue(av, targetTypeSystem, avb, genericArgs);
}
genericArgs.enterScope();
foreach (var ga in m.GenericArguments) {
var t = mb.addGenericArgument(ga.FullName);
genericArgs.declareBinding(t.FullName, t);
}
mb.setReturnType(getType(targetTypeSystem, m.ReturnType, genericArgs));
foreach (var p in m.Parameters) {
var pb = mb.addParameter(getType(targetTypeSystem, p.Type, genericArgs));
pb.setName(p.Name);
}
genericArgs.leaveScope();
}
foreach (var nt in type.NestedTypes) {
declareType(nt, targetTypeSystem, (TypeBuilder)clone.getNestedType(nt.Name), genericArgs);
}
genericArgs.leaveScope();
}
private static void cloneAnnotationValue(AnnotationValue value, Library targetTypeSystem,
AnnotationValueBuilder builder, Scope<String, TypeInfo> genericArgs) {
foreach (var s in value.ArgumentNames) {
var a = value.getArgument(s);
switch (a.AnnotationArgumentKind) {
case Annotation:
cloneAnnotationValue((AnnotationValue)a, targetTypeSystem,
builder.setAnnotationArgument(s, a.Type, a.IsRuntimeVisible), genericArgs);
break;
case Array:
var aab = builder.setArrayArgument(s);
foreach (var aa in a.Elements) {
cloneAnnotationArgument(aa, targetTypeSystem, aab, genericArgs);
}
break;
case Boolean:
builder.setBooleanArgument(s, (Boolean)a.Value);
break;
case Byte:
builder.setByteArgument(s, (Byte)a.Value);
break;
case Char:
builder.setCharArgument(s, (Character)a.Value);
break;
case Double:
builder.setDoubleArgument(s, (Double)a.Value);
break;
case Enum:
builder.setEnumArgument(s, a.getType(), a.Name);
break;
case Float:
builder.setFloatArgument(s, (Float)a.Value);
break;
case Int:
builder.setIntArgument(s, (Integer)a.Value);
break;
case Long:
builder.setLongArgument(s, (Long)a.Value);
break;
case Short:
builder.setShortArgument(s, (Short)a.Value);
break;
case String:
builder.setStringArgument(s, (String)a.Value);
break;
case Type:
builder.setTypeArgument(s, a.Type);
break;
}
}
}
private static void cloneAnnotationArgument(AnnotationArgument arg, Library targetTypeSystem,
AnnotationArrayValueBuilder builder, Scope<String, TypeInfo> genericArgs) {
switch (arg.AnnotationArgumentKind) {
case Annotation:
cloneAnnotationValue((AnnotationValue)arg, targetTypeSystem,
builder.addAnnotationArgument(arg.Type, arg.IsRuntimeVisible), genericArgs);
break;
case Array:
var avb = builder.addArrayArgument();
foreach (var aa in arg.Elements) {
cloneAnnotationArgument(aa, targetTypeSystem, avb, genericArgs);
}
break;
case Boolean:
builder.addBooleanArgument((Boolean)arg.Value);
break;
case Byte:
builder.addByteArgument((Byte)arg.Value);
break;
case Char:
builder.addCharArgument((Character)arg.Value);
break;
case Double:
builder.addDoubleArgument((Double)arg.Value);
break;
case Enum:
builder.addEnumArgument(arg.Type, arg.Name);
break;
case Float:
builder.addFloatArgument((Float)arg.Value);
break;
case Int:
builder.addIntArgument((Integer)arg.Value);
break;
case Long:
builder.addLongArgument((Long)arg.Value);
break;
case Short:
builder.addShortArgument((Short)arg.Value);
break;
case String:
builder.addStringArgument((String)arg.Value);
break;
case Type:
builder.addTypeArgument(arg.Type);
break;
}
}
private static TypeInfo getType(Library typeSystem, TypeInfo type, Scope<String, TypeInfo> genericArgs) {
switch (type.TypeKind) {
case Boolean:
case Byte:
case Char:
case Double:
case Float:
case Int:
case Long:
case Short:
case Void:
return typeSystem.getPrimitiveType(type.TypeKind);
case Reference:
TypeInfo result;
if (type.DeclaringType == null) {
result = typeSystem.getType(type.OriginalTypeDefinition.FullName);
} else {
result = getType(typeSystem, type.DeclaringType, genericArgs).getNestedType(type.Name);
}
if (type != type.OriginalTypeDefinition) {
result = typeSystem.getGenericType(result, type.GenericArguments.select(p => getType(typeSystem, p, genericArgs)).toList());
}
if (result == null) {
throw new IllegalStateException("Type not found in target file system: " + type.FullName);
}
return result;
case UnboundedWildcard:
return typeSystem.UnboundedWildcard;
case LowerBoundedWildcard:
return getType(typeSystem, type.WildcardBound, genericArgs).LowerBoundedWildcard;
case UpperBoundedWildcard:
return getType(typeSystem, type.WildcardBound, genericArgs).UpperBoundedWildcard;
case Array:
return getType(typeSystem, type.ElementType, genericArgs).ArrayType;
case GenericParameter:
return genericArgs.getBindingValue(type.FullName);
default:
throw new RuntimeException("Internal error " + type.getTypeKind());
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.ServiceModel;
using System.ServiceModel.Channels;
public class MockChannelBase : ChannelBase, IMockCommunicationObject
{
private readonly EndpointAddress _address;
private readonly MessageEncoder _encoder;
private readonly ChannelManagerBase _manager;
public MockChannelBase(ChannelManagerBase manager, MessageEncoderFactory encoderFactory, EndpointAddress address)
: base(manager)
{
_address = address;
_manager = manager;
_encoder = encoderFactory.CreateSessionEncoder();
OpenAsyncResult = new MockAsyncResult();
CloseAsyncResult = new MockAsyncResult();
GetEndpointPropertyOverride = DefaultGetEndpointProperty;
// CommunicationObject overrides
DefaultCloseTimeoutOverride = DefaultDefaultCloseTimeout;
DefaultOpenTimeoutOverride = DefaultDefaultOpenTimeout;
OnAbortOverride = DefaultOnAbort;
OnOpenOverride = DefaultOnOpen;
OnCloseOverride = DefaultOnClose;
OnBeginOpenOverride = DefaultOnBeginOpen;
OnEndOpenOverride = DefaultOnEndOpen;
OnBeginCloseOverride = DefaultOnBeginClose;
OnEndCloseOverride = DefaultOnEndClose;
// All the virtuals
OnOpeningOverride = DefaultOnOpening;
OnOpenedOverride = DefaultOnOpened;
OnClosingOverride = DefaultOnClosing;
OnClosedOverride = DefaultOnClosed;
OnFaultedOverride = DefaultOnFaulted;
}
public Func<EndpointAddress> GetEndpointPropertyOverride { get; set; }
public MockAsyncResult OpenAsyncResult { get; set; }
public MockAsyncResult CloseAsyncResult { get; set; }
// Abstract overrides
public Func<TimeSpan> DefaultCloseTimeoutOverride { get; set; }
public Func<TimeSpan> DefaultOpenTimeoutOverride { get; set; }
public Action OnAbortOverride { get; set; }
public Func<TimeSpan, AsyncCallback, object, IAsyncResult> OnBeginCloseOverride { get; set; }
public Func<TimeSpan, AsyncCallback, object, IAsyncResult> OnBeginOpenOverride { get; set; }
public Action<TimeSpan> OnOpenOverride { get; set; }
public Action<TimeSpan> OnCloseOverride { get; set; }
public Action<IAsyncResult> OnEndCloseOverride { get; set; }
public Action<IAsyncResult> OnEndOpenOverride { get; set; }
// Virtual overrides
public Action OnOpeningOverride { get; set; }
public Action OnOpenedOverride { get; set; }
public Action OnClosingOverride { get; set; }
public Action OnClosedOverride { get; set; }
public Action OnFaultedOverride { get; set; }
public EndpointAddress RemoteAddress
{
get { return DefaultGetEndpointProperty(); }
}
public EndpointAddress DefaultGetEndpointProperty()
{
return _address;
}
protected override TimeSpan DefaultCloseTimeout
{
get
{
return DefaultCloseTimeoutOverride();
}
}
public TimeSpan DefaultDefaultCloseTimeout()
{
return TimeSpan.FromSeconds(30);
}
protected override TimeSpan DefaultOpenTimeout
{
get
{
return DefaultOpenTimeoutOverride();
}
}
public TimeSpan DefaultDefaultOpenTimeout()
{
return TimeSpan.FromSeconds(30);
}
protected override void OnAbort()
{
OnAbortOverride();
}
public void DefaultOnAbort()
{
// abstract -- no base to call
}
protected override IAsyncResult OnBeginClose(TimeSpan timeout, AsyncCallback callback, object state)
{
return OnBeginCloseOverride(timeout, callback, state);
}
public IAsyncResult DefaultOnBeginClose(TimeSpan timeout, AsyncCallback callback, object state)
{
// Modify the placeholder async result we already instantiated.
CloseAsyncResult.Callback = callback;
CloseAsyncResult.AsyncState = state;
// The mock always Completes the IAsyncResult before handing it back.
// This is done because the sync path has no access to this IAsyncResult
// that happens behind the scenes.
CloseAsyncResult.Complete();
return CloseAsyncResult;
// abstract -- no base to call
}
protected override IAsyncResult OnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state)
{
return OnBeginOpenOverride(timeout, callback, state);
}
public IAsyncResult DefaultOnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state)
{
// Modify the placeholder async result we already instantiated.
OpenAsyncResult.Callback = callback;
OpenAsyncResult.AsyncState = state;
// The mock always Completes the IAsyncResult before handing it back.
// This is done because the sync path has no access to this IAsyncResult
// that happens behind the scenes.
OpenAsyncResult.Complete();
return OpenAsyncResult;
// abstract -- no base to call
}
protected override void OnClose(TimeSpan timeout)
{
OnCloseOverride(timeout);
}
public void DefaultOnClose(TimeSpan timeout)
{
// abstract -- no base to call
}
protected override void OnEndClose(IAsyncResult result)
{
OnEndCloseOverride(result);
}
public void DefaultOnEndClose(IAsyncResult result)
{
((MockAsyncResult)result).Complete();
// abstract -- no base to call
}
protected override void OnEndOpen(IAsyncResult result)
{
OnEndOpenOverride(result);
}
public void DefaultOnEndOpen(IAsyncResult result)
{
((MockAsyncResult)result).Complete();
// abstract -- no base to call
}
protected override void OnOpen(TimeSpan timeout)
{
OnOpenOverride(timeout);
}
public void DefaultOnOpen(TimeSpan timeout)
{
// abstract -- no base to call
}
// Virtuals
protected override void OnOpening()
{
OnOpeningOverride();
}
public void DefaultOnOpening()
{
base.OnOpening();
}
protected override void OnOpened()
{
OnOpenedOverride();
}
public void DefaultOnOpened()
{
base.OnOpened();
}
protected override void OnClosing()
{
OnClosingOverride();
}
public void DefaultOnClosing()
{
base.OnClosing();
}
protected override void OnClosed()
{
OnClosedOverride();
}
public void DefaultOnClosed()
{
base.OnClosed();
}
protected override void OnFaulted()
{
OnFaultedOverride();
}
public void DefaultOnFaulted()
{
base.OnFaulted();
}
}
| |
/***************************************************************************
* FileSystem.cs
*
* Copyright (C) 2007 Alan McGovern
* Written by Alan McGovern <alan.mcgovern@gmail.com>
****************************************************************************/
/* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW:
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
using System;
using System.IO;
using System.Collections.Generic;
namespace Gphoto2
{
/// <summary>
/// Represents a storage medium on the device
/// </summary>
public class FileSystem
{
// The camera that this filesystem is on
private Camera camera;
// The struct which contains the information for this filesystem
private LibGPhoto2.CameraStorageInformation storage;
/// <value>
/// The root directory for the filesystem. All paths must be prepended with this value
/// before being sent to the device
/// </value>
internal string BaseDirectory
{
get { return HasField( LibGPhoto2.CameraStorageInfoFields.Base) ? storage.basedir : null;}
}
/// <value>
/// True if the filesystem supports file deletion.
/// </value>
public bool CanDelete
{
get
{
if(!HasField(LibGPhoto2.CameraStorageInfoFields.Access))
return false;
return HasField(LibGPhoto2.CameraStorageAccessType.ReadWrite)
|| HasField(LibGPhoto2.CameraStorageAccessType.ReadOnlyWithDelete);
}
}
/// <value>
/// True if the filesystem supports reading
/// </value>
public bool CanRead
{
get
{
if(!HasField(LibGPhoto2.CameraStorageInfoFields.Access))
return false;
return HasField(LibGPhoto2.CameraStorageAccessType.ReadOnly)
|| HasField(LibGPhoto2.CameraStorageAccessType.ReadOnlyWithDelete)
|| HasField(LibGPhoto2.CameraStorageAccessType.ReadWrite);
}
}
/// <value>
/// True if the filesystem supports writing
/// </value>
public bool CanWrite
{
get
{
if(!HasField(LibGPhoto2.CameraStorageInfoFields.Access))
return false;
return HasField(LibGPhoto2.CameraStorageAccessType.ReadWrite);
}
}
/// <value>
/// The capacity of the filesystem in bytes
/// </value>
public long Capacity
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.MaxCapacity)
? (long)storage.capacitykbytes * 1024 : -1; }
}
/// <value>
/// A verbose description of the filesystem
/// </value>
public string Description
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.Description)
? storage.description : ""; }
}
/// <value>
/// The type of filesystem hierarchy in use
/// </value>
internal LibGPhoto2.CameraStorageFilesystemType FilesystemType
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.FilesystemType)
? storage.fstype : LibGPhoto2.CameraStorageFilesystemType.Undefined; }
}
/// <value>
/// The free space of the filesystem in bytes
/// </value>
public long FreeSpace
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.FreeSpaceKbytes)
? (long)storage.freekbytes * 1024 : -1; }
}
/// <value>
/// The label of the filesystem
/// </value>
public string Label
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.Label)
? storage.label : ""; }
}
/// <value>
/// The type of hardware the filesystem is on
/// </value>
internal LibGPhoto2.CameraStorageType StorageType
{
get { return HasField(LibGPhoto2.CameraStorageInfoFields.StorageType)
? storage.type : LibGPhoto2.CameraStorageType.Unknown; }
}
/// <value>
/// The amount of space which has been used in bytes
/// </value>
public long UsedSpace
{
get { return Capacity - FreeSpace; }
}
internal FileSystem(Camera camera, LibGPhoto2.CameraStorageInformation storage)
{
this.camera = camera;
this.storage = storage;
}
/// <summary>
/// True if the file can fit on this filesystem
/// </summary>
/// <param name="file"> The file to check if it can be uploaded
/// A <see cref="File"/>
/// </param>
/// <returns>
/// A <see cref="System.Boolean"/> True if the file fits
/// </returns>
public bool CanUpload(File file)
{
if (file == null)
throw new ArgumentNullException("file");
return FreeSpace > file.Size;
}
// FIXME: These are nasty hacks as there is no API for this
/// <summary>
/// Checks to see if the given directory exists on the filesystem
/// </summary>
/// <param name="directory">The directory to check if it exists. A null value is treated as
/// an empty string, which means the base directory.
/// A <see cref="System.String"/>
/// </param>
/// <returns>
/// A <see cref="System.Boolean"/>
/// </returns>
public bool Contains(string directory)
{
try
{
string filesystem;
string path;
string foldername;
SplitPath(directory, out filesystem, out path, out foldername);
foreach(string s in GetFolders(path))
if(s.Equals(foldername))
return true;
}
catch(GPhotoException ex)
{
if(ex.Error != ErrorCode.DirectoryNotFound)
throw;
}
return false;
}
/// <summary>
/// Checks to see if the given file exists in the given directory
/// </summary>
/// <param name="directory">The directory where the file should be
/// A <see cref="System.String"/>
/// </param>
/// <param name="filename">The name of the file
/// A <see cref="System.String"/>
/// </param>
/// <returns>True if the file exists in the given directory, false if the directory doesn't
/// exist or if the file doesn't exist.
/// A <see cref="System.Boolean"/>
/// </returns>
public bool Contains(string directory, string filename)
{
if(!Contains(directory))
return false;
try
{
GetFileInternal(directory, filename);
return true;
}
catch(GPhotoException ex)
{
if(ex.Error != ErrorCode.FileNotFound)
throw;
}
return false;
}
public bool Contains (File file)
{
if (file == null)
throw new ArgumentNullException("file");
return file.FileSystem == this;
}
/// <summary>
/// Counts the number of files in the base directory
/// </summary>
/// <returns>The number of files
/// A <see cref="System.Int32"/>
/// </returns>
public int Count()
{
return Count("");
}
/// <summary>
/// Counts the number of files in the specified directory
/// </summary>
/// <param name="directory">The directory to count the number of files in
/// A <see cref="System.String"/>
/// </param>
/// <returns>The number of files
/// A <see cref="System.Int32"/>
/// </returns>
public int Count(string directory)
{
return Count(directory, false);
}
/// <summary>
/// Counts the number of files in the specified directory and subdirectories (if recursive)
/// </summary>
/// <param name="directory">The directory to count the number of files in
/// A <see cref="System.String"/>
/// </param>
/// <param name="recursive">True if files in all subdirectories should be counted too
/// A <see cref="System.Boolean"/>
/// </param>
/// <returns>The number of files
/// A <see cref="System.Int32"/>
/// </returns>
public int Count(string directory, bool recursive)
{
directory = CombinePath(BaseDirectory, directory);
return CountRecursive(directory, recursive);
}
private int CountRecursive(string directory, bool recursive)
{
int count = 0;
using (LibGPhoto2.CameraList list = camera.Device.ListFiles(directory, camera.Context))
count += list.Count();
if(!recursive)
return count;
using (LibGPhoto2.CameraList list = camera.Device.ListFolders(directory, camera.Context))
foreach(string s in ParseList(list))
count += CountRecursive(CombinePath(directory, s), recursive);
return count;
}
/// <summary>
/// Creates the supplied path on the device if it doesn't already exist
/// </summary>
/// <param name="path">The path to create
/// A <see cref="System.String"/>
/// </param>
public void CreateDirectory(string path)
{
if(string.IsNullOrEmpty(path))
throw new ArgumentException("path cannot be null or empty");
string[] parts = path.Split(Camera.DirectorySeperator);
string current = "";
foreach(string s in parts)
{
if(string.IsNullOrEmpty(s))
continue;
if (!Contains(CombinePath(current, s)))
CreateDirectory(current, s);
current = CombinePath(current, s);
}
}
private void CreateDirectory(string path, string foldername)
{
if(path == null)
throw new ArgumentNullException("path");
if(string.IsNullOrEmpty(foldername))
throw new ArgumentException("directory cannot be null or empty");
if (!camera.Abilities.CanCreateDirectory)
throw new InvalidOperationException("Device doesn't support directory creation");
path = CombinePath(BaseDirectory, path);
camera.Device.MakeDirectory(path, foldername, camera.Context);
}
/// <summary>
/// Deletes the specified file from the specified directory
/// </summary>
/// <param name="directory">The directory to delete the file from (can be null)
/// A <see cref="System.String"/>
/// </param>
/// <param name="filename">The name of the file to delete
/// A <see cref="System.String"/>
/// </param>
public void DeleteFile(string directory, string filename)
{
if(string.IsNullOrEmpty(filename))
throw new ArgumentException("filename cannot be null or empty");
camera.Device.DeleteFile(CombinePath(BaseDirectory, directory), filename, camera.Context);
}
/// <summary>
/// Deletes the specified file from the filesystem
/// </summary>
/// <param name="file">The file to delete
/// A <see cref="File"/>
/// </param>
public void DeleteFile(File file)
{
if (file == null)
throw new ArgumentNullException("file");
DeleteFile(file.Path, file.Filename);
}
/// <summary>
/// Deletes all the files in the specified path
/// </summary>
/// <param name="path">The path to delete all the files in
/// A <see cref="System.String"/>
/// </param>
public void DeleteAll(string folder)
{
DeleteAll(folder, false);
}
/// <summary>
/// Deletes all the files in a specified path. If removeFolder is true, the folder is
/// deleted if it is empty.
/// </summary>
/// <param name="folder">The path to delete all the files in
/// A <see cref="System.String"/>
/// </param>
/// <param name="removeFolder">True if the folder should be removed
/// A <see cref="System.Boolean"/>
/// </param>
public void DeleteAll(string folder, bool removeFolder)
{
if(folder == null)
throw new ArgumentNullException("folder");
string path = CombinePath(BaseDirectory, folder);
camera.Device.DeleteAll(path, camera.Context);
if(!removeFolder || string.IsNullOrEmpty(folder))
return;
int index = path.LastIndexOf(Camera.DirectorySeperator);
string pathToDirectory = path.Substring(0, index);
string directory = path.Length > index ? path.Substring(index + 1) : "";
camera.Device.RemoveDirectory(pathToDirectory, directory, camera.Context);
}
private File GetFileInternal(string directory, string filename)
{
return File.Create(camera, this, directory, filename);
}
/// <summary>
/// Gets the file at the specified path with the specified filename
/// </summary>
/// <param name="directory">The path to check for the file at
/// A <see cref="System.String"/>
/// </param>
/// <param name="filename">The name of the file to get
/// A <see cref="System.String"/>
/// </param>
/// <returns>A file object representing the file
/// A <see cref="File"/>
/// </returns>
public File GetFile(string directory, string filename)
{
if(string.IsNullOrEmpty(filename))
throw new ArgumentException("filename cannot be null or empty");
return GetFileInternal(directory, filename);
}
/// <summary>
/// Gets all the files at the specified path
/// </summary>
/// <param name="directory">The path to get the files at
/// A <see cref="System.String"/>
/// </param>
/// <returns>An array containing all the files found
/// A <see cref="File"/>
/// </returns>
public File[] GetFiles(string directory)
{
string fullDirectory = CombinePath(BaseDirectory, directory);
using (LibGPhoto2.CameraList list = camera.Device.ListFiles(fullDirectory, camera.Context))
{
string[] filenames = ParseList(list);
File[] files = new File[filenames.Length];
for(int i = 0; i < files.Length; i++)
files[i] = GetFileInternal(directory, filenames[i]);
return files;
}
}
/// <summary>
/// Lists all the folders found in the root directory
/// </summary>
/// <returns>An array containing all the folders
/// A <see cref="System.String"/>
/// </returns>
public string[] GetFolders()
{
return GetFolders("");
}
/// <summary>
/// Lists all the folders found at the specified path
/// </summary>
/// <param name="directory">
/// A <see cref="System.String"/>
/// </param>
/// <returns>An array containing the names of all the directories
/// A <see cref="System.String"/>
/// </returns>
public string[] GetFolders(string directory)
{
using (LibGPhoto2.CameraList list = camera.Device.ListFolders(CombinePath(BaseDirectory, directory), camera.Context))
return ParseList(list);
}
private string[] ParseList(LibGPhoto2.CameraList list)
{
int count = list.Count();
string[] results = new string[count];
for(int i = 0; i < count; i++)
results[i] = list.GetName(i);
return results;
}
internal static void SplitPath(string path, out string filesystem, out string directory, out string filename)
{
// Split the path up and remove all empty entries
List<string> parts = new List<string>(path.Split(Camera.DirectorySeperator));
parts.RemoveAll(delegate (string s) { return string.IsNullOrEmpty(s); });
// The filesystem is the first part and needs to be prepended with '/'
filesystem = "/" + parts[0];
// The filename is the last part
filename = parts[parts.Count - 1];
// Everything else is the 'directory' which contains the file
directory = "";
for(int i = 1; i < parts.Count - 1; i++)
directory = CombinePath(directory, parts[i]);
}
/// <summary>
/// Uploads the specified file into the specified path
/// </summary>
/// <param name="file">The file to upload
/// A <see cref="File"/>
/// </param>
/// <param name="path">The path where the file should be uploaded to
/// A <see cref="System.String"/>
/// </param>
/// <returns>An object representing the file on the camera
/// A <see cref="File"/>
/// </returns>
public File Upload(File file, string directory)
{
if (file == null)
throw new ArgumentNullException("file");
return Upload(file, directory, file.Filename);
}
/// <summary>
/// Uploads the specified file into the specified path saving it with the specified
/// filename
/// </summary>
/// <param name="file">The file to upload
/// A <see cref="File"/>
/// </param>
/// <param name="directory">The path where the file should be uploaded to
/// A <see cref="System.String"/>
/// </param>
/// <param name="filename">The filename to save the file as on the filesystem
/// A <see cref="System.String"/>
/// </param>
/// <returns>An object representing the file on the camera
/// A <see cref="File"/>
/// </returns>
public File Upload(File file, string directory, string filename)
{
if (file == null)
throw new ArgumentNullException("file");
if (string.IsNullOrEmpty(filename))
throw new ArgumentException("filename cannot be null or empty");
if(!Contains(directory))
CreateDirectory(directory);
string fullPath = CombinePath(BaseDirectory, directory);
// First put the actual file data on the camera
using (LibGPhoto2.CameraFile data = new LibGPhoto2.CameraFile())
{
data.SetName(filename);
data.SetFileType(LibGPhoto2.CameraFileType.Normal);
data.SetDataAndSize(System.IO.File.ReadAllBytes(Path.Combine(file.Path, file.Filename)));
data.SetMimeType(file.MimeType);
camera.Device.PutFile(fullPath, data, camera.Context);
}
// Then put the metadata on camera.
using (LibGPhoto2.CameraFile meta = new LibGPhoto2.CameraFile())
{
meta.SetName(filename);
meta.SetFileType(LibGPhoto2.CameraFileType.MetaData);
meta.SetDataAndSize(System.Text.Encoding.UTF8.GetBytes(file.MetadataToXml()));
camera.Device.PutFile(fullPath, meta, camera.Context);
}
// Then return the user a File object referencing the file on the camera
// FIXME: Hack to copy the metadata correctly. Libgphoto returns null
// metadata until the device refreshes it's database. Workaround is to manually
// copy the metadata over from the old file.
File returnFile = GetFileInternal(directory, filename);
returnFile.Metadata.Clear();
foreach (KeyValuePair<string, string> kp in file.Metadata)
returnFile.Metadata.Add(kp.Key, kp.Value);
// FIXME: This is another hack to fix the above issue
returnFile.Size = file.Size;
return returnFile;
}
private bool HasField(LibGPhoto2.CameraStorageInfoFields field)
{
return (storage.fields & field) == field;
}
private bool HasField(LibGPhoto2.CameraStorageAccessType field)
{
return (storage.access & field) == field;
}
/// <summary>
/// Combines paths in the correct format to be used to access files on the filesystem
/// </summary>
/// <param name="path1">The first part of the path
/// A <see cref="System.String"/>
/// </param>
/// <param name="path2">The second part of the path
/// A <see cref="System.String"/>
/// </param>
/// <returns>A string containing the second path appended to the first path
/// A <see cref="System.String"/>
/// </returns>
public static string CombinePath(string path1, string path2)
{
if(string.IsNullOrEmpty(path2) || path2 == Camera.DirectorySeperator.ToString())
return path1;
if(path2 != null && path2.StartsWith("/"))
path2 = path2.Substring(1);
if(path1 != null && path1.EndsWith("/"))
path1 = path1.Substring(0, path1.Length -1);
return path1 + Camera.DirectorySeperator + path2;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.ObjectModel;
using System.Diagnostics.Contracts;
using System.IdentityModel.Policy;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.IO;
using System.Net;
using System.Net.Security;
using System.Runtime;
using System.Security.Authentication;
using System.Security.Principal;
using System.ServiceModel;
using System.ServiceModel.Description;
using System.ServiceModel.Security;
using System.ServiceModel.Security.Tokens;
using System.Threading.Tasks;
namespace System.ServiceModel.Channels
{
internal class WindowsStreamSecurityUpgradeProvider : StreamSecurityUpgradeProvider
{
private bool _extractGroupsForWindowsAccounts;
private EndpointIdentity _identity;
private IdentityVerifier _identityVerifier;
private ProtectionLevel _protectionLevel;
private SecurityTokenManager _securityTokenManager;
private NetworkCredential _serverCredential;
private string _scheme;
private bool _isClient;
private Uri _listenUri;
public WindowsStreamSecurityUpgradeProvider(WindowsStreamSecurityBindingElement bindingElement,
BindingContext context, bool isClient)
: base(context.Binding)
{
Contract.Assert(isClient, ".NET Core and .NET Native does not support server side");
_extractGroupsForWindowsAccounts = TransportDefaults.ExtractGroupsForWindowsAccounts;
_protectionLevel = bindingElement.ProtectionLevel;
_scheme = context.Binding.Scheme;
_isClient = isClient;
_listenUri = TransportSecurityHelpers.GetListenUri(context.ListenUriBaseAddress, context.ListenUriRelativeAddress);
SecurityCredentialsManager credentialProvider = context.BindingParameters.Find<SecurityCredentialsManager>();
if (credentialProvider == null)
{
credentialProvider = ClientCredentials.CreateDefaultCredentials();
}
_securityTokenManager = credentialProvider.CreateSecurityTokenManager();
}
public string Scheme
{
get { return _scheme; }
}
internal bool ExtractGroupsForWindowsAccounts
{
get
{
return _extractGroupsForWindowsAccounts;
}
}
public override EndpointIdentity Identity
{
get
{
// If the server credential is null, then we have not been opened yet and have no identity to expose.
if (_serverCredential != null)
{
if (_identity == null)
{
lock (ThisLock)
{
if (_identity == null)
{
_identity = SecurityUtils.CreateWindowsIdentity(_serverCredential);
}
}
}
}
return _identity;
}
}
internal IdentityVerifier IdentityVerifier
{
get
{
return _identityVerifier;
}
}
public ProtectionLevel ProtectionLevel
{
get
{
return _protectionLevel;
}
}
private NetworkCredential ServerCredential
{
get
{
return _serverCredential;
}
}
public override StreamUpgradeInitiator CreateUpgradeInitiator(EndpointAddress remoteAddress, Uri via)
{
ThrowIfDisposedOrNotOpen();
return new WindowsStreamSecurityUpgradeInitiator(this, remoteAddress, via);
}
protected override void OnAbort()
{
}
protected override void OnClose(TimeSpan timeout)
{
}
protected internal override Task OnCloseAsync(TimeSpan timeout)
{
return TaskHelpers.CompletedTask();
}
protected override IAsyncResult OnBeginClose(TimeSpan timeout, AsyncCallback callback, object state)
{
return OnCloseAsync(timeout).ToApm(callback, state);
}
protected override void OnEndClose(IAsyncResult result)
{
result.ToApmEnd();
}
protected override void OnOpen(TimeSpan timeout)
{
if (!_isClient)
{
SecurityTokenRequirement sspiTokenRequirement = TransportSecurityHelpers.CreateSspiTokenRequirement(Scheme, _listenUri);
_serverCredential =
TransportSecurityHelpers.GetSspiCredential(_securityTokenManager, sspiTokenRequirement, timeout,
out _extractGroupsForWindowsAccounts);
}
}
protected internal override Task OnOpenAsync(TimeSpan timeout)
{
OnOpen(timeout);
return TaskHelpers.CompletedTask();
}
protected override IAsyncResult OnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state)
{
return OnOpenAsync(timeout).ToApm(callback, state);
}
protected override void OnEndOpen(IAsyncResult result)
{
result.ToApmEnd();
}
protected override void OnOpened()
{
base.OnOpened();
if (_identityVerifier == null)
{
_identityVerifier = IdentityVerifier.CreateDefault();
}
if (_serverCredential == null)
{
_serverCredential = CredentialCache.DefaultNetworkCredentials;
}
}
private class WindowsStreamSecurityUpgradeInitiator : StreamSecurityUpgradeInitiatorBase
{
private WindowsStreamSecurityUpgradeProvider _parent;
private IdentityVerifier _identityVerifier;
private NetworkCredential _credential;
private TokenImpersonationLevel _impersonationLevel;
private SspiSecurityTokenProvider _clientTokenProvider;
private bool _allowNtlm;
public WindowsStreamSecurityUpgradeInitiator(
WindowsStreamSecurityUpgradeProvider parent, EndpointAddress remoteAddress, Uri via)
: base(FramingUpgradeString.Negotiate, remoteAddress, via)
{
_parent = parent;
_clientTokenProvider = TransportSecurityHelpers.GetSspiTokenProvider(
parent._securityTokenManager, remoteAddress, via, parent.Scheme, out _identityVerifier);
}
internal override async Task OpenAsync(TimeSpan timeout)
{
TimeoutHelper timeoutHelper = new TimeoutHelper(timeout);
base.Open(timeoutHelper.RemainingTime());
OutWrapper<TokenImpersonationLevel> impersonationLevelWrapper = new OutWrapper<TokenImpersonationLevel>();
OutWrapper<bool> allowNtlmWrapper = new OutWrapper<bool>();
SecurityUtils.OpenTokenProviderIfRequired(_clientTokenProvider, timeoutHelper.RemainingTime());
_credential = await TransportSecurityHelpers.GetSspiCredentialAsync(
_clientTokenProvider,
impersonationLevelWrapper,
allowNtlmWrapper,
timeoutHelper.GetCancellationToken());
_impersonationLevel = impersonationLevelWrapper.Value;
_allowNtlm = allowNtlmWrapper;
return;
}
internal override void Open(TimeSpan timeout)
{
OpenAsync(timeout).GetAwaiter();
}
internal override void Close(TimeSpan timeout)
{
TimeoutHelper timeoutHelper = new TimeoutHelper(timeout);
base.Close(timeoutHelper.RemainingTime());
SecurityUtils.CloseTokenProviderIfRequired(_clientTokenProvider, timeoutHelper.RemainingTime());
}
private static SecurityMessageProperty CreateServerSecurity(NegotiateStream negotiateStream)
{
GenericIdentity remoteIdentity = (GenericIdentity)negotiateStream.RemoteIdentity;
string principalName = remoteIdentity.Name;
if ((principalName != null) && (principalName.Length > 0))
{
ReadOnlyCollection<IAuthorizationPolicy> authorizationPolicies = SecurityUtils.CreatePrincipalNameAuthorizationPolicies(principalName);
SecurityMessageProperty result = new SecurityMessageProperty();
result.TransportToken = new SecurityTokenSpecification(null, authorizationPolicies);
result.ServiceSecurityContext = new ServiceSecurityContext(authorizationPolicies);
return result;
}
else
{
return null;
}
}
protected override Stream OnInitiateUpgrade(Stream stream, out SecurityMessageProperty remoteSecurity)
{
OutWrapper<SecurityMessageProperty> remoteSecurityOut = new OutWrapper<SecurityMessageProperty>();
var retVal = OnInitiateUpgradeAsync(stream, remoteSecurityOut).GetAwaiter().GetResult();
remoteSecurity = remoteSecurityOut.Value;
return retVal;
}
protected override async Task<Stream> OnInitiateUpgradeAsync(Stream stream, OutWrapper<SecurityMessageProperty> remoteSecurity)
{
NegotiateStream negotiateStream;
string targetName;
EndpointIdentity identity;
if (WcfEventSource.Instance.WindowsStreamSecurityOnInitiateUpgradeIsEnabled())
{
WcfEventSource.Instance.WindowsStreamSecurityOnInitiateUpgrade();
}
// prepare
InitiateUpgradePrepare(stream, out negotiateStream, out targetName, out identity);
// authenticate
try
{
await negotiateStream.AuthenticateAsClientAsync(_credential, targetName, _parent.ProtectionLevel, _impersonationLevel);
}
catch (AuthenticationException exception)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityNegotiationException(exception.Message,
exception));
}
catch (IOException ioException)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityNegotiationException(
SR.Format(SR.NegotiationFailedIO, ioException.Message), ioException));
}
remoteSecurity.Value = CreateServerSecurity(negotiateStream);
ValidateMutualAuth(identity, negotiateStream, remoteSecurity.Value, _allowNtlm);
return negotiateStream;
}
private void InitiateUpgradePrepare(
Stream stream,
out NegotiateStream negotiateStream,
out string targetName,
out EndpointIdentity identity)
{
negotiateStream = new NegotiateStream(stream);
targetName = string.Empty;
identity = null;
if (_parent.IdentityVerifier.TryGetIdentity(RemoteAddress, Via, out identity))
{
targetName = SecurityUtils.GetSpnFromIdentity(identity, RemoteAddress);
}
else
{
targetName = SecurityUtils.GetSpnFromTarget(RemoteAddress);
}
}
private void ValidateMutualAuth(EndpointIdentity expectedIdentity, NegotiateStream negotiateStream,
SecurityMessageProperty remoteSecurity, bool allowNtlm)
{
if (negotiateStream.IsMutuallyAuthenticated)
{
if (expectedIdentity != null)
{
if (!_parent.IdentityVerifier.CheckAccess(expectedIdentity,
remoteSecurity.ServiceSecurityContext.AuthorizationContext))
{
string primaryIdentity = SecurityUtils.GetIdentityNamesFromContext(remoteSecurity.ServiceSecurityContext.AuthorizationContext);
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityNegotiationException(SR.Format(
SR.RemoteIdentityFailedVerification, primaryIdentity)));
}
}
}
else if (!allowNtlm)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityNegotiationException(SR.Format(SR.StreamMutualAuthNotSatisfied)));
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security;
namespace System.Drawing.Internal
{
internal static partial class IntUnsafeNativeMethods
{
[DllImport(ExternDll.User32, SetLastError = true, ExactSpelling = true, EntryPoint = "GetDC", CharSet = CharSet.Auto)]
public static extern IntPtr IntGetDC(HandleRef hWnd);
public static IntPtr GetDC(HandleRef hWnd)
{
IntPtr hdc = System.Internal.HandleCollector.Add(IntGetDC(hWnd), IntSafeNativeMethods.CommonHandles.HDC);
DbgUtil.AssertWin32(hdc != IntPtr.Zero, "GetHdc([hWnd=0x{0:X8}]) failed.", hWnd);
return hdc;
}
/// <summary>
/// NOTE: DeleteDC is to be used to delete the hdc created from CreateCompatibleDC ONLY. All other hdcs should
/// be deleted with DeleteHDC.
/// </summary>
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "DeleteDC", CharSet = CharSet.Auto)]
public static extern bool IntDeleteDC(HandleRef hDC);
public static bool DeleteDC(HandleRef hDC)
{
System.Internal.HandleCollector.Remove((IntPtr)hDC, IntSafeNativeMethods.CommonHandles.GDI);
bool retVal = IntDeleteDC(hDC);
DbgUtil.AssertWin32(retVal, "DeleteDC([hdc=0x{0:X8}]) failed.", hDC.Handle);
return retVal;
}
public static bool DeleteHDC(HandleRef hDC)
{
System.Internal.HandleCollector.Remove((IntPtr)hDC, IntSafeNativeMethods.CommonHandles.HDC);
bool retVal = IntDeleteDC(hDC);
DbgUtil.AssertWin32(retVal, "DeleteHDC([hdc=0x{0:X8}]) failed.", hDC.Handle);
return retVal;
}
[DllImport(ExternDll.User32, SetLastError = true, ExactSpelling = true, EntryPoint = "ReleaseDC", CharSet = CharSet.Auto)]
public static extern int IntReleaseDC(HandleRef hWnd, HandleRef hDC);
public static int ReleaseDC(HandleRef hWnd, HandleRef hDC)
{
System.Internal.HandleCollector.Remove((IntPtr)hDC, IntSafeNativeMethods.CommonHandles.HDC);
// Note: retVal == 0 means it was not released but doesn't necessarily means an error; class or private DCs are never released.
return IntReleaseDC(hWnd, hDC);
}
[DllImport(ExternDll.Gdi32, SetLastError = true, EntryPoint = "CreateDC", CharSet = CharSet.Auto)]
public static extern IntPtr IntCreateDC(string lpszDriverName, string lpszDeviceName, string lpszOutput, HandleRef /*DEVMODE*/ lpInitData);
public static IntPtr CreateDC(string lpszDriverName, string lpszDeviceName, string lpszOutput, HandleRef /*DEVMODE*/ lpInitData)
{
IntPtr hdc = System.Internal.HandleCollector.Add(IntCreateDC(lpszDriverName, lpszDeviceName, lpszOutput, lpInitData), IntSafeNativeMethods.CommonHandles.HDC);
DbgUtil.AssertWin32(hdc != IntPtr.Zero, "CreateDC([driverName={0}], [deviceName={1}], [fileName={2}], [devMode={3}]) failed.", lpszDriverName, lpszDeviceName, lpszOutput, lpInitData.Handle);
return hdc;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, EntryPoint = "CreateIC", CharSet = CharSet.Auto)]
public static extern IntPtr IntCreateIC(string lpszDriverName, string lpszDeviceName, string lpszOutput, HandleRef /*DEVMODE*/ lpInitData);
public static IntPtr CreateIC(string lpszDriverName, string lpszDeviceName, string lpszOutput, HandleRef /*DEVMODE*/ lpInitData)
{
IntPtr hdc = System.Internal.HandleCollector.Add(IntCreateIC(lpszDriverName, lpszDeviceName, lpszOutput, lpInitData), IntSafeNativeMethods.CommonHandles.HDC);
DbgUtil.AssertWin32(hdc != IntPtr.Zero, "CreateIC([driverName={0}], [deviceName={1}], [fileName={2}], [devMode={3}]) failed.", lpszDriverName, lpszDeviceName, lpszOutput, lpInitData.Handle);
return hdc;
}
/// <summary>
/// CreateCompatibleDC requires to add a GDI handle instead of an HDC handle to avoid perf penalty in HandleCollector.
/// The hdc obtained from this method needs to be deleted with DeleteDC instead of DeleteHDC.
/// </summary>
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "CreateCompatibleDC", CharSet = CharSet.Auto)]
public static extern IntPtr IntCreateCompatibleDC(HandleRef hDC);
public static IntPtr CreateCompatibleDC(HandleRef hDC)
{
IntPtr compatibleDc = System.Internal.HandleCollector.Add(IntCreateCompatibleDC(hDC), IntSafeNativeMethods.CommonHandles.GDI);
DbgUtil.AssertWin32(compatibleDc != IntPtr.Zero, "CreateCompatibleDC([hdc=0x{0:X8}]) failed", hDC.Handle);
return compatibleDc;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "SaveDC", CharSet = CharSet.Auto)]
public static extern int IntSaveDC(HandleRef hDC);
public static int SaveDC(HandleRef hDC)
{
int state = IntSaveDC(hDC);
DbgUtil.AssertWin32(state != 0, "SaveDC([hdc=0x{0:X8}]) failed", hDC.Handle);
return state;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "RestoreDC", CharSet = CharSet.Auto)]
public static extern bool IntRestoreDC(HandleRef hDC, int nSavedDC);
public static bool RestoreDC(HandleRef hDC, int nSavedDC)
{
bool retVal = IntRestoreDC(hDC, nSavedDC);
// When a winforms app is closing, the cached MeasurementGraphics is finalized but it is possible that
// its DeviceContext is finalized first so when this method is called the DC has already been relesaed poping up the
// assert window. Need to find a way to work around this and enable the assert IF NEEDED.
// DbgUtil.AssertWin32(retVal, "RestoreDC([hdc=0x{0:X8}], [restoreState={1}]) failed.", (int)hDC.Handle, nSavedDC);
return retVal;
}
[DllImport(ExternDll.User32, SetLastError = true, ExactSpelling = true)]
public static extern IntPtr WindowFromDC(HandleRef hDC);
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "OffsetViewportOrgEx", CharSet = CharSet.Auto)]
public static extern bool IntOffsetViewportOrgEx(HandleRef hDC, int nXOffset, int nYOffset, [In, Out] IntNativeMethods.POINT point);
public static bool OffsetViewportOrgEx(HandleRef hDC, int nXOffset, int nYOffset, [In, Out] IntNativeMethods.POINT point)
{
bool retVal = IntOffsetViewportOrgEx(hDC, nXOffset, nYOffset, point);
DbgUtil.AssertWin32(retVal, "OffsetViewportOrgEx([hdc=0x{0:X8}], dx=[{1}], dy=[{2}], [out pPoint]) failed.", hDC.Handle, nXOffset, nYOffset);
return retVal;
}
// Region.
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "CombineRgn", CharSet = CharSet.Auto)]
public static extern IntNativeMethods.RegionFlags IntCombineRgn(HandleRef hRgnDest, HandleRef hRgnSrc1, HandleRef hRgnSrc2, RegionCombineMode combineMode);
public static IntNativeMethods.RegionFlags CombineRgn(HandleRef hRgnDest, HandleRef hRgnSrc1, HandleRef hRgnSrc2, RegionCombineMode combineMode)
{
Debug.Assert(hRgnDest.Wrapper != null && hRgnDest.Handle != IntPtr.Zero, "Destination region is invalid");
Debug.Assert(hRgnSrc1.Wrapper != null && hRgnSrc1.Handle != IntPtr.Zero, "Source region 1 is invalid");
Debug.Assert(hRgnSrc2.Wrapper != null && hRgnSrc2.Handle != IntPtr.Zero, "Source region 2 is invalid");
if (hRgnDest.Wrapper == null || hRgnSrc1.Wrapper == null || hRgnSrc2.Wrapper == null)
{
return IntNativeMethods.RegionFlags.ERROR;
}
// Note: CombineRgn can return Error when no regions are combined, this is not an error condition.
return IntCombineRgn(hRgnDest, hRgnSrc1, hRgnSrc2, combineMode);
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "GetClipRgn", CharSet = CharSet.Auto)]
public static extern int IntGetClipRgn(HandleRef hDC, HandleRef hRgn);
public static int GetClipRgn(HandleRef hDC, HandleRef hRgn)
{
int retVal = IntGetClipRgn(hDC, hRgn);
DbgUtil.AssertWin32(retVal != -1, "IntGetClipRgn([hdc=0x{0:X8}], [hRgn]) failed.", hDC.Handle);
return retVal;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "SelectClipRgn", CharSet = CharSet.Auto)]
public static extern IntNativeMethods.RegionFlags IntSelectClipRgn(HandleRef hDC, HandleRef hRgn);
public static IntNativeMethods.RegionFlags SelectClipRgn(HandleRef hDC, HandleRef hRgn)
{
IntNativeMethods.RegionFlags result = IntSelectClipRgn(hDC, hRgn);
DbgUtil.AssertWin32(result != IntNativeMethods.RegionFlags.ERROR, "SelectClipRgn([hdc=0x{0:X8}], [hRegion=0x{1:X8}]) failed.", hDC.Handle, hRgn.Handle);
return result;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "GetRgnBox", CharSet = CharSet.Auto)]
public static extern IntNativeMethods.RegionFlags IntGetRgnBox(HandleRef hRgn, [In, Out] ref IntNativeMethods.RECT clipRect);
public static IntNativeMethods.RegionFlags GetRgnBox(HandleRef hRgn, [In, Out] ref IntNativeMethods.RECT clipRect)
{
IntNativeMethods.RegionFlags result = IntGetRgnBox(hRgn, ref clipRect);
DbgUtil.AssertWin32(result != IntNativeMethods.RegionFlags.ERROR, "GetRgnBox([hRegion=0x{0:X8}], [out rect]) failed.", hRgn.Handle);
return result;
}
// Font.
[DllImport(ExternDll.Gdi32, SetLastError = true, EntryPoint = "CreateFontIndirect", CharSet = CharSet.Auto)]
#pragma warning disable CS0618 // Legacy code: We don't care about using obsolete API's.
public static extern IntPtr IntCreateFontIndirect([In, Out, MarshalAs(UnmanagedType.AsAny)] object lf); // need object here since LOGFONT is not public.
#pragma warning restore CS0618
// Common.
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "DeleteObject", CharSet = CharSet.Auto)]
public static extern bool IntDeleteObject(HandleRef hObject);
public static bool DeleteObject(HandleRef hObject)
{
System.Internal.HandleCollector.Remove((IntPtr)hObject, IntSafeNativeMethods.CommonHandles.GDI);
bool retVal = IntDeleteObject(hObject);
DbgUtil.AssertWin32(retVal, "DeleteObject(hObj=[0x{0:X8}]) failed.", hObject.Handle);
return retVal;
}
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, EntryPoint = "GetCurrentObject", CharSet = CharSet.Auto)]
public static extern IntPtr IntGetCurrentObject(HandleRef hDC, int uObjectType);
public static IntPtr GetCurrentObject(HandleRef hDC, int uObjectType)
{
IntPtr hGdiObj = IntGetCurrentObject(hDC, uObjectType);
// If the selected object is a region the return value is HGI_ERROR on failure.
DbgUtil.AssertWin32(hGdiObj != IntPtr.Zero, "GetObject(hdc=[0x{0:X8}], type=[{1}]) failed.", hDC, uObjectType);
return hGdiObj;
}
}
}
| |
// Transport Security Layer (TLS)
// Copyright (c) 2003-2004 Carlos Guzman Alvarez
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Mono.Security.Protocol.Tls.Handshake;
namespace Mono.Security.Protocol.Tls
{
public class SslServerStream : SslStreamBase
{
#region Internal Events
internal event CertificateValidationCallback ClientCertValidation;
internal event PrivateKeySelectionCallback PrivateKeySelection;
#endregion
#region Properties
public X509Certificate ClientCertificate
{
get
{
if (this.context.HandshakeState == HandshakeState.Finished)
{
return this.context.ClientSettings.ClientCertificate;
}
return null;
}
}
#endregion
#region Callback Properties
public CertificateValidationCallback ClientCertValidationDelegate
{
get { return this.ClientCertValidation; }
set { this.ClientCertValidation = value; }
}
public PrivateKeySelectionCallback PrivateKeyCertSelectionDelegate
{
get { return this.PrivateKeySelection; }
set { this.PrivateKeySelection = value; }
}
#endregion
public event CertificateValidationCallback2 ClientCertValidation2;
#region Constructors
public SslServerStream(
Stream stream,
X509Certificate serverCertificate) : this(
stream,
serverCertificate,
false,
false,
SecurityProtocolType.Default)
{
}
public SslServerStream(
Stream stream,
X509Certificate serverCertificate,
bool clientCertificateRequired,
bool ownsStream): this(
stream,
serverCertificate,
clientCertificateRequired,
ownsStream,
SecurityProtocolType.Default)
{
}
public SslServerStream(
Stream stream,
X509Certificate serverCertificate,
bool clientCertificateRequired,
bool requestClientCertificate,
bool ownsStream)
: this (stream, serverCertificate, clientCertificateRequired, requestClientCertificate, ownsStream, SecurityProtocolType.Default)
{
}
public SslServerStream(
Stream stream,
X509Certificate serverCertificate,
bool clientCertificateRequired,
bool ownsStream,
SecurityProtocolType securityProtocolType)
: this (stream, serverCertificate, clientCertificateRequired, false, ownsStream, securityProtocolType)
{
}
public SslServerStream(
Stream stream,
X509Certificate serverCertificate,
bool clientCertificateRequired,
bool requestClientCertificate,
bool ownsStream,
SecurityProtocolType securityProtocolType)
: base(stream, ownsStream)
{
this.context = new ServerContext(
this,
securityProtocolType,
serverCertificate,
clientCertificateRequired,
requestClientCertificate);
this.protocol = new ServerRecordProtocol(innerStream, (ServerContext)this.context);
}
#endregion
#region Finalizer
~SslServerStream()
{
this.Dispose(false);
}
#endregion
#region IDisposable Methods
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (disposing)
{
this.ClientCertValidation = null;
this.PrivateKeySelection = null;
}
}
#endregion
#region Handsake Methods
/*
Client Server
ClientHello -------->
ServerHello
Certificate*
ServerKeyExchange*
CertificateRequest*
<-------- ServerHelloDone
Certificate*
ClientKeyExchange
CertificateVerify*
[ChangeCipherSpec]
Finished -------->
[ChangeCipherSpec]
<-------- Finished
Application Data <-------> Application Data
Fig. 1 - Message flow for a full handshake
*/
internal override IAsyncResult OnBeginNegotiateHandshake(AsyncCallback callback, object state)
{
// Reset the context if needed
if (this.context.HandshakeState != HandshakeState.None)
{
this.context.Clear();
}
// Obtain supported cipher suites
this.context.SupportedCiphers = CipherSuiteFactory.GetSupportedCiphers(this.context.SecurityProtocol);
// Set handshake state
this.context.HandshakeState = HandshakeState.Started;
// Receive Client Hello message
return this.protocol.BeginReceiveRecord(this.innerStream, callback, state);
}
internal override void OnNegotiateHandshakeCallback(IAsyncResult asyncResult)
{
// Receive Client Hello message and ignore it
this.protocol.EndReceiveRecord(asyncResult);
// If received message is not an ClientHello send a
// Fatal Alert
if (this.context.LastHandshakeMsg != HandshakeType.ClientHello)
{
this.protocol.SendAlert(AlertDescription.UnexpectedMessage);
}
// Send ServerHello message
this.protocol.SendRecord(HandshakeType.ServerHello);
// Send ServerCertificate message
this.protocol.SendRecord(HandshakeType.Certificate);
// If the negotiated cipher is a KeyEx cipher send ServerKeyExchange
if (this.context.Negotiating.Cipher.IsExportable)
{
this.protocol.SendRecord(HandshakeType.ServerKeyExchange);
}
// If the negotiated cipher is a KeyEx cipher or
// the client certificate is required send the CertificateRequest message
if (this.context.Negotiating.Cipher.IsExportable ||
((ServerContext)this.context).ClientCertificateRequired ||
((ServerContext)this.context).RequestClientCertificate)
{
this.protocol.SendRecord(HandshakeType.CertificateRequest);
}
// Send ServerHelloDone message
this.protocol.SendRecord(HandshakeType.ServerHelloDone);
// Receive client response, until the Client Finished message
// is received. IE can be interrupted at this stage and never
// complete the handshake
while (this.context.LastHandshakeMsg != HandshakeType.Finished)
{
byte[] record = this.protocol.ReceiveRecord(this.innerStream);
if ((record == null) || (record.Length == 0))
{
throw new TlsException(
AlertDescription.HandshakeFailiure,
"The client stopped the handshake.");
}
}
// Send ChangeCipherSpec and ServerFinished messages
this.protocol.SendChangeCipherSpec();
this.protocol.SendRecord (HandshakeType.Finished);
// The handshake is finished
this.context.HandshakeState = HandshakeState.Finished;
// Reset Handshake messages information
this.context.HandshakeMessages.Reset ();
// Clear Key Info
this.context.ClearKeyInfo();
}
#endregion
#region Event Methods
internal override X509Certificate OnLocalCertificateSelection(X509CertificateCollection clientCertificates, X509Certificate serverCertificate, string targetHost, X509CertificateCollection serverRequestedCertificates)
{
throw new NotSupportedException();
}
internal override bool OnRemoteCertificateValidation(X509Certificate certificate, int[] errors)
{
if (this.ClientCertValidation != null)
{
return this.ClientCertValidation(certificate, errors);
}
return (errors != null && errors.Length == 0);
}
internal override bool HaveRemoteValidation2Callback {
get { return ClientCertValidation2 != null; }
}
internal override ValidationResult OnRemoteCertificateValidation2 (Mono.Security.X509.X509CertificateCollection collection)
{
CertificateValidationCallback2 cb = ClientCertValidation2;
if (cb != null)
return cb (collection);
return null;
}
internal bool RaiseClientCertificateValidation(
X509Certificate certificate,
int[] certificateErrors)
{
return base.RaiseRemoteCertificateValidation(certificate, certificateErrors);
}
internal override AsymmetricAlgorithm OnLocalPrivateKeySelection(X509Certificate certificate, string targetHost)
{
if (this.PrivateKeySelection != null)
{
return this.PrivateKeySelection(certificate, targetHost);
}
return null;
}
internal AsymmetricAlgorithm RaisePrivateKeySelection(
X509Certificate certificate,
string targetHost)
{
return base.RaiseLocalPrivateKeySelection(certificate, targetHost);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using Microsoft.Data.Entity.Migrations;
using Microsoft.Data.Entity.Metadata;
namespace SoloProject.Migrations
{
public partial class Initial : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "AspNetRoles",
columns: table => new
{
Id = table.Column<string>(nullable: false),
ConcurrencyStamp = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
NormalizedName = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_IdentityRole", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetUsers",
columns: table => new
{
Id = table.Column<string>(nullable: false),
AccessFailedCount = table.Column<int>(nullable: false),
ConcurrencyStamp = table.Column<string>(nullable: true),
Email = table.Column<string>(nullable: true),
EmailConfirmed = table.Column<bool>(nullable: false),
LockoutEnabled = table.Column<bool>(nullable: false),
LockoutEnd = table.Column<DateTimeOffset>(nullable: true),
NormalizedEmail = table.Column<string>(nullable: true),
NormalizedUserName = table.Column<string>(nullable: true),
PasswordHash = table.Column<string>(nullable: true),
PhoneNumber = table.Column<string>(nullable: true),
PhoneNumberConfirmed = table.Column<bool>(nullable: false),
SecurityStamp = table.Column<string>(nullable: true),
TwoFactorEnabled = table.Column<bool>(nullable: false),
UserName = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_ApplicationUser", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetRoleClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true),
RoleId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_IdentityRoleClaim<string>", x => x.Id);
table.ForeignKey(
name: "FK_IdentityRoleClaim<string>_IdentityRole_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true),
UserId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_IdentityUserClaim<string>", x => x.Id);
table.ForeignKey(
name: "FK_IdentityUserClaim<string>_ApplicationUser_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserLogins",
columns: table => new
{
LoginProvider = table.Column<string>(nullable: false),
ProviderKey = table.Column<string>(nullable: false),
ProviderDisplayName = table.Column<string>(nullable: true),
UserId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_IdentityUserLogin<string>", x => new { x.LoginProvider, x.ProviderKey });
table.ForeignKey(
name: "FK_IdentityUserLogin<string>_ApplicationUser_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserRoles",
columns: table => new
{
UserId = table.Column<string>(nullable: false),
RoleId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_IdentityUserRole<string>", x => new { x.UserId, x.RoleId });
table.ForeignKey(
name: "FK_IdentityUserRole<string>_IdentityRole_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_IdentityUserRole<string>_ApplicationUser_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Posts",
columns: table => new
{
PostId = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Content = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
UserId = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Post", x => x.PostId);
table.ForeignKey(
name: "FK_Post_ApplicationUser_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Comments",
columns: table => new
{
CommentId = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
CommentBody = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
PostId = table.Column<int>(nullable: false),
UserId = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Comment", x => x.CommentId);
table.ForeignKey(
name: "FK_Comment_Post_PostId",
column: x => x.PostId,
principalTable: "Posts",
principalColumn: "PostId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Comment_ApplicationUser_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateIndex(
name: "RoleNameIndex",
table: "AspNetRoles",
column: "NormalizedName");
migrationBuilder.CreateIndex(
name: "EmailIndex",
table: "AspNetUsers",
column: "NormalizedEmail");
migrationBuilder.CreateIndex(
name: "UserNameIndex",
table: "AspNetUsers",
column: "NormalizedUserName");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable("AspNetRoleClaims");
migrationBuilder.DropTable("AspNetUserClaims");
migrationBuilder.DropTable("AspNetUserLogins");
migrationBuilder.DropTable("AspNetUserRoles");
migrationBuilder.DropTable("Comments");
migrationBuilder.DropTable("AspNetRoles");
migrationBuilder.DropTable("Posts");
migrationBuilder.DropTable("AspNetUsers");
}
}
}
| |
namespace XenAdmin.Controls.Wlb
{
partial class WlbOptModeScheduler
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(WlbOptModeScheduler));
XenAdmin.Controls.Wlb.TriggerPoints triggerPoints1 = new XenAdmin.Controls.Wlb.TriggerPoints();
this.panelScheduleList = new System.Windows.Forms.Panel();
this.panel1 = new System.Windows.Forms.Panel();
this.lvTaskList = new System.Windows.Forms.ListView();
this.columnHeaderHidden = new System.Windows.Forms.ColumnHeader();
this.columnHeaderMode = new System.Windows.Forms.ColumnHeader();
this.columnHeaderDay = new System.Windows.Forms.ColumnHeader();
this.columnHeaderTime = new System.Windows.Forms.ColumnHeader();
this.columnHeaderEnabled = new System.Windows.Forms.ColumnHeader();
this.flowLayoutPanel2 = new System.Windows.Forms.FlowLayoutPanel();
this.lableScheduledTaskBlurb = new System.Windows.Forms.Label();
this.flowLayoutPanel1 = new System.Windows.Forms.FlowLayoutPanel();
this.buttonDelete = new System.Windows.Forms.Button();
this.buttonEdit = new System.Windows.Forms.Button();
this.buttonAddNew = new System.Windows.Forms.Button();
this.weekView1 = new XenAdmin.Controls.Wlb.WeekView();
this.panelScheduleList.SuspendLayout();
this.panel1.SuspendLayout();
this.flowLayoutPanel2.SuspendLayout();
this.flowLayoutPanel1.SuspendLayout();
this.SuspendLayout();
//
// panelScheduleList
//
this.panelScheduleList.Controls.Add(this.panel1);
this.panelScheduleList.Controls.Add(this.flowLayoutPanel2);
this.panelScheduleList.Controls.Add(this.flowLayoutPanel1);
resources.ApplyResources(this.panelScheduleList, "panelScheduleList");
this.panelScheduleList.Name = "panelScheduleList";
//
// panel1
//
this.panel1.Controls.Add(this.weekView1);
this.panel1.Controls.Add(this.lvTaskList);
resources.ApplyResources(this.panel1, "panel1");
this.panel1.Name = "panel1";
//
// lvTaskList
//
resources.ApplyResources(this.lvTaskList, "lvTaskList");
this.lvTaskList.CheckBoxes = true;
this.lvTaskList.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.columnHeaderHidden,
this.columnHeaderMode,
this.columnHeaderDay,
this.columnHeaderTime,
this.columnHeaderEnabled});
this.lvTaskList.FullRowSelect = true;
this.lvTaskList.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.Nonclickable;
this.lvTaskList.HideSelection = false;
this.lvTaskList.MultiSelect = false;
this.lvTaskList.Name = "lvTaskList";
this.lvTaskList.OwnerDraw = true;
this.lvTaskList.ShowItemToolTips = true;
this.lvTaskList.UseCompatibleStateImageBehavior = false;
this.lvTaskList.View = System.Windows.Forms.View.Details;
this.lvTaskList.MouseDoubleClick += new System.Windows.Forms.MouseEventHandler(this.lvTaskList_MouseDoubleClick);
this.lvTaskList.DrawColumnHeader += new System.Windows.Forms.DrawListViewColumnHeaderEventHandler(this.lvTaskList_DrawColumnHeader);
this.lvTaskList.Resize += new System.EventHandler(this.lvTaskList_Resize);
this.lvTaskList.EnabledChanged += new System.EventHandler(this.lvTaskList_EnabledChanged);
this.lvTaskList.DrawItem += new System.Windows.Forms.DrawListViewItemEventHandler(this.lvTaskList_DrawItem);
this.lvTaskList.ColumnWidthChanged += new System.Windows.Forms.ColumnWidthChangedEventHandler(this.lvTaskList_ColumnWidthChanged);
this.lvTaskList.SelectedIndexChanged += new System.EventHandler(this.lvTaskList_SelectedIndexChanged);
this.lvTaskList.MouseDown += new System.Windows.Forms.MouseEventHandler(this.lvTaskList_MouseDown);
this.lvTaskList.DrawSubItem += new System.Windows.Forms.DrawListViewSubItemEventHandler(this.lvTaskList_DrawSubItem);
//
// columnHeaderHidden
//
this.columnHeaderHidden.Text = global::XenAdmin.Messages.SOLUTION_UNKNOWN;
resources.ApplyResources(this.columnHeaderHidden, "columnHeaderHidden");
//
// columnHeaderMode
//
resources.ApplyResources(this.columnHeaderMode, "columnHeaderMode");
//
// columnHeaderDay
//
resources.ApplyResources(this.columnHeaderDay, "columnHeaderDay");
//
// columnHeaderTime
//
resources.ApplyResources(this.columnHeaderTime, "columnHeaderTime");
//
// columnHeaderEnabled
//
resources.ApplyResources(this.columnHeaderEnabled, "columnHeaderEnabled");
//
// flowLayoutPanel2
//
resources.ApplyResources(this.flowLayoutPanel2, "flowLayoutPanel2");
this.flowLayoutPanel2.Controls.Add(this.lableScheduledTaskBlurb);
this.flowLayoutPanel2.Name = "flowLayoutPanel2";
//
// lableScheduledTaskBlurb
//
resources.ApplyResources(this.lableScheduledTaskBlurb, "lableScheduledTaskBlurb");
this.lableScheduledTaskBlurb.Name = "lableScheduledTaskBlurb";
//
// flowLayoutPanel1
//
this.flowLayoutPanel1.Controls.Add(this.buttonDelete);
this.flowLayoutPanel1.Controls.Add(this.buttonEdit);
this.flowLayoutPanel1.Controls.Add(this.buttonAddNew);
resources.ApplyResources(this.flowLayoutPanel1, "flowLayoutPanel1");
this.flowLayoutPanel1.Name = "flowLayoutPanel1";
//
// buttonDelete
//
resources.ApplyResources(this.buttonDelete, "buttonDelete");
this.buttonDelete.Name = "buttonDelete";
this.buttonDelete.Click += new System.EventHandler(this.buttonDelete_Click);
//
// buttonEdit
//
resources.ApplyResources(this.buttonEdit, "buttonEdit");
this.buttonEdit.BackColor = System.Drawing.Color.Transparent;
this.buttonEdit.Name = "buttonEdit";
this.buttonEdit.UseVisualStyleBackColor = false;
this.buttonEdit.Click += new System.EventHandler(this.buttonEdit_Click);
//
// buttonAddNew
//
resources.ApplyResources(this.buttonAddNew, "buttonAddNew");
this.buttonAddNew.BackColor = System.Drawing.Color.Transparent;
this.buttonAddNew.Name = "buttonAddNew";
this.buttonAddNew.UseVisualStyleBackColor = false;
this.buttonAddNew.Click += new System.EventHandler(this.buttonAddNew_Click);
//
// weekView1
//
this.weekView1.BarHeight = 22;
this.weekView1.BarPadding = new System.Windows.Forms.Padding(3, 2, 3, 1);
this.weekView1.CurrentTimeMarkColor = System.Drawing.Color.Red;
this.weekView1.DayLabelColor = System.Drawing.Color.DarkGray;
this.weekView1.DayLabelPadding = new System.Windows.Forms.Padding(0, 3, 3, 3);
resources.ApplyResources(this.weekView1, "weekView1");
this.weekView1.GridColor = System.Drawing.SystemColors.ActiveBorder;
this.weekView1.HightlightColor = System.Drawing.Color.Yellow;
this.weekView1.HourLabelColor = System.Drawing.Color.DarkGray;
this.weekView1.HourLabelFont = new System.Drawing.Font("Microsoft Sans Serif", 6.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.weekView1.HourLabelInterval = 6;
this.weekView1.HourLabelPadding = new System.Windows.Forms.Padding(0, 0, 3, 3);
this.weekView1.HourLineColor = System.Drawing.Color.DarkGray;
this.weekView1.HourLineInterval = 6;
this.weekView1.LargeTickHeight = 5;
this.weekView1.MinimumSize = new System.Drawing.Size(350, 45);
this.weekView1.Name = "weekView1";
this.weekView1.SelectedItemHighlightType = XenAdmin.Controls.Wlb.WeekView.HighlightType.Box;
this.weekView1.ShowCurrentTimeMark = true;
this.weekView1.SmalltickHeight = 3;
triggerPoints1.Selected = null;
this.weekView1.TriggerPoints = triggerPoints1;
this.weekView1.OnTriggerPointDoubleClick += new System.Windows.Forms.MouseEventHandler(this.weekView1_OnTriggerPointDoubleClick);
this.weekView1.OnTriggerPointClick += new System.Windows.Forms.MouseEventHandler(this.weekView1_OnTriggerPointClick);
//
// WlbOptModeScheduler
//
resources.ApplyResources(this, "$this");
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi;
this.Controls.Add(this.panelScheduleList);
this.Name = "WlbOptModeScheduler";
this.panelScheduleList.ResumeLayout(false);
this.panelScheduleList.PerformLayout();
this.panel1.ResumeLayout(false);
this.flowLayoutPanel2.ResumeLayout(false);
this.flowLayoutPanel2.PerformLayout();
this.flowLayoutPanel1.ResumeLayout(false);
this.flowLayoutPanel1.PerformLayout();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Panel panelScheduleList;
private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel1;
private System.Windows.Forms.Button buttonDelete;
private System.Windows.Forms.Button buttonAddNew;
private System.Windows.Forms.Button buttonEdit;
private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel2;
private System.Windows.Forms.Label lableScheduledTaskBlurb;
private System.Windows.Forms.Panel panel1;
private System.Windows.Forms.ListView lvTaskList;
private WeekView weekView1;
private System.Windows.Forms.ColumnHeader columnHeaderHidden;
private System.Windows.Forms.ColumnHeader columnHeaderMode;
private System.Windows.Forms.ColumnHeader columnHeaderDay;
private System.Windows.Forms.ColumnHeader columnHeaderTime;
private System.Windows.Forms.ColumnHeader columnHeaderEnabled;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics;
using System.Globalization;
namespace System.Xml
{
// Specifies formatting options for XmlTextWriter.
internal enum Formatting
{
// No special formatting is done (this is the default).
None,
//This option causes child elements to be indented using the Indentation and IndentChar properties.
// It only indents Element Content (http://www.w3.org/TR/1998/REC-xml-19980210#sec-element-content)
// and not Mixed Content (http://www.w3.org/TR/1998/REC-xml-19980210#sec-mixed-content)
// according to the XML 1.0 definitions of these terms.
Indented,
};
// Represents a writer that provides fast non-cached forward-only way of generating XML streams
// containing XML documents that conform to the W3CExtensible Markup Language (XML) 1.0 specification
// and the Namespaces in XML specification.
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
internal class XmlTextWriter : XmlWriter
{
//
// Private types
//
enum NamespaceState
{
Uninitialized,
NotDeclaredButInScope,
DeclaredButNotWrittenOut,
DeclaredAndWrittenOut
}
struct TagInfo
{
internal string name;
internal string prefix;
internal string defaultNs;
internal NamespaceState defaultNsState;
internal XmlSpace xmlSpace;
internal string xmlLang;
internal int prevNsTop;
internal int prefixCount;
internal bool mixed; // whether to pretty print the contents of this element.
internal void Init(int nsTop)
{
name = null;
defaultNs = String.Empty;
defaultNsState = NamespaceState.Uninitialized;
xmlSpace = XmlSpace.None;
xmlLang = null;
prevNsTop = nsTop;
prefixCount = 0;
mixed = false;
}
}
struct Namespace
{
internal string prefix;
internal string ns;
internal bool declared;
internal int prevNsIndex;
internal void Set(string prefix, string ns, bool declared)
{
this.prefix = prefix;
this.ns = ns;
this.declared = declared;
this.prevNsIndex = -1;
}
}
enum SpecialAttr
{
None,
XmlSpace,
XmlLang,
XmlNs
};
// State machine is working through autocomplete
private enum State
{
Start,
Prolog,
PostDTD,
Element,
Attribute,
Content,
AttrOnly,
Epilog,
Error,
Closed,
}
private enum Token
{
PI,
Doctype,
Comment,
CData,
StartElement,
EndElement,
LongEndElement,
StartAttribute,
EndAttribute,
Content,
Base64,
RawData,
Whitespace,
Empty
}
//
// Fields
//
// output
TextWriter textWriter;
XmlTextEncoder xmlEncoder;
Encoding encoding;
// formatting
Formatting formatting;
bool indented; // perf - faster to check a boolean.
int indentation;
char indentChar;
// element stack
TagInfo[] stack;
int top;
// state machine for AutoComplete
State[] stateTable;
State currentState;
Token lastToken;
// Base64 content
XmlTextWriterBase64Encoder base64Encoder;
// misc
char quoteChar;
char curQuoteChar;
bool namespaces;
SpecialAttr specialAttr;
string prefixForXmlNs;
bool flush;
// namespaces
Namespace[] nsStack;
int nsTop;
Dictionary<string, int> nsHashtable;
bool useNsHashtable;
// char types
XmlCharType xmlCharType = XmlCharType.Instance;
//
// Constants and constant tables
//
const int NamespaceStackInitialSize = 8;
#if DEBUG
const int MaxNamespacesWalkCount = 3;
#else
const int MaxNamespacesWalkCount = 16;
#endif
static string[] stateName = {
"Start",
"Prolog",
"PostDTD",
"Element",
"Attribute",
"Content",
"AttrOnly",
"Epilog",
"Error",
"Closed",
};
static string[] tokenName = {
"PI",
"Doctype",
"Comment",
"CData",
"StartElement",
"EndElement",
"LongEndElement",
"StartAttribute",
"EndAttribute",
"Content",
"Base64",
"RawData",
"Whitespace",
"Empty"
};
static readonly State[] stateTableDefault = {
// State.Start State.Prolog State.PostDTD State.Element State.Attribute State.Content State.AttrOnly State.Epilog
//
/* Token.PI */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog,
/* Token.Doctype */ State.PostDTD, State.PostDTD, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error,
/* Token.Comment */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog,
/* Token.CData */ State.Content, State.Content, State.Error, State.Content, State.Content, State.Content, State.Error, State.Epilog,
/* Token.StartElement */ State.Element, State.Element, State.Element, State.Element, State.Element, State.Element, State.Error, State.Element,
/* Token.EndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error,
/* Token.LongEndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error,
/* Token.StartAttribute */ State.AttrOnly, State.Error, State.Error, State.Attribute, State.Attribute, State.Error, State.Error, State.Error,
/* Token.EndAttribute */ State.Error, State.Error, State.Error, State.Error, State.Element, State.Error, State.Epilog, State.Error,
/* Token.Content */ State.Content, State.Content, State.Error, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog,
/* Token.Base64 */ State.Content, State.Content, State.Error, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog,
/* Token.RawData */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog,
/* Token.Whitespace */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog,
};
static readonly State[] stateTableDocument = {
// State.Start State.Prolog State.PostDTD State.Element State.Attribute State.Content State.AttrOnly State.Epilog
//
/* Token.PI */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog,
/* Token.Doctype */ State.Error, State.PostDTD, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error,
/* Token.Comment */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog,
/* Token.CData */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error,
/* Token.StartElement */ State.Error, State.Element, State.Element, State.Element, State.Element, State.Element, State.Error, State.Error,
/* Token.EndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error,
/* Token.LongEndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error,
/* Token.StartAttribute */ State.Error, State.Error, State.Error, State.Attribute, State.Attribute, State.Error, State.Error, State.Error,
/* Token.EndAttribute */ State.Error, State.Error, State.Error, State.Error, State.Element, State.Error, State.Error, State.Error,
/* Token.Content */ State.Error, State.Error, State.Error, State.Content, State.Attribute, State.Content, State.Error, State.Error,
/* Token.Base64 */ State.Error, State.Error, State.Error, State.Content, State.Attribute, State.Content, State.Error, State.Error,
/* Token.RawData */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Error, State.Epilog,
/* Token.Whitespace */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Error, State.Epilog,
};
//
// Constructors
//
internal XmlTextWriter()
{
namespaces = true;
formatting = Formatting.None;
indentation = 2;
indentChar = ' ';
// namespaces
nsStack = new Namespace[NamespaceStackInitialSize];
nsTop = -1;
// element stack
stack = new TagInfo[10];
top = 0;// 0 is an empty sentential element
stack[top].Init(-1);
quoteChar = '"';
stateTable = stateTableDefault;
currentState = State.Start;
lastToken = Token.Empty;
}
// Creates an instance of the XmlTextWriter class using the specified stream.
public XmlTextWriter(Stream w, Encoding encoding) : this()
{
this.encoding = encoding;
if (encoding != null)
textWriter = new StreamWriter(w, encoding);
else
textWriter = new StreamWriter(w);
xmlEncoder = new XmlTextEncoder(textWriter);
xmlEncoder.QuoteChar = this.quoteChar;
}
// Creates an instance of the XmlTextWriter class using the specified TextWriter.
public XmlTextWriter(TextWriter w) : this()
{
textWriter = w;
encoding = w.Encoding;
xmlEncoder = new XmlTextEncoder(w);
xmlEncoder.QuoteChar = this.quoteChar;
}
//
// XmlTextWriter properties
//
// Gets the XmlTextWriter base stream.
public Stream BaseStream
{
get
{
StreamWriter streamWriter = textWriter as StreamWriter;
return (streamWriter == null ? null : streamWriter.BaseStream);
}
}
// Gets or sets a value indicating whether to do namespace support.
public bool Namespaces
{
get { return this.namespaces; }
set
{
if (this.currentState != State.Start)
throw new InvalidOperationException(SR.Xml_NotInWriteState);
this.namespaces = value;
}
}
// Indicates how the output is formatted.
public Formatting Formatting
{
get { return this.formatting; }
set { this.formatting = value; this.indented = value == Formatting.Indented; }
}
// Gets or sets how many IndentChars to write for each level in the hierarchy when Formatting is set to "Indented".
public int Indentation
{
get { return this.indentation; }
set
{
if (value < 0)
throw new ArgumentException(SR.Xml_InvalidIndentation);
this.indentation = value;
}
}
// Gets or sets which character to use for indenting when Formatting is set to "Indented".
public char IndentChar
{
get { return this.indentChar; }
set { this.indentChar = value; }
}
// Gets or sets which character to use to quote attribute values.
public char QuoteChar
{
get { return this.quoteChar; }
set
{
if (value != '"' && value != '\'')
{
throw new ArgumentException(SR.Xml_InvalidQuote);
}
this.quoteChar = value;
this.xmlEncoder.QuoteChar = value;
}
}
//
// XmlWriter implementation
//
// Writes out the XML declaration with the version "1.0".
public override void WriteStartDocument()
{
StartDocument(-1);
}
// Writes out the XML declaration with the version "1.0" and the standalone attribute.
public override void WriteStartDocument(bool standalone)
{
StartDocument(standalone ? 1 : 0);
}
// Closes any open elements or attributes and puts the writer back in the Start state.
public override void WriteEndDocument()
{
try
{
AutoCompleteAll();
if (this.currentState != State.Epilog)
{
if (this.currentState == State.Closed)
{
throw new ArgumentException(SR.Xml_ClosedOrError);
}
else
{
throw new ArgumentException(SR.Xml_NoRoot);
}
}
this.stateTable = stateTableDefault;
this.currentState = State.Start;
this.lastToken = Token.Empty;
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the DOCTYPE declaration with the specified name and optional attributes.
public override void WriteDocType(string name, string pubid, string sysid, string subset)
{
try
{
ValidateName(name, false);
AutoComplete(Token.Doctype);
textWriter.Write("<!DOCTYPE ");
textWriter.Write(name);
if (pubid != null)
{
textWriter.Write(" PUBLIC ");
textWriter.Write(quoteChar);
textWriter.Write(pubid);
textWriter.Write(quoteChar);
textWriter.Write(' ');
textWriter.Write(quoteChar);
textWriter.Write(sysid);
textWriter.Write(quoteChar);
}
else if (sysid != null)
{
textWriter.Write(" SYSTEM ");
textWriter.Write(quoteChar);
textWriter.Write(sysid);
textWriter.Write(quoteChar);
}
if (subset != null)
{
textWriter.Write('[');
textWriter.Write(subset);
textWriter.Write(']');
}
textWriter.Write('>');
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the specified start tag and associates it with the given namespace and prefix.
public override void WriteStartElement(string prefix, string localName, string ns)
{
try
{
AutoComplete(Token.StartElement);
PushStack();
textWriter.Write('<');
if (this.namespaces)
{
// Propagate default namespace and mix model down the stack.
stack[top].defaultNs = stack[top - 1].defaultNs;
if (stack[top - 1].defaultNsState != NamespaceState.Uninitialized)
stack[top].defaultNsState = NamespaceState.NotDeclaredButInScope;
stack[top].mixed = stack[top - 1].mixed;
if (ns == null)
{
// use defined prefix
if (!string.IsNullOrEmpty(prefix) && (LookupNamespace(prefix) == -1))
{
throw new ArgumentException(SR.Xml_UndefPrefix);
}
}
else
{
if (prefix == null)
{
string definedPrefix = FindPrefix(ns);
if (definedPrefix != null)
{
prefix = definedPrefix;
}
else
{
PushNamespace(null, ns, false); // new default
}
}
else if (prefix.Length == 0)
{
PushNamespace(null, ns, false); // new default
}
else
{
if (ns.Length == 0)
{
prefix = null;
}
VerifyPrefixXml(prefix, ns);
PushNamespace(prefix, ns, false); // define
}
}
stack[top].prefix = null;
if (!string.IsNullOrEmpty(prefix))
{
stack[top].prefix = prefix;
textWriter.Write(prefix);
textWriter.Write(':');
}
}
else
{
if (!string.IsNullOrEmpty(ns) || !string.IsNullOrEmpty(prefix))
{
throw new ArgumentException(SR.Xml_NoNamespaces);
}
}
stack[top].name = localName;
textWriter.Write(localName);
}
catch
{
currentState = State.Error;
throw;
}
}
// Closes one element and pops the corresponding namespace scope.
public override void WriteEndElement()
{
InternalWriteEndElement(false);
}
// Closes one element and pops the corresponding namespace scope.
public override void WriteFullEndElement()
{
InternalWriteEndElement(true);
}
// Writes the start of an attribute.
public override void WriteStartAttribute(string prefix, string localName, string ns)
{
try
{
AutoComplete(Token.StartAttribute);
this.specialAttr = SpecialAttr.None;
if (this.namespaces)
{
if (prefix != null && prefix.Length == 0)
{
prefix = null;
}
if (ns == XmlConst.ReservedNsXmlNs && prefix == null && localName != "xmlns")
{
prefix = "xmlns";
}
if (prefix == "xml")
{
if (localName == "lang")
{
this.specialAttr = SpecialAttr.XmlLang;
}
else if (localName == "space")
{
this.specialAttr = SpecialAttr.XmlSpace;
}
}
else if (prefix == "xmlns")
{
if (XmlConst.ReservedNsXmlNs != ns && ns != null)
{
throw new ArgumentException(SR.Xml_XmlnsBelongsToReservedNs);
}
if (string.IsNullOrEmpty(localName))
{
localName = prefix;
prefix = null;
this.prefixForXmlNs = null;
}
else
{
this.prefixForXmlNs = localName;
}
this.specialAttr = SpecialAttr.XmlNs;
}
else if (prefix == null && localName == "xmlns")
{
if (XmlConst.ReservedNsXmlNs != ns && ns != null)
{
// add the below line back in when DOM is fixed
throw new ArgumentException(SR.Xml_XmlnsBelongsToReservedNs);
}
this.specialAttr = SpecialAttr.XmlNs;
this.prefixForXmlNs = null;
}
else
{
if (ns == null)
{
// use defined prefix
if (prefix != null && (LookupNamespace(prefix) == -1))
{
throw new ArgumentException(SR.Xml_UndefPrefix);
}
}
else if (ns.Length == 0)
{
// empty namespace require null prefix
prefix = string.Empty;
}
else
{ // ns.Length != 0
VerifyPrefixXml(prefix, ns);
if (prefix != null && LookupNamespaceInCurrentScope(prefix) != -1)
{
prefix = null;
}
// Now verify prefix validity
string definedPrefix = FindPrefix(ns);
if (definedPrefix != null && (prefix == null || prefix == definedPrefix))
{
prefix = definedPrefix;
}
else
{
if (prefix == null)
{
prefix = GeneratePrefix(); // need a prefix if
}
PushNamespace(prefix, ns, false);
}
}
}
if (!string.IsNullOrEmpty(prefix))
{
textWriter.Write(prefix);
textWriter.Write(':');
}
}
else
{
if (!string.IsNullOrEmpty(ns) || !string.IsNullOrEmpty(prefix))
{
throw new ArgumentException(SR.Xml_NoNamespaces);
}
if (localName == "xml:lang")
{
this.specialAttr = SpecialAttr.XmlLang;
}
else if (localName == "xml:space")
{
this.specialAttr = SpecialAttr.XmlSpace;
}
}
xmlEncoder.StartAttribute(this.specialAttr != SpecialAttr.None);
textWriter.Write(localName);
textWriter.Write('=');
if (this.curQuoteChar != this.quoteChar)
{
this.curQuoteChar = this.quoteChar;
xmlEncoder.QuoteChar = this.quoteChar;
}
textWriter.Write(this.curQuoteChar);
}
catch
{
currentState = State.Error;
throw;
}
}
// Closes the attribute opened by WriteStartAttribute.
public override void WriteEndAttribute()
{
try
{
AutoComplete(Token.EndAttribute);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out a <![CDATA[...]]> block containing the specified text.
public override void WriteCData(string text)
{
try
{
AutoComplete(Token.CData);
if (null != text && text.IndexOf("]]>", StringComparison.Ordinal) >= 0)
{
throw new ArgumentException(SR.Xml_InvalidCDataChars);
}
textWriter.Write("<![CDATA[");
if (null != text)
{
xmlEncoder.WriteRawWithSurrogateChecking(text);
}
textWriter.Write("]]>");
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out a comment <!--...--> containing the specified text.
public override void WriteComment(string text)
{
try
{
if (null != text && (text.IndexOf("--", StringComparison.Ordinal) >= 0 || (text.Length != 0 && text[text.Length - 1] == '-')))
{
throw new ArgumentException(SR.Xml_InvalidCommentChars);
}
AutoComplete(Token.Comment);
textWriter.Write("<!--");
if (null != text)
{
xmlEncoder.WriteRawWithSurrogateChecking(text);
}
textWriter.Write("-->");
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out a processing instruction with a space between the name and text as follows: <?name text?>
public override void WriteProcessingInstruction(string name, string text)
{
try
{
if (null != text && text.IndexOf("?>", StringComparison.Ordinal) >= 0)
{
throw new ArgumentException(SR.Xml_InvalidPiChars);
}
if (String.Equals(name, "xml", StringComparison.OrdinalIgnoreCase) && this.stateTable == stateTableDocument)
{
throw new ArgumentException(SR.Xml_DupXmlDecl);
}
AutoComplete(Token.PI);
InternalWriteProcessingInstruction(name, text);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out an entity reference as follows: "&"+name+";".
public override void WriteEntityRef(string name)
{
try
{
ValidateName(name, false);
AutoComplete(Token.Content);
xmlEncoder.WriteEntityRef(name);
}
catch
{
currentState = State.Error;
throw;
}
}
// Forces the generation of a character entity for the specified Unicode character value.
public override void WriteCharEntity(char ch)
{
try
{
AutoComplete(Token.Content);
xmlEncoder.WriteCharEntity(ch);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the given whitespace.
public override void WriteWhitespace(string ws)
{
try
{
if (null == ws)
{
ws = String.Empty;
}
if (!xmlCharType.IsOnlyWhitespace(ws))
{
throw new ArgumentException(SR.Xml_NonWhitespace);
}
AutoComplete(Token.Whitespace);
xmlEncoder.Write(ws);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the specified text content.
public override void WriteString(string text)
{
try
{
if (null != text && text.Length != 0)
{
AutoComplete(Token.Content);
xmlEncoder.Write(text);
}
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the specified surrogate pair as a character entity.
public override void WriteSurrogateCharEntity(char lowChar, char highChar)
{
try
{
AutoComplete(Token.Content);
xmlEncoder.WriteSurrogateCharEntity(lowChar, highChar);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the specified text content.
public override void WriteChars(Char[] buffer, int index, int count)
{
try
{
AutoComplete(Token.Content);
xmlEncoder.Write(buffer, index, count);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes raw markup from the specified character buffer.
public override void WriteRaw(Char[] buffer, int index, int count)
{
try
{
AutoComplete(Token.RawData);
xmlEncoder.WriteRaw(buffer, index, count);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes raw markup from the specified character string.
public override void WriteRaw(String data)
{
try
{
AutoComplete(Token.RawData);
xmlEncoder.WriteRawWithSurrogateChecking(data);
}
catch
{
currentState = State.Error;
throw;
}
}
// Encodes the specified binary bytes as base64 and writes out the resulting text.
public override void WriteBase64(byte[] buffer, int index, int count)
{
try
{
if (!this.flush)
{
AutoComplete(Token.Base64);
}
this.flush = true;
// No need for us to explicitly validate the args. The StreamWriter will do
// it for us.
if (null == this.base64Encoder)
{
this.base64Encoder = new XmlTextWriterBase64Encoder(xmlEncoder);
}
// Encode will call WriteRaw to write out the encoded characters
this.base64Encoder.Encode(buffer, index, count);
}
catch
{
currentState = State.Error;
throw;
}
}
// Encodes the specified binary bytes as binhex and writes out the resulting text.
public override void WriteBinHex(byte[] buffer, int index, int count)
{
try
{
AutoComplete(Token.Content);
BinHexEncoder.Encode(buffer, index, count, this);
}
catch
{
currentState = State.Error;
throw;
}
}
// Returns the state of the XmlWriter.
public override WriteState WriteState
{
get
{
switch (this.currentState)
{
case State.Start:
return WriteState.Start;
case State.Prolog:
case State.PostDTD:
return WriteState.Prolog;
case State.Element:
return WriteState.Element;
case State.Attribute:
case State.AttrOnly:
return WriteState.Attribute;
case State.Content:
case State.Epilog:
return WriteState.Content;
case State.Error:
return WriteState.Error;
case State.Closed:
return WriteState.Closed;
default:
Debug.Fail("Unmatched state in switch");
return WriteState.Error;
}
}
}
// Disposes the XmlWriter and the underlying stream/TextWriter.
protected override void Dispose(bool disposing)
{
if (disposing && this.currentState != State.Closed)
{
try
{
AutoCompleteAll();
}
catch
{ // never fail
}
finally
{
this.currentState = State.Closed;
textWriter.Dispose();
}
}
base.Dispose(disposing);
}
// Flushes whatever is in the buffer to the underlying stream/TextWriter and flushes the underlying stream/TextWriter.
public override void Flush()
{
textWriter.Flush();
}
// Writes out the specified name, ensuring it is a valid Name according to the XML specification
// (http://www.w3.org/TR/1998/REC-xml-19980210#NT-Name
public override void WriteName(string name)
{
try
{
AutoComplete(Token.Content);
InternalWriteName(name, false);
}
catch
{
currentState = State.Error;
throw;
}
}
// Writes out the specified namespace-qualified name by looking up the prefix that is in scope for the given namespace.
public override void WriteQualifiedName(string localName, string ns)
{
try
{
AutoComplete(Token.Content);
if (this.namespaces)
{
if (!string.IsNullOrEmpty(ns) && ns != stack[top].defaultNs)
{
string prefix = FindPrefix(ns);
if (prefix == null)
{
if (this.currentState != State.Attribute)
{
throw new ArgumentException(SR.Format(SR.Xml_UndefNamespace, ns));
}
prefix = GeneratePrefix(); // need a prefix if
PushNamespace(prefix, ns, false);
}
if (prefix.Length != 0)
{
InternalWriteName(prefix, true);
textWriter.Write(':');
}
}
}
else if (!string.IsNullOrEmpty(ns))
{
throw new ArgumentException(SR.Xml_NoNamespaces);
}
InternalWriteName(localName, true);
}
catch
{
currentState = State.Error;
throw;
}
}
// Returns the closest prefix defined in the current namespace scope for the specified namespace URI.
public override string LookupPrefix(string ns)
{
if (string.IsNullOrEmpty(ns))
{
throw new ArgumentException(SR.Xml_EmptyName);
}
string s = FindPrefix(ns);
if (s == null && ns == stack[top].defaultNs)
{
s = string.Empty;
}
return s;
}
// Gets an XmlSpace representing the current xml:space scope.
public override XmlSpace XmlSpace
{
get
{
for (int i = top; i > 0; i--)
{
XmlSpace xs = stack[i].xmlSpace;
if (xs != XmlSpace.None)
return xs;
}
return XmlSpace.None;
}
}
// Gets the current xml:lang scope.
public override string XmlLang
{
get
{
for (int i = top; i > 0; i--)
{
String xlang = stack[i].xmlLang;
if (xlang != null)
return xlang;
}
return null;
}
}
// Writes out the specified name, ensuring it is a valid NmToken
// according to the XML specification (http://www.w3.org/TR/1998/REC-xml-19980210#NT-Name).
public override void WriteNmToken(string name)
{
try
{
AutoComplete(Token.Content);
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException(SR.Xml_EmptyName);
}
if (!ValidateNames.IsNmtokenNoNamespaces(name))
{
throw new ArgumentException(SR.Format(SR.Xml_InvalidNameChars, name));
}
textWriter.Write(name);
}
catch
{
currentState = State.Error;
throw;
}
}
//
// Private implementation methods
//
void StartDocument(int standalone)
{
try
{
if (this.currentState != State.Start)
{
throw new InvalidOperationException(SR.Xml_NotTheFirst);
}
this.stateTable = stateTableDocument;
this.currentState = State.Prolog;
StringBuilder bufBld = new StringBuilder(128);
bufBld.Append("version=");
bufBld.Append(quoteChar);
bufBld.Append("1.0");
bufBld.Append(quoteChar);
if (this.encoding != null)
{
bufBld.Append(" encoding=");
bufBld.Append(quoteChar);
bufBld.Append(this.encoding.WebName);
bufBld.Append(quoteChar);
}
if (standalone >= 0)
{
bufBld.Append(" standalone=");
bufBld.Append(quoteChar);
bufBld.Append(standalone == 0 ? "no" : "yes");
bufBld.Append(quoteChar);
}
InternalWriteProcessingInstruction("xml", bufBld.ToString());
}
catch
{
currentState = State.Error;
throw;
}
}
void AutoComplete(Token token)
{
if (this.currentState == State.Closed)
{
throw new InvalidOperationException(SR.Xml_Closed);
}
else if (this.currentState == State.Error)
{
throw new InvalidOperationException(SR.Format(SR.Xml_WrongToken, tokenName[(int)token], stateName[(int)State.Error]));
}
State newState = this.stateTable[(int)token * 8 + (int)this.currentState];
if (newState == State.Error)
{
throw new InvalidOperationException(SR.Format(SR.Xml_WrongToken, tokenName[(int)token], stateName[(int)this.currentState]));
}
switch (token)
{
case Token.Doctype:
if (this.indented && this.currentState != State.Start)
{
Indent(false);
}
break;
case Token.StartElement:
case Token.Comment:
case Token.PI:
case Token.CData:
if (this.currentState == State.Attribute)
{
WriteEndAttributeQuote();
WriteEndStartTag(false);
}
else if (this.currentState == State.Element)
{
WriteEndStartTag(false);
}
if (token == Token.CData)
{
stack[top].mixed = true;
}
else if (this.indented && this.currentState != State.Start)
{
Indent(false);
}
break;
case Token.EndElement:
case Token.LongEndElement:
if (this.flush)
{
FlushEncoders();
}
if (this.currentState == State.Attribute)
{
WriteEndAttributeQuote();
}
if (this.currentState == State.Content)
{
token = Token.LongEndElement;
}
else
{
WriteEndStartTag(token == Token.EndElement);
}
if (stateTableDocument == this.stateTable && top == 1)
{
newState = State.Epilog;
}
break;
case Token.StartAttribute:
if (this.flush)
{
FlushEncoders();
}
if (this.currentState == State.Attribute)
{
WriteEndAttributeQuote();
textWriter.Write(' ');
}
else if (this.currentState == State.Element)
{
textWriter.Write(' ');
}
break;
case Token.EndAttribute:
if (this.flush)
{
FlushEncoders();
}
WriteEndAttributeQuote();
break;
case Token.Whitespace:
case Token.Content:
case Token.RawData:
case Token.Base64:
if (token != Token.Base64 && this.flush)
{
FlushEncoders();
}
if (this.currentState == State.Element && this.lastToken != Token.Content)
{
WriteEndStartTag(false);
}
if (newState == State.Content)
{
stack[top].mixed = true;
}
break;
default:
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
this.currentState = newState;
this.lastToken = token;
}
void AutoCompleteAll()
{
if (this.flush)
{
FlushEncoders();
}
while (top > 0)
{
WriteEndElement();
}
}
void InternalWriteEndElement(bool longFormat)
{
try
{
if (top <= 0)
{
throw new InvalidOperationException(SR.Xml_NoStartTag);
}
// if we are in the element, we need to close it.
AutoComplete(longFormat ? Token.LongEndElement : Token.EndElement);
if (this.lastToken == Token.LongEndElement)
{
if (this.indented)
{
Indent(true);
}
textWriter.Write('<');
textWriter.Write('/');
if (this.namespaces && stack[top].prefix != null)
{
textWriter.Write(stack[top].prefix);
textWriter.Write(':');
}
textWriter.Write(stack[top].name);
textWriter.Write('>');
}
// pop namespaces
int prevNsTop = stack[top].prevNsTop;
if (useNsHashtable && prevNsTop < nsTop)
{
PopNamespaces(prevNsTop + 1, nsTop);
}
nsTop = prevNsTop;
top--;
}
catch
{
currentState = State.Error;
throw;
}
}
void WriteEndStartTag(bool empty)
{
xmlEncoder.StartAttribute(false);
for (int i = nsTop; i > stack[top].prevNsTop; i--)
{
if (!nsStack[i].declared)
{
textWriter.Write(" xmlns");
textWriter.Write(':');
textWriter.Write(nsStack[i].prefix);
textWriter.Write('=');
textWriter.Write(this.quoteChar);
xmlEncoder.Write(nsStack[i].ns);
textWriter.Write(this.quoteChar);
}
}
// Default
if ((stack[top].defaultNs != stack[top - 1].defaultNs) &&
(stack[top].defaultNsState == NamespaceState.DeclaredButNotWrittenOut))
{
textWriter.Write(" xmlns");
textWriter.Write('=');
textWriter.Write(this.quoteChar);
xmlEncoder.Write(stack[top].defaultNs);
textWriter.Write(this.quoteChar);
stack[top].defaultNsState = NamespaceState.DeclaredAndWrittenOut;
}
xmlEncoder.EndAttribute();
if (empty)
{
textWriter.Write(" /");
}
textWriter.Write('>');
}
void WriteEndAttributeQuote()
{
if (this.specialAttr != SpecialAttr.None)
{
// Ok, now to handle xmlspace, etc.
HandleSpecialAttribute();
}
xmlEncoder.EndAttribute();
textWriter.Write(this.curQuoteChar);
}
void Indent(bool beforeEndElement)
{
// pretty printing.
if (top == 0)
{
textWriter.WriteLine();
}
else if (!stack[top].mixed)
{
textWriter.WriteLine();
int i = beforeEndElement ? top - 1 : top;
for (i *= this.indentation; i > 0; i--)
{
textWriter.Write(this.indentChar);
}
}
}
// pushes new namespace scope, and returns generated prefix, if one
// was needed to resolve conflicts.
void PushNamespace(string prefix, string ns, bool declared)
{
if (XmlConst.ReservedNsXmlNs == ns)
{
throw new ArgumentException(SR.Xml_CanNotBindToReservedNamespace);
}
if (prefix == null)
{
switch (stack[top].defaultNsState)
{
case NamespaceState.DeclaredButNotWrittenOut:
Debug.Assert(declared == true, "Unexpected situation!!");
// the first namespace that the user gave us is what we
// like to keep.
break;
case NamespaceState.Uninitialized:
case NamespaceState.NotDeclaredButInScope:
// we now got a brand new namespace that we need to remember
stack[top].defaultNs = ns;
break;
default:
Debug.Fail("Should have never come here");
return;
}
stack[top].defaultNsState = (declared ? NamespaceState.DeclaredAndWrittenOut : NamespaceState.DeclaredButNotWrittenOut);
}
else
{
if (prefix.Length != 0 && ns.Length == 0)
{
throw new ArgumentException(SR.Xml_PrefixForEmptyNs);
}
int existingNsIndex = LookupNamespace(prefix);
if (existingNsIndex != -1 && nsStack[existingNsIndex].ns == ns)
{
// it is already in scope.
if (declared)
{
nsStack[existingNsIndex].declared = true;
}
}
else
{
// see if prefix conflicts for the current element
if (declared)
{
if (existingNsIndex != -1 && existingNsIndex > stack[top].prevNsTop)
{
nsStack[existingNsIndex].declared = true; // old one is silenced now
}
}
AddNamespace(prefix, ns, declared);
}
}
}
void AddNamespace(string prefix, string ns, bool declared)
{
int nsIndex = ++nsTop;
if (nsIndex == nsStack.Length)
{
Namespace[] newStack = new Namespace[nsIndex * 2];
Array.Copy(nsStack, 0, newStack, 0, nsIndex);
nsStack = newStack;
}
nsStack[nsIndex].Set(prefix, ns, declared);
if (useNsHashtable)
{
AddToNamespaceHashtable(nsIndex);
}
else if (nsIndex == MaxNamespacesWalkCount)
{
// add all
nsHashtable = new Dictionary<string, int>(new SecureStringHasher());
for (int i = 0; i <= nsIndex; i++)
{
AddToNamespaceHashtable(i);
}
useNsHashtable = true;
}
}
void AddToNamespaceHashtable(int namespaceIndex)
{
string prefix = nsStack[namespaceIndex].prefix;
int existingNsIndex;
if (nsHashtable.TryGetValue(prefix, out existingNsIndex))
{
nsStack[namespaceIndex].prevNsIndex = existingNsIndex;
}
nsHashtable[prefix] = namespaceIndex;
}
private void PopNamespaces(int indexFrom, int indexTo)
{
Debug.Assert(useNsHashtable);
for (int i = indexTo; i >= indexFrom; i--)
{
Debug.Assert(nsHashtable.ContainsKey(nsStack[i].prefix));
if (nsStack[i].prevNsIndex == -1)
{
nsHashtable.Remove(nsStack[i].prefix);
}
else
{
nsHashtable[nsStack[i].prefix] = nsStack[i].prevNsIndex;
}
}
}
string GeneratePrefix()
{
int temp = stack[top].prefixCount++ + 1;
return "d" + top.ToString("d", CultureInfo.InvariantCulture)
+ "p" + temp.ToString("d", CultureInfo.InvariantCulture);
}
void InternalWriteProcessingInstruction(string name, string text)
{
textWriter.Write("<?");
ValidateName(name, false);
textWriter.Write(name);
textWriter.Write(' ');
if (null != text)
{
xmlEncoder.WriteRawWithSurrogateChecking(text);
}
textWriter.Write("?>");
}
int LookupNamespace(string prefix)
{
if (useNsHashtable)
{
int nsIndex;
if (nsHashtable.TryGetValue(prefix, out nsIndex))
{
return nsIndex;
}
}
else
{
for (int i = nsTop; i >= 0; i--)
{
if (nsStack[i].prefix == prefix)
{
return i;
}
}
}
return -1;
}
int LookupNamespaceInCurrentScope(string prefix)
{
if (useNsHashtable)
{
int nsIndex;
if (nsHashtable.TryGetValue(prefix, out nsIndex))
{
if (nsIndex > stack[top].prevNsTop)
{
return nsIndex;
}
}
}
else
{
for (int i = nsTop; i > stack[top].prevNsTop; i--)
{
if (nsStack[i].prefix == prefix)
{
return i;
}
}
}
return -1;
}
string FindPrefix(string ns)
{
for (int i = nsTop; i >= 0; i--)
{
if (nsStack[i].ns == ns)
{
if (LookupNamespace(nsStack[i].prefix) == i)
{
return nsStack[i].prefix;
}
}
}
return null;
}
// There are three kind of strings we write out - Name, LocalName and Prefix.
// Both LocalName and Prefix can be represented with NCName == false and Name
// can be represented as NCName == true
void InternalWriteName(string name, bool isNCName)
{
ValidateName(name, isNCName);
textWriter.Write(name);
}
// This method is used for validation of the DOCTYPE, processing instruction and entity names plus names
// written out by the user via WriteName and WriteQualifiedName.
// Unfortunately the names of elements and attributes are not validated by the XmlTextWriter.
// Also this method does not check whether the character after ':' is a valid start name character. It accepts
// all valid name characters at that position. This can't be changed because of backwards compatibility.
private unsafe void ValidateName(string name, bool isNCName)
{
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException(SR.Xml_EmptyName);
}
int nameLength = name.Length;
// Namespaces supported
if (namespaces)
{
// We can't use ValidateNames.ParseQName here because of backwards compatibility bug we need to preserve.
// The bug is that the character after ':' is validated only as a NCName characters instead of NCStartName.
int colonPosition = -1;
// Parse NCName (may be prefix, may be local name)
int position = ValidateNames.ParseNCName(name);
Continue:
if (position == nameLength)
{
return;
}
// we have prefix:localName
if (name[position] == ':')
{
if (!isNCName)
{
// first colon in qname
if (colonPosition == -1)
{
// make sure it is not the first or last characters
if (position > 0 && position + 1 < nameLength)
{
colonPosition = position;
// Because of the back-compat bug (described above) parse the rest as Nmtoken
position++;
position += ValidateNames.ParseNmtoken(name, position);
goto Continue;
}
}
}
}
}
// Namespaces not supported
else
{
if (ValidateNames.IsNameNoNamespaces(name))
{
return;
}
}
throw new ArgumentException(SR.Format(SR.Xml_InvalidNameChars, name));
}
void HandleSpecialAttribute()
{
string value = xmlEncoder.AttributeValue;
switch (this.specialAttr)
{
case SpecialAttr.XmlLang:
stack[top].xmlLang = value;
break;
case SpecialAttr.XmlSpace:
// validate XmlSpace attribute
value = XmlConvertEx.TrimString(value);
if (value == "default")
{
stack[top].xmlSpace = XmlSpace.Default;
}
else if (value == "preserve")
{
stack[top].xmlSpace = XmlSpace.Preserve;
}
else
{
throw new ArgumentException(SR.Format(SR.Xml_InvalidXmlSpace, value));
}
break;
case SpecialAttr.XmlNs:
VerifyPrefixXml(this.prefixForXmlNs, value);
PushNamespace(this.prefixForXmlNs, value, true);
break;
}
}
void VerifyPrefixXml(string prefix, string ns)
{
if (prefix != null && prefix.Length == 3)
{
if (
(prefix[0] == 'x' || prefix[0] == 'X') &&
(prefix[1] == 'm' || prefix[1] == 'M') &&
(prefix[2] == 'l' || prefix[2] == 'L')
)
{
if (XmlConst.ReservedNsXml != ns)
{
throw new ArgumentException(SR.Xml_InvalidPrefix);
}
}
}
}
void PushStack()
{
if (top == stack.Length - 1)
{
TagInfo[] na = new TagInfo[stack.Length + 10];
if (top > 0) Array.Copy(stack, 0, na, 0, top + 1);
stack = na;
}
top++; // Move up stack
stack[top].Init(nsTop);
}
void FlushEncoders()
{
if (null != this.base64Encoder)
{
// The Flush will call WriteRaw to write out the rest of the encoded characters
this.base64Encoder.Flush();
}
this.flush = false;
}
}
}
| |
#region License
/*
* Copyright 2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
using System;
using NUnit.Framework;
using Spring.Core;
using Spring.Objects;
using Spring.Objects.Factory;
using Spring.Objects.Factory.Config;
using Spring.Objects.Factory.Xml;
namespace Spring.Context.Support
{
/// <summary>
/// Test creation of application context from XML.
/// </summary>
/// <author>Mark Pollack</author>
[TestFixture]
public sealed class XmlApplicationContextTests
{
[Test(Description = "http://jira.springframework.org/browse/SPRNET-1231")]
public void SPRNET1231_DoesNotInvokeFactoryMethodDuringObjectFactoryPostProcessing()
{
string configLocation = TestResourceLoader.GetAssemblyResourceUri(this.GetType(), "XmlApplicationContextTests-SPRNET1231.xml");
XmlApplicationContext ctx = new XmlApplicationContext(configLocation);
}
private class SPRNET1231ObjectFactoryPostProcessor : IObjectFactoryPostProcessor
{
public void PostProcessObjectFactory(IConfigurableListableObjectFactory factory)
{
SPRNET1231FactoryObject testFactory = (SPRNET1231FactoryObject)factory.GetObject("testFactory");
Assert.AreEqual(0, testFactory.count);
}
}
private class SPRNET1231FactoryObject
{
public int count;
public ITestObject GetProduct()
{
count++;
return new TestObject("test" + count, count);
}
}
[Test]
public void InnerObjectWithPostProcessing()
{
try
{
XmlApplicationContext ctx = new XmlApplicationContext(false, "assembly://Spring.Core.Tests/Spring.Context.Support/innerObjectsWithPostProcessor.xml");
ctx.GetObject("hasInnerObjects");
Assert.Fail("should throw ObjectCreationException");
}
catch (ObjectCreationException e)
{
NoSuchObjectDefinitionException ex = e.InnerException as NoSuchObjectDefinitionException;
Assert.IsNotNull(ex);
//Pass
}
}
[Test]
public void NoConfigLocation()
{
Assert.Throws<ArgumentException>(() => new XmlApplicationContext());
}
[Test]
public void SingleConfigLocation()
{
XmlApplicationContext ctx =
new XmlApplicationContext(false, "assembly://Spring.Core.Tests/Spring.Context.Support/simpleContext.xml");
Assert.IsTrue(ctx.ContainsObject("someMessageSource"));
ctx.Dispose();
}
[Test]
public void MultipleConfigLocations()
{
XmlApplicationContext ctx =
new XmlApplicationContext(false, "assembly://Spring.Core.Tests/Spring.Context.Support/contextB.xml",
"assembly://Spring.Core.Tests/Spring.Context.Support/contextC.xml",
"assembly://Spring.Core.Tests/Spring.Context.Support/contextA.xml");
Assert.IsTrue(ctx.ContainsObject("service"));
Assert.IsTrue(ctx.ContainsObject("logicOne"));
Assert.IsTrue(ctx.ContainsObject("logicTwo"));
Service service = (Service) ctx.GetObject("service");
ctx.Refresh();
Assert.IsTrue(service.ProperlyDestroyed);
service = (Service) ctx.GetObject("service");
ctx.Dispose();
Assert.IsTrue(service.ProperlyDestroyed);
}
[Test]
public void ContextWithInvalidValueType()
{
try
{
XmlApplicationContext ctx =
new XmlApplicationContext(false,
"assembly://Spring.Core.Tests/Spring.Context.Support/invalidValueType.xml");
Assert.Fail("Should have thrown ObjectCreationException for context", ctx);
}
catch (ObjectCreationException ex)
{
Assert.IsTrue(ex.Message.IndexOf((typeof (TypeMismatchException).Name)) != -1);
Assert.IsTrue(ex.Message.IndexOf(("UseCodeAsDefaultMessage")) != -1);
}
}
[Test]
[Ignore("Need to add Spring.TypeLoadException")]
public void ContextWithInvalidLazyType()
{
XmlApplicationContext ctx =
new XmlApplicationContext(false,
"assembly://Spring.Core.Tests/Spring.Context.Support/invalidType.xml");
Assert.IsTrue(ctx.ContainsObject("someMessageSource"));
ctx.GetObject("someMessageSource");
}
[Test]
public void CaseInsensitiveContext()
{
XmlApplicationContext ctx =
new XmlApplicationContext(false, "assembly://Spring.Core.Tests/Spring.Context.Support/objects.xml");
Assert.IsTrue(ctx.ContainsObject("goran"));
Assert.IsTrue(ctx.ContainsObject("Goran"));
Assert.IsTrue(ctx.ContainsObject("GORAN"));
Assert.AreEqual(ctx.GetObject("goran"), ctx.GetObject("GORAN"));
}
[Test]
public void GetObjectOnUnknownIdThrowsNoSuchObjectDefinition()
{
XmlApplicationContext ctx =
new XmlApplicationContext(false, "assembly://Spring.Core.Tests/Spring.Context.Support/objects.xml");
string DOES_NOT_EXIST = "does_not_exist";
Assert.IsFalse(ctx.ContainsObject(DOES_NOT_EXIST));
Assert.Throws<NoSuchObjectDefinitionException>(() => ctx.GetObject(DOES_NOT_EXIST));
}
[Test]
public void FactoryObjectsAreNotInstantiatedBeforeObjectFactoryPostProcessorsAreApplied()
{
XmlApplicationContext ctx = new XmlApplicationContext("Spring/Context/Support/SPRNET-192.xml");
LogFactoryObject logFactory = (LogFactoryObject) ctx["&log"];
Assert.AreEqual("foo", logFactory.LogName);
}
/// <summary>
/// Make sure that if an IObjectPostProcessor is defined as abstract
/// the creation of an IApplicationContext will not try to instantiate it.
/// </summary>
[Test]
public void ContextWithPostProcessors()
{
CountingObjectPostProcessor.Count = 0;
CoutingObjectFactoryPostProcessor.Count = 0;
IApplicationContext ctx =
new XmlApplicationContext("assembly://Spring.Core.Tests/Spring.Context.Support/objects.xml");
Assert.IsTrue(ctx.ContainsObject("abstractObjectProcessorPrototype"));
Assert.IsTrue(ctx.ContainsObject("abstractFactoryProcessorPrototype"));
Assert.AreEqual(0, CountingObjectPostProcessor.Count);
Assert.AreEqual(0, CoutingObjectFactoryPostProcessor.Count);
}
/// <summary>
/// Make sure that ConfigureObject() completly configures target
/// object (goes through whole lifecycle of object creation and
/// applies all processors).
/// </summary>
[Test]
public void ConfigureObject()
{
const string objDefLocation = "assembly://Spring.Core.Tests/Spring.Context.Support/objects.xml";
XmlApplicationContext xmlContext = new XmlApplicationContext(new string[] {objDefLocation});
object objGoran = xmlContext.GetObject("goran");
Assert.IsTrue(objGoran is TestObject);
TestObject fooGet = objGoran as TestObject;
TestObject fooConfigure = new TestObject();
xmlContext.ConfigureObject(fooConfigure, "goran");
Assert.IsNotNull(fooGet);
Assert.AreEqual(fooGet.Name, fooConfigure.Name);
Assert.AreEqual(fooGet.Age, fooConfigure.Age);
Assert.AreEqual(fooGet.ObjectName, fooConfigure.ObjectName);
Assert.IsNotNull(fooGet.ObjectName);
Assert.AreEqual(xmlContext, fooGet.ApplicationContext);
Assert.AreEqual(xmlContext, fooConfigure.ApplicationContext);
}
[Test]
public void ContextLifeCycle()
{
IApplicationContext ctx =
new XmlApplicationContext("assembly://Spring.Core.Tests/Spring.Context/contextlifecycle.xml");
IConfigurableApplicationContext configCtx = ctx as IConfigurableApplicationContext;
Assert.IsNotNull(configCtx);
ContextListenerObject clo;
using (configCtx)
{
clo = configCtx["contextListenerObject"] as ContextListenerObject;
Assert.IsNotNull(clo);
Assert.IsTrue(clo.AppListenerContextRefreshed,
"Object did not receive context refreshed event via IApplicationListener");
Assert.IsTrue(clo.CtxRefreshed, "Object did not receive context refreshed event via direct wiring");
}
Assert.IsTrue(clo.AppListenerContextClosed,
"Object did not receive context closed event via IApplicationContextListener");
Assert.IsTrue(clo.CtxClosed, "Object did not receive context closed event via direct wiring.");
}
[Test]
public void RefreshDisposesExistingObjectFactory_SPRNET479()
{
string tmp = typeof (DestroyTester).FullName;
Console.WriteLine(tmp);
IApplicationContext ctx =
new XmlApplicationContext("assembly://Spring.Core.Tests/Spring.Context.Support/objects.xml");
DestroyTester destroyTester = (DestroyTester) ctx.GetObject("destroyTester");
DisposeTester disposeTester = (DisposeTester) ctx.GetObject("disposeTester");
Assert.IsFalse(destroyTester.IsDestroyed);
Assert.IsFalse(disposeTester.IsDisposed);
((IConfigurableApplicationContext) ctx).Refresh();
Assert.IsTrue(destroyTester.IsDestroyed);
Assert.IsTrue(disposeTester.IsDisposed);
}
[Test]
public void GenericApplicationContextWithXmlObjectDefinitions()
{
GenericApplicationContext ctx = new GenericApplicationContext();
XmlObjectDefinitionReader reader = new XmlObjectDefinitionReader(ctx);
reader.LoadObjectDefinitions("assembly://Spring.Core.Tests/Spring.Context.Support/contextB.xml");
reader.LoadObjectDefinitions("assembly://Spring.Core.Tests/Spring.Context.Support/contextC.xml");
reader.LoadObjectDefinitions("assembly://Spring.Core.Tests/Spring.Context.Support/contextA.xml");
ctx.Refresh();
Assert.IsTrue(ctx.ContainsObject("service"));
Assert.IsTrue(ctx.ContainsObject("logicOne"));
Assert.IsTrue(ctx.ContainsObject("logicTwo"));
ctx.Dispose();
}
[Test]
public void GenericApplicationContextConstructorTests()
{
IApplicationContext ctx = new XmlApplicationContext("assembly://Spring.Core.Tests/Spring.Context/contextlifecycle.xml");
GenericApplicationContext genericCtx = new GenericApplicationContext(ctx);
genericCtx = new GenericApplicationContext("test", true, ctx);
}
#region Helper classes
public class DisposeTester : IDisposable
{
private bool isDisposed = false;
public bool IsDisposed
{
get { return isDisposed; }
}
public void Dispose()
{
if (isDisposed) throw new InvalidOperationException("must not be disposed twice");
isDisposed = true;
}
}
public class DestroyTester
{
private bool isDestroyed = false;
public bool IsDestroyed
{
get { return isDestroyed; }
}
public void DestroyMe()
{
if (isDestroyed) throw new InvalidOperationException("must not be destroyed twice");
isDestroyed = true;
}
}
/// <summary>
/// Utility class to keep track of object construction.
/// </summary>
public class CountingObjectPostProcessor : IObjectPostProcessor
{
private static int count;
/// <summary>
/// Property Count (int)
/// </summary>
public static int Count
{
get { return count; }
set { count = value; }
}
/// <summary>
/// Create an instance and increment the counter
/// </summary>
public CountingObjectPostProcessor()
{
count++;
}
#region IObjectPostProcessor Members
/// <summary>
/// No op implementation
/// </summary>
/// <param name="obj">object to process</param>
/// <param name="objectName">name of object</param>
/// <returns>processed object</returns>
public object PostProcessAfterInitialization(object obj, string objectName)
{
return obj;
}
/// <summary>
/// No op implementation
/// </summary>
/// <param name="obj">object to process</param>
/// <param name="name">name of object</param>
/// <returns>processed object</returns>
public object PostProcessBeforeInitialization(object obj, string name)
{
return obj;
}
#endregion
}
/// <summary>
/// Utility class to keep track of object construction.
/// </summary>
public class CoutingObjectFactoryPostProcessor : IObjectFactoryPostProcessor
{
private static int count;
/// <summary>
/// Property Count (int)
/// </summary>
public static int Count
{
get { return count; }
set { count = value; }
}
/// <summary>
/// Create an instance and increment the counter
/// </summary>
public CoutingObjectFactoryPostProcessor()
{
count++;
}
#region IObjectFactoryPostProcessor Members
/// <summary>
/// no op
/// </summary>
/// <param name="factory">factory to post process</param>
public void PostProcessObjectFactory(IConfigurableListableObjectFactory factory)
{
}
#endregion
}
#endregion
}
public class SingletonTestingObjectPostProcessor : IObjectPostProcessor, IApplicationContextAware
{
private IApplicationContext applicationContext;
#region IObjectPostProcessor Members
public object PostProcessBeforeInitialization(object instance, string name)
{
return instance;
}
public object PostProcessAfterInitialization(object instance, string objectName)
{
Console.WriteLine("post process " + objectName);
if (this.applicationContext.IsSingleton(objectName))
{
return instance;
}
return instance;
}
#endregion
#region IApplicationContextAware Members
public IApplicationContext ApplicationContext
{
set { this.applicationContext = value; }
}
#endregion
}
}
| |
namespace Epi.Windows.MakeView.Dialogs.FieldDefinitionDialogs
{
partial class GridFieldDefinition
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(GridFieldDefinition));
this.groupBox2 = new System.Windows.Forms.GroupBox();
this.btnAdd = new System.Windows.Forms.Button();
this.dgColumns = new System.Windows.Forms.DataGrid();
this.groupBox1.SuspendLayout();
this.groupBox2.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.dgColumns)).BeginInit();
this.SuspendLayout();
//
// txtPrompt
//
this.txtPrompt.TextChanged += new System.EventHandler(this.txtPrompt_TextChanged);
//
// txtFieldName
//
this.txtFieldName.TextChanged += new System.EventHandler(this.txtFieldName_TextChanged);
//
// btnCancel
//
resources.ApplyResources(this.btnCancel, "btnCancel");
this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click);
//
// btnOk
//
resources.ApplyResources(this.btnOk, "btnOk");
this.btnOk.Click += new System.EventHandler(this.btnOk_Click);
//
// groupBox1
//
resources.ApplyResources(this.groupBox1, "groupBox1");
//
// baseImageList
//
this.baseImageList.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("baseImageList.ImageStream")));
this.baseImageList.Images.SetKeyName(0, "");
this.baseImageList.Images.SetKeyName(1, "");
this.baseImageList.Images.SetKeyName(2, "");
this.baseImageList.Images.SetKeyName(3, "");
this.baseImageList.Images.SetKeyName(4, "");
this.baseImageList.Images.SetKeyName(5, "");
this.baseImageList.Images.SetKeyName(6, "");
this.baseImageList.Images.SetKeyName(7, "");
this.baseImageList.Images.SetKeyName(8, "");
this.baseImageList.Images.SetKeyName(9, "");
this.baseImageList.Images.SetKeyName(10, "");
this.baseImageList.Images.SetKeyName(11, "");
this.baseImageList.Images.SetKeyName(12, "");
this.baseImageList.Images.SetKeyName(13, "");
this.baseImageList.Images.SetKeyName(14, "");
this.baseImageList.Images.SetKeyName(15, "");
this.baseImageList.Images.SetKeyName(16, "");
this.baseImageList.Images.SetKeyName(17, "");
this.baseImageList.Images.SetKeyName(18, "");
this.baseImageList.Images.SetKeyName(19, "");
this.baseImageList.Images.SetKeyName(20, "");
this.baseImageList.Images.SetKeyName(21, "");
this.baseImageList.Images.SetKeyName(22, "");
this.baseImageList.Images.SetKeyName(23, "");
this.baseImageList.Images.SetKeyName(24, "");
this.baseImageList.Images.SetKeyName(25, "");
this.baseImageList.Images.SetKeyName(26, "");
this.baseImageList.Images.SetKeyName(27, "");
this.baseImageList.Images.SetKeyName(28, "");
this.baseImageList.Images.SetKeyName(29, "");
this.baseImageList.Images.SetKeyName(30, "");
this.baseImageList.Images.SetKeyName(31, "");
this.baseImageList.Images.SetKeyName(32, "");
this.baseImageList.Images.SetKeyName(33, "");
this.baseImageList.Images.SetKeyName(34, "");
this.baseImageList.Images.SetKeyName(35, "");
this.baseImageList.Images.SetKeyName(36, "");
this.baseImageList.Images.SetKeyName(37, "");
this.baseImageList.Images.SetKeyName(38, "");
this.baseImageList.Images.SetKeyName(39, "");
this.baseImageList.Images.SetKeyName(40, "");
this.baseImageList.Images.SetKeyName(41, "");
this.baseImageList.Images.SetKeyName(42, "");
this.baseImageList.Images.SetKeyName(43, "");
this.baseImageList.Images.SetKeyName(44, "");
this.baseImageList.Images.SetKeyName(45, "");
this.baseImageList.Images.SetKeyName(46, "");
this.baseImageList.Images.SetKeyName(47, "");
this.baseImageList.Images.SetKeyName(48, "");
this.baseImageList.Images.SetKeyName(49, "");
this.baseImageList.Images.SetKeyName(50, "");
this.baseImageList.Images.SetKeyName(51, "");
this.baseImageList.Images.SetKeyName(52, "");
this.baseImageList.Images.SetKeyName(53, "");
this.baseImageList.Images.SetKeyName(54, "");
this.baseImageList.Images.SetKeyName(55, "");
this.baseImageList.Images.SetKeyName(56, "");
this.baseImageList.Images.SetKeyName(57, "");
this.baseImageList.Images.SetKeyName(58, "");
this.baseImageList.Images.SetKeyName(59, "");
this.baseImageList.Images.SetKeyName(60, "");
this.baseImageList.Images.SetKeyName(61, "");
this.baseImageList.Images.SetKeyName(62, "");
this.baseImageList.Images.SetKeyName(63, "");
this.baseImageList.Images.SetKeyName(64, "");
this.baseImageList.Images.SetKeyName(65, "");
this.baseImageList.Images.SetKeyName(66, "");
this.baseImageList.Images.SetKeyName(67, "");
this.baseImageList.Images.SetKeyName(68, "");
this.baseImageList.Images.SetKeyName(69, "");
this.baseImageList.Images.SetKeyName(70, "");
this.baseImageList.Images.SetKeyName(71, "");
this.baseImageList.Images.SetKeyName(72, "");
this.baseImageList.Images.SetKeyName(73, "");
this.baseImageList.Images.SetKeyName(74, "");
this.baseImageList.Images.SetKeyName(75, "");
this.baseImageList.Images.SetKeyName(76, "");
this.baseImageList.Images.SetKeyName(77, "");
this.baseImageList.Images.SetKeyName(78, "");
this.baseImageList.Images.SetKeyName(79, "");
this.baseImageList.Images.SetKeyName(80, "");
this.baseImageList.Images.SetKeyName(81, "");
this.baseImageList.Images.SetKeyName(82, "");
this.baseImageList.Images.SetKeyName(83, "");
this.baseImageList.Images.SetKeyName(84, "");
this.baseImageList.Images.SetKeyName(85, "");
this.baseImageList.Images.SetKeyName(86, "");
//
// groupBox2
//
this.groupBox2.Controls.Add(this.btnAdd);
this.groupBox2.Controls.Add(this.dgColumns);
resources.ApplyResources(this.groupBox2, "groupBox2");
this.groupBox2.Name = "groupBox2";
this.groupBox2.TabStop = false;
//
// btnAdd
//
resources.ApplyResources(this.btnAdd, "btnAdd");
this.btnAdd.Name = "btnAdd";
this.btnAdd.UseVisualStyleBackColor = true;
this.btnAdd.Click += new System.EventHandler(this.btnAdd_Click);
//
// dgColumns
//
this.dgColumns.AllowSorting = false;
resources.ApplyResources(this.dgColumns, "dgColumns");
this.dgColumns.DataMember = "";
this.dgColumns.HeaderForeColor = System.Drawing.SystemColors.ControlText;
this.dgColumns.Name = "dgColumns";
this.dgColumns.ParentRowsVisible = false;
this.dgColumns.RowHeadersVisible = false;
this.dgColumns.MouseDown += new System.Windows.Forms.MouseEventHandler(this.dgColumns_MouseDown);
//
// GridFieldDefinition
//
resources.ApplyResources(this, "$this");
this.Controls.Add(this.groupBox2);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "GridFieldDefinition";
this.ShowInTaskbar = false;
this.Load += new System.EventHandler(this.GridFieldDefinition_Load);
this.Controls.SetChildIndex(this.btnOk, 0);
this.Controls.SetChildIndex(this.btnCancel, 0);
this.Controls.SetChildIndex(this.groupBox1, 0);
this.Controls.SetChildIndex(this.groupBox2, 0);
this.Controls.SetChildIndex(this.txtFieldName, 0);
this.Controls.SetChildIndex(this.lblPrompt, 0);
this.Controls.SetChildIndex(this.txtPrompt, 0);
this.Controls.SetChildIndex(this.label1, 0);
this.groupBox1.ResumeLayout(false);
this.groupBox2.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.dgColumns)).EndInit();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.GroupBox groupBox2;
private System.Windows.Forms.Button btnAdd;
private System.Windows.Forms.DataGrid dgColumns;
}
}
| |
using ClipperLib;
using GerberLibrary.Core.Primitives;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
//using System.Drawing.Drawing2D;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using GerberLibrary.Core;
using Ionic.Zip;
namespace GerberLibrary
{
public class BoardRenderColorSet
{
public Color BoardRenderBaseMaterialColor = Gerber.ParseColor("#808080");
public Color BoardRenderColor = Gerber.ParseColor("green");
public Color BoardRenderCopperColor = Color.FromArgb(219, 125, 104);
public Color BoardRenderPadColor = Gerber.ParseColor("gold");
public Color BoardRenderSilkColor = Gerber.ParseColor("white");
public Color BackgroundColor = Color.FromArgb(10, 10, 40);
public Color BoardRenderTraceColor = Gerber.ParseColor("green");
public void SetupColors(string SolderMaskColor, string SilkScreenColor, string TracesColor = "auto", string CopperColor = "gold")
{
BoardRenderColor = GerberLibrary.Gerber.ParseColor(SolderMaskColor);
BoardRenderSilkColor = GerberLibrary.Gerber.ParseColor(SilkScreenColor);
BoardRenderPadColor = GerberLibrary.Gerber.ParseColor(CopperColor);
BoardRenderTraceColor = GerberLibrary.Gerber.ParseColor(TracesColor);
}
public Color GetDefaultColor(BoardLayer layer, BoardSide side)
{
switch (layer)
{
case BoardLayer.Drill: return BackgroundColor;
case BoardLayer.Copper: return BoardRenderCopperColor;
case BoardLayer.Outline: return BoardRenderColor;
case BoardLayer.SolderMask: return MathHelpers.Interpolate(BoardRenderColor, BoardRenderBaseMaterialColor, 0.2f);
case BoardLayer.Silk: return BoardRenderSilkColor;
}
return Color.FromArgb(100, 255, 255, 255);
}
}
public static class Gerber
{
#region GERBERPROCESSINGDEFAULTS
public static double ArcQualityScaleFactor = 20;
public static bool DirectlyShowGeneratedBoardImages = true;
public static bool DumpSanitizedOutput = false;
public static string EOF = "M02*";
public static bool ExtremelyVerbose = false;
public static bool GerberRenderBumpMapOutput = true;
public static string INCH = "%MOIN*%";
public static string LinearInterpolation = "G01*";
public static string LineEnding = "\n";
public static string MM = "%MOMM*%";
public static bool SaveDebugImageOutput = false;
public static bool SaveIntermediateImages = false;
public static bool SaveOutlineImages = false;
public static bool ShowProgress = false;
public static string StartRegion = "G36*";
public static string StopRegion = "G37*";
public static bool WaitForKey = false;
public static bool WriteSanitized = false;
#endregion
public static string FindOutlineFile(string folder)
{
if (Directory.Exists(folder) == false) return null;
foreach (var a in Directory.GetFiles(folder))
{
BoardSide bs;
BoardLayer bl;
DetermineBoardSideAndLayer(a, out bs, out bl);
if (bl == BoardLayer.Outline) return a;
}
foreach (var a in Directory.GetFiles(folder))
{
BoardSide bs;
BoardLayer bl;
DetermineBoardSideAndLayer(a, out bs, out bl);
if (bl == BoardLayer.Mill) return a;
}
return null;
}
public static PolyLine FindAndLoadOutlineFile(string folder)
{
string File = FindOutlineFile(folder);
if (File == null || File.Length == 0) return null;
PolyLine PL = new PolyLine(PolyLine.PolyIDs.Outline);
ParsedGerber pls = PolyLineSet.LoadGerberFile(new StandardConsoleLog(), File);
if (pls.OutlineShapes.Count > 0)
{
return pls.OutlineShapes[0];
}
return null;
}
public static void ZipGerberFolderToFactoryFolder(string Name, string BoardGerbersFolder, string BoardFactoryFolder)
{
if (Directory.Exists(BoardGerbersFolder))
{
string TargetZip = Path.Combine(BoardFactoryFolder, Name + "_gerbers.zip");
if (File.Exists(TargetZip)) File.Delete(TargetZip);
Console.WriteLine("Zipping gerbers to {0}", TargetZip);
ZipFile Z = new ZipFile();
List<string> OutlineMerge = new List<string>();
foreach (var F in Directory.GetFiles(BoardGerbersFolder))
{
bool AddToZip = false;
var T = GerberLibrary.Gerber.FindFileType(F);
if (T == BoardFileType.Drill)
{
AddToZip = true;
}
else
{
GerberLibrary.Gerber.DetermineBoardSideAndLayer(F, out BoardSide Side, out BoardLayer Layer);
switch (Layer)
{
case BoardLayer.Mill:
case BoardLayer.Outline:
OutlineMerge.Add(F);
break;
case BoardLayer.Carbon:
case BoardLayer.Paste:
case BoardLayer.Silk:
case BoardLayer.SolderMask:
case BoardLayer.Copper:
case BoardLayer.Drill:
AddToZip = true;
break;
case BoardLayer.Assembly:
string TargetGerb = Path.Combine(BoardFactoryFolder, Name + "_" + Path.GetFileName(F));
File.Copy(F, TargetGerb, true);
break;
}
}
if (AddToZip)
{
Console.WriteLine("Adding {0} to zip.", F);
Z.AddFile(F, ".");
}
}
if (OutlineMerge.Count > 0)
{
if (OutlineMerge.Count == 1)
{
// string TargetGerb = Path.Combine(BoardFactoryFolder, Name + "_" + Path.GetFileName(OutlineMerge[0]));
// File.Copy(OutlineMerge[0], TargetGerb, true);
Z.AddFile(OutlineMerge[0], ".");
}
else
{
string TargetGerb = Path.Combine(BoardFactoryFolder, Name + "_" + "MergedOutlines.gko");
GerberMerger.MergeAll(OutlineMerge, TargetGerb, new StandardConsoleLog());
Z.AddFile(TargetGerb, ".");
}
}
Z.Save(TargetZip);
}
}
public static int GetDefaultSortOrder(BoardSide side, BoardLayer layer)
{
int R = 0;
switch (layer)
{
case BoardLayer.Mill: R = 11; break;
case BoardLayer.Silk: R = 101; break;
case BoardLayer.Paste: R = 10; break;
case BoardLayer.SolderMask: R = 102; break;
case BoardLayer.Copper: R = 100; break;
case BoardLayer.Carbon: R = 103; break;
}
if (side == BoardSide.Bottom) R *= -1;
return R;
}
private static readonly Regex rxScientific = new Regex(@"^(?<sign>-?)(?<head>\d+)(\.(?<tail>\d*?)0*)?E(?<exponent>[+\-]\d+)$", RegexOptions.IgnoreCase | RegexOptions.ExplicitCapture | RegexOptions.CultureInvariant);
public static bool SkipEagleDrillFix = false;
public static bool ThrowExceptions = false; // set to true to make the debugger die in useful places.
public static List<PointD> CreateCurvePoints(double LastX, double LastY, double X, double Y, double I, double J, InterpolationMode mode, GerberQuadrantMode qmode)
{
// Console.WriteLine("Current curve mode: {0}", qmode);
List<PointD> R = new List<PointD>();
double Radius = Math.Sqrt(I * I + J * J);
double CX = LastX + I;
double CY = LastY + J;
Quadrant Q = Quadrant.xposypos;
double HS = Math.Atan2(LastY - CY, LastX - CX);
double HE = Math.Atan2(Y - CY, X - CX);
if (qmode == GerberQuadrantMode.Multi)
{
if (mode == InterpolationMode.ClockWise)
{
while (HS <= HE) HS += Math.PI * 2;
}
else
{
while (HS >= HE) HS -= Math.PI * 2;
}
}
else
{
double LastDiff = Math.PI * 2;
List<QuadR> qR = new List<QuadR>();
qR.Add(new QuadR() { CX = LastX + I, CY = LastY + J });
qR.Add(new QuadR() { CX = LastX - I, CY = LastY + J });
qR.Add(new QuadR() { CX = LastX - I, CY = LastY - J });
qR.Add(new QuadR() { CX = LastX + I, CY = LastY - J });
foreach (var a in qR) a.Calc(LastX, LastY, X, Y);
int candidates = 0;
if (Gerber.ExtremelyVerbose)
{
var DX = LastX - X;
var DY = LastY - Y;
var L = Math.Sqrt(DX * DX + DY * DY);
if (L < 1.0)
{
R.Add(new PointD(X, Y));
return R;
}
Console.WriteLine("length: {0}", L);
}
if (mode == InterpolationMode.CounterClockwise)
{
double LastRat = 10;
foreach (var a in qR) a.FixCounterClockwise();
foreach (var a in qR)
{
if (a.Diff <= Math.PI / 2.0)
{
candidates++;
if (Math.Abs(1 - a.DRat) < LastRat)
{
CX = a.CX;
CY = a.CY;
HS = a.S;
HE = a.E;
LastRat = Math.Abs(1 - a.DRat);
}
if (Gerber.ExtremelyVerbose) Console.WriteLine("candidate: {0:N1} - {1:N1} - {2:N1}", RadToDeg(a.S), RadToDeg(a.E), RadToDeg(a.Diff));
}
}
/*
HS = qR[3].S;
CX = qR[3].CX;
CY = qR[3].CY;
HE = qR[3].E;
*/
}
else
{
foreach (var a in qR) a.FixClockwise();
foreach (var a in qR)
{
if (a.Diff >= 0 && a.Diff <= Math.PI / 2.0 + 0.00001)
{
candidates++;
if (Math.Abs(a.Diff) < LastDiff)
{
CX = a.CX;
CY = a.CY;
HS = a.S;
HE = a.E;
LastDiff = Math.Abs(a.Diff);
}
if (Gerber.ExtremelyVerbose) Console.WriteLine("candidate: {0} - {1} - {2}", a.S, a.E, a.Diff);
}
if (Gerber.ExtremelyVerbose) Console.WriteLine("selected : {0} - {1} - {2}", HS, HE, LastDiff);
}
}
if (candidates == 0 && Gerber.ExtremelyVerbose)
{
foreach (var a in qR)
{
Console.WriteLine("no candidate: {0} - {1} - {2} ( should be smaller than {3}) ", a.S, a.E, a.Diff, Math.PI / 2.0);
}
}
}
if (Gerber.ExtremelyVerbose)
{
Console.WriteLine("HS {0:N1} HE {1:N1} DIFF {2:N1} QUAD {3} CX {4} CY {5}", RadToDeg(HS), RadToDeg(HE), RadToDeg(HE - HS), Q, CX, CY);
}
int segs = (int)(Gerber.ArcQualityScaleFactor * Math.Max(2.0, Radius) * Math.Abs(HS - HE) / (Math.PI * 2));
if (segs < 10) segs = 10;
double HEdeg = RadToDeg(HE);
double HSdeg = RadToDeg(HS);
for (int i = 0; i <= segs; i++)
{
double P = ((double)i / (double)segs) * (HE - HS) + HS;
double nx = Math.Cos(P) * Radius + CX;
double ny = Math.Sin(P) * Radius + CY;
R.Add(new PointD(nx, ny));
}
// R.Add(new PointD(X, Y));
return R;
}
public static void DetermineBoardSideAndLayer(string gerberfile, out BoardSide Side, out BoardLayer Layer)
{
Side = BoardSide.Unknown;
Layer = BoardLayer.Unknown;
string[] filesplit = Path.GetFileName(gerberfile).Split('.');
string ext = filesplit[filesplit.Count() - 1].ToLower();
switch (ext)
{
case "art": // ORCAD RELATED TYPES
{
switch (Path.GetFileNameWithoutExtension(gerberfile).ToUpper())
{
case "PMT": Side = BoardSide.Top; Layer = BoardLayer.Paste; break;
case "PMB": Side = BoardSide.Bottom; Layer = BoardLayer.Paste; break;
case "TOP": Side = BoardSide.Top; Layer = BoardLayer.Copper; break;
case "BOTTOM": Side = BoardSide.Bottom; Layer = BoardLayer.Copper; break;
case "SMBOT": Side = BoardSide.Bottom; Layer = BoardLayer.SolderMask; break;
case "SMTOP": Side = BoardSide.Top; Layer = BoardLayer.SolderMask; break;
case "SSBOT": Side = BoardSide.Bottom; Layer = BoardLayer.Silk; break;
case "SSTOP": Side = BoardSide.Top; Layer = BoardLayer.Silk; break;
case "DRILLING": Side = BoardSide.Both; Layer = BoardLayer.Drill; break;
// case "KEEPOUT": Side = BoardSide.Both; Layer = BoardLayer.Outline; break;
}
break;
}
case "slices": Side = BoardSide.Both; Layer = BoardLayer.Utility; break;
case "copper_bottom": Side = BoardSide.Bottom; Layer = BoardLayer.Copper; break;
case "copper_top": Side = BoardSide.Top; Layer = BoardLayer.Copper; break;
case "silk_bottom": Side = BoardSide.Bottom; Layer = BoardLayer.Silk; break;
case "silk_top": Side = BoardSide.Top; Layer = BoardLayer.Silk; break;
case "paste_bottom": Side = BoardSide.Bottom; Layer = BoardLayer.Paste; break;
case "paste_top": Side = BoardSide.Top; Layer = BoardLayer.Paste; break;
case "soldermask_bottom": Side = BoardSide.Bottom; Layer = BoardLayer.SolderMask; break;
case "soldermask_top": Side = BoardSide.Top; Layer = BoardLayer.SolderMask; break;
case "drill_both": Side = BoardSide.Both; Layer = BoardLayer.Drill; break;
case "outline_both": Side = BoardSide.Both; Layer = BoardLayer.Outline; break;
case "png":
{
Side = BoardSide.Both;
Layer = BoardLayer.Silk;
}
break;
case "assemblytop":
Layer = BoardLayer.Assembly;
Side = BoardSide.Top;
break;
case "assemblybottom":
Layer = BoardLayer.Assembly;
Side = BoardSide.Bottom;
break;
case "gbr":
switch (Path.GetFileNameWithoutExtension(gerberfile).ToLower())
{
case "profile":
case "boardoutline":
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "outline":
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "board":
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "copper_bottom":
case "bottom":
Side = BoardSide.Bottom;
Layer = BoardLayer.Copper;
break;
case "soldermask_bottom":
case "bottommask":
Side = BoardSide.Bottom;
Layer = BoardLayer.SolderMask;
break;
case "solderpaste_bottom":
case "bottompaste":
Side = BoardSide.Bottom;
Layer = BoardLayer.Paste;
break;
case "silkscreen_bottom":
case "bottomsilk":
Side = BoardSide.Bottom;
Layer = BoardLayer.Silk;
break;
case "copper_top":
case "top":
Side = BoardSide.Top;
Layer = BoardLayer.Copper;
break;
case "soldermask_top":
case "topmask":
Side = BoardSide.Top;
Layer = BoardLayer.SolderMask;
break;
case "solderpaste_top":
case "toppaste":
Side = BoardSide.Top;
Layer = BoardLayer.Paste;
break;
case "silkscreen_top":
case "topsilk":
Side = BoardSide.Top;
Layer = BoardLayer.Silk;
break;
case "inner1":
Side = BoardSide.Internal1;
Layer = BoardLayer.Copper;
break;
case "inner2":
Side = BoardSide.Internal2;
Layer = BoardLayer.Copper;
break;
default:
{
string lcase = gerberfile.ToLower();
if (lcase.Contains("board outline")) { Side = BoardSide.Both; Layer = BoardLayer.Outline; };
if (lcase.Contains("copper bottom")) { Side = BoardSide.Bottom; Layer = BoardLayer.Copper; };
if (lcase.Contains("silkscreen bottom")) { Side = BoardSide.Bottom; Layer = BoardLayer.Silk; };
if (lcase.Contains("copper top")) { Side = BoardSide.Top; Layer = BoardLayer.Copper; };
if (lcase.Contains("silkscreen top")) { Side = BoardSide.Top; Layer = BoardLayer.Silk; };
if (lcase.Contains("solder mask bottom")) { Side = BoardSide.Bottom; Layer = BoardLayer.SolderMask; };
if (lcase.Contains("solder mask top")) { Side = BoardSide.Top; Layer = BoardLayer.SolderMask; };
if (lcase.Contains("drill-copper top-copper bottom")) { Side = BoardSide.Both; Layer = BoardLayer.Drill; };
if (lcase.Contains("outline")) { Side = BoardSide.Both; Layer = BoardLayer.Outline; }
if (lcase.Contains("-edge_cuts")) { Side = BoardSide.Both; Layer = BoardLayer.Outline; }
if (lcase.Contains("-b_cu")) { Side = BoardSide.Bottom; Layer = BoardLayer.Copper; }
if (lcase.Contains("-f_cu")) { Side = BoardSide.Top; Layer = BoardLayer.Copper; }
if (lcase.Contains("-b_silks")) { Side = BoardSide.Bottom; Layer = BoardLayer.Silk; }
if (lcase.Contains("-f_silks")) { Side = BoardSide.Top; Layer = BoardLayer.Silk; }
if (lcase.Contains("-b_mask")) { Side = BoardSide.Bottom; Layer = BoardLayer.SolderMask; }
if (lcase.Contains("-f_mask")) { Side = BoardSide.Top; Layer = BoardLayer.SolderMask; }
if (lcase.Contains("-b_paste")) { Side = BoardSide.Bottom; Layer = BoardLayer.Paste; }
if (lcase.Contains("-f_paste")) { Side = BoardSide.Top; Layer = BoardLayer.Paste; }
}
break;
}
break;
case "ger":
{
string l = gerberfile.ToLower();
List<boardset> bs = new List<boardset>();
bs.Add(new boardset() { name = ".topsoldermask", side = BoardSide.Top, layer = BoardLayer.SolderMask });
bs.Add(new boardset() { name = ".topsilkscreen", side = BoardSide.Top, layer = BoardLayer.Silk });
bs.Add(new boardset() { name = ".toplayer", side = BoardSide.Top, layer = BoardLayer.Copper });
bs.Add(new boardset() { name = ".tcream", side = BoardSide.Top, layer = BoardLayer.Paste });
bs.Add(new boardset() { name = ".boardoutline", side = BoardSide.Both, layer = BoardLayer.Outline });
bs.Add(new boardset() { name = ".bcream", side = BoardSide.Bottom, layer = BoardLayer.SolderMask });
bs.Add(new boardset() { name = ".bottomsoldermask", side = BoardSide.Bottom, layer = BoardLayer.SolderMask });
bs.Add(new boardset() { name = ".bottomsilkscreen", side = BoardSide.Bottom, layer = BoardLayer.Silk });
bs.Add(new boardset() { name = ".bottomlayer", side = BoardSide.Bottom, layer = BoardLayer.Copper });
bs.Add(new boardset() { name = ".bcream", side = BoardSide.Bottom, layer = BoardLayer.Paste });
bs.Add(new boardset() { name = ".internalplane1", side = BoardSide.Internal1, layer = BoardLayer.Copper });
bs.Add(new boardset() { name = ".internalplane2", side = BoardSide.Internal2, layer = BoardLayer.Copper });
foreach (var a in bs)
{
if (l.Contains(a.name))
{
Side = a.side;
Layer = a.layer;
}
}
}
break;
case "gml":
Side = BoardSide.Both;
Layer = BoardLayer.Mill;
break;
case "fabrd":
case "oln":
case "gko":
case "gm1":
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "l2":
case "g1l":
case "gl1":
case "g1":
Side = BoardSide.Internal1;
Layer = BoardLayer.Copper;
break;
case "adtop":
Side = BoardSide.Top;
Layer = BoardLayer.Assembly;
break;
case "adbottom":
Side = BoardSide.Bottom;
Layer = BoardLayer.Assembly;
break;
case "notes":
Side = BoardSide.Both;
Layer = BoardLayer.Notes;
break;
case "l3":
case "gl2":
case "g2l":
case "g2":
Side = BoardSide.Internal2;
Layer = BoardLayer.Copper;
break;
case "l4":
case "gbl":
case "l2m":
Side = BoardSide.Bottom;
Layer = BoardLayer.Copper;
break;
case "l1":
case "l1m":
case "gtl":
Side = BoardSide.Top;
Layer = BoardLayer.Copper;
break;
case "gbp":
case "spbottom":
Side = BoardSide.Bottom;
Layer = BoardLayer.Paste;
break;
case "gtp":
case "sptop":
Side = BoardSide.Top;
Layer = BoardLayer.Paste;
break;
case "gbo":
case "ss2":
case "ssbottom":
Side = BoardSide.Bottom;
Layer = BoardLayer.Silk;
break;
case "gto":
case "ss1":
case "sstop":
Side = BoardSide.Top;
Layer = BoardLayer.Silk;
break;
case "gbs":
case "sm2":
case "smbottom":
Side = BoardSide.Bottom;
Layer = BoardLayer.SolderMask;
break;
case "gts":
case "sm1":
case "smtop":
Side = BoardSide.Top;
Layer = BoardLayer.SolderMask;
break;
case "outline":
case "gb3": // oshstencils bottom outline
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "gt3": // oshstencils top outline
Side = BoardSide.Both;
Layer = BoardLayer.Outline;
break;
case "top":
Side = BoardSide.Top;
Layer = BoardLayer.Copper;
break;
case "bottom":
case "bot":
Side = BoardSide.Bottom;
Layer = BoardLayer.Copper;
break;
case "smb":
Side = BoardSide.Bottom;
Layer = BoardLayer.SolderMask;
break;
case "smt":
Side = BoardSide.Top;
Layer = BoardLayer.SolderMask;
break;
case "slk":
case "sst":
Side = BoardSide.Top;
Layer = BoardLayer.Silk;
break;
case "bsk":
case "ssb":
Side = BoardSide.Bottom;
Layer = BoardLayer.Silk;
break;
case "spt":
Side = BoardSide.Top;
Layer = BoardLayer.Paste;
break;
case "spb":
Side = BoardSide.Bottom;
Layer = BoardLayer.Paste;
break;
case "drill_top_bottom":
case "drl":
case "drill":
case "drillnpt":
case "rou":
case "sco":
Side = BoardSide.Both;
Layer = BoardLayer.Drill;
break;
}
}
public static BoardFileType FindFileType(string filename)
{
//filename = filename.ToLower();
List<string> unsupported = new List<string>() { "config", "exe", "dll", "png", "zip", "gif", "jpeg", "doc", "docx", "jpg", "bmp", "svg" };
string[] filesplit = filename.Split('.');
string ext = filesplit[filesplit.Count() - 1].ToLower();
foreach (var s in unsupported)
{
if (ext == s)
{
return BoardFileType.Unsupported;
}
}
try
{
// var F = File.OpenText(a);
var F = File.ReadAllLines(filename);
for (int i = 0; i < F.Count(); i++)
{
string L = F[i];
if (L.Contains("%FS")) return BoardFileType.Gerber;
if (L.Contains("M48")) return BoardFileType.Drill;
};
}
catch (Exception E)
{
if (Gerber.ExtremelyVerbose)
{
Console.WriteLine("Exception determining filetype: {0}", E.Message);
}
return BoardFileType.Unsupported;
}
return BoardFileType.Unsupported;
}
public static BoardFileType FindFileTypeFromStream(StreamReader l, string filename)
{
filename = filename.ToLower();
List<string> unsupported = new List<string>() { "config", "exe", "dll", "png", "zip", "gif", "jpeg", "doc", "docx", "jpg", "bmp" };
string[] filesplit = filename.Split('.');
string ext = filesplit[filesplit.Count() - 1].ToLower();
foreach (var s in unsupported)
{
if (ext == s)
{
return BoardFileType.Unsupported;
}
}
try
{
// var F = File.OpenText(a);
List<string> lines = new List<string>();
while (!l.EndOfStream)
{
lines.Add(l.ReadLine());
}
//var F = File.ReadAllLines(filename);
for (int i = 0; i < lines.Count(); i++)
{
string L = lines[i];
if (L.Contains("%FS")) return BoardFileType.Gerber;
if (L.Contains("M48")) return BoardFileType.Drill;
};
}
catch (Exception)
{
return BoardFileType.Unsupported;
}
return BoardFileType.Unsupported;
}
public static Bounds GetBoundingBox(ProgressLog log, List<string> generatedFiles)
{
Bounds A = new Bounds();
foreach (var a in generatedFiles)
{
ParsedGerber PLS = PolyLineSet.LoadGerberFile(log, a, State: new GerberParserState() { PreCombinePolygons = false });
A.AddBox(PLS.BoundingBox);
}
return A;
}
public static Color ParseColor(string color)
{
if (color == null)
{
Console.WriteLine("Error: Null color! Defaulting to lime!");
return Color.Lime;
}
switch (color.ToLower())
{
case "blue": return Color.FromArgb(0, 40, 74);
case "yellow": return Color.FromArgb(234, 206, 39);
case "green": return Color.FromArgb(0, 0x30, 0);
case "black": return Color.FromArgb(5, 5, 5);
case "white": return Color.FromArgb(250, 250, 250);
case "red": return Color.FromArgb(192, 43, 43);
case "silver": return Color.FromArgb(160, 160, 160);
case "gold": return Color.FromArgb(239, 205, 85);
}
try
{
return System.Drawing.ColorTranslator.FromHtml(color);
}
catch (Exception)
{
// unknown colors end up here... no need to worry, just pass it on to the default color handler which returns 0,0,0 as error-color if it too cant find anything.
}
return Color.FromName(color);
}
public static double RadToDeg(double inp)
{
return inp * 360.0 / (Math.PI * 2.0);
}
public static bool SaveDebugImage(string GerberFilename, string BitmapFilename, float dpi, Color Foreground, Color Background, ProgressLog log)
{
log.PushActivity("debug image");
ParsedGerber PLS;
GerberParserState State = new GerberParserState()
{
PreCombinePolygons = false
};
var FileType = Gerber.FindFileType(GerberFilename);
Gerber.DetermineBoardSideAndLayer(GerberFilename, out State.Side, out State.Layer);
bool forcezero = false;
if (State.Layer == BoardLayer.Outline)
{
// PLS.PreCombinePolygons = true;
// forcezero = true;
}
if (FileType == BoardFileType.Drill)
{
PLS = PolyLineSet.LoadExcellonDrillFile(log, GerberFilename);
PLS.CalcPathBounds();
}
else
{
PLS = PolyLineSet.LoadGerberFile(log, GerberFilename, forcezero, Gerber.WriteSanitized, State);
}
double WidthInMM = PLS.BoundingBox.BottomRight.X - PLS.BoundingBox.TopLeft.X;
double HeightInMM = PLS.BoundingBox.BottomRight.Y - PLS.BoundingBox.TopLeft.Y;
int Width = (int)(Math.Ceiling((WidthInMM) * (dpi / 25.4)));
int Height = (int)(Math.Ceiling((HeightInMM) * (dpi / 25.4)));
log.AddString(String.Format("Exporting {0} ({2},{3}mm) to {1} ({4},{5})", GerberFilename, BitmapFilename, WidthInMM, HeightInMM, Width, Height));
GerberImageCreator GIC = new GerberImageCreator();
GIC.scale = dpi / 25.4f; // dpi
GIC.BoundingBox.AddBox(PLS.BoundingBox);
var Tr = GIC.BuildMatrix(Width, Height);
Bitmap B2 = GIC.RenderToBitmap(Width, Height, Tr, Foreground, Background, PLS, true);
if (B2 == null) return false;
var GerberLines = PolyLineSet.SanitizeInputLines(System.IO.File.ReadAllLines(GerberFilename).ToList());
double LastX = 0;
double LastY = 0;
Graphics G2 = Graphics.FromImage(B2);
GerberImageCreator.ApplyAASettings(G2);
//G2.Clear(Background);
G2.Transform = Tr.Clone();
foreach (var L in GerberLines)
{
if (L[0] != '%')
{
GerberSplitter GS = new GerberSplitter();
GS.Split(L, PLS.State.CoordinateFormat);
if (GS.Has("G") && (int)GS.Get("G") == 3)
{
double X = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("X"));
double Y = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("Y"));
double I = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("I"));
double J = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("J"));
//Console.WriteLine("Counterclockwise Curve {0},{1} -> {2},{3}", LastX, LastY, X, Y);
DrawCross(G2, X, Y, Color.Blue);
DrawCross(G2, LastX, LastY, Color.Red);
DrawCross(G2, LastX + I, LastY - J, Color.Yellow);
DrawCross(G2, LastX + I, LastY + J, Color.Purple);
DrawCross(G2, LastX - I, LastY - J, Color.Green);
DrawCross(G2, LastX - I, LastY + J, Color.Orange);
}
if (GS.Has("X")) LastX = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("X"));
if (GS.Has("Y")) LastY = PLS.State.CoordinateFormat.ScaleFileToMM(GS.Get("Y"));
}
}
B2.Save(BitmapFilename);
log.PopActivity();
return true;
}
public static bool SaveGerberFileToImage(ProgressLog log, string GerberFilename, string BitmapFilename, float dpi, Color Foreground, Color Background)
{
try
{
return SaveGerberFileToImageUnsafe(log, GerberFilename, BitmapFilename, dpi, Foreground, Background);
}
catch (Exception E)
{
Console.WriteLine("Error: Errors while writing bitmap {0} for gerberfile {1} at dpi {2}:", BitmapFilename, GerberFilename, dpi);
while (E != null)
{
Console.WriteLine("Error: \t{0}", E.Message);
E = E.InnerException;
}
return false;
}
}
public static bool SaveGerberFileToImageUnsafe(ProgressLog log, string GerberFilename, string BitmapFilename, float dpi, Color Foreground, Color Background)
{
ParsedGerber PLS;
GerberParserState State = new GerberParserState()
{
PreCombinePolygons = false
};
var FileType = Gerber.FindFileType(GerberFilename);
Gerber.DetermineBoardSideAndLayer(GerberFilename, out State.Side, out State.Layer);
bool forcezero = false;
if (State.Layer == BoardLayer.Outline)
{
// PLS.PreCombinePolygons = true;
// forcezero = true;
}
if (FileType == BoardFileType.Drill)
{
PLS = PolyLineSet.LoadExcellonDrillFile(log, GerberFilename);
PLS.CalcPathBounds();
}
else
{
PLS = PolyLineSet.LoadGerberFile(log, GerberFilename, forcezero, Gerber.WriteSanitized, State);
}
double WidthInMM = PLS.BoundingBox.BottomRight.X - PLS.BoundingBox.TopLeft.X;
double HeightInMM = PLS.BoundingBox.BottomRight.Y - PLS.BoundingBox.TopLeft.Y;
int Width = (int)(Math.Ceiling((WidthInMM) * (dpi / 25.4)));
int Height = (int)(Math.Ceiling((HeightInMM) * (dpi / 25.4)));
Console.WriteLine("Progress: Exporting {0} ({2},{3}mm) to {1} ({4},{5})", GerberFilename, BitmapFilename, WidthInMM, HeightInMM, Width, Height);
GerberImageCreator GIC = new GerberImageCreator();
GIC.scale = dpi / 25.4f; // dpi
GIC.BoundingBox.AddBox(PLS.BoundingBox);
var Tr = GIC.BuildMatrix(Width, Height);
Bitmap B2 = GIC.RenderToBitmap(Width, Height, Tr, Foreground, Background, PLS, true);
if (B2 == null) return false;
B2.Save(BitmapFilename);
return true;
}
public static string ToFloatingPointString(double value)
{
return ToFloatingPointString(value, NumberFormatInfo.CurrentInfo);
}
public static string ToFloatingPointString(double value, NumberFormatInfo formatInfo)
{
string result = value.ToString("r", NumberFormatInfo.InvariantInfo);
Match match = rxScientific.Match(result);
if (match.Success)
{
Debug.WriteLine("Found scientific format: {0} => [{1}] [{2}] [{3}] [{4}]", result, match.Groups["sign"], match.Groups["head"], match.Groups["tail"], match.Groups["exponent"]);
int exponent = int.Parse(match.Groups["exponent"].Value, NumberStyles.Integer, NumberFormatInfo.InvariantInfo);
StringBuilder builder = new StringBuilder(result.Length + Math.Abs(exponent));
builder.Append(match.Groups["sign"].Value);
if (exponent >= 0)
{
builder.Append(match.Groups["head"].Value);
string tail = match.Groups["tail"].Value;
if (exponent < tail.Length)
{
builder.Append(tail, 0, exponent);
builder.Append(formatInfo.NumberDecimalSeparator);
builder.Append(tail, exponent, tail.Length - exponent);
}
else
{
builder.Append(tail);
builder.Append('0', exponent - tail.Length);
}
}
else
{
builder.Append('0');
builder.Append(formatInfo.NumberDecimalSeparator);
builder.Append('0', (-exponent) - 1);
builder.Append(match.Groups["head"].Value);
builder.Append(match.Groups["tail"].Value);
}
result = builder.ToString();
}
return result;
}
public static void WriteAllLines(string filename, List<string> lines)
{
File.WriteAllText(filename, string.Join(Gerber.LineEnding, lines));
}
internal static double ParseDouble(string inp)
{
return double.Parse(inp, NumberStyles.Any, CultureInfo.InvariantCulture);
}
internal static bool TryParseDouble(string inp, out double N)
{
inp = inp.Replace("*", "");
return double.TryParse(inp, NumberStyles.Any, CultureInfo.InvariantCulture, out N);
}
private static void DrawCross(Graphics G2, double X, double Y, Color C)
{
float S = 0.2f;
Pen P = new Pen(C, 1.0f);
G2.DrawLine(P, (float)X - S, (float)Y - S, (float)X + S, (float)Y - S);
G2.DrawLine(P, (float)X - S, (float)Y + S, (float)X + S, (float)Y + S);
G2.DrawLine(P, (float)X - S, (float)Y - S, (float)X - S, (float)Y + S);
G2.DrawLine(P, (float)X + S, (float)Y - S, (float)X + S, (float)Y + S);
}
private static double LimitPos2PI(double dA)
{
while (dA < -Math.PI / 2) dA += Math.PI * 2;
while (dA >= Math.PI / 2) dA -= Math.PI * 2;
return dA;
}
class boardset
{
public BoardLayer layer;
public string name; public BoardSide side;
};
class QuadR
{
public double CX;
public double CY;
public double D1 = 0;
public double D2 = 0;
public double Diff;
public double DRat = 0;
public double E;
public double S;
internal void Calc(double LastX, double LastY, double X, double Y)
{
double CX1 = LastX - CX;
double CX2 = X - CX;
double CY1 = LastY - CY;
double CY2 = Y - CY;
D1 = Math.Sqrt(CX1 * CX1 + CY1 * CY1);
D2 = Math.Sqrt(CX2 * CX2 + CY2 * CY2);
if (D2 != 0) DRat = D1 / D2;
S = Math.Atan2(LastY - CY, LastX - CX);
E = Math.Atan2(Y - CY, X - CX);
}
internal void FixClockwise()
{
// while (S < E) S += Math.PI * 2;
Diff = S - E;
while (Diff > Math.PI) Diff -= Math.PI * 2;
// Console.WriteLine("clock: {0:N2}", Gerber.RadToDeg(Diff));
}
internal void FixCounterClockwise()
{
while (S > E) S -= Math.PI * 2;
while (S < 0)
{
S += Math.PI * 2.0;
E += Math.PI * 2.0;
}
Diff = E - S;
// while (Diff < 0) Diff += Math.PI * 2.0;
// Console.WriteLine("counterclock: {0:N2}", Gerber.RadToDeg(Diff));
}
}
#region GERBERCOMMANDSTRINGS
public static string BuildOutlineApertureMacro(string name, List<PointD> Vertices, GerberNumberFormat format)
{
string res = "%AM" + name + "*" + Gerber.LineEnding;
res += String.Format("4,1,{0}," + Gerber.LineEnding, (Vertices.Count - 2));
for (int i = 0; i < Vertices.Count - 1; i++)
{
res += String.Format("{0},{1}," + Gerber.LineEnding, Gerber.ToFloatingPointString(format._ScaleMMToFile(Vertices[i].X)).Replace(',', '.'), Gerber.ToFloatingPointString(format._ScaleMMToFile(Vertices[i].Y)).Replace(',', '.'));
}
res += "0*" + Gerber.LineEnding + "%" + Gerber.LineEnding;
return res;
}
public static string Flash(PointD t, GerberNumberFormat GNF)
{
return String.Format("X{0}Y{1}D03*", GNF.Format(GNF._ScaleMMToFile(t.X)), GNF.Format(GNF._ScaleMMToFile(t.Y)));
}
public static string LineTo(PointD t, GerberNumberFormat GNF)
{
return String.Format("X{0}Y{1}D01*", GNF.Format(GNF._ScaleMMToFile(t.X)), GNF.Format(GNF._ScaleMMToFile(t.Y)));
}
public static string MoveTo(PointD t, GerberNumberFormat GNF)
{
return String.Format("X{0}Y{1}D02*", GNF.Format(GNF._ScaleMMToFile(t.X)), GNF.Format(GNF._ScaleMMToFile(t.Y)));
}
public static string WriteMacroEnd()
{
return "" + Gerber.LineEnding + "%" + Gerber.LineEnding;
}
public static string WriteMacroPartVertices(List<PointD> Vertices, GerberNumberFormat format)
{
string res = "";
res += String.Format("4,1,{0}," + Gerber.LineEnding, (Vertices.Count - 2));
for (int i = 0; i < Vertices.Count - 1; i++)
{
res += String.Format("{0},{1}," + Gerber.LineEnding, Gerber.ToFloatingPointString(format._ScaleMMToFile(Vertices[i].X)).Replace(',', '.'), Gerber.ToFloatingPointString(format._ScaleMMToFile(Vertices[i].Y)).Replace(',', '.'));
}
res += "0*";
return res;
}
public static string WriteMacroStart(string name)
{
return "%AM" + name + "*" + Gerber.LineEnding;
}
#endregion
}
public class LayerSet
{
public List<ParsedGerber> Gerbs = new List<ParsedGerber>();
public List<string> Files = new List<string>();
public BoardSide Side;
public BoardLayer Layer;
public static List<LayerSet> LoadDefaultLayersetFromZip(string gerberFile)
{
List<LayerSet> LayerSets = new List<LayerSet>();
LayerSets.Add(new LayerSet() { Side = BoardSide.Both, Layer = BoardLayer.Outline });
LayerSets.Add(new LayerSet() { Side = BoardSide.Top, Layer = BoardLayer.SolderMask });
LayerSets.Add(new LayerSet() { Side = BoardSide.Top, Layer = BoardLayer.Silk });
LayerSets.Add(new LayerSet() { Side = BoardSide.Bottom, Layer = BoardLayer.SolderMask });
LayerSets.Add(new LayerSet() { Side = BoardSide.Bottom, Layer = BoardLayer.Silk });
GerberLibrary.GerberImageCreator GIC = new GerberLibrary.GerberImageCreator();
List<string> res = new List<string>();
Dictionary<string, MemoryStream> Files = new Dictionary<string, MemoryStream>();
using (Ionic.Zip.ZipFile zip1 = Ionic.Zip.ZipFile.Read(gerberFile))
{
foreach (ZipEntry e in zip1)
{
MemoryStream MS = new MemoryStream();
if (e.IsDirectory == false)
{
e.Extract(MS);
MS.Seek(0, SeekOrigin.Begin);
Files[e.FileName] = MS;
}
}
}
string[] FileNames = Files.Keys.ToArray();
List<string> outlinefiles = new List<string>();
List<string> topsilkfiles = new List<string>();
List<string> bottomsilkfiles = new List<string>();
foreach (var F in FileNames)
{
BoardSide BS = BoardSide.Unknown;
BoardLayer BL = BoardLayer.Unknown;
Files[F].Seek(0, SeekOrigin.Begin);
if (Gerber.FindFileTypeFromStream(new StreamReader(Files[F]), F) == BoardFileType.Gerber)
{
Gerber.DetermineBoardSideAndLayer(F, out BS, out BL);
foreach (var l in LayerSets)
{
if (l.Side == BS && l.Layer == BL)
{
l.Files.Add(F);
Files[F].Seek(0, SeekOrigin.Begin);
var pls = PolyLineSet.LoadGerberFileFromStream(new StandardConsoleLog(), new StreamReader(Files[F]), F, true, false, new GerberParserState() { PreCombinePolygons = false });
l.Gerbs.Add(pls);
}
}
}
}
return LayerSets;
}
}
}
| |
using System;
using Signum.Utilities;
namespace Signum.Engine.Maps
{
public static class TableExtensions
{
internal static string UnScapeSql(this string name, bool isPostgres)
{
if (isPostgres)
{
if (name.StartsWith('\"'))
return name.Trim('\"');
return name.ToLower();
}
return name.Trim('[', ']');
}
}
public class ServerName : IEquatable<ServerName>
{
public string Name { get; private set; }
public bool IsPostgres { get; private set; }
/// <summary>
/// Linked Servers: http://msdn.microsoft.com/en-us/library/ms188279.aspx
/// </summary>
/// <param name="name"></param>
public ServerName(string name, bool isPostgres)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException(nameof(name));
this.Name = name;
this.IsPostgres = isPostgres;
}
public override string ToString()
{
return Name.SqlEscape(IsPostgres);
}
public override bool Equals(object? obj) => obj is ServerName sn && Equals(sn);
public bool Equals(ServerName other)
{
return other.Name == Name;
}
public override int GetHashCode()
{
return Name.GetHashCode();
}
public static ServerName? Parse(string? name, bool isPostgres)
{
if (!name.HasText())
return null;
return new ServerName(name.UnScapeSql(isPostgres), isPostgres);
}
}
public class DatabaseName : IEquatable<DatabaseName>
{
public string Name { get; private set; }
public bool IsPostgres { get; private set; }
public ServerName? Server { get; private set; }
public DatabaseName(ServerName? server, string name, bool isPostgres)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException(nameof(name));
this.Name = name;
this.Server = server;
this.IsPostgres = isPostgres;
}
public override string ToString()
{
var options = ObjectName.CurrentOptions;
var name = !options.DatabaseNameReplacement.HasText() ? Name.SqlEscape(IsPostgres): Name.Replace(Connector.Current.DatabaseName(), options.DatabaseNameReplacement).SqlEscape(IsPostgres);
if (Server == null)
return name;
return Server.ToString() + "." + name;
}
public override bool Equals(object? obj) => obj is DatabaseName dn && Equals(dn);
public bool Equals(DatabaseName other)
{
return other.Name == Name && object.Equals(Server, other.Server);
}
public override int GetHashCode()
{
return Name.GetHashCode() ^ (Server == null ? 0 : Server.GetHashCode());
}
public static DatabaseName? Parse(string? name, bool isPostgres)
{
if (!name.HasText())
return null;
var tuple = ObjectName.SplitLast(name, isPostgres);
return new DatabaseName(ServerName.Parse(tuple.prefix, isPostgres), tuple.name, isPostgres);
}
}
public class SchemaName : IEquatable<SchemaName>
{
public string Name { get; private set; }
public bool IsPostgres { get; private set; }
readonly DatabaseName? database;
public DatabaseName? Database
{
get
{
if (database == null || ObjectName.CurrentOptions.AvoidDatabaseName)
return null;
return database;
}
}
static readonly SchemaName defaultSqlServer = new SchemaName(null, "dbo", isPostgres: false);
static readonly SchemaName defaultPostgreeSql = new SchemaName(null, "public", isPostgres: true);
public static SchemaName Default(bool isPostgres) => isPostgres ? defaultPostgreeSql : defaultSqlServer;
public bool IsDefault()
{
return Database == null && (IsPostgres ? defaultPostgreeSql : defaultSqlServer).Name == Name;
}
public SchemaName(DatabaseName? database, string name, bool isPostgres)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException(nameof(name));
this.Name = name;
this.database = database;
this.IsPostgres = isPostgres;
}
public override string ToString()
{
var result = Name.SqlEscape(IsPostgres);
if (Database == null)
return result;
return Database.ToString() + "." + result;
}
public override bool Equals(object? obj) => obj is SchemaName sn && Equals(sn);
public bool Equals(SchemaName other)
{
return other.Name == Name &&
object.Equals(Database, other.Database);
}
public override int GetHashCode()
{
return Name.GetHashCode() ^ (Database == null ? 0 : Database.GetHashCode());
}
public static SchemaName Parse(string? name, bool isPostgres)
{
if (!name.HasText())
return SchemaName.Default(isPostgres);
var tuple = ObjectName.SplitLast(name, isPostgres);
return new SchemaName(DatabaseName.Parse(tuple.prefix, isPostgres), tuple.name, isPostgres);
}
internal SchemaName OnDatabase(DatabaseName? database)
{
return new SchemaName(database, this.Name, this.IsPostgres);
}
}
public class ObjectName : IEquatable<ObjectName>
{
public static int MaxPostgreeSize = 63;
public string Name { get; private set; }
public bool IsPostgres { get; private set; }
public SchemaName Schema { get; private set; } // null only for postgres temporary
public ObjectName(SchemaName schema, string name, bool isPostgres)
{
this.Name = name.HasText() ? name : throw new ArgumentNullException(nameof(name));
if (isPostgres && this.Name.Length > MaxPostgreeSize)
throw new InvalidOperationException($"The name '{name}' is too long, consider using TableNameAttribute/ColumnNameAttribute");
this.Schema = schema ?? (isPostgres && name.StartsWith("#") ? (SchemaName)null! : throw new ArgumentNullException(nameof(schema)));
this.IsPostgres = isPostgres;
}
public override string ToString()
{
if (Schema == null)
return Name.SqlEscape(IsPostgres);
return Schema.ToString() + "." + Name.SqlEscape(IsPostgres);
}
public override bool Equals(object? obj) => obj is ObjectName on && Equals(on);
public bool Equals(ObjectName other)
{
return other.Name == Name &&
object.Equals(Schema, other.Schema);
}
public override int GetHashCode()
{
return Name.GetHashCode() ^ Schema?.GetHashCode() ?? 0;
}
public static ObjectName Parse(string? name, bool isPostgres)
{
if (!name.HasText())
throw new ArgumentNullException(nameof(name));
var tuple = SplitLast(name, isPostgres);
return new ObjectName(SchemaName.Parse(tuple.prefix, isPostgres), tuple.name, isPostgres);
}
//FROM "[a.b.c].[d.e.f].[a.b.c].[c.d.f]"
//TO ("[a.b.c].[d.e.f].[a.b.c]", "c.d.f")
internal static (string? prefix, string name) SplitLast(string str, bool isPostgres)
{
if (isPostgres)
{
if (!str.EndsWith('\"'))
{
return (
prefix: str.TryBeforeLast('.'),
name: str.TryAfterLast('.') ?? str
);
}
var index = str.LastIndexOf('\"', str.Length - 2);
return (
prefix: index == 0 ? null : str.Substring(0, index - 1),
name: str.Substring(index).UnScapeSql(isPostgres)
);
}
else
{
if (!str.EndsWith("]"))
{
return (
prefix: str.TryBeforeLast('.'),
name: str.TryAfterLast('.') ?? str
);
}
var index = str.LastIndexOf('[');
return (
prefix: index == 0 ? null : str.Substring(0, index - 1),
name: str.Substring(index).UnScapeSql(isPostgres)
);
}
}
public ObjectName OnDatabase(DatabaseName? databaseName)
{
if (databaseName != null && databaseName.IsPostgres != this.IsPostgres)
throw new Exception("Inconsitent IsPostgres");
return new ObjectName(new SchemaName(databaseName, Schema!.Name, IsPostgres), Name, IsPostgres);
}
public ObjectName OnSchema(SchemaName schemaName)
{
if (schemaName.IsPostgres != this.IsPostgres)
throw new Exception("Inconsitent IsPostgres");
return new ObjectName(schemaName, Name, IsPostgres);
}
static readonly ThreadVariable<ObjectNameOptions> optionsVariable = Statics.ThreadVariable<ObjectNameOptions>("objectNameOptions");
public static IDisposable OverrideOptions(ObjectNameOptions options)
{
var old = optionsVariable.Value;
optionsVariable.Value = options;
return new Disposable(() => optionsVariable.Value = old);
}
public static ObjectNameOptions CurrentOptions
{
get { return optionsVariable.Value; }
}
public bool IsTemporal => this.Name.StartsWith("#");
}
public struct ObjectNameOptions
{
public string DatabaseNameReplacement;
public bool AvoidDatabaseName;
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using QuantConnect.Data;
using QuantConnect.Data.Market;
using QuantConnect.Indicators;
using QuantConnect.Interfaces;
namespace QuantConnect.Algorithm.CSharp
{
/// <summary>
/// Demonstration algorithm of popular indicators and plotting them.
/// </summary>
/// <meta name="tag" content="indicators" />
/// <meta name="tag" content="indicator classes" />
/// <meta name="tag" content="plotting indicators" />
/// <meta name="tag" content="charting" />
/// <meta name="tag" content="indicator field selection" />
public class IndicatorSuiteAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition
{
private string _ticker = "SPY";
private string _customTicker = "IBM";
private Symbol _symbol;
private Symbol _customSymbol;
private Indicators _indicators;
private Indicators _selectorIndicators;
private IndicatorBase<IndicatorDataPoint> _ratio;
//RSI Custom Data:
private RelativeStrengthIndex _rsiCustom;
private Minimum _minCustom;
private Maximum _maxCustom;
private decimal _price;
/// <summary>
/// Initialize the data and resolution you require for your strategy
/// </summary>
public override void Initialize()
{
//Initialize
SetStartDate(2013, 1, 1);
SetEndDate(2014, 12, 31);
SetCash(25000);
//Add as many securities as you like. All the data will be passed into the event handler:
_symbol = AddSecurity(SecurityType.Equity, _ticker, Resolution.Daily).Symbol;
//Add the Custom Data:
_customSymbol = AddData<CustomData>(_customTicker, Resolution.Daily).Symbol;
//Set up default Indicators, these indicators are defined on the Value property of incoming data (except ATR and AROON which use the full TradeBar object)
_indicators = new Indicators
{
BB = BB(_symbol, 20, 1, MovingAverageType.Simple, Resolution.Daily),
RSI = RSI(_symbol, 14, MovingAverageType.Simple, Resolution.Daily),
ATR = ATR(_symbol, 14, MovingAverageType.Simple, Resolution.Daily),
EMA = EMA(_symbol, 14, Resolution.Daily),
SMA = SMA(_symbol, 14, Resolution.Daily),
MACD = MACD(_symbol, 12, 26, 9, MovingAverageType.Simple, Resolution.Daily),
AROON = AROON(_symbol, 20, Resolution.Daily),
MOM = MOM(_symbol, 20, Resolution.Daily),
MOMP = MOMP(_symbol, 20, Resolution.Daily),
STD = STD(_symbol, 20, Resolution.Daily),
MIN = MIN(_symbol, 14, Resolution.Daily), // by default if the symbol is a tradebar type then it will be the min of the low property
MAX = MAX(_symbol, 14, Resolution.Daily) // by default if the symbol is a tradebar type then it will be the max of the high property
};
// Here we're going to define indicators using 'selector' functions. These 'selector' functions will define what data gets sent into the indicator
// These functions have a signature like the following: decimal Selector(BaseData baseData), and can be defined like: baseData => baseData.Value
// We'll define these 'selector' functions to select the Low value
//
// For more information on 'anonymous functions' see: http://en.wikipedia.org/wiki/Anonymous_function
// https://msdn.microsoft.com/en-us/library/bb397687.aspx
//
_selectorIndicators = new Indicators
{
BB = BB(_symbol, 20, 1, MovingAverageType.Simple, Resolution.Daily, Field.Low),
RSI = RSI(_symbol, 14, MovingAverageType.Simple, Resolution.Daily, Field.Low),
EMA = EMA(_symbol, 14, Resolution.Daily, Field.Low),
SMA = SMA(_symbol, 14, Resolution.Daily, Field.Low),
MACD = MACD(_symbol, 12, 26, 9, MovingAverageType.Simple, Resolution.Daily, Field.Low),
MOM = MOM(_symbol, 20, Resolution.Daily, Field.Low),
MOMP = MOMP(_symbol, 20, Resolution.Daily, Field.Low),
STD = STD(_symbol, 20, Resolution.Daily, Field.Low),
MIN = MIN(_symbol, 14, Resolution.Daily, Field.High), // this will find the 14 day min of the high property
MAX = MAX(_symbol, 14, Resolution.Daily, Field.Low), // this will find the 14 day max of the low property
// ATR and AROON are special in that they accept a TradeBar instance instead of a decimal, we could easily project and/or transform the input TradeBar
// before it gets sent to the ATR/AROON indicator, here we use a function that will multiply the input trade bar by a factor of two
ATR = ATR(_symbol, 14, MovingAverageType.Simple, Resolution.Daily, SelectorDoubleTradeBar),
AROON = AROON(_symbol, 20, Resolution.Daily, SelectorDoubleTradeBar)
};
//Custom Data Indicator:
_rsiCustom = RSI(_customSymbol, 14, MovingAverageType.Simple, Resolution.Daily);
_minCustom = MIN(_customSymbol, 14, Resolution.Daily);
_maxCustom = MAX(_customSymbol, 14, Resolution.Daily);
// in addition to defining indicators on a single security, you can all define 'composite' indicators.
// these are indicators that require multiple inputs. the most common of which is a ratio.
// suppose we seek the ratio of BTC to SPY, we could write the following:
var spyClose = Identity(_symbol);
var ibmClose = Identity(_customSymbol);
// this will create a new indicator whose value is FB/SPY
_ratio = ibmClose.Over(spyClose);
// we can also easily plot our indicators each time they update using th PlotIndicator function
PlotIndicator("Ratio", _ratio);
}
/// <summary>
/// Custom data event handler:
/// </summary>
/// <param name="data">CustomData - dictionary Bars of custom data</param>
public void OnData(CustomData data)
{
}
/// <summary>
/// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
/// </summary>
/// <param name="data">TradeBars IDictionary object with your stock data</param>
public void OnData(TradeBars data)
{
if (!_indicators.BB.IsReady || !_indicators.RSI.IsReady) return;
_price = data[_symbol].Close;
if (!Portfolio.HoldStock)
{
int quantity = (int)Math.Floor(Portfolio.Cash / _price);
//Order function places trades: enter the string symbol and the quantity you want:
Order(_symbol, quantity);
//Debug sends messages to the user console: "Time" is the algorithm time keeper object
Debug("Purchased SPY on " + Time.ToShortDateString());
}
}
/// <summary>
/// Fire plotting events once per day.
/// </summary>
public override void OnEndOfDay(Symbol symbol)
{
if (symbol != _symbol) return;
if (!_indicators.BB.IsReady) return;
Plot("BB", "Price", _price);
Plot("BB", _indicators.BB.UpperBand, _indicators.BB.MiddleBand, _indicators.BB.LowerBand);
Plot("RSI", _indicators.RSI);
//Custom data indicator
Plot("RSI-BTC", _rsiCustom);
Plot("ATR", _indicators.ATR);
Plot("STD", _indicators.STD);
Plot("AROON", _indicators.AROON.AroonUp, _indicators.AROON.AroonDown);
// The following Plot method calls are commented out because of the 10 series limit for backtests
//Plot("MOM", _indicators.MOM);
//Plot("MOMP", _indicators.MOMP);
//Plot("MACD", "Price", _price);
//Plot("MACD", _indicators.MACD.Fast, _indicators.MACD.Slow, _indicators.MACD.Signal);
//Plot("Averages", _indicators.EMA, _indicators.SMA);
}
/// <summary>
/// Class to hold a bunch of different indicators for this example
/// </summary>
private class Indicators
{
public BollingerBands BB;
public SimpleMovingAverage SMA;
public ExponentialMovingAverage EMA;
public RelativeStrengthIndex RSI;
public AverageTrueRange ATR;
public StandardDeviation STD;
public AroonOscillator AROON;
public Momentum MOM;
public MomentumPercent MOMP;
public MovingAverageConvergenceDivergence MACD;
public Minimum MIN;
public Maximum MAX;
}
/// <summary>
/// Function used to select a trade bar that has double the values of the input trade bar
/// </summary>
private static TradeBar SelectorDoubleTradeBar(IBaseData baseData)
{
var bar = (TradeBar)baseData;
return new TradeBar
{
Close = 2 * bar.Close,
DataType = bar.DataType,
High = 2 * bar.High,
Low = 2 * bar.Low,
Open = 2 * bar.Open,
Symbol = bar.Symbol,
Time = bar.Time,
Value = 2 * bar.Value,
Volume = 2 * bar.Volume,
Period = bar.Period
};
}
/// <summary>
/// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm.
/// </summary>
public bool CanRunLocally { get; } = true;
/// <summary>
/// This is used by the regression test system to indicate which languages this algorithm is written in.
/// </summary>
public Language[] Languages { get; } = { Language.CSharp, Language.Python };
/// <summary>
/// This is used by the regression test system to indicate what the expected statistics are from running the algorithm
/// </summary>
public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string>
{
{"Total Trades", "1"},
{"Average Win", "0%"},
{"Average Loss", "0%"},
{"Compounding Annual Return", "19.058%"},
{"Drawdown", "7.300%"},
{"Expectancy", "0"},
{"Net Profit", "41.748%"},
{"Sharpe Ratio", "1.448"},
{"Probabilistic Sharpe Ratio", "72.548%"},
{"Loss Rate", "0%"},
{"Win Rate", "0%"},
{"Profit-Loss Ratio", "0"},
{"Alpha", "-0.017"},
{"Beta", "0.963"},
{"Annual Standard Deviation", "0.092"},
{"Annual Variance", "0.008"},
{"Information Ratio", "-1.289"},
{"Tracking Error", "0.018"},
{"Treynor Ratio", "0.138"},
{"Total Fees", "$1.00"},
{"Estimated Strategy Capacity", "$580000000.00"},
{"Lowest Capacity Asset", "SPY R735QTJ8XC9X"},
{"Fitness Score", "0.001"},
{"Kelly Criterion Estimate", "0"},
{"Kelly Criterion Probability Value", "0"},
{"Sortino Ratio", "2.283"},
{"Return Over Maximum Drawdown", "2.627"},
{"Portfolio Turnover", "0.001"},
{"Total Insights Generated", "0"},
{"Total Insights Closed", "0"},
{"Total Insights Analysis Completed", "0"},
{"Long Insight Count", "0"},
{"Short Insight Count", "0"},
{"Long/Short Ratio", "100%"},
{"Estimated Monthly Alpha Value", "$0"},
{"Total Accumulated Estimated Alpha Value", "$0"},
{"Mean Population Estimated Insight Value", "$0"},
{"Mean Population Direction", "0%"},
{"Mean Population Magnitude", "0%"},
{"Rolling Averaged Population Direction", "0%"},
{"Rolling Averaged Population Magnitude", "0%"},
{"OrderListHash", "ee33b931de5b59dfa930cbcacdaa2c9b"}
};
}
}
| |
// ZlibCodec.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2009-November-03 15:40:51>
//
// ------------------------------------------------------------------
//
// This module defines a Codec for ZLIB compression and
// decompression. This code extends code that was based the jzlib
// implementation of zlib, but this code is completely novel. The codec
// class is new, and encapsulates some behaviors that are new, and some
// that were present in other classes in the jzlib code base. In
// keeping with the license for jzlib, the copyright to the jzlib code
// is included below.
//
// ------------------------------------------------------------------
//
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
//
// 3. The names of the authors may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// -----------------------------------------------------------------------
//
// This program is based on zlib-1.1.3; credit to authors
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
// and contributors of zlib.
//
// -----------------------------------------------------------------------
using System;
using Interop=System.Runtime.InteropServices;
namespace Ionic.Zlib
{
/// <summary>
/// Encoder and Decoder for ZLIB and DEFLATE (IETF RFC1950 and RFC1951).
/// </summary>
///
/// <remarks>
/// This class compresses and decompresses data according to the Deflate algorithm
/// and optionally, the ZLIB format, as documented in <see
/// href="http://www.ietf.org/rfc/rfc1950.txt">RFC 1950 - ZLIB</see> and <see
/// href="http://www.ietf.org/rfc/rfc1951.txt">RFC 1951 - DEFLATE</see>.
/// </remarks>
[Interop.GuidAttribute("ebc25cf6-9120-4283-b972-0e5520d0000D")]
[Interop.ComVisible(true)]
//#if !NETCF
// [Interop.ClassInterface(Interop.ClassInterfaceType.AutoDispatch)]
//#endif
sealed public class ZlibCodec
{
/// <summary>
/// The buffer from which data is taken.
/// </summary>
public byte[] InputBuffer;
/// <summary>
/// An index into the InputBuffer array, indicating where to start reading.
/// </summary>
public int NextIn;
/// <summary>
/// The number of bytes available in the InputBuffer, starting at NextIn.
/// </summary>
/// <remarks>
/// Generally you should set this to InputBuffer.Length before the first Inflate() or Deflate() call.
/// The class will update this number as calls to Inflate/Deflate are made.
/// </remarks>
public int AvailableBytesIn;
/// <summary>
/// Total number of bytes read so far, through all calls to Inflate()/Deflate().
/// </summary>
public long TotalBytesIn;
/// <summary>
/// Buffer to store output data.
/// </summary>
public byte[] OutputBuffer;
/// <summary>
/// An index into the OutputBuffer array, indicating where to start writing.
/// </summary>
public int NextOut;
/// <summary>
/// The number of bytes available in the OutputBuffer, starting at NextOut.
/// </summary>
/// <remarks>
/// Generally you should set this to OutputBuffer.Length before the first Inflate() or Deflate() call.
/// The class will update this number as calls to Inflate/Deflate are made.
/// </remarks>
public int AvailableBytesOut;
/// <summary>
/// Total number of bytes written to the output so far, through all calls to Inflate()/Deflate().
/// </summary>
public long TotalBytesOut;
/// <summary>
/// used for diagnostics, when something goes wrong!
/// </summary>
public System.String Message;
internal DeflateManager dstate;
internal InflateManager istate;
internal uint _Adler32;
/// <summary>
/// The compression level to use in this codec. Useful only in compression mode.
/// </summary>
public CompressionLevel CompressLevel = CompressionLevel.Default;
/// <summary>
/// The number of Window Bits to use.
/// </summary>
/// <remarks>
/// This gauges the size of the sliding window, and hence the
/// compression effectiveness as well as memory consumption. It's best to just leave this
/// setting alone if you don't know what it is. The maximum value is 15 bits, which implies
/// a 32k window.
/// </remarks>
public int WindowBits = ZlibConstants.WindowBitsDefault;
/// <summary>
/// The compression strategy to use.
/// </summary>
/// <remarks>
/// This is only effective in compression. The theory offered by ZLIB is that different
/// strategies could potentially produce significant differences in compression behavior
/// for different data sets. Unfortunately I don't have any good recommendations for how
/// to set it differently. When I tested changing the strategy I got minimally different
/// compression performance. It's best to leave this property alone if you don't have a
/// good feel for it. Or, you may want to produce a test harness that runs through the
/// different strategy options and evaluates them on different file types. If you do that,
/// let me know your results.
/// </remarks>
public CompressionStrategy Strategy = CompressionStrategy.Default;
/// <summary>
/// The Adler32 checksum on the data transferred through the codec so far. You probably don't need to look at this.
/// </summary>
public int Adler32 { get { return (int)_Adler32; } }
/// <summary>
/// Create a ZlibCodec.
/// </summary>
/// <remarks>
/// If you use this default constructor, you will later have to explicitly call
/// InitializeInflate() or InitializeDeflate() before using the ZlibCodec to compress
/// or decompress.
/// </remarks>
public ZlibCodec() { }
/// <summary>
/// Create a ZlibCodec that either compresses or decompresses.
/// </summary>
/// <param name="mode">
/// Indicates whether the codec should compress (deflate) or decompress (inflate).
/// </param>
public ZlibCodec(CompressionMode mode)
{
if (mode == CompressionMode.Compress)
{
int rc = InitializeDeflate();
if (rc != ZlibConstants.Z_OK) throw new ZlibException("Cannot initialize for deflate.");
}
else if (mode == CompressionMode.Decompress)
{
int rc = InitializeInflate();
if (rc != ZlibConstants.Z_OK) throw new ZlibException("Cannot initialize for inflate.");
}
else throw new ZlibException("Invalid ZlibStreamFlavor.");
}
/// <summary>
/// Initialize the inflation state.
/// </summary>
/// <remarks>
/// It is not necessary to call this before using the ZlibCodec to inflate data;
/// It is implicitly called when you call the constructor.
/// </remarks>
/// <returns>Z_OK if everything goes well.</returns>
public int InitializeInflate()
{
return InitializeInflate(this.WindowBits);
}
/// <summary>
/// Initialize the inflation state with an explicit flag to
/// govern the handling of RFC1950 header bytes.
/// </summary>
///
/// <remarks>
/// By default, the ZLIB header defined in <see
/// href="http://www.ietf.org/rfc/rfc1950.txt">RFC 1950</see> is expected. If
/// you want to read a zlib stream you should specify true for
/// expectRfc1950Header. If you have a deflate stream, you will want to specify
/// false. It is only necessary to invoke this initializer explicitly if you
/// want to specify false.
/// </remarks>
///
/// <param name="expectRfc1950Header">whether to expect an RFC1950 header byte
/// pair when reading the stream of data to be inflated.</param>
///
/// <returns>Z_OK if everything goes well.</returns>
public int InitializeInflate(bool expectRfc1950Header)
{
return InitializeInflate(this.WindowBits, expectRfc1950Header);
}
/// <summary>
/// Initialize the ZlibCodec for inflation, with the specified number of window bits.
/// </summary>
/// <param name="windowBits">The number of window bits to use. If you need to ask what that is,
/// then you shouldn't be calling this initializer.</param>
/// <returns>Z_OK if all goes well.</returns>
public int InitializeInflate(int windowBits)
{
this.WindowBits = windowBits;
return InitializeInflate(windowBits, true);
}
/// <summary>
/// Initialize the inflation state with an explicit flag to govern the handling of
/// RFC1950 header bytes.
/// </summary>
///
/// <remarks>
/// If you want to read a zlib stream you should specify true for
/// expectRfc1950Header. In this case, the library will expect to find a ZLIB
/// header, as defined in <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
/// 1950</see>, in the compressed stream. If you will be reading a DEFLATE or
/// GZIP stream, which does not have such a header, you will want to specify
/// false.
/// </remarks>
///
/// <param name="expectRfc1950Header">whether to expect an RFC1950 header byte pair when reading
/// the stream of data to be inflated.</param>
/// <param name="windowBits">The number of window bits to use. If you need to ask what that is,
/// then you shouldn't be calling this initializer.</param>
/// <returns>Z_OK if everything goes well.</returns>
public int InitializeInflate(int windowBits, bool expectRfc1950Header)
{
this.WindowBits = windowBits;
if (dstate != null) throw new ZlibException("You may not call InitializeInflate() after calling InitializeDeflate().");
istate = new InflateManager(expectRfc1950Header);
return istate.Initialize(this, windowBits);
}
/// <summary>
/// Inflate the data in the InputBuffer, placing the result in the OutputBuffer.
/// </summary>
/// <remarks>
/// You must have set InputBuffer and OutputBuffer, NextIn and NextOut, and AvailableBytesIn and
/// AvailableBytesOut before calling this method.
/// </remarks>
/// <example>
/// <code>
/// private void InflateBuffer()
/// {
/// int bufferSize = 1024;
/// byte[] buffer = new byte[bufferSize];
/// ZlibCodec decompressor = new ZlibCodec();
///
/// Console.WriteLine("\n============================================");
/// Console.WriteLine("Size of Buffer to Inflate: {0} bytes.", CompressedBytes.Length);
/// MemoryStream ms = new MemoryStream(DecompressedBytes);
///
/// int rc = decompressor.InitializeInflate();
///
/// decompressor.InputBuffer = CompressedBytes;
/// decompressor.NextIn = 0;
/// decompressor.AvailableBytesIn = CompressedBytes.Length;
///
/// decompressor.OutputBuffer = buffer;
///
/// // pass 1: inflate
/// do
/// {
/// decompressor.NextOut = 0;
/// decompressor.AvailableBytesOut = buffer.Length;
/// rc = decompressor.Inflate(FlushType.None);
///
/// if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
/// throw new Exception("inflating: " + decompressor.Message);
///
/// ms.Write(decompressor.OutputBuffer, 0, buffer.Length - decompressor.AvailableBytesOut);
/// }
/// while (decompressor.AvailableBytesIn > 0 || decompressor.AvailableBytesOut == 0);
///
/// // pass 2: finish and flush
/// do
/// {
/// decompressor.NextOut = 0;
/// decompressor.AvailableBytesOut = buffer.Length;
/// rc = decompressor.Inflate(FlushType.Finish);
///
/// if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
/// throw new Exception("inflating: " + decompressor.Message);
///
/// if (buffer.Length - decompressor.AvailableBytesOut > 0)
/// ms.Write(buffer, 0, buffer.Length - decompressor.AvailableBytesOut);
/// }
/// while (decompressor.AvailableBytesIn > 0 || decompressor.AvailableBytesOut == 0);
///
/// decompressor.EndInflate();
/// }
///
/// </code>
/// </example>
/// <param name="flush">The flush to use when inflating.</param>
/// <returns>Z_OK if everything goes well.</returns>
public int Inflate(FlushType flush)
{
if (istate == null)
throw new ZlibException("No Inflate State!");
return istate.Inflate(flush);
}
/// <summary>
/// Ends an inflation session.
/// </summary>
/// <remarks>
/// Call this after successively calling Inflate(). This will cause all buffers to be flushed.
/// After calling this you cannot call Inflate() without a intervening call to one of the
/// InitializeInflate() overloads.
/// </remarks>
/// <returns>Z_OK if everything goes well.</returns>
public int EndInflate()
{
if (istate == null)
throw new ZlibException("No Inflate State!");
int ret = istate.End();
istate = null;
return ret;
}
/// <summary>
/// I don't know what this does!
/// </summary>
/// <returns>Z_OK if everything goes well.</returns>
public int SyncInflate()
{
if (istate == null)
throw new ZlibException("No Inflate State!");
return istate.Sync();
}
/// <summary>
/// Initialize the ZlibCodec for deflation operation.
/// </summary>
/// <remarks>
/// The codec will use the MAX window bits and the default level of compression.
/// </remarks>
/// <example>
/// <code>
/// int bufferSize = 40000;
/// byte[] CompressedBytes = new byte[bufferSize];
/// byte[] DecompressedBytes = new byte[bufferSize];
///
/// ZlibCodec compressor = new ZlibCodec();
///
/// compressor.InitializeDeflate(CompressionLevel.Default);
///
/// compressor.InputBuffer = System.Text.ASCIIEncoding.ASCII.GetBytes(TextToCompress);
/// compressor.NextIn = 0;
/// compressor.AvailableBytesIn = compressor.InputBuffer.Length;
///
/// compressor.OutputBuffer = CompressedBytes;
/// compressor.NextOut = 0;
/// compressor.AvailableBytesOut = CompressedBytes.Length;
///
/// while (compressor.TotalBytesIn != TextToCompress.Length && compressor.TotalBytesOut < bufferSize)
/// {
/// compressor.Deflate(FlushType.None);
/// }
///
/// while (true)
/// {
/// int rc= compressor.Deflate(FlushType.Finish);
/// if (rc == ZlibConstants.Z_STREAM_END) break;
/// }
///
/// compressor.EndDeflate();
///
/// </code>
/// </example>
/// <returns>Z_OK if all goes well. You generally don't need to check the return code.</returns>
public int InitializeDeflate()
{
return _InternalInitializeDeflate(true);
}
/// <summary>
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel.
/// </summary>
/// <remarks>
/// The codec will use the maximum window bits (15) and the specified
/// CompressionLevel. It will emit a ZLIB stream as it compresses.
/// </remarks>
/// <param name="level">The compression level for the codec.</param>
/// <returns>Z_OK if all goes well.</returns>
public int InitializeDeflate(CompressionLevel level)
{
this.CompressLevel = level;
return _InternalInitializeDeflate(true);
}
/// <summary>
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel,
/// and the explicit flag governing whether to emit an RFC1950 header byte pair.
/// </summary>
/// <remarks>
/// The codec will use the maximum window bits (15) and the specified CompressionLevel.
/// If you want to generate a zlib stream, you should specify true for
/// wantRfc1950Header. In this case, the library will emit a ZLIB
/// header, as defined in <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
/// 1950</see>, in the compressed stream.
/// </remarks>
/// <param name="level">The compression level for the codec.</param>
/// <param name="wantRfc1950Header">whether to emit an initial RFC1950 byte pair in the compressed stream.</param>
/// <returns>Z_OK if all goes well.</returns>
public int InitializeDeflate(CompressionLevel level, bool wantRfc1950Header)
{
this.CompressLevel = level;
return _InternalInitializeDeflate(wantRfc1950Header);
}
/// <summary>
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel,
/// and the specified number of window bits.
/// </summary>
/// <remarks>
/// The codec will use the specified number of window bits and the specified CompressionLevel.
/// </remarks>
/// <param name="level">The compression level for the codec.</param>
/// <param name="bits">the number of window bits to use. If you don't know what this means, don't use this method.</param>
/// <returns>Z_OK if all goes well.</returns>
public int InitializeDeflate(CompressionLevel level, int bits)
{
this.CompressLevel = level;
this.WindowBits = bits;
return _InternalInitializeDeflate(true);
}
/// <summary>
/// Initialize the ZlibCodec for deflation operation, using the specified
/// CompressionLevel, the specified number of window bits, and the explicit flag
/// governing whether to emit an RFC1950 header byte pair.
/// </summary>
///
/// <param name="level">The compression level for the codec.</param>
/// <param name="wantRfc1950Header">whether to emit an initial RFC1950 byte pair in the compressed stream.</param>
/// <param name="bits">the number of window bits to use. If you don't know what this means, don't use this method.</param>
/// <returns>Z_OK if all goes well.</returns>
public int InitializeDeflate(CompressionLevel level, int bits, bool wantRfc1950Header)
{
this.CompressLevel = level;
this.WindowBits = bits;
return _InternalInitializeDeflate(wantRfc1950Header);
}
private int _InternalInitializeDeflate(bool wantRfc1950Header)
{
if (istate != null) throw new ZlibException("You may not call InitializeDeflate() after calling InitializeInflate().");
dstate = new DeflateManager();
dstate.WantRfc1950HeaderBytes = wantRfc1950Header;
return dstate.Initialize(this, this.CompressLevel, this.WindowBits, this.Strategy);
}
/// <summary>
/// Deflate one batch of data.
/// </summary>
/// <remarks>
/// You must have set InputBuffer and OutputBuffer before calling this method.
/// </remarks>
/// <example>
/// <code>
/// private void DeflateBuffer(CompressionLevel level)
/// {
/// int bufferSize = 1024;
/// byte[] buffer = new byte[bufferSize];
/// ZlibCodec compressor = new ZlibCodec();
///
/// Console.WriteLine("\n============================================");
/// Console.WriteLine("Size of Buffer to Deflate: {0} bytes.", UncompressedBytes.Length);
/// MemoryStream ms = new MemoryStream();
///
/// int rc = compressor.InitializeDeflate(level);
///
/// compressor.InputBuffer = UncompressedBytes;
/// compressor.NextIn = 0;
/// compressor.AvailableBytesIn = UncompressedBytes.Length;
///
/// compressor.OutputBuffer = buffer;
///
/// // pass 1: deflate
/// do
/// {
/// compressor.NextOut = 0;
/// compressor.AvailableBytesOut = buffer.Length;
/// rc = compressor.Deflate(FlushType.None);
///
/// if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
/// throw new Exception("deflating: " + compressor.Message);
///
/// ms.Write(compressor.OutputBuffer, 0, buffer.Length - compressor.AvailableBytesOut);
/// }
/// while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0);
///
/// // pass 2: finish and flush
/// do
/// {
/// compressor.NextOut = 0;
/// compressor.AvailableBytesOut = buffer.Length;
/// rc = compressor.Deflate(FlushType.Finish);
///
/// if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
/// throw new Exception("deflating: " + compressor.Message);
///
/// if (buffer.Length - compressor.AvailableBytesOut > 0)
/// ms.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut);
/// }
/// while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0);
///
/// compressor.EndDeflate();
///
/// ms.Seek(0, SeekOrigin.Begin);
/// CompressedBytes = new byte[compressor.TotalBytesOut];
/// ms.Read(CompressedBytes, 0, CompressedBytes.Length);
/// }
/// </code>
/// </example>
/// <param name="flush">whether to flush all data as you deflate. Generally you will want to
/// use Z_NO_FLUSH here, in a series of calls to Deflate(), and then call EndDeflate() to
/// flush everything.
/// </param>
/// <returns>Z_OK if all goes well.</returns>
public int Deflate(FlushType flush)
{
if (dstate == null)
throw new ZlibException("No Deflate State!");
return dstate.Deflate(flush);
}
/// <summary>
/// End a deflation session.
/// </summary>
/// <remarks>
/// Call this after making a series of one or more calls to Deflate(). All buffers are flushed.
/// </remarks>
/// <returns>Z_OK if all goes well.</returns>
public int EndDeflate()
{
if (dstate == null)
throw new ZlibException("No Deflate State!");
dstate = null;
return ZlibConstants.Z_OK; //ret;
}
/// <summary>
/// Reset a codec for another deflation session.
/// </summary>
/// <remarks>
/// Call this to reset the deflation state. For example if a thread is deflating
/// non-consecutive blocks, you can call Reset() after the Deflate(Sync) of the first
/// block and before the next Deflate(None) of the second block.
/// </remarks>
/// <returns>Z_OK if all goes well.</returns>
public void ResetDeflate()
{
if (dstate == null)
throw new ZlibException("No Deflate State!");
dstate.Reset();
}
/// <summary>
/// Set the CompressionStrategy and CompressionLevel for a deflation session.
/// </summary>
/// <param name="level">the level of compression to use.</param>
/// <param name="strategy">the strategy to use for compression.</param>
/// <returns>Z_OK if all goes well.</returns>
public int SetDeflateParams(CompressionLevel level, CompressionStrategy strategy)
{
if (dstate == null)
throw new ZlibException("No Deflate State!");
return dstate.SetParams(level, strategy);
}
/// <summary>
/// Set the dictionary to be used for either Inflation or Deflation.
/// </summary>
/// <param name="dictionary">The dictionary bytes to use.</param>
/// <returns>Z_OK if all goes well.</returns>
public int SetDictionary(byte[] dictionary)
{
if (istate != null)
return istate.SetDictionary(dictionary);
if (dstate != null)
return dstate.SetDictionary(dictionary);
throw new ZlibException("No Inflate or Deflate state!");
}
// Flush as much pending output as possible. All deflate() output goes
// through this function so some applications may wish to modify it
// to avoid allocating a large strm->next_out buffer and copying into it.
// (See also read_buf()).
internal void flush_pending()
{
int len = dstate.pendingCount;
if (len > AvailableBytesOut)
len = AvailableBytesOut;
if (len == 0)
return;
if (dstate.pending.Length <= dstate.nextPending ||
OutputBuffer.Length <= NextOut ||
dstate.pending.Length < (dstate.nextPending + len) ||
OutputBuffer.Length < (NextOut + len))
{
throw new ZlibException(String.Format("Invalid State. (pending.Length={0}, pendingCount={1})",
dstate.pending.Length, dstate.pendingCount));
}
Array.Copy(dstate.pending, dstate.nextPending, OutputBuffer, NextOut, len);
NextOut += len;
dstate.nextPending += len;
TotalBytesOut += len;
AvailableBytesOut -= len;
dstate.pendingCount -= len;
if (dstate.pendingCount == 0)
{
dstate.nextPending = 0;
}
}
// Read a new buffer from the current input stream, update the adler32
// and total number of bytes read. All deflate() input goes through
// this function so some applications may wish to modify it to avoid
// allocating a large strm->next_in buffer and copying from it.
// (See also flush_pending()).
internal int read_buf(byte[] buf, int start, int size)
{
int len = AvailableBytesIn;
if (len > size)
len = size;
if (len == 0)
return 0;
AvailableBytesIn -= len;
if (dstate.WantRfc1950HeaderBytes)
{
_Adler32 = Adler.Adler32(_Adler32, InputBuffer, NextIn, len);
}
Array.Copy(InputBuffer, NextIn, buf, start, len);
NextIn += len;
TotalBytesIn += len;
return len;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using WebApplication1.Areas.HelpPage.ModelDescriptions;
using WebApplication1.Areas.HelpPage.Models;
namespace WebApplication1.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using Nini.Config;
using System;
using System.Collections.Generic;
using System.Reflection;
using OpenSim.Framework;
using OpenSim.Server.Base;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
namespace OpenSim.Region.CoreModules.ServiceConnectorsOut.Asset
{
public class AgentAssetServicesConnector :
ISharedRegionModule, IAssetService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private IImprovedAssetCache m_Cache = null;
private IAssetService m_AssetService;
private bool m_Enabled = false;
public Type ReplaceableInterface
{
get { return null; }
}
public string Name
{
get { return "LocalAssetServicesConnector"; }
}
public void Initialise(IConfigSource source)
{
IConfig moduleConfig = source.Configs["Modules"];
if (moduleConfig != null)
{
string name = moduleConfig.GetString("AssetServices", "");
if (name == Name)
{
IConfig assetConfig = source.Configs["AssetService"];
if (assetConfig == null)
{
m_log.Error("[ASSET CONNECTOR]: AssetService missing from OpenSim.ini");
return;
}
string serviceDll = assetConfig.GetString("LocalServiceModule",
String.Empty);
if (serviceDll == String.Empty)
{
m_log.Error("[ASSET CONNECTOR]: No LocalServiceModule named in section AssetService");
return;
}
Object[] args = new Object[] { source };
m_AssetService =
ServerUtils.LoadPlugin<IAssetService>(serviceDll,
args);
if (m_AssetService == null)
{
m_log.Error("[ASSET CONNECTOR]: Can't load asset service");
return;
}
m_Enabled = true;
m_log.Info("[ASSET CONNECTOR]: Local asset connector enabled");
}
}
}
public void PostInitialise()
{
}
public void Close()
{
}
public void AddRegion(Scene scene)
{
if (!m_Enabled)
return;
scene.RegisterModuleInterface<IAssetService>(this);
}
public void RemoveRegion(Scene scene)
{
}
public void RegionLoaded(Scene scene)
{
if (!m_Enabled)
return;
if (m_Cache == null)
{
m_Cache = scene.RequestModuleInterface<IImprovedAssetCache>();
if (!(m_Cache is ISharedRegionModule))
m_Cache = null;
}
m_log.InfoFormat("[ASSET CONNECTOR]: Enabled local assets for region {0}", scene.RegionInfo.RegionName);
if (m_Cache != null)
{
m_log.InfoFormat("[ASSET CONNECTOR]: Enabled asset caching for region {0}", scene.RegionInfo.RegionName);
}
else
{
// Short-circuit directly to storage layer
//
scene.UnregisterModuleInterface<IAssetService>(this);
scene.RegisterModuleInterface<IAssetService>(m_AssetService);
}
}
public AssetBase Get(string id)
{
AssetBase asset = null;
if (m_Cache != null)
asset = m_Cache.Get(id);
if (asset == null)
{
asset = m_AssetService.Get(id);
if ((m_Cache != null) && (asset != null))
m_Cache.Cache(asset);
}
return asset;
}
public AssetMetadata GetMetadata(string id)
{
AssetBase asset = null;
if (m_Cache != null)
asset = m_Cache.Get(id);
if (asset != null)
return asset.Metadata;
asset = m_AssetService.Get(id);
if (asset != null)
{
if (m_Cache != null)
m_Cache.Cache(asset);
return asset.Metadata;
}
return null;
}
public byte[] GetData(string id)
{
AssetBase asset = m_Cache.Get(id);
if (asset != null)
return asset.Data;
asset = m_AssetService.Get(id);
if (asset != null)
{
if (m_Cache != null)
m_Cache.Cache(asset);
return asset.Data;
}
return null;
}
public bool Get(string id, Object sender, AssetRetrieved handler)
{
AssetBase asset = null;
if (m_Cache != null)
m_Cache.Get(id);
if (asset != null)
{
handler.BeginInvoke(id, sender, asset, null, null);
return true;
}
return m_AssetService.Get(id, sender, delegate (string assetID, Object s, AssetBase a)
{
if ((a != null) && (m_Cache != null))
m_Cache.Cache(a);
handler.BeginInvoke(assetID, s, a, null, null);
});
}
public string Store(AssetBase asset)
{
//if (m_Cache != null)
// m_Cache.Cache(asset);
//if (asset.Temporary || asset.Local)
// return asset.ID;
return m_AssetService.Store(asset);
}
public bool UpdateContent(string id, byte[] data)
{
AssetBase asset = null;
if (m_Cache != null)
m_Cache.Get(id);
if (asset != null)
{
asset.Data = data;
if (m_Cache != null)
m_Cache.Cache(asset);
}
return m_AssetService.UpdateContent(id, data);
}
public bool Delete(string id)
{
if (m_Cache != null)
m_Cache.Expire(id);
return m_AssetService.Delete(id);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// ReSharper disable UnusedAutoPropertyAccessor.Global
// ReSharper disable MemberCanBePrivate.Global
namespace Apache.Ignite.Core.Tests.Cache.Query
{
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Configuration;
using Apache.Ignite.Core.Cache.Query;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Resource;
using NUnit.Framework;
/// <summary>
/// Queries tests.
/// </summary>
public class CacheQueriesTest
{
/** Grid count. */
private const int GridCnt = 2;
/** Cache name. */
private const string CacheName = "cache";
/** Path to XML configuration. */
private const string CfgPath = "Config\\cache-query.xml";
/** Maximum amount of items in cache. */
private const int MaxItemCnt = 100;
/// <summary>
/// Fixture setup.
/// </summary>
[TestFixtureSetUp]
public void StartGrids()
{
for (int i = 0; i < GridCnt; i++)
{
Ignition.Start(new IgniteConfiguration(TestUtils.GetTestConfiguration())
{
BinaryConfiguration = new BinaryConfiguration
{
NameMapper = GetNameMapper()
},
SpringConfigUrl = CfgPath,
IgniteInstanceName = "grid-" + i
});
}
}
/// <summary>
/// Gets the name mapper.
/// </summary>
protected virtual IBinaryNameMapper GetNameMapper()
{
return new BinaryBasicNameMapper {IsSimpleName = false};
}
/// <summary>
/// Fixture teardown.
/// </summary>
[TestFixtureTearDown]
public void StopGrids()
{
Ignition.StopAll(true);
}
/// <summary>
///
/// </summary>
[SetUp]
public void BeforeTest()
{
Console.WriteLine("Test started: " + TestContext.CurrentContext.Test.Name);
}
/// <summary>
///
/// </summary>
[TearDown]
public void AfterTest()
{
var cache = Cache();
for (int i = 0; i < GridCnt; i++)
{
cache.Clear();
Assert.IsTrue(cache.IsEmpty());
}
TestUtils.AssertHandleRegistryIsEmpty(300,
Enumerable.Range(0, GridCnt).Select(x => Ignition.GetIgnite("grid-" + x)).ToArray());
Console.WriteLine("Test finished: " + TestContext.CurrentContext.Test.Name);
}
/// <summary>
/// Gets the ignite.
/// </summary>
private static IIgnite GetIgnite()
{
return Ignition.GetIgnite("grid-0");
}
/// <summary>
///
/// </summary>
/// <returns></returns>
private static ICache<int, QueryPerson> Cache()
{
return GetIgnite().GetCache<int, QueryPerson>(CacheName);
}
/// <summary>
/// Test arguments validation for SQL queries.
/// </summary>
[Test]
public void TestValidationSql()
{
#pragma warning disable 618
// 1. No sql.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new SqlQuery(typeof(QueryPerson), null)); });
// 2. No type.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new SqlQuery((string)null, "age >= 50")); });
#pragma warning restore 618
}
/// <summary>
/// Test arguments validation for SQL fields queries.
/// </summary>
[Test]
public void TestValidationSqlFields()
{
// 1. No sql.
Assert.Throws<ArgumentException>(() => { Cache().Query(new SqlFieldsQuery(null)); });
}
/// <summary>
/// Test arguments validation for TEXT queries.
/// </summary>
[Test]
public void TestValidationText()
{
// 1. No text.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new TextQuery(typeof(QueryPerson), null)); });
// 2. No type.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new TextQuery((string)null, "Ivanov")); });
}
/// <summary>
/// Cursor tests.
/// </summary>
[Test]
[SuppressMessage("ReSharper", "ReturnValueOfPureMethodIsNotUsed")]
public void TestCursor()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(1, new QueryPerson("Petrov", 40));
Cache().Put(1, new QueryPerson("Sidorov", 50));
#pragma warning disable 618
SqlQuery qry = new SqlQuery(typeof(QueryPerson), "age >= 20");
#pragma warning restore 618
// 1. Test GetAll().
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
cursor.GetAll();
Assert.Throws<InvalidOperationException>(() => { cursor.GetAll(); });
Assert.Throws<InvalidOperationException>(() => { cursor.GetEnumerator(); });
}
// 2. Test GetEnumerator.
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
cursor.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { cursor.GetAll(); });
Assert.Throws<InvalidOperationException>(() => { cursor.GetEnumerator(); });
}
}
/// <summary>
/// Test enumerator.
/// </summary>
[Test]
[SuppressMessage("ReSharper", "UnusedVariable")]
public void TestEnumerator()
{
#pragma warning disable 618
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
Cache().Put(4, new QueryPerson("Unknown", 60));
// 1. Empty result set.
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor =
Cache().Query(new SqlQuery(typeof(QueryPerson), "age = 100")))
{
IEnumerator<ICacheEntry<int, QueryPerson>> e = cursor.GetEnumerator();
Assert.Throws<InvalidOperationException>(() =>
{ ICacheEntry<int, QueryPerson> entry = e.Current; });
Assert.IsFalse(e.MoveNext());
Assert.Throws<InvalidOperationException>(() =>
{ ICacheEntry<int, QueryPerson> entry = e.Current; });
Assert.Throws<NotSupportedException>(() => e.Reset());
e.Dispose();
}
SqlQuery qry = new SqlQuery(typeof (QueryPerson), "age < 60");
Assert.AreEqual(QueryBase.DefaultPageSize, qry.PageSize);
// 2. Page size is bigger than result set.
qry.PageSize = 4;
CheckEnumeratorQuery(qry);
// 3. Page size equal to result set.
qry.PageSize = 3;
CheckEnumeratorQuery(qry);
// 4. Page size if less than result set.
qry.PageSize = 2;
CheckEnumeratorQuery(qry);
#pragma warning restore 618
}
/// <summary>
/// Test SQL query arguments passing.
/// </summary>
[Test]
public void TestSqlQueryArguments()
{
#pragma warning disable 618
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
// 1. Empty result set.
using (var cursor = Cache().Query(new SqlQuery(typeof(QueryPerson), "age < ?", 50)))
{
foreach (ICacheEntry<int, QueryPerson> entry in cursor.GetAll())
Assert.IsTrue(entry.Key == 1 || entry.Key == 2);
}
#pragma warning restore 618
}
/// <summary>
/// Test SQL fields query arguments passing.
/// </summary>
[Test]
public void TestSqlFieldsQueryArguments()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
// 1. Empty result set.
using (var cursor = Cache().Query(new SqlFieldsQuery("SELECT age FROM QueryPerson WHERE age < ?", 50)))
{
foreach (var entry in cursor.GetAll())
Assert.IsTrue((int) entry[0] < 50);
}
}
/// <summary>
/// Check query result for enumerator test.
/// </summary>
/// <param name="qry">QUery.</param>
#pragma warning disable 618
private void CheckEnumeratorQuery(SqlQuery qry)
{
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
bool first = false;
bool second = false;
bool third = false;
foreach (var entry in cursor)
{
if (entry.Key == 1)
{
first = true;
Assert.AreEqual("Ivanov", entry.Value.Name);
Assert.AreEqual(30, entry.Value.Age);
}
else if (entry.Key == 2)
{
second = true;
Assert.AreEqual("Petrov", entry.Value.Name);
Assert.AreEqual(40, entry.Value.Age);
}
else if (entry.Key == 3)
{
third = true;
Assert.AreEqual("Sidorov", entry.Value.Name);
Assert.AreEqual(50, entry.Value.Age);
}
else
Assert.Fail("Unexpected value: " + entry);
}
Assert.IsTrue(first && second && third);
}
}
#pragma warning restore 618
/// <summary>
/// Check SQL query.
/// </summary>
[Test]
public void TestSqlQuery([Values(true, false)] bool loc, [Values(true, false)] bool keepBinary,
[Values(true, false)] bool distrJoin)
{
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, MaxItemCnt, x => x < 50);
// 2. Validate results.
#pragma warning disable 618
var qry = new SqlQuery(typeof(QueryPerson), "age < 50", loc)
{
EnableDistributedJoins = distrJoin,
ReplicatedOnly = false,
Timeout = TimeSpan.FromSeconds(3)
};
#pragma warning restore 618
Assert.AreEqual(string.Format("SqlQuery [Sql=age < 50, Arguments=[], Local={0}, " +
"PageSize=1024, EnableDistributedJoins={1}, Timeout={2}, " +
"ReplicatedOnly=False]", loc, distrJoin, qry.Timeout), qry.ToString());
ValidateQueryResults(cache, qry, exp, keepBinary);
}
/// <summary>
/// Check SQL fields query.
/// </summary>
[Test]
public void TestSqlFieldsQuery([Values(true, false)] bool loc, [Values(true, false)] bool distrJoin,
[Values(true, false)] bool enforceJoinOrder, [Values(true, false)] bool lazy)
{
int cnt = MaxItemCnt;
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, cnt, x => x < 50);
// 2. Validate results.
var qry = new SqlFieldsQuery("SELECT name, age FROM QueryPerson WHERE age < 50")
{
EnableDistributedJoins = distrJoin,
EnforceJoinOrder = enforceJoinOrder,
Colocated = !distrJoin,
#pragma warning disable 618
ReplicatedOnly = false,
#pragma warning restore 618
Local = loc,
Timeout = TimeSpan.FromSeconds(2),
Lazy = lazy
};
using (var cursor = cache.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
foreach (var entry in cursor.GetAll())
{
Assert.AreEqual(2, entry.Count);
Assert.AreEqual(entry[0].ToString(), entry[1].ToString());
exp0.Remove((int)entry[1]);
}
Assert.AreEqual(0, exp0.Count);
Assert.AreEqual(new[] {"NAME", "AGE"}, cursor.FieldNames);
}
// Test old API as well.
#pragma warning disable 618
using (var cursor = cache.QueryFields(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
foreach (var entry in cursor)
{
Assert.AreEqual(entry[0].ToString(), entry[1].ToString());
exp0.Remove((int)entry[1]);
}
Assert.AreEqual(0, exp0.Count);
}
#pragma warning restore 618
}
/// <summary>
/// Tests that query configuration propagates from Spring XML correctly.
/// </summary>
[Test]
public void TestQueryConfiguration()
{
var qe = Cache().GetConfiguration().QueryEntities.Single();
Assert.AreEqual(typeof(QueryPerson).FullName, qe.ValueTypeName);
var age = qe.Fields.First();
Assert.AreEqual("age", age.Name);
Assert.AreEqual(typeof(int), age.FieldType);
Assert.IsFalse(age.IsKeyField);
var name = qe.Fields.Last();
Assert.AreEqual("name", name.Name);
Assert.AreEqual(typeof(string), name.FieldType);
Assert.IsFalse(name.IsKeyField);
var textIdx = qe.Indexes.First();
Assert.AreEqual(QueryIndexType.FullText, textIdx.IndexType);
Assert.AreEqual("name", textIdx.Fields.Single().Name);
Assert.AreEqual(QueryIndex.DefaultInlineSize, textIdx.InlineSize);
var sqlIdx = qe.Indexes.Last();
Assert.AreEqual(QueryIndexType.Sorted, sqlIdx.IndexType);
Assert.AreEqual("age", sqlIdx.Fields.Single().Name);
Assert.AreEqual(2345, sqlIdx.InlineSize);
}
/// <summary>
/// Check text query.
/// </summary>
[Test]
public void TestTextQuery([Values(true, false)] bool loc, [Values(true, false)] bool keepBinary)
{
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, MaxItemCnt, x => x.ToString().StartsWith("1"));
// 2. Validate results.
var qry = new TextQuery(typeof(QueryPerson), "1*", loc);
ValidateQueryResults(cache, qry, exp, keepBinary);
}
/// <summary>
/// Check scan query.
/// </summary>
[Test]
public void TestScanQuery([Values(true, false)] bool loc)
{
CheckScanQuery<QueryPerson>(loc, false);
}
/// <summary>
/// Check scan query in binary mode.
/// </summary>
[Test]
public void TestScanQueryBinary([Values(true, false)] bool loc)
{
CheckScanQuery<IBinaryObject>(loc, true);
}
/// <summary>
/// Check scan query with partitions.
/// </summary>
[Test]
public void TestScanQueryPartitions([Values(true, false)] bool loc)
{
CheckScanQueryPartitions<QueryPerson>(loc, false);
}
/// <summary>
/// Check scan query with partitions in binary mode.
/// </summary>
[Test]
public void TestScanQueryPartitionsBinary([Values(true, false)] bool loc)
{
CheckScanQueryPartitions<IBinaryObject>(loc, true);
}
/// <summary>
/// Checks that scan query is thread-safe and throws correct exception when disposed from another thread.
/// </summary>
[Test]
public void TestScanQueryDisposedFromAnotherThreadThrowsObjectDisposedException()
{
var cache = GetIgnite().GetOrCreateCache<int, int>(TestUtils.TestName);
const int totalCount = 10000;
cache.PutAll(Enumerable.Range(1, totalCount).ToDictionary(x => x, x => x));
var scanQuery = new ScanQuery<int, int>
{
Filter = new ScanQueryFilter<int> {AcceptAll = true}
};
var cursor = cache.Query(scanQuery);
long count = 0;
Task.Factory.StartNew(() =>
{
// ReSharper disable once AccessToModifiedClosure
while (Interlocked.Read(ref count) < totalCount / 10) { }
cursor.Dispose();
});
Assert.Throws<ObjectDisposedException>(() =>
{
foreach (var unused in cursor)
{
Interlocked.Increment(ref count);
}
});
}
/// <summary>
/// Tests that query attempt on non-indexed cache causes an exception.
/// </summary>
[Test]
public void TestIndexingDisabledError()
{
var cache = GetIgnite().GetOrCreateCache<int, QueryPerson>("nonindexed_cache");
// Text query.
var err = Assert.Throws<IgniteException>(() => cache.Query(new TextQuery(typeof(QueryPerson), "1*")));
Assert.AreEqual("Indexing is disabled for cache: nonindexed_cache. " +
"Use setIndexedTypes or setTypeMetadata methods on CacheConfiguration to enable.", err.Message);
// SQL query.
#pragma warning disable 618
err = Assert.Throws<IgniteException>(() => cache.Query(new SqlQuery(typeof(QueryPerson), "age < 50")));
#pragma warning restore 618
Assert.AreEqual("Failed to find SQL table for type: QueryPerson", err.Message);
}
/// <summary>
/// Check scan query.
/// </summary>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private static void CheckScanQuery<TV>(bool loc, bool keepBinary)
{
var cache = Cache();
int cnt = MaxItemCnt;
// No predicate
var exp = PopulateCache(cache, loc, cnt, x => true);
var qry = new ScanQuery<int, TV>();
ValidateQueryResults(cache, qry, exp, keepBinary);
// Serializable
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV>());
ValidateQueryResults(cache, qry, exp, keepBinary);
// Binarizable
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new BinarizableScanQueryFilter<TV>());
ValidateQueryResults(cache, qry, exp, keepBinary);
// Invalid
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new InvalidScanQueryFilter<TV>());
Assert.Throws<BinaryObjectException>(() => ValidateQueryResults(cache, qry, exp, keepBinary));
// Exception
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV> {ThrowErr = true});
var ex = Assert.Throws<IgniteException>(() => ValidateQueryResults(cache, qry, exp, keepBinary));
Assert.AreEqual(ScanQueryFilter<TV>.ErrMessage, ex.Message);
}
/// <summary>
/// Checks scan query with partitions.
/// </summary>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private void CheckScanQueryPartitions<TV>(bool loc, bool keepBinary)
{
StopGrids();
StartGrids();
var cache = Cache();
int cnt = MaxItemCnt;
var aff = cache.Ignite.GetAffinity(CacheName);
var exp = PopulateCache(cache, loc, cnt, x => true); // populate outside the loop (slow)
for (var part = 0; part < aff.Partitions; part++)
{
//var exp0 = new HashSet<int>(exp.Where(x => aff.Partition(x) == part)); // filter expected keys
var exp0 = new HashSet<int>();
foreach (var x in exp)
if (aff.GetPartition(x) == part)
exp0.Add(x);
var qry = new ScanQuery<int, TV> { Partition = part };
ValidateQueryResults(cache, qry, exp0, keepBinary);
}
// Partitions with predicate
exp = PopulateCache(cache, loc, cnt, x => x < 50); // populate outside the loop (slow)
for (var part = 0; part < aff.Partitions; part++)
{
//var exp0 = new HashSet<int>(exp.Where(x => aff.Partition(x) == part)); // filter expected keys
var exp0 = new HashSet<int>();
foreach (var x in exp)
if (aff.GetPartition(x) == part)
exp0.Add(x);
var qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV>()) { Partition = part };
ValidateQueryResults(cache, qry, exp0, keepBinary);
}
}
/// <summary>
/// Tests custom schema name.
/// </summary>
[Test]
public void TestCustomSchema()
{
var doubles = GetIgnite().GetOrCreateCache<int, double>(new CacheConfiguration("doubles",
new QueryEntity(typeof(int), typeof(double))));
var strings = GetIgnite().GetOrCreateCache<int, string>(new CacheConfiguration("strings",
new QueryEntity(typeof(int), typeof(string))));
doubles[1] = 36.6;
strings[1] = "foo";
// Default schema.
var res = doubles.Query(new SqlFieldsQuery(
"select S._val from double as D join \"strings\".string as S on S._key = D._key"))
.Select(x => (string) x[0])
.Single();
Assert.AreEqual("foo", res);
// Custom schema.
res = doubles.Query(new SqlFieldsQuery(
"select S._val from \"doubles\".double as D join string as S on S._key = D._key")
{
Schema = strings.Name
})
.Select(x => (string)x[0])
.Single();
Assert.AreEqual("foo", res);
}
/// <summary>
/// Tests the distributed joins flag.
/// </summary>
[Test]
public void TestDistributedJoins()
{
var cache = GetIgnite().GetOrCreateCache<int, QueryPerson>(
new CacheConfiguration("replicatedCache")
{
QueryEntities = new[]
{
new QueryEntity(typeof(int), typeof(QueryPerson))
{
Fields = new[] {new QueryField("age", "int")}
}
}
});
const int count = 100;
cache.PutAll(Enumerable.Range(0, count).ToDictionary(x => x, x => new QueryPerson("Name" + x, x)));
// Test non-distributed join: returns partial results
var sql = "select T0.Age from QueryPerson as T0 " +
"inner join QueryPerson as T1 on ((? - T1.Age - 1) = T0._key)";
var res = cache.Query(new SqlFieldsQuery(sql, count)).GetAll().Distinct().Count();
Assert.Greater(res, 0);
Assert.Less(res, count);
// Test distributed join: returns complete results
res = cache.Query(new SqlFieldsQuery(sql, count) {EnableDistributedJoins = true})
.GetAll().Distinct().Count();
Assert.AreEqual(count, res);
}
/// <summary>
/// Tests the get configuration.
/// </summary>
[Test]
public void TestGetConfiguration()
{
var entity = Cache().GetConfiguration().QueryEntities.Single();
var ageField = entity.Fields.Single(x => x.Name == "age");
Assert.AreEqual(typeof(int), ageField.FieldType);
Assert.IsFalse(ageField.NotNull);
Assert.IsFalse(ageField.IsKeyField);
var nameField = entity.Fields.Single(x => x.Name == "name");
Assert.AreEqual(typeof(string), nameField.FieldType);
Assert.IsTrue(nameField.NotNull);
Assert.IsFalse(nameField.IsKeyField);
}
/// <summary>
/// Tests custom key and value field names.
/// </summary>
[Test]
public void TestCustomKeyValueFieldNames()
{
// Check select * with default config - does not include _key, _val.
var cache = Cache();
cache[1] = new QueryPerson("Joe", 48);
var row = cache.Query(new SqlFieldsQuery("select * from QueryPerson")).GetAll()[0];
Assert.AreEqual(2, row.Count);
Assert.AreEqual(48, row[0]);
Assert.AreEqual("Joe", row[1]);
// Check select * with custom names - fields are included.
cache = GetIgnite().GetOrCreateCache<int, QueryPerson>(
new CacheConfiguration("customKeyVal")
{
QueryEntities = new[]
{
new QueryEntity(typeof(int), typeof(QueryPerson))
{
Fields = new[]
{
new QueryField("age", "int"),
new QueryField("FullKey", "int"),
new QueryField("FullVal", "QueryPerson")
},
KeyFieldName = "FullKey",
ValueFieldName = "FullVal"
}
}
});
cache[1] = new QueryPerson("John", 33);
row = cache.Query(new SqlFieldsQuery("select * from QueryPerson")).GetAll()[0];
Assert.AreEqual(3, row.Count);
Assert.AreEqual(33, row[0]);
Assert.AreEqual(1, row[1]);
var person = (QueryPerson) row[2];
Assert.AreEqual("John", person.Name);
// Check explicit select.
row = cache.Query(new SqlFieldsQuery("select FullKey from QueryPerson")).GetAll()[0];
Assert.AreEqual(1, row[0]);
}
/// <summary>
/// Tests query timeouts.
/// </summary>
[Test]
[Category(TestUtils.CategoryIntensive)]
public void TestSqlQueryTimeout()
{
var cache = Cache();
PopulateCache(cache, false, 30000, x => true);
#pragma warning disable 618
var sqlQry = new SqlQuery(typeof(QueryPerson), "WHERE age < 2000")
{
Timeout = TimeSpan.FromMilliseconds(1)
};
#pragma warning restore 618
// ReSharper disable once ReturnValueOfPureMethodIsNotUsed
var ex = Assert.Throws<CacheException>(() => cache.Query(sqlQry).ToArray());
Assert.IsTrue(ex.ToString().Contains("QueryCancelledException: The query was cancelled while executing."));
}
/// <summary>
/// Tests fields query timeouts.
/// </summary>
[Test]
[Category(TestUtils.CategoryIntensive)]
public void TestSqlFieldsQueryTimeout()
{
var cache = Cache();
PopulateCache(cache, false, 20000, x => true);
var fieldsQry = new SqlFieldsQuery("SELECT * FROM QueryPerson WHERE age < 5000 AND name like '%0%'")
{
Timeout = TimeSpan.FromMilliseconds(3)
};
// ReSharper disable once ReturnValueOfPureMethodIsNotUsed
var ex = Assert.Throws<CacheException>(() => cache.Query(fieldsQry).ToArray());
Assert.IsTrue(ex.ToString().Contains("QueryCancelledException: The query was cancelled while executing."));
}
/// <summary>
/// Tests the FieldNames property.
/// </summary>
[Test]
public void TestFieldNames()
{
var cache = Cache();
PopulateCache(cache, false, 5, x => true);
// Get before iteration.
var qry = new SqlFieldsQuery("SELECT * FROM QueryPerson");
var cur = cache.Query(qry);
var names = cur.FieldNames;
Assert.AreEqual(new[] {"AGE", "NAME" }, names);
cur.Dispose();
Assert.AreSame(names, cur.FieldNames);
Assert.Throws<NotSupportedException>(() => cur.FieldNames.Add("x"));
// Custom order, key-val, get after iteration.
qry.Sql = "SELECT NAME, _key, AGE, _val FROM QueryPerson";
cur = cache.Query(qry);
cur.GetAll();
Assert.AreEqual(new[] { "NAME", "_KEY", "AGE", "_VAL" }, cur.FieldNames);
// Get after disposal.
qry.Sql = "SELECT 1, AGE FROM QueryPerson";
cur = cache.Query(qry);
cur.Dispose();
Assert.AreEqual(new[] { "1", "AGE" }, cur.FieldNames);
}
/// <summary>
/// Tests the FieldsMetadata property.
/// </summary>
[Test]
public void TestFieldsMetadata()
{
var cache = Cache();
PopulateCache(cache, false, 5, x => true);
// Get before iteration.
var qry = new SqlFieldsQuery("SELECT * FROM QueryPerson");
var cur = cache.Query(qry);
var metas = cur.Fields;
ValidateFieldsMetadata(
metas,
new[] {"AGE", "NAME"},
new[] {typeof(int), typeof(string)},
new[] {"java.lang.Integer", "java.lang.String"}
);
cur.Dispose();
Assert.AreSame(metas, cur.Fields);
Assert.Throws<NotSupportedException>(() => cur.Fields.Add(default(IQueryCursorField)));
// Custom order, key-val, get after iteration.
qry.Sql = "SELECT NAME, _key, AGE, _val FROM QueryPerson";
cur = cache.Query(qry);
cur.GetAll();
ValidateFieldsMetadata(
cur.Fields,
new[] {"NAME", "_KEY", "AGE", "_VAL"},
new[] {typeof(string), typeof(object), typeof(int), typeof(object)},
new[] {"java.lang.String", "java.lang.Object", "java.lang.Integer", "java.lang.Object"}
);
// Get after disposal.
qry.Sql = "SELECT 1, AGE FROM QueryPerson";
cur = cache.Query(qry);
cur.Dispose();
ValidateFieldsMetadata(
cur.Fields,
new[] {"1", "AGE"},
new[] {typeof(int), typeof(int)},
new[] {"java.lang.Integer", "java.lang.Integer"}
);
}
/// <summary>
/// Tests <see cref="SqlFieldsQuery.Partitions"/> argument propagation and validation.
/// </summary>
[Test]
public void TestPartitionsValidation()
{
var cache = Cache();
var qry = new SqlFieldsQuery("SELECT * FROM QueryPerson") { Partitions = new int[0] };
var ex = Assert.Throws<ArgumentException>(() => cache.Query(qry).GetAll());
StringAssert.EndsWith("Partitions must not be empty.", ex.Message);
qry.Partitions = new[] {-1, -2};
ex = Assert.Throws<ArgumentException>(() => cache.Query(qry).GetAll());
StringAssert.EndsWith("Illegal partition", ex.Message);
}
/// <summary>
/// Tests <see cref="SqlFieldsQuery.UpdateBatchSize"/> argument propagation and validation.
/// </summary>
[Test]
public void TestUpdateBatchSizeValidation()
{
var cache = Cache();
var qry = new SqlFieldsQuery("SELECT * FROM QueryPerson") { UpdateBatchSize = -1 };
var ex = Assert.Throws<ArgumentException>(() => cache.Query(qry).GetAll());
StringAssert.EndsWith("updateBatchSize cannot be lower than 1", ex.Message);
}
/// <summary>
/// Validates fields metadata collection
/// </summary>
/// <param name="metadata">Metadata</param>
/// <param name="expectedNames">Expected field names</param>
/// <param name="expectedTypes">Expected field types</param>
/// <param name="expectedJavaTypeNames">Expected java type names</param>
private static void ValidateFieldsMetadata(
IList<IQueryCursorField> metadata,
string[] expectedNames,
Type[] expectedTypes,
string[] expectedJavaTypeNames
)
{
Assert.AreEqual(expectedNames, metadata.Select(m => m.Name));
Assert.AreEqual(expectedTypes, metadata.Select(m => m.Type));
Assert.AreEqual(expectedJavaTypeNames, metadata.Select(m => m.JavaTypeName));
}
/// <summary>
/// Validates the query results.
/// </summary>
/// <param name="cache">Cache.</param>
/// <param name="qry">Query.</param>
/// <param name="exp">Expected keys.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private static void ValidateQueryResults(ICache<int, QueryPerson> cache, QueryBase qry, HashSet<int> exp,
bool keepBinary)
{
if (keepBinary)
{
var cache0 = cache.WithKeepBinary<int, IBinaryObject>();
using (var cursor = cache0.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor.GetAll())
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.GetField<string>("name"));
Assert.AreEqual(entry.Key, entry.Value.GetField<int>("age"));
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
using (var cursor = cache0.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor)
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.GetField<string>("name"));
Assert.AreEqual(entry.Key, entry.Value.GetField<int>("age"));
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
}
else
{
using (var cursor = cache.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor.GetAll())
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.Name);
Assert.AreEqual(entry.Key, entry.Value.Age);
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
using (var cursor = cache.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor)
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.Name);
Assert.AreEqual(entry.Key, entry.Value.Age);
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
}
}
/// <summary>
/// Asserts that all expected entries have been received.
/// </summary>
private static void AssertMissingExpectedKeys(ICollection<int> exp, ICache<int, QueryPerson> cache,
IList<ICacheEntry<int, object>> all)
{
if (exp.Count == 0)
return;
var sb = new StringBuilder();
var aff = cache.Ignite.GetAffinity(cache.Name);
foreach (var key in exp)
{
var part = aff.GetPartition(key);
sb.AppendFormat(
"Query did not return expected key '{0}' (exists: {1}), partition '{2}', partition nodes: ",
key, cache.Get(key) != null, part);
var partNodes = aff.MapPartitionToPrimaryAndBackups(part);
foreach (var node in partNodes)
sb.Append(node).Append(" ");
sb.AppendLine(";");
}
sb.Append("Returned keys: ");
foreach (var e in all)
sb.Append(e.Key).Append(" ");
sb.AppendLine(";");
Assert.Fail(sb.ToString());
}
/// <summary>
/// Populates the cache with random entries and returns expected results set according to filter.
/// </summary>
/// <param name="cache">The cache.</param>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="expectedEntryFilter">The expected entry filter.</param>
/// <returns>Expected results set.</returns>
private static HashSet<int> PopulateCache(ICache<int, QueryPerson> cache, bool loc, int cnt,
Func<int, bool> expectedEntryFilter)
{
var rand = new Random();
for (var i = 0; i < cnt; i++)
{
var val = rand.Next(cnt);
cache.Put(val, new QueryPerson(val.ToString(), val));
}
var entries = loc
? cache.GetLocalEntries(CachePeekMode.Primary)
: cache;
return new HashSet<int>(entries.Select(x => x.Key).Where(expectedEntryFilter));
}
}
/// <summary>
/// Person.
/// </summary>
public class QueryPerson
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="name">Name.</param>
/// <param name="age">Age.</param>
public QueryPerson(string name, int age)
{
Name = name;
Age = age % 2000;
Birthday = DateTime.UtcNow.AddYears(-Age);
}
/// <summary>
/// Name.
/// </summary>
public string Name { get; set; }
/// <summary>
/// Age.
/// </summary>
public int Age { get; set; }
/// <summary>
/// Gets or sets the birthday.
/// </summary>
[QuerySqlField] // Enforce Timestamp serialization
public DateTime Birthday { get; set; }
}
/// <summary>
/// Query filter.
/// </summary>
[Serializable]
public class ScanQueryFilter<TV> : ICacheEntryFilter<int, TV>
{
// Error message
public const string ErrMessage = "Error in ScanQueryFilter.Invoke";
// Error flag
public bool ThrowErr { get; set; }
// Error flag
public bool AcceptAll { get; set; }
// Injection test
[InstanceResource]
public IIgnite Ignite { get; set; }
/** <inheritdoc /> */
public bool Invoke(ICacheEntry<int, TV> entry)
{
Assert.IsNotNull(Ignite);
if (ThrowErr)
throw new Exception(ErrMessage);
return entry.Key < 50 || AcceptAll;
}
}
/// <summary>
/// binary query filter.
/// </summary>
public class BinarizableScanQueryFilter<TV> : ScanQueryFilter<TV>, IBinarizable
{
/** <inheritdoc /> */
public void WriteBinary(IBinaryWriter writer)
{
var w = writer.GetRawWriter();
w.WriteBoolean(ThrowErr);
}
/** <inheritdoc /> */
public void ReadBinary(IBinaryReader reader)
{
var r = reader.GetRawReader();
ThrowErr = r.ReadBoolean();
}
}
/// <summary>
/// Filter that can't be serialized.
/// </summary>
public class InvalidScanQueryFilter<TV> : ScanQueryFilter<TV>, IBinarizable
{
public void WriteBinary(IBinaryWriter writer)
{
throw new BinaryObjectException("Expected");
}
public void ReadBinary(IBinaryReader reader)
{
throw new BinaryObjectException("Expected");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// Represents the policy associated with some piece of evidence
//
using System.Diagnostics.Contracts;
namespace System.Security.Policy {
using System;
using System.Security;
using System.Security.Util;
using Math = System.Math;
using System.Collections;
using System.Collections.Generic;
using System.Security.Permissions;
using System.Text;
using System.Globalization;
[Serializable]
[Flags]
[System.Runtime.InteropServices.ComVisible(true)]
public enum PolicyStatementAttribute
{
Nothing = 0x0,
Exclusive = 0x01,
LevelFinal = 0x02,
All = 0x03,
}
[Serializable]
[System.Runtime.InteropServices.ComVisible(true)]
sealed public class PolicyStatement : ISecurityPolicyEncodable, ISecurityEncodable
{
// The PermissionSet associated with this policy
internal PermissionSet m_permSet;
#if FEATURE_CAS_POLICY
// Evidence which was not verified but which was required to generate this policy statement.
// This is not serialized, since once we serialize we lose the ability to verify the evidence,
// meaning that restoring this state is meaningless.
[NonSerialized]
private List<IDelayEvaluatedEvidence> m_dependentEvidence;
#endif
// The bitfield of inheritance properties associated with this policy
internal PolicyStatementAttribute m_attributes;
internal PolicyStatement()
{
m_permSet = null;
m_attributes = PolicyStatementAttribute.Nothing;
}
public PolicyStatement( PermissionSet permSet )
: this( permSet, PolicyStatementAttribute.Nothing )
{
}
public PolicyStatement( PermissionSet permSet, PolicyStatementAttribute attributes )
{
if (permSet == null)
{
m_permSet = new PermissionSet( false );
}
else
{
m_permSet = permSet.Copy();
}
if (ValidProperties( attributes ))
{
m_attributes = attributes;
}
}
private PolicyStatement( PermissionSet permSet, PolicyStatementAttribute attributes, bool copy )
{
if (permSet != null)
{
if (copy)
m_permSet = permSet.Copy();
else
m_permSet = permSet;
}
else
{
m_permSet = new PermissionSet( false );
}
m_attributes = attributes;
}
public PermissionSet PermissionSet
{
get
{
lock (this)
{
return m_permSet.Copy();
}
}
set
{
lock (this)
{
if (value == null)
{
m_permSet = new PermissionSet( false );
}
else
{
m_permSet = value.Copy();
}
}
}
}
internal void SetPermissionSetNoCopy( PermissionSet permSet )
{
m_permSet = permSet;
}
internal PermissionSet GetPermissionSetNoCopy()
{
lock (this)
{
return m_permSet;
}
}
public PolicyStatementAttribute Attributes
{
get
{
return m_attributes;
}
set
{
if (ValidProperties( value ))
{
m_attributes = value;
}
}
}
public PolicyStatement Copy()
{
PolicyStatement copy = new PolicyStatement(m_permSet, Attributes, true); // The PolicyStatement .ctor will copy the permission set
#if FEATURE_CAS_POLICY
if (HasDependentEvidence)
{
copy.m_dependentEvidence = new List<IDelayEvaluatedEvidence>(m_dependentEvidence);
}
#endif
return copy;
}
public String AttributeString
{
get
{
StringBuilder sb = new StringBuilder();
bool first = true;
if (GetFlag((int) PolicyStatementAttribute.Exclusive ))
{
sb.Append( "Exclusive" );
first = false;
}
if (GetFlag((int) PolicyStatementAttribute.LevelFinal ))
{
if (!first)
sb.Append( " " );
sb.Append( "LevelFinal" );
}
return sb.ToString();
}
}
private static bool ValidProperties( PolicyStatementAttribute attributes )
{
if ((attributes & ~(PolicyStatementAttribute.All)) == 0)
{
return true;
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidFlag" ) );
}
}
private bool GetFlag( int flag )
{
return (flag & (int)m_attributes) != 0;
}
#if FEATURE_CAS_POLICY
/// <summary>
/// Gets all of the delay evaluated evidence which needs to be verified before this policy can
/// be used.
/// </summary>
internal IEnumerable<IDelayEvaluatedEvidence> DependentEvidence
{
get
{
BCLDebug.Assert(HasDependentEvidence, "HasDependentEvidence");
return m_dependentEvidence.AsReadOnly();
}
}
/// <summary>
/// Determine if this policy dependent upon the evaluation of any delay evaluated evidence
/// </summary>
internal bool HasDependentEvidence
{
get { return m_dependentEvidence != null && m_dependentEvidence.Count > 0; }
}
/// <summary>
/// Add evidence which this policy statement is depending upon being verified to be valid.
/// </summary>
internal void AddDependentEvidence(IDelayEvaluatedEvidence dependentEvidence)
{
BCLDebug.Assert(dependentEvidence != null, "dependentEvidence != null");
if (m_dependentEvidence == null)
{
m_dependentEvidence = new List<IDelayEvaluatedEvidence>();
}
m_dependentEvidence.Add(dependentEvidence);
}
#endif
/// <summary>
/// Union a child policy statement into this policy statement
/// </summary>
internal void InplaceUnion(PolicyStatement childPolicy)
{
BCLDebug.Assert(childPolicy != null, "childPolicy != null");
if (((Attributes & childPolicy.Attributes) & PolicyStatementAttribute.Exclusive) == PolicyStatementAttribute.Exclusive)
{
throw new PolicyException(Environment.GetResourceString( "Policy_MultipleExclusive" ));
}
#if FEATURE_CAS_POLICY
// If our code group generated a grant set based upon unverified evidence, or it generated a grant
// set strictly less than that of a child group based upon unverified evidence, we need to keep
// track of any unverified evidence our child group has.
if (childPolicy.HasDependentEvidence)
{
bool childEvidenceNeedsVerification = m_permSet.IsSubsetOf(childPolicy.GetPermissionSetNoCopy()) &&
!childPolicy.GetPermissionSetNoCopy().IsSubsetOf(m_permSet);
if (HasDependentEvidence || childEvidenceNeedsVerification)
{
if (m_dependentEvidence == null)
{
m_dependentEvidence = new List<IDelayEvaluatedEvidence>();
}
m_dependentEvidence.AddRange(childPolicy.DependentEvidence);
}
}
#endif
// We need to merge together our grant set and attributes. The result of this merge is
// dependent upon if we're merging a child marked exclusive or not. If the child is not
// exclusive, we need to union in its grant set and or in its attributes. However, if the child
// is exclusive then it is the only code group which should have an effect on the resulting
// grant set and therefore our grant should be ignored.
if ((childPolicy.Attributes & PolicyStatementAttribute.Exclusive) == PolicyStatementAttribute.Exclusive)
{
m_permSet = childPolicy.GetPermissionSetNoCopy();
Attributes = childPolicy.Attributes;
}
else
{
m_permSet.InplaceUnion(childPolicy.GetPermissionSetNoCopy());
Attributes = Attributes | childPolicy.Attributes;
}
}
#if FEATURE_CAS_POLICY
public SecurityElement ToXml()
{
return ToXml( null );
}
public void FromXml( SecurityElement et )
{
FromXml( et, null );
}
public SecurityElement ToXml( PolicyLevel level )
{
return ToXml( level, false );
}
internal SecurityElement ToXml( PolicyLevel level, bool useInternal )
{
SecurityElement e = new SecurityElement( "PolicyStatement" );
e.AddAttribute( "version", "1" );
if (m_attributes != PolicyStatementAttribute.Nothing)
e.AddAttribute( "Attributes", XMLUtil.BitFieldEnumToString( typeof( PolicyStatementAttribute ), m_attributes ) );
lock (this)
{
if (m_permSet != null)
{
if (m_permSet is NamedPermissionSet)
{
// If the named permission set exists in the parent level of this
// policy struct, then just save the name of the permission set.
// Otherwise, serialize it like normal.
NamedPermissionSet namedPermSet = (NamedPermissionSet)m_permSet;
if (level != null && level.GetNamedPermissionSet( namedPermSet.Name ) != null)
{
e.AddAttribute( "PermissionSetName", namedPermSet.Name );
}
else
{
if (useInternal)
e.AddChild( namedPermSet.InternalToXml() );
else
e.AddChild( namedPermSet.ToXml() );
}
}
else
{
if (useInternal)
e.AddChild( m_permSet.InternalToXml() );
else
e.AddChild( m_permSet.ToXml() );
}
}
}
return e;
}
[System.Security.SecuritySafeCritical] // auto-generated
public void FromXml( SecurityElement et, PolicyLevel level )
{
FromXml( et, level, false );
}
[System.Security.SecurityCritical] // auto-generated
internal void FromXml( SecurityElement et, PolicyLevel level, bool allowInternalOnly )
{
if (et == null)
throw new ArgumentNullException( nameof(et) );
if (!et.Tag.Equals( "PolicyStatement" ))
throw new ArgumentException( String.Format( CultureInfo.CurrentCulture, Environment.GetResourceString( "Argument_InvalidXMLElement" ), nameof(PolicyStatement), this.GetType().FullName ) );
Contract.EndContractBlock();
m_attributes = (PolicyStatementAttribute) 0;
String strAttributes = et.Attribute( "Attributes" );
if (strAttributes != null)
m_attributes = (PolicyStatementAttribute)Enum.Parse( typeof( PolicyStatementAttribute ), strAttributes );
lock (this)
{
m_permSet = null;
if (level != null)
{
String permSetName = et.Attribute( "PermissionSetName" );
if (permSetName != null)
{
m_permSet = level.GetNamedPermissionSetInternal( permSetName );
if (m_permSet == null)
m_permSet = new PermissionSet( PermissionState.None );
}
}
if (m_permSet == null)
{
// There is no provided level, it is not a named permission set, or
// the named permission set doesn't exist in the provided level,
// so just create the class through reflection and decode normally.
SecurityElement e = et.SearchForChildByTag( "PermissionSet" );
if (e != null)
{
String className = e.Attribute( "class" );
if (className != null && (className.Equals( "NamedPermissionSet" ) ||
className.Equals( "System.Security.NamedPermissionSet" )))
m_permSet = new NamedPermissionSet( "DefaultName", PermissionState.None );
else
m_permSet = new PermissionSet( PermissionState.None );
try
{
m_permSet.FromXml( e, allowInternalOnly, true );
}
catch
{
// ignore any exceptions from the decode process.
// Note: we go ahead and use the permission set anyway. This should be safe since
// the decode process should never give permission beyond what a proper decode would have
// given.
}
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidXML" ) );
}
}
if (m_permSet == null)
m_permSet = new PermissionSet( PermissionState.None );
}
}
[System.Security.SecurityCritical] // auto-generated
internal void FromXml( SecurityDocument doc, int position, PolicyLevel level, bool allowInternalOnly )
{
if (doc == null)
throw new ArgumentNullException( nameof(doc) );
Contract.EndContractBlock();
if (!doc.GetTagForElement( position ).Equals( "PolicyStatement" ))
throw new ArgumentException( String.Format( CultureInfo.CurrentCulture, Environment.GetResourceString( "Argument_InvalidXMLElement" ), nameof(PolicyStatement), this.GetType().FullName ) );
m_attributes = (PolicyStatementAttribute) 0;
String strAttributes = doc.GetAttributeForElement( position, "Attributes" );
if (strAttributes != null)
m_attributes = (PolicyStatementAttribute)Enum.Parse( typeof( PolicyStatementAttribute ), strAttributes );
lock (this)
{
m_permSet = null;
if (level != null)
{
String permSetName = doc.GetAttributeForElement( position, "PermissionSetName" );
if (permSetName != null)
{
m_permSet = level.GetNamedPermissionSetInternal( permSetName );
if (m_permSet == null)
m_permSet = new PermissionSet( PermissionState.None );
}
}
if (m_permSet == null)
{
// There is no provided level, it is not a named permission set, or
// the named permission set doesn't exist in the provided level,
// so just create the class through reflection and decode normally.
ArrayList childPositions = doc.GetChildrenPositionForElement( position );
int positionPermissionSet = -1;
for (int i = 0; i < childPositions.Count; ++i)
{
if (doc.GetTagForElement( (int)childPositions[i] ).Equals( "PermissionSet" ))
{
positionPermissionSet = (int)childPositions[i];
}
}
if (positionPermissionSet != -1)
{
String className = doc.GetAttributeForElement( positionPermissionSet, "class" );
if (className != null && (className.Equals( "NamedPermissionSet" ) ||
className.Equals( "System.Security.NamedPermissionSet" )))
m_permSet = new NamedPermissionSet( "DefaultName", PermissionState.None );
else
m_permSet = new PermissionSet( PermissionState.None );
m_permSet.FromXml( doc, positionPermissionSet, allowInternalOnly );
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidXML" ) );
}
}
if (m_permSet == null)
m_permSet = new PermissionSet( PermissionState.None );
}
}
#endif // FEATURE_CAS_POLICY
[System.Runtime.InteropServices.ComVisible(false)]
public override bool Equals( Object obj )
{
PolicyStatement other = obj as PolicyStatement;
if (other == null)
return false;
if (this.m_attributes != other.m_attributes)
return false;
if (!Object.Equals( this.m_permSet, other.m_permSet ))
return false;
return true;
}
[System.Runtime.InteropServices.ComVisible(false)]
public override int GetHashCode()
{
int accumulator = (int)this.m_attributes;
if (m_permSet != null)
accumulator = accumulator ^ m_permSet.GetHashCode();
return accumulator;
}
}
}
| |
//
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using System.IO;
using System.Reflection;
using System.Security.Cryptography;
using System.Runtime.Serialization;
using Mono.Security.Cryptography;
using Mono.Cecil.PE;
namespace Mono.Cecil {
// Most of this code has been adapted
// from Jeroen Frijters' fantastic work
// in IKVM.Reflection.Emit. Thanks!
static class CryptoService {
public static byte [] GetPublicKey (WriterParameters parameters)
{
using (var rsa = parameters.CreateRSA ()) {
var cspBlob = CryptoConvert.ToCapiPublicKeyBlob (rsa);
var publicKey = new byte [12 + cspBlob.Length];
Buffer.BlockCopy (cspBlob, 0, publicKey, 12, cspBlob.Length);
// The first 12 bytes are documented at:
// http://msdn.microsoft.com/library/en-us/cprefadd/html/grfungethashfromfile.asp
// ALG_ID - Signature
publicKey [1] = 36;
// ALG_ID - Hash
publicKey [4] = 4;
publicKey [5] = 128;
// Length of Public Key (in bytes)
publicKey [8] = (byte) (cspBlob.Length >> 0);
publicKey [9] = (byte) (cspBlob.Length >> 8);
publicKey [10] = (byte) (cspBlob.Length >> 16);
publicKey [11] = (byte) (cspBlob.Length >> 24);
return publicKey;
}
}
public static void StrongName (Stream stream, ImageWriter writer, WriterParameters parameters)
{
int strong_name_pointer;
var strong_name = CreateStrongName (parameters, HashStream (stream, writer, out strong_name_pointer));
PatchStrongName (stream, strong_name_pointer, strong_name);
}
static void PatchStrongName (Stream stream, int strong_name_pointer, byte [] strong_name)
{
stream.Seek (strong_name_pointer, SeekOrigin.Begin);
stream.Write (strong_name, 0, strong_name.Length);
}
static byte [] CreateStrongName (WriterParameters parameters, byte [] hash)
{
const string hash_algo = "SHA1";
using (var rsa = parameters.CreateRSA ()) {
var formatter = new RSAPKCS1SignatureFormatter (rsa);
formatter.SetHashAlgorithm (hash_algo);
byte [] signature = formatter.CreateSignature (hash);
Array.Reverse (signature);
return signature;
}
}
static byte [] HashStream (Stream stream, ImageWriter writer, out int strong_name_pointer)
{
const int buffer_size = 8192;
var text = writer.text;
var header_size = (int) writer.GetHeaderSize ();
var text_section_pointer = (int) text.PointerToRawData;
var strong_name_directory = writer.GetStrongNameSignatureDirectory ();
if (strong_name_directory.Size == 0)
throw new InvalidOperationException ();
strong_name_pointer = (int) (text_section_pointer
+ (strong_name_directory.VirtualAddress - text.VirtualAddress));
var strong_name_length = (int) strong_name_directory.Size;
var sha1 = new SHA1Managed ();
var buffer = new byte [buffer_size];
using (var crypto_stream = new CryptoStream (Stream.Null, sha1, CryptoStreamMode.Write)) {
stream.Seek (0, SeekOrigin.Begin);
CopyStreamChunk (stream, crypto_stream, buffer, header_size);
stream.Seek (text_section_pointer, SeekOrigin.Begin);
CopyStreamChunk (stream, crypto_stream, buffer, (int) strong_name_pointer - text_section_pointer);
stream.Seek (strong_name_length, SeekOrigin.Current);
CopyStreamChunk (stream, crypto_stream, buffer, (int) (stream.Length - (strong_name_pointer + strong_name_length)));
}
return sha1.Hash;
}
public static void CopyStreamChunk (Stream stream, Stream dest_stream, byte [] buffer, int length)
{
while (length > 0) {
int read = stream.Read (buffer, 0, System.Math.Min (buffer.Length, length));
dest_stream.Write (buffer, 0, read);
length -= read;
}
}
public static byte [] ComputeHash (string file)
{
if (!File.Exists (file))
return Empty<byte>.Array;
using (var stream = new FileStream (file, FileMode.Open, FileAccess.Read, FileShare.Read))
return ComputeHash (stream);
}
public static byte [] ComputeHash (Stream stream)
{
const int buffer_size = 8192;
var sha1 = new SHA1Managed ();
var buffer = new byte [buffer_size];
using (var crypto_stream = new CryptoStream (Stream.Null, sha1, CryptoStreamMode.Write))
CopyStreamChunk (stream, crypto_stream, buffer, (int) stream.Length);
return sha1.Hash;
}
public static byte [] ComputeHash (params ByteBuffer [] buffers)
{
var sha1 = new SHA1Managed ();
using (var crypto_stream = new CryptoStream (Stream.Null, sha1, CryptoStreamMode.Write)) {
for (int i = 0; i < buffers.Length; i++) {
crypto_stream.Write (buffers [i].buffer, 0, buffers [i].length);
}
}
return sha1.Hash;
}
public static Guid ComputeGuid (byte [] hash)
{
// From corefx/src/System.Reflection.Metadata/src/System/Reflection/Metadata/BlobContentId.cs
var guid = new byte [16];
Buffer.BlockCopy (hash, 0, guid, 0, 16);
// modify the guid data so it decodes to the form of a "random" guid ala rfc4122
guid [7] = (byte) ((guid [7] & 0x0f) | (4 << 4));
guid [8] = (byte) ((guid [8] & 0x3f) | (2 << 6));
return new Guid (guid);
}
}
static partial class Mixin {
public static RSA CreateRSA (this WriterParameters writer_parameters)
{
byte [] key;
string key_container;
if (writer_parameters.StrongNameKeyBlob != null)
return CryptoConvert.FromCapiKeyBlob (writer_parameters.StrongNameKeyBlob);
if (writer_parameters.StrongNameKeyContainer != null)
key_container = writer_parameters.StrongNameKeyContainer;
else if (!TryGetKeyContainer (writer_parameters.StrongNameKeyPair, out key, out key_container))
return CryptoConvert.FromCapiKeyBlob (key);
var parameters = new CspParameters {
Flags = CspProviderFlags.UseMachineKeyStore,
KeyContainerName = key_container,
KeyNumber = 2,
};
return new RSACryptoServiceProvider (parameters);
}
static bool TryGetKeyContainer (ISerializable key_pair, out byte [] key, out string key_container)
{
var info = new SerializationInfo (typeof (StrongNameKeyPair), new FormatterConverter ());
key_pair.GetObjectData (info, new StreamingContext ());
key = (byte []) info.GetValue ("_keyPairArray", typeof (byte []));
key_container = info.GetString ("_keyPairContainer");
return key_container != null;
}
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT
namespace NLog.UnitTests.Targets
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Configuration;
using System.Data;
using System.Data.Common;
using System.Globalization;
using System.IO;
using System.Linq;
using NLog.Common;
using NLog.Config;
using NLog.Targets;
using Xunit;
using Xunit.Extensions;
using System.Data.SqlClient;
public class DatabaseTargetTests : NLogTestBase
{
#if !MONO
static DatabaseTargetTests()
{
var data = (DataSet)ConfigurationManager.GetSection("system.data");
var providerFactories = data.Tables["DBProviderFactories"];
providerFactories.Rows.Add("MockDb Provider", "MockDb Provider", "MockDb",
typeof(MockDbFactory).AssemblyQualifiedName);
providerFactories.AcceptChanges();
}
#endif
[Fact]
public void SimpleDatabaseTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "FooBar",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
List<Exception> exceptions = new List<Exception>();
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg3").WithContinuation(exceptions.Add));
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
Close()
Dispose()
Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
Close()
Dispose()
Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void SimpleBatchedDatabaseTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "FooBar",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
List<Exception> exceptions = new List<Exception>();
var events = new[]
{
new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg3").WithContinuation(exceptions.Add),
};
dt.WriteAsyncLogEvents(events);
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void KeepConnectionOpenTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "FooBar",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
List<Exception> exceptions = new List<Exception>();
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg3").WithContinuation(exceptions.Add));
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
";
AssertLog(expectedLog);
MockDbConnection.ClearLog();
dt.Close();
expectedLog = @"Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void KeepConnectionOpenBatchedTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "FooBar",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
var exceptions = new List<Exception>();
var events = new[]
{
new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg3").WithContinuation(exceptions.Add),
};
dt.WriteAsyncLogEvents(events);
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('FooBar').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
";
AssertLog(expectedLog);
MockDbConnection.ClearLog();
dt.Close();
expectedLog = @"Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void KeepConnectionOpenTest2()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "Database=${logger}",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
List<Exception> exceptions = new List<Exception>();
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger2", "msg3").WithContinuation(exceptions.Add));
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "MyLogger", "msg4").WithContinuation(exceptions.Add));
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('Database=MyLogger').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
Close()
Dispose()
Open('Database=MyLogger2').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
Close()
Dispose()
Open('Database=MyLogger').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg4')
";
AssertLog(expectedLog);
MockDbConnection.ClearLog();
dt.Close();
expectedLog = @"Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void KeepConnectionOpenBatchedTest2()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES('${message}')",
ConnectionString = "Database=${logger}",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
// when we pass multiple log events in an array, the target will bucket-sort them by
// connection string and group all commands for the same connection string together
// to minimize number of db open/close operations
// in this case msg1, msg2 and msg4 will be written together to MyLogger database
// and msg3 will be written to MyLogger2 database
List<Exception> exceptions = new List<Exception>();
var events = new[]
{
new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg2").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger2", "msg3").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Info, "MyLogger", "msg4").WithContinuation(exceptions.Add),
};
dt.WriteAsyncLogEvents(events);
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('Database=MyLogger').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg1')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg2')
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg4')
Close()
Dispose()
Open('Database=MyLogger2').
ExecuteNonQuery: INSERT INTO FooBar VALUES('msg3')
";
AssertLog(expectedLog);
MockDbConnection.ClearLog();
dt.Close();
expectedLog = @"Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void ParameterTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES(@msg, @lvl, @lg)",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
Parameters =
{
new DatabaseParameterInfo("msg", "${message}"),
new DatabaseParameterInfo("lvl", "${level}"),
new DatabaseParameterInfo("lg", "${logger}")
}
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
// when we pass multiple log events in an array, the target will bucket-sort them by
// connection string and group all commands for the same connection string together
// to minimize number of db open/close operations
// in this case msg1, msg2 and msg4 will be written together to MyLogger database
// and msg3 will be written to MyLogger2 database
List<Exception> exceptions = new List<Exception>();
var events = new[]
{
new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Debug, "MyLogger2", "msg3").WithContinuation(exceptions.Add),
};
dt.WriteAsyncLogEvents(events);
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('Server=.;Trusted_Connection=SSPI;').
CreateParameter(0)
Parameter #0 Direction=Input
Parameter #0 Name=msg
Parameter #0 Value=msg1
Add Parameter Parameter #0
CreateParameter(1)
Parameter #1 Direction=Input
Parameter #1 Name=lvl
Parameter #1 Value=Info
Add Parameter Parameter #1
CreateParameter(2)
Parameter #2 Direction=Input
Parameter #2 Name=lg
Parameter #2 Value=MyLogger
Add Parameter Parameter #2
ExecuteNonQuery: INSERT INTO FooBar VALUES(@msg, @lvl, @lg)
CreateParameter(0)
Parameter #0 Direction=Input
Parameter #0 Name=msg
Parameter #0 Value=msg3
Add Parameter Parameter #0
CreateParameter(1)
Parameter #1 Direction=Input
Parameter #1 Name=lvl
Parameter #1 Value=Debug
Add Parameter Parameter #1
CreateParameter(2)
Parameter #2 Direction=Input
Parameter #2 Name=lg
Parameter #2 Value=MyLogger2
Add Parameter Parameter #2
ExecuteNonQuery: INSERT INTO FooBar VALUES(@msg, @lvl, @lg)
";
AssertLog(expectedLog);
MockDbConnection.ClearLog();
dt.Close();
expectedLog = @"Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void ParameterFacetTest()
{
MockDbConnection.ClearLog();
DatabaseTarget dt = new DatabaseTarget()
{
CommandText = "INSERT INTO FooBar VALUES(@msg, @lvl, @lg)",
DBProvider = typeof(MockDbConnection).AssemblyQualifiedName,
KeepConnection = true,
Parameters =
{
new DatabaseParameterInfo("msg", "${message}")
{
Precision = 3,
Scale = 7,
Size = 9,
},
new DatabaseParameterInfo("lvl", "${level}")
{
Scale = 7
},
new DatabaseParameterInfo("lg", "${logger}")
{
Precision = 0
},
}
};
dt.Initialize(null);
Assert.Same(typeof(MockDbConnection), dt.ConnectionType);
// when we pass multiple log events in an array, the target will bucket-sort them by
// connection string and group all commands for the same connection string together
// to minimize number of db open/close operations
// in this case msg1, msg2 and msg4 will be written together to MyLogger database
// and msg3 will be written to MyLogger2 database
var exceptions = new List<Exception>();
var events = new[]
{
new LogEventInfo(LogLevel.Info, "MyLogger", "msg1").WithContinuation(exceptions.Add),
new LogEventInfo(LogLevel.Debug, "MyLogger2", "msg3").WithContinuation(exceptions.Add),
};
dt.WriteAsyncLogEvents(events);
dt.Close();
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
string expectedLog = @"Open('Server=.;Trusted_Connection=SSPI;').
CreateParameter(0)
Parameter #0 Direction=Input
Parameter #0 Name=msg
Parameter #0 Size=9
Parameter #0 Precision=3
Parameter #0 Scale=7
Parameter #0 Value=msg1
Add Parameter Parameter #0
CreateParameter(1)
Parameter #1 Direction=Input
Parameter #1 Name=lvl
Parameter #1 Scale=7
Parameter #1 Value=Info
Add Parameter Parameter #1
CreateParameter(2)
Parameter #2 Direction=Input
Parameter #2 Name=lg
Parameter #2 Value=MyLogger
Add Parameter Parameter #2
ExecuteNonQuery: INSERT INTO FooBar VALUES(@msg, @lvl, @lg)
CreateParameter(0)
Parameter #0 Direction=Input
Parameter #0 Name=msg
Parameter #0 Size=9
Parameter #0 Precision=3
Parameter #0 Scale=7
Parameter #0 Value=msg3
Add Parameter Parameter #0
CreateParameter(1)
Parameter #1 Direction=Input
Parameter #1 Name=lvl
Parameter #1 Scale=7
Parameter #1 Value=Debug
Add Parameter Parameter #1
CreateParameter(2)
Parameter #2 Direction=Input
Parameter #2 Name=lg
Parameter #2 Value=MyLogger2
Add Parameter Parameter #2
ExecuteNonQuery: INSERT INTO FooBar VALUES(@msg, @lvl, @lg)
Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void ConnectionStringBuilderTest1()
{
DatabaseTarget dt;
dt = new DatabaseTarget();
Assert.Equal("Server=.;Trusted_Connection=SSPI;", this.GetConnectionString(dt));
dt = new DatabaseTarget();
dt.DBHost = "${logger}";
Assert.Equal("Server=Logger1;Trusted_Connection=SSPI;", this.GetConnectionString(dt));
dt = new DatabaseTarget();
dt.DBHost = "HOST1";
dt.DBDatabase = "${logger}";
Assert.Equal("Server=HOST1;Trusted_Connection=SSPI;Database=Logger1", this.GetConnectionString(dt));
dt = new DatabaseTarget();
dt.DBHost = "HOST1";
dt.DBDatabase = "${logger}";
dt.DBUserName = "user1";
dt.DBPassword = "password1";
Assert.Equal("Server=HOST1;User id=user1;Password=password1;Database=Logger1", this.GetConnectionString(dt));
dt = new DatabaseTarget();
dt.ConnectionString = "customConnectionString42";
dt.DBHost = "HOST1";
dt.DBDatabase = "${logger}";
dt.DBUserName = "user1";
dt.DBPassword = "password1";
Assert.Equal("customConnectionString42", this.GetConnectionString(dt));
}
[Fact]
public void DatabaseExceptionTest1()
{
MockDbConnection.ClearLog();
var exceptions = new List<Exception>();
var db = new DatabaseTarget();
db.CommandText = "not important";
db.ConnectionString = "cannotconnect";
db.DBProvider = typeof(MockDbConnection).AssemblyQualifiedName;
db.Initialize(null);
db.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
db.Close();
Assert.Equal(1, exceptions.Count);
Assert.NotNull(exceptions[0]);
Assert.Equal("Cannot open fake database.", exceptions[0].Message);
Assert.Equal("Open('cannotconnect').\r\n", MockDbConnection.Log);
}
[Fact]
public void DatabaseExceptionTest2()
{
MockDbConnection.ClearLog();
var exceptions = new List<Exception>();
var db = new DatabaseTarget();
db.CommandText = "not important";
db.ConnectionString = "cannotexecute";
db.KeepConnection = true;
db.DBProvider = typeof(MockDbConnection).AssemblyQualifiedName;
db.Initialize(null);
db.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
db.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
db.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
db.Close();
Assert.Equal(3, exceptions.Count);
Assert.NotNull(exceptions[0]);
Assert.NotNull(exceptions[1]);
Assert.NotNull(exceptions[2]);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[0].Message);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[1].Message);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[2].Message);
string expectedLog = @"Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void DatabaseExceptionTest3()
{
MockDbConnection.ClearLog();
var exceptions = new List<Exception>();
var db = new DatabaseTarget();
db.CommandText = "not important";
db.ConnectionString = "cannotexecute";
db.KeepConnection = true;
db.DBProvider = typeof(MockDbConnection).AssemblyQualifiedName;
db.Initialize(null);
db.WriteAsyncLogEvents(
LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add),
LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add),
LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
db.Close();
Assert.Equal(3, exceptions.Count);
Assert.NotNull(exceptions[0]);
Assert.NotNull(exceptions[1]);
Assert.NotNull(exceptions[2]);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[0].Message);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[1].Message);
Assert.Equal("Failure during ExecuteNonQuery", exceptions[2].Message);
string expectedLog = @"Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
Open('cannotexecute').
ExecuteNonQuery: not important
Close()
Dispose()
";
AssertLog(expectedLog);
}
[Fact]
public void ConnectionStringNameInitTest()
{
var dt = new DatabaseTarget
{
ConnectionStringName = "MyConnectionString",
CommandText = "notimportant",
};
Assert.Same(ConfigurationManager.ConnectionStrings, dt.ConnectionStringsSettings);
dt.ConnectionStringsSettings = new ConnectionStringSettingsCollection()
{
new ConnectionStringSettings("MyConnectionString", "cs1", "MockDb"),
};
dt.Initialize(null);
Assert.Same(MockDbFactory.Instance, dt.ProviderFactory);
Assert.Equal("cs1", dt.ConnectionString.Render(LogEventInfo.CreateNullEvent()));
}
[Fact]
public void ConnectionStringNameNegativeTest_if_ThrowConfigExceptions()
{
LogManager.ThrowConfigExceptions = true;
var dt = new DatabaseTarget
{
ConnectionStringName = "MyConnectionString",
CommandText = "notimportant",
ConnectionStringsSettings = new ConnectionStringSettingsCollection(),
};
try
{
dt.Initialize(null);
Assert.True(false, "Exception expected.");
}
catch (NLogConfigurationException configurationException)
{
Assert.Equal(
"Connection string 'MyConnectionString' is not declared in <connectionStrings /> section.",
configurationException.Message);
}
}
[Fact]
public void ProviderFactoryInitTest()
{
var dt = new DatabaseTarget();
dt.DBProvider = "MockDb";
dt.CommandText = "Notimportant";
dt.Initialize(null);
Assert.Same(MockDbFactory.Instance, dt.ProviderFactory);
dt.OpenConnection("myConnectionString");
Assert.Equal(1, MockDbConnection2.OpenCount);
Assert.Equal("myConnectionString", MockDbConnection2.LastOpenConnectionString);
}
[Fact]
public void SqlServerShorthandNotationTest()
{
foreach (string provName in new[] { "microsoft", "msde", "mssql", "sqlserver" })
{
var dt = new DatabaseTarget()
{
Name = "myTarget",
DBProvider = provName,
ConnectionString = "notimportant",
CommandText = "notimportant",
};
dt.Initialize(null);
Assert.Equal(typeof(System.Data.SqlClient.SqlConnection), dt.ConnectionType);
}
}
[Fact]
public void OleDbShorthandNotationTest()
{
var dt = new DatabaseTarget()
{
Name = "myTarget",
DBProvider = "oledb",
ConnectionString = "notimportant",
CommandText = "notimportant",
};
dt.Initialize(null);
Assert.Equal(typeof(System.Data.OleDb.OleDbConnection), dt.ConnectionType);
}
[Fact]
public void OdbcShorthandNotationTest()
{
var dt = new DatabaseTarget()
{
Name = "myTarget",
DBProvider = "odbc",
ConnectionString = "notimportant",
CommandText = "notimportant",
};
dt.Initialize(null);
Assert.Equal(typeof(System.Data.Odbc.OdbcConnection), dt.ConnectionType);
}
[Fact]
public void SqlServer_InstallAndLogMessage()
{
if (SqlServerTest.IsTravis())
{
Console.WriteLine("skipping test SqlServer_InstallAndLogMessage because we are running in Travis");
return;
}
SqlServerTest.TryDropDatabase();
try
{
SqlServerTest.CreateDatabase();
var connectionString = SqlServerTest.GetConnectionString();
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog xmlns='http://www.nlog-project.org/schemas/NLog.xsd'
xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' throwExceptions='true'>
<targets>
<target name='database' xsi:type='Database' connectionstring=""" + connectionString + @"""
commandText='insert into dbo.NLogSqlServerTest (Uid) values (@uid);'>
<parameter name='@uid' layout='${event-properties:uid}' />
<install-command ignoreFailures=""false""
text=""CREATE TABLE dbo.NLogSqlServerTest (
Id int NOT NULL IDENTITY(1,1) PRIMARY KEY CLUSTERED,
Uid uniqueidentifier NULL
);""/>
</target>
</targets>
<rules>
<logger name='*' writeTo='database' />
</rules>
</nlog>");
//install
InstallationContext context = new InstallationContext();
LogManager.Configuration.Install(context);
var tableCatalog = SqlServerTest.IssueScalarQuery(@"SELECT TABLE_CATALOG FROM INFORMATION_SCHEMA.TABLES
WHERE TABLE_SCHEMA = 'Dbo'
AND TABLE_NAME = 'NLogSqlServerTest'");
//check if table exists
Assert.Equal("NLogTest", tableCatalog);
var logger = LogManager.GetLogger("A");
var target = LogManager.Configuration.FindTargetByName<DatabaseTarget>("database");
var uid = new Guid("e7c648b4-3508-4df2-b001-753148659d6d");
var logEvent = new LogEventInfo(LogLevel.Info, null, null);
logEvent.Properties["uid"] = uid;
logger.Log(logEvent);
var count = SqlServerTest.IssueScalarQuery("SELECT count(1) FROM dbo.NLogSqlServerTest");
Assert.Equal(1, count);
var result = SqlServerTest.IssueScalarQuery("SELECT Uid FROM dbo.NLogSqlServerTest");
Assert.Equal(uid, result);
}
finally
{
SqlServerTest.TryDropDatabase();
}
}
public void GetProviderNameFromAppConfig()
{
LogManager.ThrowExceptions = true;
var databaseTarget = new DatabaseTarget()
{
Name = "myTarget",
ConnectionStringName = "test_connectionstring_with_providerName",
CommandText = "notimportant",
};
databaseTarget.ConnectionStringsSettings = new ConnectionStringSettingsCollection()
{
new ConnectionStringSettings("test_connectionstring_without_providerName", "some connectionstring"),
new ConnectionStringSettings("test_connectionstring_with_providerName", "some connectionstring",
"System.Data.SqlClient"),
};
databaseTarget.Initialize(null);
Assert.NotNull(databaseTarget.ProviderFactory);
Assert.Equal(typeof(System.Data.SqlClient.SqlClientFactory), databaseTarget.ProviderFactory.GetType());
}
[Fact]
public void DontRequireProviderNameInAppConfig()
{
LogManager.ThrowExceptions = true;
var databaseTarget = new DatabaseTarget()
{
Name = "myTarget",
ConnectionStringName = "test_connectionstring_without_providerName",
CommandText = "notimportant",
DBProvider = "System.Data.SqlClient"
};
databaseTarget.ConnectionStringsSettings = new ConnectionStringSettingsCollection()
{
new ConnectionStringSettings("test_connectionstring_without_providerName", "some connectionstring"),
new ConnectionStringSettings("test_connectionstring_with_providerName", "some connectionstring",
"System.Data.SqlClient"),
};
databaseTarget.Initialize(null);
Assert.NotNull(databaseTarget.ProviderFactory);
Assert.Equal(typeof(System.Data.SqlClient.SqlClientFactory), databaseTarget.ProviderFactory.GetType());
}
[Theory]
[InlineData("usetransactions='false'", true)]
[InlineData("usetransactions='true'", true)]
[InlineData("", false)]
public void WarningForObsoleteUseTransactions(string property, bool printWarning)
{
LoggingConfiguration c = CreateConfigurationFromString(string.Format(@"
<nlog ThrowExceptions='true'>
<targets>
<target type='database' {0} name='t1' commandtext='fake sql' connectionstring='somewhere' />
</targets>
<rules>
<logger name='*' writeTo='t1'>
</logger>
</rules>
</nlog>", property));
StringWriter writer1 = new StringWriter()
{
NewLine = "\n"
};
InternalLogger.LogWriter = writer1;
var t = c.FindTargetByName<DatabaseTarget>("t1");
t.Initialize(null);
var internalLog = writer1.ToString();
if (printWarning)
{
Assert.Contains("obsolete", internalLog, StringComparison.InvariantCultureIgnoreCase);
Assert.Contains("usetransactions", internalLog, StringComparison.InvariantCultureIgnoreCase);
}
else
{
Assert.DoesNotContain("obsolete", internalLog, StringComparison.InvariantCultureIgnoreCase);
Assert.DoesNotContain("usetransactions", internalLog, StringComparison.InvariantCultureIgnoreCase);
}
}
private static void AssertLog(string expectedLog)
{
Assert.Equal(expectedLog.Replace("\r", ""), MockDbConnection.Log.Replace("\r", ""));
}
private string GetConnectionString(DatabaseTarget dt)
{
MockDbConnection.ClearLog();
dt.DBProvider = typeof(MockDbConnection).AssemblyQualifiedName;
dt.CommandText = "NotImportant";
var exceptions = new List<Exception>();
dt.Initialize(null);
dt.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "Logger1", "msg1").WithContinuation(exceptions.Add));
dt.Close();
return MockDbConnection.LastConnectionString;
}
public class MockDbConnection : IDbConnection
{
public static string Log { get; private set; }
public static string LastConnectionString { get; private set; }
public MockDbConnection()
{
}
public MockDbConnection(string connectionString)
{
this.ConnectionString = connectionString;
}
public IDbTransaction BeginTransaction(IsolationLevel il)
{
throw new NotImplementedException();
}
public IDbTransaction BeginTransaction()
{
throw new NotImplementedException();
}
public void ChangeDatabase(string databaseName)
{
throw new NotImplementedException();
}
public void Close()
{
AddToLog("Close()");
}
public string ConnectionString { get; set; }
public int ConnectionTimeout
{
get { throw new NotImplementedException(); }
}
public IDbCommand CreateCommand()
{
return new MockDbCommand() { Connection = this };
}
public string Database
{
get { throw new NotImplementedException(); }
}
public void Open()
{
LastConnectionString = this.ConnectionString;
AddToLog("Open('{0}').", this.ConnectionString);
if (this.ConnectionString == "cannotconnect")
{
throw new InvalidOperationException("Cannot open fake database.");
}
}
public ConnectionState State
{
get { throw new NotImplementedException(); }
}
public void Dispose()
{
AddToLog("Dispose()");
}
public static void ClearLog()
{
Log = string.Empty;
}
public void AddToLog(string message, params object[] args)
{
if (args.Length > 0)
{
message = string.Format(CultureInfo.InvariantCulture, message, args);
}
Log += message + "\r\n";
}
}
private class MockDbCommand : IDbCommand
{
private int paramCount;
private IDataParameterCollection parameters;
public MockDbCommand()
{
this.parameters = new MockParameterCollection(this);
}
public void Cancel()
{
throw new NotImplementedException();
}
public string CommandText { get; set; }
public int CommandTimeout { get; set; }
public CommandType CommandType { get; set; }
public IDbConnection Connection { get; set; }
public IDbDataParameter CreateParameter()
{
((MockDbConnection)this.Connection).AddToLog("CreateParameter({0})", this.paramCount);
return new MockDbParameter(this, paramCount++);
}
public int ExecuteNonQuery()
{
((MockDbConnection)this.Connection).AddToLog("ExecuteNonQuery: {0}", this.CommandText);
if (this.Connection.ConnectionString == "cannotexecute")
{
throw new InvalidOperationException("Failure during ExecuteNonQuery");
}
return 0;
}
public IDataReader ExecuteReader(CommandBehavior behavior)
{
throw new NotImplementedException();
}
public IDataReader ExecuteReader()
{
throw new NotImplementedException();
}
public object ExecuteScalar()
{
throw new NotImplementedException();
}
public IDataParameterCollection Parameters
{
get { return parameters; }
}
public void Prepare()
{
throw new NotImplementedException();
}
public IDbTransaction Transaction { get; set; }
public UpdateRowSource UpdatedRowSource
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public void Dispose()
{
throw new NotImplementedException();
}
}
private class MockDbParameter : IDbDataParameter
{
private readonly MockDbCommand mockDbCommand;
private readonly int paramId;
private string parameterName;
private object parameterValue;
private DbType parameterType;
public MockDbParameter(MockDbCommand mockDbCommand, int paramId)
{
this.mockDbCommand = mockDbCommand;
this.paramId = paramId;
}
public DbType DbType
{
get { return this.parameterType; }
set { this.parameterType = value; }
}
public ParameterDirection Direction
{
get { throw new NotImplementedException(); }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Direction={1}", paramId,
value);
}
}
public bool IsNullable
{
get { throw new NotImplementedException(); }
}
public string ParameterName
{
get { return this.parameterName; }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Name={1}", paramId, value);
this.parameterName = value;
}
}
public string SourceColumn
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public DataRowVersion SourceVersion
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public object Value
{
get { return this.parameterValue; }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Value={1}", paramId, value);
this.parameterValue = value;
}
}
public byte Precision
{
get { throw new NotImplementedException(); }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Precision={1}", paramId,
value);
}
}
public byte Scale
{
get { throw new NotImplementedException(); }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Scale={1}", paramId, value);
}
}
public int Size
{
get { throw new NotImplementedException(); }
set
{
((MockDbConnection)mockDbCommand.Connection).AddToLog("Parameter #{0} Size={1}", paramId, value);
}
}
public override string ToString()
{
return "Parameter #" + this.paramId;
}
}
private class MockParameterCollection : IDataParameterCollection
{
private readonly MockDbCommand command;
public MockParameterCollection(MockDbCommand command)
{
this.command = command;
}
public IEnumerator GetEnumerator()
{
throw new NotImplementedException();
}
public void CopyTo(Array array, int index)
{
throw new NotImplementedException();
}
public int Count
{
get { throw new NotImplementedException(); }
}
public object SyncRoot
{
get { throw new NotImplementedException(); }
}
public bool IsSynchronized
{
get { throw new NotImplementedException(); }
}
public int Add(object value)
{
((MockDbConnection)command.Connection).AddToLog("Add Parameter {0}", value);
return 0;
}
public bool Contains(object value)
{
throw new NotImplementedException();
}
public void Clear()
{
throw new NotImplementedException();
}
public int IndexOf(object value)
{
throw new NotImplementedException();
}
public void Insert(int index, object value)
{
throw new NotImplementedException();
}
public void Remove(object value)
{
throw new NotImplementedException();
}
public void RemoveAt(int index)
{
throw new NotImplementedException();
}
object IList.this[int index]
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public bool IsReadOnly
{
get { throw new NotImplementedException(); }
}
public bool IsFixedSize
{
get { throw new NotImplementedException(); }
}
public bool Contains(string parameterName)
{
throw new NotImplementedException();
}
public int IndexOf(string parameterName)
{
throw new NotImplementedException();
}
public void RemoveAt(string parameterName)
{
throw new NotImplementedException();
}
object IDataParameterCollection.this[string parameterName]
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
}
public class MockDbFactory : DbProviderFactory
{
public static readonly MockDbFactory Instance = new MockDbFactory();
public override DbConnection CreateConnection()
{
return new MockDbConnection2();
}
}
public class MockDbConnection2 : DbConnection
{
public static int OpenCount { get; private set; }
public static string LastOpenConnectionString { get; private set; }
protected override DbTransaction BeginDbTransaction(IsolationLevel isolationLevel)
{
throw new NotImplementedException();
}
public override void ChangeDatabase(string databaseName)
{
throw new NotImplementedException();
}
public override void Close()
{
throw new NotImplementedException();
}
public override string ConnectionString { get; set; }
protected override DbCommand CreateDbCommand()
{
throw new NotImplementedException();
}
public override string DataSource
{
get { throw new NotImplementedException(); }
}
public override string Database
{
get { throw new NotImplementedException(); }
}
public override void Open()
{
LastOpenConnectionString = this.ConnectionString;
OpenCount++;
}
public override string ServerVersion
{
get { throw new NotImplementedException(); }
}
public override ConnectionState State
{
get { throw new NotImplementedException(); }
}
}
private static class SqlServerTest
{
static SqlServerTest()
{
}
public static string GetConnectionString()
{
var connectionString = ConfigurationManager.AppSettings["SqlServerTestConnectionString"];
if (String.IsNullOrWhiteSpace(connectionString))
{
connectionString = IsAppVeyor() ? AppVeyorConnectionStringNLogTest : LocalConnectionStringNLogTest;
}
return connectionString;
}
/// <summary>
/// AppVeyor connectionstring for SQL 2012, see https://www.appveyor.com/docs/services-databases/
/// </summary>
private const string AppVeyorConnectionStringMaster =
@"Server=(local)\SQL2012SP1;Database=master;User ID=sa;Password=Password12!";
private const string AppVeyorConnectionStringNLogTest =
@"Server=(local)\SQL2012SP1;Database=NLogTest;User ID=sa;Password=Password12!";
private const string LocalConnectionStringMaster =
@"Data Source=(localdb)\MSSQLLocalDB; Database=master; Integrated Security=True;";
private const string LocalConnectionStringNLogTest =
@"Data Source=(localdb)\MSSQLLocalDB; Database=NLogTest; Integrated Security=True;";
public static void CreateDatabase()
{
var connectionString = GetMasterConnectionString();
IssueCommand("CREATE DATABASE NLogTest", connectionString);
}
public static bool NLogTestDatabaseExists()
{
var connectionString = GetMasterConnectionString();
var dbId = IssueScalarQuery("select db_id('NLogTest')", connectionString);
return dbId != null && dbId != DBNull.Value;
}
private static string GetMasterConnectionString()
{
return IsAppVeyor() ? AppVeyorConnectionStringMaster : LocalConnectionStringMaster;
}
/// <summary>
/// Are we running on AppVeyor?
/// </summary>
/// <returns></returns>
private static bool IsAppVeyor()
{
var val = Environment.GetEnvironmentVariable("APPVEYOR");
return val != null && val.Equals("true", StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Are we running on Travis?
/// </summary>
/// <returns></returns>
public static bool IsTravis()
{
var val = Environment.GetEnvironmentVariable("TRAVIS");
return val != null && val.Equals("true", StringComparison.OrdinalIgnoreCase);
}
public static void IssueCommand(string commandString, string connectionString = null)
{
using (var connection = new SqlConnection(connectionString ?? GetConnectionString()))
{
connection.Open();
if (connectionString == null)
connection.ChangeDatabase("NLogTest");
using (var command = new SqlCommand(commandString, connection))
{
command.ExecuteNonQuery();
}
}
}
public static object IssueScalarQuery(string commandString, string connectionString = null)
{
using (var connection = new SqlConnection(connectionString ?? GetConnectionString()))
{
connection.Open();
if (connectionString == null)
connection.ChangeDatabase("NLogTest");
using (var command = new SqlCommand(commandString, connection))
{
var scalar = command.ExecuteScalar();
return scalar;
}
}
}
/// <summary>
/// Try dropping. IF fail, not exception
/// </summary>
public static bool TryDropDatabase()
{
try
{
if (NLogTestDatabaseExists())
{
var connectionString = GetMasterConnectionString();
IssueCommand(
"ALTER DATABASE [NLogTest] SET SINGLE_USER WITH ROLLBACK IMMEDIATE; DROP DATABASE NLogTest;",
connectionString);
return true;
}
return false;
}
catch (Exception)
{
//ignore
return false;
}
}
}
}
}
#endif
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Mocument.Model;
using Newtonsoft.Json;
using Salient.HTTPArchiveModel;
namespace Mocument.DataAccess
{
/// <summary>
/// for controlled testing only. not threadsafe and is filebased so extreme load is not advised.
/// ABSOLUTELY NOT SUITABLE FOR MULTIPROCESS ACCESS
/// </summary>
public class JsonFileStore : IStore
{
private readonly bool _deleteFileOnDispose;
private readonly string _filepath;
private readonly object _lockObject;
private bool _disposed;
private List<Tape> _list;
public JsonFileStore()
{
_lockObject = new object();
_filepath = Path.GetTempFileName();
_deleteFileOnDispose = true;
EnsureDatabase();
}
public JsonFileStore(string filepath)
{
_lockObject = new object();
_filepath = filepath;
EnsureDatabase();
}
#region IStore Members
public void ClearDatabase()
{
lock (_lockObject)
{
_list.Clear();
WriteJson();
}
}
public void EnsureDatabase()
{
lock (_lockObject)
{
try
{
ReadJson();
}
catch (Exception)
{
_list = new List<Tape>();
WriteJson();
}
}
}
public void Delete(string id)
{
lock (_lockObject)
{
if (string.IsNullOrEmpty(id))
{
throw new ArgumentNullException("id");
}
Tape existing = _list.FirstOrDefault(t => t.Id == id);
if (existing == null)
{
throw new Exception("cannot find key");
}
_list.Remove(existing);
WriteJson();
}
}
public void Update(Tape tape)
{
lock (_lockObject)
{
if (string.IsNullOrEmpty(tape.Id))
{
throw new ArgumentNullException("id");
}
Tape existing = _list.FirstOrDefault(t => t.Id == tape.Id);
if (existing == null)
{
throw new Exception("cannot find key");
}
_list.Remove(existing);
// hack in lieu of complicated cloning
_list.Add(JsonConvert.DeserializeObject<Tape>(JsonConvert.SerializeObject(tape,Formatting.Indented)));
WriteJson();
}
}
public void Insert(Tape tape)
{
lock (_lockObject)
{
if (string.IsNullOrEmpty(tape.Id))
{
throw new ArgumentNullException("id");
}
if (Select(tape.Id) != null)
{
throw new Exception("cannot insert duplicate key");
}
// hack in lieu of complicated cloning
_list.Add(JsonConvert.DeserializeObject<Tape>(JsonConvert.SerializeObject(tape,Formatting.Indented)));
WriteJson();
}
}
public Tape Select(string id)
{
if (string.IsNullOrEmpty(id))
{
throw new ArgumentNullException("id");
}
lock (_lockObject)
{
return List().FirstOrDefault(t => t.Id == id);
}
}
public List<Tape> List()
{
lock (_lockObject)
{
// we want to return cloned tapes, not references to those in list.
// so short of writing clone logic, just roundtrip the list through json serialization
return JsonConvert.DeserializeObject<List<Tape>>(JsonConvert.SerializeObject(_list, Formatting.Indented));
}
}
public List<Tape> List(Func<Tape, bool> selector)
{
lock (_lockObject)
{
return List().Where(selector).ToList();
}
}
public Entry MatchEntry(string tapeId, Entry entryToMatch, IEntryComparer[] comparers = null)
{
lock (_lockObject)
{
Tape tape = Select(tapeId);
// provide a default comparer
if (comparers == null || comparers.Length == 0)
{
comparers = new IEntryComparer[] { new DefaultEntryComparer() };
}
List<Entry> potentialMatches = tape.log.entries;
return (
from entryComparer in comparers
select entryComparer.FindMatch(potentialMatches, entryToMatch)
into result
where result.Match != null
select result.Match)
.FirstOrDefault();
}
}
public void FromJson(string json)
{
lock (_lockObject)
{
_list = JsonConvert.DeserializeObject<List<Tape>>(json);
if (_list == null)
{
throw new Exception("invalid json");
}
WriteJson();
}
}
public string ToJson()
{
return JsonConvert.SerializeObject(_list, Formatting.Indented);
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_deleteFileOnDispose)
{
File.Delete(_filepath);
}
else
{
WriteJson();
}
}
#endregion
private void ReadJson()
{
lock (_lockObject)
{
FromJson(File.ReadAllText(_filepath));
}
}
private void WriteJson()
{
lock (_lockObject)
{
File.WriteAllText(_filepath, ToJson());
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using System;
using System.Collections;
using System.Collections.Specialized;
using GenStrings;
namespace System.Collections.Specialized.Tests
{
public class ContainsValueStrTests
{
public const int MAX_LEN = 50; // max length of random strings
[Fact]
public void Test01()
{
IntlStrings intl;
StringDictionary sd;
string ind;
// simple string values
string[] values =
{
"",
" ",
"a",
"aa",
"text",
" spaces",
"1",
"$%^#",
"2222222222222222222222222",
System.DateTime.Today.ToString(),
Int32.MaxValue.ToString()
};
// keys for simple string values
string[] keys =
{
"zero",
"one",
" ",
"",
"aa",
"1",
System.DateTime.Today.ToString(),
"$%^#",
Int32.MaxValue.ToString(),
" spaces",
"2222222222222222222222222"
};
int cnt = 0; // Count
// initialize IntStrings
intl = new IntlStrings();
// [] StringDictionary is constructed as expected
//-----------------------------------------------------------------
sd = new StringDictionary();
// [] check for empty dictionary
//
for (int i = 0; i < values.Length; i++)
{
if (sd.ContainsValue(values[i]))
{
Assert.False(true, string.Format("Error, returned true for empty dictionary", i));
}
}
// [] add simple strings and verify ContainsValue()
//
cnt = values.Length;
for (int i = 0; i < cnt; i++)
{
sd.Add(keys[i], values[i]);
}
if (sd.Count != cnt)
{
Assert.False(true, string.Format("Error, count is {0} instead of {1}", sd.Count, cnt));
}
for (int i = 0; i < cnt; i++)
{
// verify that collection contains all added items
//
if (!sd.ContainsValue(values[i]))
{
Assert.False(true, string.Format("Error, collection doesn't contain value \"{1}\"", i, values[i]));
}
if (!sd.ContainsKey(keys[i]))
{
Assert.False(true, string.Format("Error, collection doesn't contain key \"{1}\"", i, keys[i]));
}
}
//
// Intl strings
// [] add Intl strings and verify ContainsValue()
//
int len = values.Length;
string[] intlValues = new string[len * 2];
// fill array with unique strings
//
for (int i = 0; i < len * 2; i++)
{
string val = intl.GetRandomString(MAX_LEN);
while (Array.IndexOf(intlValues, val) != -1)
val = intl.GetRandomString(MAX_LEN);
intlValues[i] = val;
}
Boolean caseInsensitive = false;
for (int i = 0; i < len * 2; i++)
{
if (intlValues[i].Length != 0 && intlValues[i].ToLowerInvariant() == intlValues[i].ToUpperInvariant())
caseInsensitive = true;
}
//
// will use first half of array as values and second half as keys
//
for (int i = 0; i < len; i++)
{
cnt = sd.Count;
sd.Add(intlValues[i + len], intlValues[i]);
if (sd.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", i, sd.Count, cnt + 1));
}
// verify that collection contains newly added item
//
if (!sd.ContainsValue(intlValues[i]))
{
Assert.False(true, string.Format("Error, collection doesn't contain value of new item", i));
}
if (!sd.ContainsKey(intlValues[i + len]))
{
Assert.False(true, string.Format("Error, collection doesn't contain key of new item", i));
}
// access the item
//
ind = intlValues[i + len];
if (String.Compare(sd[ind], intlValues[i]) != 0)
{
Assert.False(true, string.Format("Error, returned item \"{1}\" instead of \"{2}\"", i, sd[ind], intlValues[i]));
}
}
//
// add null string
// [] add null string with non-null key and verify ContainsValue()
//
cnt = sd.Count;
string k = "keykey";
sd.Add(k, null);
if (sd.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", sd.Count, cnt + 1));
}
// verify that collection contains newly added item
//
if (!sd.ContainsValue(null))
{
Assert.False(true, string.Format("Error, dictionary doesn't contain value null"));
}
//
// [] Case sensitivity: search should be case-sensitive
//
sd.Clear();
if (sd.Count != 0)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2} after Clear()", sd.Count, 0));
}
string[] intlValuesLower = new string[len * 2];
// fill array with unique strings
//
for (int i = 0; i < len * 2; i++)
{
intlValues[i] = intlValues[i].ToUpperInvariant();
}
for (int i = 0; i < len * 2; i++)
{
intlValuesLower[i] = intlValues[i].ToLowerInvariant();
}
sd.Clear();
//
// will use first half of array as values and second half as keys
//
for (int i = 0; i < len; i++)
{
cnt = sd.Count;
sd.Add(intlValues[i + len], intlValues[i]); // adding uppercase strings
if (sd.Count != cnt + 1)
{
Assert.False(true, string.Format("Error, count is {1} instead of {2}", i, sd.Count, cnt + 1));
}
// verify that collection contains newly added uppercase item
//
if (!sd.ContainsValue(intlValues[i]))
{
Assert.False(true, string.Format("Error, collection doesn't contain value of new item", i));
}
if (!sd.ContainsKey(intlValues[i + len]))
{
Assert.False(true, string.Format("Error, collection doesn't contain key of new item", i));
}
// verify that collection doesn't contains lowercase item
//
if (!caseInsensitive && sd.ContainsValue(intlValuesLower[i]))
{
Assert.False(true, string.Format("Error, collection contains lowercase value of new item", i));
}
// key is case insensitive
if (!sd.ContainsKey(intlValuesLower[i + len]))
{
Assert.False(true, string.Format("Error, collection doesn't contain lowercase key of new item", i));
}
}
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System;
using System.Linq;
using System.IO;
using System.Collections.Generic;
using System.Reflection;
using V1=AssetBundleGraph;
using Model=UnityEngine.AssetGraph.DataModel.Version2;
namespace UnityEngine.AssetGraph {
[CustomNode("Create Assets/Generate Asset", 51)]
public class AssetGenerator : Node {
[System.Serializable]
public class GeneratorEntry
{
public string m_name;
public string m_id;
public SerializableMultiTargetInstance m_instance;
public GeneratorEntry(string name, Model.ConnectionPointData point) {
m_name = name;
m_id = point.Id;
m_instance = new SerializableMultiTargetInstance();
}
public GeneratorEntry(string name, SerializableMultiTargetInstance i, Model.ConnectionPointData point) {
m_name = name;
m_id = point.Id;
m_instance = new SerializableMultiTargetInstance(i);
}
}
public enum OutputOption : int {
CreateInCacheDirectory,
CreateInSelectedDirectory,
RelativeToSourceAsset
}
[SerializeField] private List<GeneratorEntry> m_entries;
[SerializeField] private string m_defaultOutputPointId;
[SerializeField] private SerializableMultiTargetString m_outputDir;
[SerializeField] private SerializableMultiTargetInt m_outputOption;
private GeneratorEntry m_removingEntry;
public static readonly string kCacheDirName = "GeneratedAssets";
public override string ActiveStyle {
get {
return "node 4 on";
}
}
public override string InactiveStyle {
get {
return "node 4";
}
}
public override string Category {
get {
return "Create";
}
}
public override void Initialize(Model.NodeData data) {
m_entries = new List<GeneratorEntry>();
m_outputDir = new SerializableMultiTargetString();
m_outputOption = new SerializableMultiTargetInt((int)OutputOption.CreateInCacheDirectory);
data.AddDefaultInputPoint();
var point = data.AddDefaultOutputPoint();
m_defaultOutputPointId = point.Id;
}
public override Node Clone(Model.NodeData newData) {
var newNode = new AssetGenerator();
newData.AddDefaultInputPoint();
newData.AddDefaultOutputPoint();
var point = newData.AddDefaultOutputPoint();
newNode.m_defaultOutputPointId = point.Id;
newNode.m_outputDir = new SerializableMultiTargetString(m_outputDir);
newNode.m_outputOption = new SerializableMultiTargetInt(m_outputOption);
newNode.m_entries = new List<GeneratorEntry>();
foreach(var s in m_entries) {
newNode.AddEntryForClone (newData, s);
}
return newNode;
}
private void DrawGeneratorSetting(
GeneratorEntry entry,
NodeGUI node,
AssetReferenceStreamManager streamManager,
NodeGUIEditor editor,
Action onValueChanged)
{
var generator = entry.m_instance.Get<IAssetGenerator>(editor.CurrentEditingGroup);
using (new EditorGUILayout.VerticalScope(GUI.skin.box)) {
var newName = EditorGUILayout.TextField ("Name", entry.m_name);
if (newName != entry.m_name) {
using(new RecordUndoScope("Change Name", node, true)) {
entry.m_name = newName;
UpdateGeneratorEntry (node, entry);
onValueChanged();
}
}
var map = AssetGeneratorUtility.GetAttributeAssemblyQualifiedNameMap();
if(map.Count > 0) {
using(new GUILayout.HorizontalScope()) {
GUILayout.Label("AssetGenerator");
var guiName = AssetGeneratorUtility.GetGUIName(entry.m_instance.ClassName);
if (GUILayout.Button(guiName, "Popup", GUILayout.MinWidth(150f))) {
var builders = map.Keys.ToList();
if(builders.Count > 0) {
NodeGUI.ShowTypeNamesMenu(guiName, builders, (string selectedGUIName) =>
{
using(new RecordUndoScope("Change AssetGenerator class", node, true)) {
generator = AssetGeneratorUtility.CreateGenerator(selectedGUIName);
entry.m_instance.Set(editor.CurrentEditingGroup, generator);
onValueChanged();
}
}
);
}
}
MonoScript s = TypeUtility.LoadMonoScript(entry.m_instance.ClassName);
using(new EditorGUI.DisabledScope(s == null)) {
if(GUILayout.Button("Edit", GUILayout.Width(50))) {
AssetDatabase.OpenAsset(s, 0);
}
}
}
} else {
if(!string.IsNullOrEmpty(entry.m_instance.ClassName)) {
EditorGUILayout.HelpBox(
string.Format(
"Your AssetGenerator script {0} is missing from assembly. Did you delete script?", entry.m_instance.ClassName), MessageType.Info);
} else {
string[] menuNames = Model.Settings.GUI_TEXT_MENU_GENERATE_ASSETGENERATOR.Split('/');
EditorGUILayout.HelpBox(
string.Format(
"You need to create at least one AssetGenerator script to use this node. To start, select {0}>{1}>{2} menu and create new script from template.",
menuNames[1],menuNames[2], menuNames[3]
), MessageType.Info);
}
}
GUILayout.Space(10f);
editor.DrawPlatformSelector(node);
using (new EditorGUILayout.VerticalScope()) {
var disabledScope = editor.DrawOverrideTargetToggle(node, entry.m_instance.ContainsValueOf(editor.CurrentEditingGroup), (bool enabled) => {
if(enabled) {
entry.m_instance.CopyDefaultValueTo(editor.CurrentEditingGroup);
} else {
entry.m_instance.Remove(editor.CurrentEditingGroup);
}
onValueChanged();
});
using (disabledScope) {
if (generator != null) {
Action onChangedAction = () => {
using(new RecordUndoScope("Change AssetGenerator Setting", node)) {
entry.m_instance.Set(editor.CurrentEditingGroup, generator);
onValueChanged();
}
};
generator.OnInspectorGUI(onChangedAction);
}
}
}
GUILayout.Space (4);
using (new EditorGUILayout.HorizontalScope ()) {
GUILayout.FlexibleSpace ();
if (GUILayout.Button ("Remove")) {
m_removingEntry = entry;
}
}
}
}
public override void OnInspectorGUI(NodeGUI node, AssetReferenceStreamManager streamManager, NodeGUIEditor editor, Action onValueChanged) {
EditorGUILayout.HelpBox("Generate Asset: Generate new asset from incoming asset.", MessageType.Info);
editor.UpdateNodeName(node);
GUILayout.Space(8f);
editor.DrawPlatformSelector(node);
using (new EditorGUILayout.VerticalScope()) {
var disabledScope = editor.DrawOverrideTargetToggle(node, m_outputOption.ContainsValueOf(editor.CurrentEditingGroup), (bool enabled) => {
if(enabled) {
m_outputOption[editor.CurrentEditingGroup] = m_outputOption.DefaultValue;
m_outputDir[editor.CurrentEditingGroup] = m_outputDir.DefaultValue;
} else {
m_outputOption.Remove(editor.CurrentEditingGroup);
m_outputDir.Remove(editor.CurrentEditingGroup);
}
onValueChanged();
});
using (disabledScope) {
OutputOption opt = (OutputOption)m_outputOption[editor.CurrentEditingGroup];
var newOption = (OutputOption)EditorGUILayout.EnumPopup("Output Option", opt);
if(newOption != opt) {
using(new RecordUndoScope("Change Output Option", node, true)){
m_outputOption[editor.CurrentEditingGroup] = (int)newOption;
onValueChanged();
}
opt = newOption;
}
if (opt != OutputOption.CreateInCacheDirectory) {
EditorGUILayout.HelpBox ("When you are not creating assets under cache directory, make sure your generators are not overwriting assets each other.", MessageType.Info);
}
using (new EditorGUI.DisabledScope (opt == OutputOption.CreateInCacheDirectory)) {
var newDirPath = m_outputDir[editor.CurrentEditingGroup];
if (opt == OutputOption.CreateInSelectedDirectory) {
newDirPath = editor.DrawFolderSelector ("Output Directory", "Select Output Folder",
m_outputDir [editor.CurrentEditingGroup],
Application.dataPath,
(string folderSelected) => {
string basePath = Application.dataPath;
if (basePath == folderSelected) {
folderSelected = string.Empty;
} else {
var index = folderSelected.IndexOf (basePath);
if (index >= 0) {
folderSelected = folderSelected.Substring (basePath.Length + index);
if (folderSelected.IndexOf ('/') == 0) {
folderSelected = folderSelected.Substring (1);
}
}
}
return folderSelected;
}
);
} else if (opt == OutputOption.RelativeToSourceAsset) {
newDirPath = EditorGUILayout.TextField("Relative Path", m_outputDir[editor.CurrentEditingGroup]);
}
if (newDirPath != m_outputDir[editor.CurrentEditingGroup]) {
using(new RecordUndoScope("Change Output Directory", node, true)){
m_outputDir[editor.CurrentEditingGroup] = newDirPath;
onValueChanged();
}
}
var dirPath = Path.Combine (Application.dataPath, m_outputDir [editor.CurrentEditingGroup]);
if (opt == OutputOption.CreateInSelectedDirectory &&
!string.IsNullOrEmpty(m_outputDir [editor.CurrentEditingGroup]) &&
!Directory.Exists (dirPath))
{
using (new EditorGUILayout.HorizontalScope()) {
EditorGUILayout.LabelField(m_outputDir[editor.CurrentEditingGroup] + " does not exist.");
if(GUILayout.Button("Create directory")) {
Directory.CreateDirectory(dirPath);
AssetDatabase.Refresh ();
}
}
EditorGUILayout.Space();
string parentDir = Path.GetDirectoryName(m_outputDir[editor.CurrentEditingGroup]);
if(Directory.Exists(parentDir)) {
EditorGUILayout.LabelField("Available Directories:");
string[] dirs = Directory.GetDirectories(parentDir);
foreach(string s in dirs) {
EditorGUILayout.LabelField(s);
}
}
EditorGUILayout.Space();
}
if (opt == OutputOption.CreateInSelectedDirectory || opt == OutputOption.CreateInCacheDirectory) {
var outputDir = PrepareOutputDirectory (BuildTargetUtility.GroupToTarget(editor.CurrentEditingGroup), node.Data, null);
using (new EditorGUI.DisabledScope (!Directory.Exists (outputDir)))
{
using (new EditorGUILayout.HorizontalScope ()) {
GUILayout.FlexibleSpace ();
if (GUILayout.Button ("Highlight in Project Window", GUILayout.Width (180f))) {
var folder = AssetDatabase.LoadMainAssetAtPath (outputDir);
EditorGUIUtility.PingObject (folder);
}
}
}
}
}
}
}
GUILayout.Space(8f);
foreach (var s in m_entries) {
DrawGeneratorSetting (s, node, streamManager, editor, onValueChanged);
GUILayout.Space (10f);
}
if (m_removingEntry != null) {
using (new RecordUndoScope ("Remove Generator", node)) {
RemoveGeneratorEntry (node, m_removingEntry);
m_removingEntry = null;
onValueChanged ();
}
}
GUILayout.Space (8);
if (GUILayout.Button ("Add Generator")) {
using (new RecordUndoScope ("Add Generator", node)) {
AddEntry (node);
onValueChanged ();
}
}
}
public override void OnContextMenuGUI(GenericMenu menu) {
foreach (var s in m_entries) {
MonoScript script = TypeUtility.LoadMonoScript(s.m_instance.ClassName);
if(script != null) {
menu.AddItem(
new GUIContent(string.Format("Edit Script({0})", script.name)),
false,
() => {
AssetDatabase.OpenAsset(script, 0);
}
);
}
}
}
public override void Prepare (BuildTarget target,
Model.NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<Model.ConnectionData> connectionsToOutput,
PerformGraph.Output Output)
{
ValidateAssetGenerator(node, target, incoming,
() => {
throw new NodeException("AssetGenerator is not specified.", "Select generator from inspector.", node);
},
() => {
throw new NodeException("Failed to create AssetGenerator from settings.", "Fix AssetGenerator settings from inspector", node);
},
(AssetReference badAsset) => {
throw new NodeException(string.Format("Generator not create asset from source : Source: {0}", badAsset.importFrom),
"Remove source asset from node input.", node);
},
(AssetReference badAsset) => {
throw new NodeException(string.Format("Can not import incoming asset {0}.", badAsset.fileNameAndExtension),
"Remove source asset from node input.", node);
}
);
if(incoming == null) {
return;
}
if(connectionsToOutput == null || Output == null) {
return;
}
var allOutput = new Dictionary<string, Dictionary<string, List<AssetReference>>>();
foreach(var outPoints in node.OutputPoints) {
allOutput[outPoints.Id] = new Dictionary<string, List<AssetReference>>();
}
var defaultOutputCond = connectionsToOutput.Where (c => c.FromNodeConnectionPointId == m_defaultOutputPointId);
Model.ConnectionData defaultOutput = null;
if (defaultOutputCond.Any ()) {
defaultOutput = defaultOutputCond.First ();
}
foreach(var ag in incoming) {
if (defaultOutput != null) {
Output(defaultOutput, ag.assetGroups);
}
foreach(var groupKey in ag.assetGroups.Keys) {
foreach(var a in ag.assetGroups [groupKey]) {
foreach (var entry in m_entries) {
var assetOutputDir = PrepareOutputDirectory(target, node, a);
var generator = entry.m_instance.Get<IAssetGenerator>(target);
UnityEngine.Assertions.Assert.IsNotNull(generator);
var newItem = FileUtility.PathCombine (assetOutputDir, GetGeneratorIdForSubPath(target, entry), a.fileName + generator.GetAssetExtension (a));
var output = allOutput[entry.m_id];
if(!output.ContainsKey(groupKey)) {
output[groupKey] = new List<AssetReference>();
}
output[groupKey].Add(AssetReferenceDatabase.GetReferenceWithType (newItem, generator.GetAssetType(a)));
}
}
}
}
foreach(var dst in connectionsToOutput) {
if(allOutput.ContainsKey(dst.FromNodeConnectionPointId)) {
Output(dst, allOutput[dst.FromNodeConnectionPointId]);
}
}
}
public override void Build (BuildTarget target,
Model.NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<Model.ConnectionData> connectionsToOutput,
PerformGraph.Output Output,
Action<Model.NodeData, string, float> progressFunc)
{
if(incoming == null) {
return;
}
bool isAnyAssetGenerated = false;
foreach (var entry in m_entries) {
var generator = entry.m_instance.Get<IAssetGenerator>(target);
UnityEngine.Assertions.Assert.IsNotNull(generator);
foreach(var ag in incoming) {
foreach(var assets in ag.assetGroups.Values) {
foreach (var a in assets) {
var assetOutputDir = PrepareOutputDirectory (target, node, a);
var assetSaveDir = FileUtility.PathCombine (assetOutputDir, GetGeneratorIdForSubPath(target, entry));
var assetSavePath = FileUtility.PathCombine (assetSaveDir, a.fileName + generator.GetAssetExtension(a));
if(!File.Exists(assetSavePath) || AssetGenerateInfo.DoesAssetNeedRegenerate(entry, node, target, a))
{
if (!Directory.Exists (assetSaveDir)) {
Directory.CreateDirectory (assetSaveDir);
}
if (!generator.GenerateAsset (a, assetSavePath)) {
throw new AssetGraphException(string.Format("{0} :Failed to generate asset for {1}",
node.Name, entry.m_name));
}
if (!File.Exists (assetSavePath)) {
throw new AssetGraphException(string.Format("{0} :{1} returned success, but generated asset not found.",
node.Name, entry.m_name));
}
AssetProcessEventRecord.GetRecord ().LogModify (AssetDatabase.AssetPathToGUID(assetSavePath));
isAnyAssetGenerated = true;
LogUtility.Logger.LogFormat(LogType.Log, "{0} is (re)generating Asset:{1} with {2}({3})", node.Name, assetSavePath,
AssetGeneratorUtility.GetGUIName(entry.m_instance.ClassName),
AssetGeneratorUtility.GetVersion(entry.m_instance.ClassName));
if(progressFunc != null) progressFunc(node, string.Format("Creating {0}", assetSavePath), 0.5f);
AssetGenerateInfo.SaveAssetGenerateInfo(entry, node, target, a);
}
}
}
}
}
if (isAnyAssetGenerated) {
AssetDatabase.Refresh ();
}
}
public void AddEntry(NodeGUI node) {
var point = node.Data.AddOutputPoint("");
var newEntry = new GeneratorEntry("", point);
m_entries.Add(newEntry);
UpdateGeneratorEntry(node, newEntry);
}
// For Clone
public void AddEntryForClone(Model.NodeData data, GeneratorEntry src) {
var point = data.AddOutputPoint(src.m_name);
var newEntry = new GeneratorEntry(src.m_name, src.m_instance, point);
m_entries.Add(newEntry);
UpdateGeneratorEntry(null, data, newEntry);
}
public void RemoveGeneratorEntry(NodeGUI node, GeneratorEntry e) {
m_entries.Remove(e);
var point = GetConnectionPoint (node.Data, e);
node.Data.OutputPoints.Remove(point);
// event must raise to remove connection associated with point
NodeGUIUtility.NodeEventHandler(new NodeEvent(NodeEvent.EventType.EVENT_CONNECTIONPOINT_DELETED, node, Vector2.zero, point));
}
public Model.ConnectionPointData GetConnectionPoint(Model.NodeData n, GeneratorEntry e) {
Model.ConnectionPointData p = n.OutputPoints.Find(v => v.Id == e.m_id);
UnityEngine.Assertions.Assert.IsNotNull(p);
return p;
}
public void UpdateGeneratorEntry(NodeGUI node, GeneratorEntry e) {
UpdateGeneratorEntry (node, node.Data, e);
}
public void UpdateGeneratorEntry(NodeGUI node, Model.NodeData data, GeneratorEntry e) {
Model.ConnectionPointData p = node.Data.OutputPoints.Find(v => v.Id == e.m_id);
UnityEngine.Assertions.Assert.IsNotNull(p);
p.Label = e.m_name;
if (node != null) {
// event must raise to propagate change to connection associated with point
NodeGUIUtility.NodeEventHandler(new NodeEvent(NodeEvent.EventType.EVENT_CONNECTIONPOINT_LABELCHANGED, node, Vector2.zero, GetConnectionPoint(node.Data, e)));
}
}
private string GetGeneratorIdForSubPath(BuildTarget target, GeneratorEntry e) {
var outputOption = (OutputOption)m_outputOption [target];
if(outputOption == OutputOption.CreateInCacheDirectory) {
return e.m_id;
}
return string.Empty;
}
private string PrepareOutputDirectory(BuildTarget target, Model.NodeData node, AssetReference a) {
var outputOption = (OutputOption)m_outputOption [target];
if (outputOption == OutputOption.CreateInSelectedDirectory) {
return Path.Combine("Assets", m_outputDir [target]);
}
if(outputOption == OutputOption.CreateInCacheDirectory) {
return FileUtility.EnsureCacheDirExists (target, node, kCacheDirName);
}
var sourceDir = Path.GetDirectoryName (a.importFrom);
return FileUtility.PathCombine (sourceDir, m_outputDir [target]);
}
public void ValidateAssetGenerator (
Model.NodeData node,
BuildTarget target,
IEnumerable<PerformGraph.AssetGroups> incoming,
Action noGeneratorData,
Action failedToCreateGenerator,
Action<AssetReference> canNotGenerateAsset,
Action<AssetReference> canNotImportAsset
) {
foreach (var entry in m_entries) {
var generator = entry.m_instance.Get<IAssetGenerator>(target);
if(null == generator ) {
failedToCreateGenerator();
}
generator.OnValidate ();
if(null != generator && null != incoming) {
foreach(var ag in incoming) {
foreach(var assets in ag.assetGroups.Values) {
foreach (var a in assets) {
if(string.IsNullOrEmpty(a.importFrom)) {
canNotImportAsset(a);
continue;
}
if(!generator.CanGenerateAsset(a)) {
canNotGenerateAsset(a);
}
}
}
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Test.Common;
using System.Text;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.Sockets.Tests
{
public class UnixDomainSocketTest
{
private readonly ITestOutputHelper _log;
public UnixDomainSocketTest(ITestOutputHelper output)
{
_log = TestLogging.GetInstance();
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.Windows)]
public void Socket_CreateUnixDomainSocket_Throws_OnWindows()
{
SocketException e = Assert.Throws<SocketException>(() => new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified));
Assert.Equal(SocketError.AddressFamilyNotSupported, e.SocketErrorCode);
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public async Task Socket_ConnectAsyncUnixDomainSocketEndPoint_Success()
{
string path = null;
SocketTestServer server = null;
UnixDomainSocketEndPoint endPoint = null;
for (int attempt = 0; attempt < 5; attempt++)
{
path = GetRandomNonExistingFilePath();
endPoint = new UnixDomainSocketEndPoint(path);
try
{
server = SocketTestServer.SocketTestServerFactory(SocketImplementationType.Async, endPoint, ProtocolType.Unspecified);
break;
}
catch (SocketException)
{
//Path selection is contingent on a successful Bind().
//If it fails, the next iteration will try another path.
}
}
try
{
Assert.NotNull(server);
SocketAsyncEventArgs args = new SocketAsyncEventArgs();
args.RemoteEndPoint = endPoint;
args.Completed += (s, e) => ((TaskCompletionSource<bool>)e.UserToken).SetResult(true);
var complete = new TaskCompletionSource<bool>();
args.UserToken = complete;
using (Socket sock = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
{
Assert.True(sock.ConnectAsync(args));
await complete.Task;
Assert.Equal(SocketError.Success, args.SocketError);
Assert.Null(args.ConnectByNameError);
}
}
finally
{
server.Dispose();
try { File.Delete(path); }
catch { }
}
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public async Task Socket_ConnectAsyncUnixDomainSocketEndPoint_NotServer()
{
string path = GetRandomNonExistingFilePath();
var endPoint = new UnixDomainSocketEndPoint(path);
try
{
SocketAsyncEventArgs args = new SocketAsyncEventArgs();
args.RemoteEndPoint = endPoint;
args.Completed += (s, e) => ((TaskCompletionSource<bool>)e.UserToken).SetResult(true);
var complete = new TaskCompletionSource<bool>();
args.UserToken = complete;
using (Socket sock = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
{
bool willRaiseEvent = sock.ConnectAsync(args);
if (willRaiseEvent)
{
await complete.Task;
}
Assert.Equal(SocketError.AddressNotAvailable, args.SocketError);
}
}
finally
{
try { File.Delete(path); }
catch { }
}
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public void Socket_SendReceive_Success()
{
string path = GetRandomNonExistingFilePath();
var endPoint = new UnixDomainSocketEndPoint(path);
try
{
using (var server = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
using (var client = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
{
server.Bind(endPoint);
server.Listen(1);
client.Connect(endPoint);
using (Socket accepted = server.Accept())
{
var data = new byte[1];
for (int i = 0; i < 10; i++)
{
data[0] = (byte)i;
accepted.Send(data);
data[0] = 0;
Assert.Equal(1, client.Receive(data));
Assert.Equal(i, data[0]);
}
}
}
}
finally
{
try { File.Delete(path); }
catch { }
}
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public async Task Socket_SendReceiveAsync_Success()
{
string path = GetRandomNonExistingFilePath();
var endPoint = new UnixDomainSocketEndPoint(path);
try
{
using (var server = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
using (var client = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
{
server.Bind(endPoint);
server.Listen(1);
await client.ConnectAsync(endPoint);
using (Socket accepted = await server.AcceptAsync())
{
var data = new byte[1];
for (int i = 0; i < 10; i++)
{
data[0] = (byte)i;
await accepted.SendAsync(new ArraySegment<byte>(data), SocketFlags.None);
data[0] = 0;
Assert.Equal(1, await client.ReceiveAsync(new ArraySegment<byte>(data), SocketFlags.None));
Assert.Equal(i, data[0]);
}
}
}
}
finally
{
try { File.Delete(path); }
catch { }
}
}
[OuterLoop] // TODO: Issue #11345
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public void ConcurrentSendReceive()
{
using (Socket server = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
using (Socket client = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified))
{
const int Iters = 2048;
byte[] sendData = new byte[Iters];
byte[] receiveData = new byte[sendData.Length];
new Random().NextBytes(sendData);
string path = GetRandomNonExistingFilePath();
server.Bind(new UnixDomainSocketEndPoint(path));
server.Listen(1);
Task<Socket> acceptTask = server.AcceptAsync();
client.Connect(new UnixDomainSocketEndPoint(path));
acceptTask.Wait();
Socket accepted = acceptTask.Result;
Task[] writes = new Task[Iters];
Task<int>[] reads = new Task<int>[Iters];
for (int i = 0; i < Iters; i++)
{
writes[i] = client.SendAsync(new ArraySegment<byte>(sendData, i, 1), SocketFlags.None);
}
for (int i = 0; i < Iters; i++)
{
reads[i] = accepted.ReceiveAsync(new ArraySegment<byte>(receiveData, i, 1), SocketFlags.None);
}
Task.WaitAll(writes);
Task.WaitAll(reads);
Assert.Equal(sendData, receiveData);
}
}
private static string GetRandomNonExistingFilePath()
{
string result;
do
{
result = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
}
while (File.Exists(result));
return result;
}
private sealed class UnixDomainSocketEndPoint : EndPoint
{
private const AddressFamily EndPointAddressFamily = AddressFamily.Unix;
private static readonly Encoding s_pathEncoding = Encoding.UTF8;
private static readonly int s_nativePathOffset = 2; // = offsetof(struct sockaddr_un, sun_path). It's the same on Linux and OSX
private static readonly int s_nativePathLength = 91; // sockaddr_un.sun_path at http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_un.h.html, -1 for terminator
private static readonly int s_nativeAddressSize = s_nativePathOffset + s_nativePathLength;
private readonly string _path;
private readonly byte[] _encodedPath;
public UnixDomainSocketEndPoint(string path)
{
if (path == null)
{
throw new ArgumentNullException(nameof(path));
}
_path = path;
_encodedPath = s_pathEncoding.GetBytes(_path);
if (path.Length == 0 || _encodedPath.Length > s_nativePathLength)
{
throw new ArgumentOutOfRangeException(nameof(path));
}
}
internal UnixDomainSocketEndPoint(SocketAddress socketAddress)
{
if (socketAddress == null)
{
throw new ArgumentNullException(nameof(socketAddress));
}
if (socketAddress.Family != EndPointAddressFamily ||
socketAddress.Size > s_nativeAddressSize)
{
throw new ArgumentOutOfRangeException(nameof(socketAddress));
}
if (socketAddress.Size > s_nativePathOffset)
{
_encodedPath = new byte[socketAddress.Size - s_nativePathOffset];
for (int i = 0; i < _encodedPath.Length; i++)
{
_encodedPath[i] = socketAddress[s_nativePathOffset + i];
}
_path = s_pathEncoding.GetString(_encodedPath, 0, _encodedPath.Length);
}
else
{
_encodedPath = Array.Empty<byte>();
_path = string.Empty;
}
}
public override SocketAddress Serialize()
{
var result = new SocketAddress(AddressFamily.Unix, s_nativeAddressSize);
Debug.Assert(_encodedPath.Length + s_nativePathOffset <= result.Size, "Expected path to fit in address");
for (int index = 0; index < _encodedPath.Length; index++)
{
result[s_nativePathOffset + index] = _encodedPath[index];
}
result[s_nativePathOffset + _encodedPath.Length] = 0; // path must be null-terminated
return result;
}
public override EndPoint Create(SocketAddress socketAddress) => new UnixDomainSocketEndPoint(socketAddress);
public override AddressFamily AddressFamily => EndPointAddressFamily;
public override string ToString() => _path;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace NewsPlus.Api.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2008-2012 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Reflection;
using NUnit.Framework.Api;
using NUnit.Framework.Internal;
using NUnit.Framework.Extensibility;
using NUnit.Framework.Internal.Commands;
#if NET_4_5
using System.Threading.Tasks;
#endif
namespace NUnit.Framework.Builders
{
/// <summary>
/// Class to build ether a parameterized or a normal NUnitTestMethod.
/// There are four cases that the builder must deal with:
/// 1. The method needs no params and none are provided
/// 2. The method needs params and they are provided
/// 3. The method needs no params but they are provided in error
/// 4. The method needs params but they are not provided
/// This could have been done using two different builders, but it
/// turned out to be simpler to have just one. The BuildFrom method
/// takes a different branch depending on whether any parameters are
/// provided, but all four cases are dealt with in lower-level methods
/// </summary>
public class NUnitTestCaseBuilder : ITestCaseBuilder2
{
private Randomizer randomizer;
private ITestCaseProvider testCaseProvider = new TestCaseProviders();
/// <summary>
/// Default no argument constructor for NUnitTestCaseBuilder
/// </summary>
public NUnitTestCaseBuilder()
{
randomizer = Randomizer.CreateRandomizer();
}
#region ITestCaseBuilder Methods
/// <summary>
/// Determines if the method can be used to build an NUnit test
/// test method of some kind. The method must normally be marked
/// with an identifying attriute for this to be true.
///
/// Note that this method does not check that the signature
/// of the method for validity. If we did that here, any
/// test methods with invalid signatures would be passed
/// over in silence in the test run. Since we want such
/// methods to be reported, the check for validity is made
/// in BuildFrom rather than here.
/// </summary>
/// <param name="method">A MethodInfo for the method being used as a test method</param>
/// <returns>True if the builder can create a test case from this method</returns>
public bool CanBuildFrom(MethodInfo method)
{
return method.IsDefined(typeof(TestAttribute), false)
|| method.IsDefined(typeof(ITestCaseSource), false)
|| method.IsDefined(typeof(TheoryAttribute), false);
}
/// <summary>
/// Build a Test from the provided MethodInfo. Depending on
/// whether the method takes arguments and on the availability
/// of test case data, this method may return a single test
/// or a group of tests contained in a ParameterizedMethodSuite.
/// </summary>
/// <param name="method">The MethodInfo for which a test is to be built</param>
/// <returns>A Test representing one or more method invocations</returns>
public Test BuildFrom(MethodInfo method)
{
return BuildFrom(method, null);
}
#endregion
#region ITestCaseBuilder2 Members
/// <summary>
/// Determines if the method can be used to build an NUnit test
/// test method of some kind. The method must normally be marked
/// with an identifying attriute for this to be true.
///
/// Note that this method does not check that the signature
/// of the method for validity. If we did that here, any
/// test methods with invalid signatures would be passed
/// over in silence in the test run. Since we want such
/// methods to be reported, the check for validity is made
/// in BuildFrom rather than here.
/// </summary>
/// <param name="method">A MethodInfo for the method being used as a test method</param>
/// <param name="parentSuite">The test suite being built, to which the new test would be added</param>
/// <returns>True if the builder can create a test case from this method</returns>
public bool CanBuildFrom(MethodInfo method, Test parentSuite)
{
return CanBuildFrom(method);
}
/// <summary>
/// Build a Test from the provided MethodInfo. Depending on
/// whether the method takes arguments and on the availability
/// of test case data, this method may return a single test
/// or a group of tests contained in a ParameterizedMethodSuite.
/// </summary>
/// <param name="method">The MethodInfo for which a test is to be built</param>
/// <param name="parentSuite">The test fixture being populated, or null</param>
/// <returns>A Test representing one or more method invocations</returns>
public Test BuildFrom(MethodInfo method, Test parentSuite)
{
return testCaseProvider.HasTestCasesFor(method)
? BuildParameterizedMethodSuite(method, parentSuite)
: BuildSingleTestMethod(method, parentSuite, null);
}
#endregion
#region Implementation
/// <summary>
/// Builds a ParameterizedMetodSuite containing individual
/// test cases for each set of parameters provided for
/// this method.
/// </summary>
/// <param name="method">The MethodInfo for which a test is to be built</param>
/// <param name="parentSuite">The test suite for which the method is being built</param>
/// <returns>A ParameterizedMethodSuite populated with test cases</returns>
public Test BuildParameterizedMethodSuite(MethodInfo method, Test parentSuite)
{
ParameterizedMethodSuite methodSuite = new ParameterizedMethodSuite(method);
methodSuite.ApplyAttributesToTest(method);
foreach (ITestCaseData testcase in testCaseProvider.GetTestCasesFor(method))
{
ParameterSet parms = testcase as ParameterSet;
if (parms == null)
parms = new ParameterSet(testcase);
TestMethod test = BuildSingleTestMethod(method, parentSuite, parms);
methodSuite.Add(test);
}
return methodSuite;
}
/// <summary>
/// Builds a single NUnitTestMethod, either as a child of the fixture
/// or as one of a set of test cases under a ParameterizedTestMethodSuite.
/// </summary>
/// <param name="method">The MethodInfo from which to construct the TestMethod</param>
/// <param name="parentSuite">The suite or fixture to which the new test will be added</param>
/// <param name="parms">The ParameterSet to be used, or null</param>
/// <returns></returns>
private TestMethod BuildSingleTestMethod(MethodInfo method, Test parentSuite, ParameterSet parms)
{
TestMethod testMethod = new TestMethod(method, parentSuite);
testMethod.Seed = randomizer.Next();
string prefix = method.ReflectedType.FullName;
// Needed to give proper fullname to test in a parameterized fixture.
// Without this, the arguments to the fixture are not included.
if (parentSuite != null)
{
prefix = parentSuite.FullName;
//testMethod.FullName = prefix + "." + testMethod.Name;
}
if (CheckTestMethodSignature(testMethod, parms))
{
if (parms == null)
testMethod.ApplyAttributesToTest(method);
foreach (ICommandDecorator decorator in method.GetCustomAttributes(typeof(ICommandDecorator), true))
testMethod.CustomDecorators.Add(decorator);
ExpectedExceptionAttribute[] attributes =
(ExpectedExceptionAttribute[])method.GetCustomAttributes(typeof(ExpectedExceptionAttribute), false);
if (attributes.Length > 0)
{
ExpectedExceptionAttribute attr = attributes[0];
string handlerName = attr.Handler;
if (handlerName != null && GetExceptionHandler(testMethod.FixtureType, handlerName) == null)
MarkAsNotRunnable(
testMethod,
string.Format("The specified exception handler {0} was not found", handlerName));
testMethod.CustomDecorators.Add(new ExpectedExceptionDecorator(attr.ExceptionData));
}
}
if (parms != null)
{
// NOTE: After the call to CheckTestMethodSignature, the Method
// property of testMethod may no longer be the same as the
// original MethodInfo, so we reassign it here.
method = testMethod.Method;
if (parms.TestName != null)
{
testMethod.Name = parms.TestName;
testMethod.FullName = prefix + "." + parms.TestName;
}
else if (parms.OriginalArguments != null)
{
string name = MethodHelper.GetDisplayName(method, parms.OriginalArguments);
testMethod.Name = name;
testMethod.FullName = prefix + "." + name;
}
parms.ApplyToTest(testMethod);
}
return testMethod;
}
#endregion
#region Helper Methods
/// <summary>
/// Helper method that checks the signature of a TestMethod and
/// any supplied parameters to determine if the test is valid.
///
/// Currently, NUnitTestMethods are required to be public,
/// non-abstract methods, either static or instance,
/// returning void. They may take arguments but the values must
/// be provided or the TestMethod is not considered runnable.
///
/// Methods not meeting these criteria will be marked as
/// non-runnable and the method will return false in that case.
/// </summary>
/// <param name="testMethod">The TestMethod to be checked. If it
/// is found to be non-runnable, it will be modified.</param>
/// <param name="parms">Parameters to be used for this test, or null</param>
/// <returns>True if the method signature is valid, false if not</returns>
private static bool CheckTestMethodSignature(TestMethod testMethod, ParameterSet parms)
{
if (testMethod.Method.IsAbstract)
{
return MarkAsNotRunnable(testMethod, "Method is abstract");
}
if (!testMethod.Method.IsPublic)
{
return MarkAsNotRunnable(testMethod, "Method is not public");
}
#if NETCF
// TODO: Get this to work
if (testMethod.Method.IsGenericMethodDefinition)
{
return MarkAsNotRunnable(testMethod, "Generic test methods are not yet supported under .NET CF");
}
#endif
ParameterInfo[] parameters = testMethod.Method.GetParameters();
int argsNeeded = parameters.Length;
object[] arglist = null;
int argsProvided = 0;
if (parms != null)
{
testMethod.parms = parms;
testMethod.RunState = parms.RunState;
arglist = parms.Arguments;
if (arglist != null)
argsProvided = arglist.Length;
if (testMethod.RunState != RunState.Runnable)
return false;
}
Type returnType = testMethod.Method.ReturnType;
if (returnType.Equals(typeof(void)))
{
if (parms != null && parms.HasExpectedResult)
return MarkAsNotRunnable(testMethod, "Method returning void cannot have an expected result");
}
else
{
#if NET_4_5
if (MethodHelper.IsAsyncMethod(testMethod.Method))
{
bool returnsGenericTask = returnType.IsGenericType && returnType.GetGenericTypeDefinition() == typeof(Task<>);
if (returnsGenericTask && (parms == null|| !parms.HasExpectedResult && !parms.ExceptionExpected))
return MarkAsNotRunnable(testMethod, "Async test method must have Task or void return type when no result is expected");
else if (!returnsGenericTask && parms != null && parms.HasExpectedResult)
return MarkAsNotRunnable(testMethod, "Async test method must have Task<T> return type when a result is expected");
}
else
#endif
if (parms == null || !parms.HasExpectedResult && !parms.ExceptionExpected)
return MarkAsNotRunnable(testMethod, "Method has non-void return value, but no result is expected");
}
if (argsProvided > 0 && argsNeeded == 0)
{
return MarkAsNotRunnable(testMethod, "Arguments provided for method not taking any");
}
if (argsProvided == 0 && argsNeeded > 0)
{
return MarkAsNotRunnable(testMethod, "No arguments were provided");
}
if (argsProvided != argsNeeded)
{
return MarkAsNotRunnable(testMethod, "Wrong number of arguments provided");
}
#if CLR_2_0 || CLR_4_0
#if !NETCF
if (testMethod.Method.IsGenericMethodDefinition)
{
Type[] typeArguments = GetTypeArgumentsForMethod(testMethod.Method, arglist);
foreach (object o in typeArguments)
if (o == null)
{
return MarkAsNotRunnable(testMethod, "Unable to determine type arguments for method");
}
testMethod.method = testMethod.Method.MakeGenericMethod(typeArguments);
parameters = testMethod.Method.GetParameters();
}
#endif
#endif
if (arglist != null && parameters != null)
TypeHelper.ConvertArgumentList(arglist, parameters);
return true;
}
#if CLR_2_0 || CLR_4_0
#if !NETCF
private static Type[] GetTypeArgumentsForMethod(MethodInfo method, object[] arglist)
{
Type[] typeParameters = method.GetGenericArguments();
Type[] typeArguments = new Type[typeParameters.Length];
ParameterInfo[] parameters = method.GetParameters();
for (int typeIndex = 0; typeIndex < typeArguments.Length; typeIndex++)
{
Type typeParameter = typeParameters[typeIndex];
for (int argIndex = 0; argIndex < parameters.Length; argIndex++)
{
if (parameters[argIndex].ParameterType.Equals(typeParameter))
typeArguments[typeIndex] = TypeHelper.BestCommonType(
typeArguments[typeIndex],
arglist[argIndex].GetType());
}
}
return typeArguments;
}
#endif
#endif
private static MethodInfo GetExceptionHandler(Type fixtureType, string name)
{
return fixtureType.GetMethod(
name,
BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic,
null,
new Type[] { typeof(System.Exception) },
null);
}
private static bool MarkAsNotRunnable(TestMethod testMethod, string reason)
{
testMethod.RunState = RunState.NotRunnable;
testMethod.Properties.Set(PropertyNames.SkipReason, reason);
return false;
}
#endregion
}
}
| |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Orleans.Runtime;
using Orleans.Concurrency;
namespace Orleans.Streams
{
internal class PersistentStreamPullingAgent : SystemTarget, IPersistentStreamPullingAgent
{
private static readonly IBackoffProvider DefaultBackoffProvider = new ExponentialBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(1));
private const int StreamInactivityCheckFrequency = 10;
private readonly string streamProviderName;
private readonly IStreamProviderRuntime providerRuntime;
private readonly IStreamPubSub pubSub;
private readonly Dictionary<StreamId, StreamConsumerCollection> pubSubCache;
private readonly SafeRandom safeRandom;
private readonly PersistentStreamProviderConfig config;
private readonly Logger logger;
private readonly CounterStatistic numReadMessagesCounter;
private readonly CounterStatistic numSentMessagesCounter;
private int numMessages;
private IQueueAdapter queueAdapter;
private IQueueCache queueCache;
private IQueueAdapterReceiver receiver;
private IStreamFailureHandler streamFailureHandler;
private DateTime lastTimeCleanedPubSubCache;
private IDisposable timer;
internal readonly QueueId QueueId;
private bool IsShutdown { get { return timer == null; } }
internal PersistentStreamPullingAgent(
GrainId id,
string strProviderName,
IStreamProviderRuntime runtime,
IStreamPubSub streamPubSub,
QueueId queueId,
PersistentStreamProviderConfig config)
: base(id, runtime.ExecutingSiloAddress, true)
{
if (runtime == null) throw new ArgumentNullException("runtime", "PersistentStreamPullingAgent: runtime reference should not be null");
if (strProviderName == null) throw new ArgumentNullException("runtime", "PersistentStreamPullingAgent: strProviderName should not be null");
QueueId = queueId;
streamProviderName = strProviderName;
providerRuntime = runtime;
pubSub = streamPubSub;
pubSubCache = new Dictionary<StreamId, StreamConsumerCollection>();
safeRandom = new SafeRandom();
this.config = config;
numMessages = 0;
logger = providerRuntime.GetLogger(GrainId + "-" + streamProviderName);
logger.Info((int)ErrorCode.PersistentStreamPullingAgent_01,
"Created {0} {1} for Stream Provider {2} on silo {3} for Queue {4}.",
GetType().Name, GrainId.ToDetailedString(), streamProviderName, Silo, QueueId.ToStringWithHashCode());
string statUniquePostfix = strProviderName + "." + QueueId;
numReadMessagesCounter = CounterStatistic.FindOrCreate(new StatisticName(StatisticNames.STREAMS_PERSISTENT_STREAM_NUM_READ_MESSAGES, statUniquePostfix));
numSentMessagesCounter = CounterStatistic.FindOrCreate(new StatisticName(StatisticNames.STREAMS_PERSISTENT_STREAM_NUM_SENT_MESSAGES, statUniquePostfix));
IntValueStatistic.FindOrCreate(new StatisticName(StatisticNames.STREAMS_PERSISTENT_STREAM_PUBSUB_CACHE_SIZE, statUniquePostfix), () => pubSubCache.Count);
IntValueStatistic.FindOrCreate(new StatisticName(StatisticNames.STREAMS_PERSISTENT_STREAM_QUEUE_CACHE_SIZE, statUniquePostfix), () => queueCache !=null ? queueCache.Size : 0);
}
/// <summary>
/// Take responsibility for a new queues that was assigned to me via a new range.
/// We first store the new queue in our internal data structure, try to initialize it and start a pumping timer.
/// ERROR HANDLING:
/// The resposibility to handle initializatoion and shutdown failures is inside the INewQueueAdapterReceiver code.
/// The agent will call Initialize once and log an error. It will not call initiliaze again.
/// The receiver itself may attempt later to recover from this error and do initialization again.
/// The agent will assume initialization has succeeded and will subsequently start calling pumping receive.
/// Same applies to shutdown.
/// </summary>
/// <param name="qAdapter"></param>
/// <param name="queueAdapterCache"></param>
/// <param name="failureHandler"></param>
/// <returns></returns>
public async Task Initialize(Immutable<IQueueAdapter> qAdapter, Immutable<IQueueAdapterCache> queueAdapterCache, Immutable<IStreamFailureHandler> failureHandler)
{
if (qAdapter.Value == null) throw new ArgumentNullException("qAdapter", "Init: queueAdapter should not be null");
if (failureHandler.Value == null) throw new ArgumentNullException("failureHandler", "Init: streamDeliveryFailureHandler should not be null");
logger.Info((int)ErrorCode.PersistentStreamPullingAgent_02, "Init of {0} {1} on silo {2} for queue {3}.",
GetType().Name, GrainId.ToDetailedString(), Silo, QueueId.ToStringWithHashCode());
// Remove cast once we cleanup
queueAdapter = qAdapter.Value;
streamFailureHandler = failureHandler.Value;
lastTimeCleanedPubSubCache = DateTime.UtcNow;
try
{
receiver = queueAdapter.CreateReceiver(QueueId);
}
catch (Exception exc)
{
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_02, "Exception while calling IQueueAdapter.CreateNewReceiver.", exc);
return;
}
try
{
if (queueAdapterCache.Value != null)
{
queueCache = queueAdapterCache.Value.CreateQueueCache(QueueId);
}
}
catch (Exception exc)
{
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_23, "Exception while calling IQueueAdapterCache.CreateQueueCache.", exc);
return;
}
try
{
var task = OrleansTaskExtentions.SafeExecute(() => receiver.Initialize(config.InitQueueTimeout));
task = task.LogException(logger, ErrorCode.PersistentStreamPullingAgent_03, String.Format("QueueAdapterReceiver {0} failed to Initialize.", QueueId.ToStringWithHashCode()));
await task;
}
catch
{
// Just ignore this exception and proceed as if Initialize has succeeded.
// We already logged individual exceptions for individual calls to Initialize. No need to log again.
}
// Setup a reader for a new receiver.
// Even if the receiver failed to initialise, treat it as OK and start pumping it. It's receiver responsibility to retry initialization.
var randomTimerOffset = safeRandom.NextTimeSpan(config.GetQueueMsgsTimerPeriod);
timer = base.RegisterTimer(AsyncTimerCallback, QueueId, randomTimerOffset, config.GetQueueMsgsTimerPeriod);
logger.Info((int) ErrorCode.PersistentStreamPullingAgent_04, "Taking queue {0} under my responsibility.", QueueId.ToStringWithHashCode());
}
public async Task Shutdown()
{
// Stop pulling from queues that are not in my range anymore.
logger.Info((int)ErrorCode.PersistentStreamPullingAgent_05, "Shutdown of {0} responsible for queue: {1}", GetType().Name, QueueId.ToStringWithHashCode());
if (timer != null)
{
var tmp = timer;
timer = null;
Utils.SafeExecute(tmp.Dispose);
}
try
{
var task = OrleansTaskExtentions.SafeExecute(() => receiver.Shutdown(config.InitQueueTimeout));
task = task.LogException(logger, ErrorCode.PersistentStreamPullingAgent_07,
String.Format("QueueAdapterReceiver {0} failed to Shutdown.", QueueId));
await task;
}
catch
{
// Just ignore this exception and proceed as if Shutdown has succeeded.
// We already logged individual exceptions for individual calls to Shutdown. No need to log again.
}
var unregisterTasks = new List<Task>();
var meAsStreamProducer = this.AsReference<IStreamProducerExtension>();
foreach (var tuple in pubSubCache)
{
tuple.Value.DisposeAll(logger);
var streamId = tuple.Key;
logger.Info((int)ErrorCode.PersistentStreamPullingAgent_06, "Unregister PersistentStreamPullingAgent Producer for stream {0}.", streamId);
unregisterTasks.Add(pubSub.UnregisterProducer(streamId, streamProviderName, meAsStreamProducer));
}
try
{
await Task.WhenAll(unregisterTasks);
}
catch (Exception exc)
{
logger.Warn((int)ErrorCode.PersistentStreamPullingAgent_08,
"Failed to unregister myself as stream producer to some streams taht used to be in my responsibility.", exc);
}
pubSubCache.Clear();
}
public Task AddSubscriber(
GuidId subscriptionId,
StreamId streamId,
IStreamConsumerExtension streamConsumer,
IStreamFilterPredicateWrapper filter)
{
if (logger.IsVerbose) logger.Verbose((int)ErrorCode.PersistentStreamPullingAgent_09, "AddSubscriber: Stream={0} Subscriber={1}.", streamId, streamConsumer);
// cannot await here because explicit consumers trigger this call, so it could cause a deadlock.
AddSubscriber_Impl(subscriptionId, streamId, streamConsumer, null, filter)
.LogException(logger, ErrorCode.PersistentStreamPullingAgent_26,
String.Format("Failed to add subscription for stream {0}." , streamId))
.Ignore();
return TaskDone.Done;
}
// Called by rendezvous when new remote subscriber subscribes to this stream.
private async Task AddSubscriber_Impl(
GuidId subscriptionId,
StreamId streamId,
IStreamConsumerExtension streamConsumer,
StreamSequenceToken cacheToken,
IStreamFilterPredicateWrapper filter)
{
if (IsShutdown) return;
StreamConsumerCollection streamDataCollection;
if (!pubSubCache.TryGetValue(streamId, out streamDataCollection))
{
streamDataCollection = new StreamConsumerCollection(DateTime.UtcNow);
pubSubCache.Add(streamId, streamDataCollection);
}
StreamConsumerData data;
if (!streamDataCollection.TryGetConsumer(subscriptionId, out data))
data = streamDataCollection.AddConsumer(subscriptionId, streamId, streamConsumer, filter);
if (await DoHandshakeWithConsumer(data, cacheToken))
{
if (data.State == StreamConsumerDataState.Inactive)
RunConsumerCursor(data, filter).Ignore(); // Start delivering events if not actively doing so
}
}
private async Task<bool> DoHandshakeWithConsumer(
StreamConsumerData consumerData,
StreamSequenceToken cacheToken)
{
StreamHandshakeToken requestedHandshakeToken = null;
// if not cache, then we can't get cursor and there is no reason to ask consumer for token.
if (queueCache != null)
{
Exception exceptionOccured = null;
try
{
requestedHandshakeToken = await AsyncExecutorWithRetries.ExecuteWithRetries(
i => consumerData.StreamConsumer.GetSequenceToken(consumerData.SubscriptionId),
AsyncExecutorWithRetries.INFINITE_RETRIES,
(exception, i) => true,
config.MaxEventDeliveryTime,
DefaultBackoffProvider);
if (requestedHandshakeToken != null)
{
consumerData.SafeDisposeCursor(logger);
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid, consumerData.StreamId.Namespace, requestedHandshakeToken.Token);
}
else
{
if (consumerData.Cursor == null) // if the consumer did not ask for a specific token and we already have a cursor, jsut keep using it.
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid, consumerData.StreamId.Namespace, cacheToken);
}
}
catch (Exception exception)
{
exceptionOccured = exception;
}
if (exceptionOccured != null)
{
bool faultedSubscription = await ErrorProtocol(consumerData, exceptionOccured, false, null, requestedHandshakeToken != null ? requestedHandshakeToken.Token : null);
if (faultedSubscription) return false;
}
}
consumerData.LastToken = requestedHandshakeToken; // use what ever the consumer asked for as LastToken for next handshake (even if he asked for null).
// if we don't yet have a cursor (had errors in the handshake or data not available exc), get a cursor at the event that triggered that consumer subscription.
if (consumerData.Cursor == null && queueCache != null)
{
try
{
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid, consumerData.StreamId.Namespace, cacheToken);
}
catch (Exception)
{
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid, consumerData.StreamId.Namespace, null); // just in case last GetCacheCursor failed.
}
}
return true;
}
public Task RemoveSubscriber(GuidId subscriptionId, StreamId streamId)
{
RemoveSubscriber_Impl(subscriptionId, streamId);
return TaskDone.Done;
}
public void RemoveSubscriber_Impl(GuidId subscriptionId, StreamId streamId)
{
if (IsShutdown) return;
StreamConsumerCollection streamData;
if (!pubSubCache.TryGetValue(streamId, out streamData)) return;
// remove consumer
bool removed = streamData.RemoveConsumer(subscriptionId, logger);
if (removed && logger.IsVerbose) logger.Verbose((int)ErrorCode.PersistentStreamPullingAgent_10, "Removed Consumer: subscription={0}, for stream {1}.", subscriptionId, streamId);
if (streamData.Count == 0)
pubSubCache.Remove(streamId);
}
private async Task AsyncTimerCallback(object state)
{
try
{
var myQueueId = (QueueId)(state);
if (IsShutdown) return; // timer was already removed, last tick
IQueueAdapterReceiver rcvr = receiver;
int maxCacheAddCount = queueCache != null ? queueCache.MaxAddCount : QueueAdapterConstants.UNLIMITED_GET_QUEUE_MSG;
// loop through the queue until it is empty.
while (!IsShutdown) // timer will be set to null when we are asked to shudown.
{
var now = DateTime.UtcNow;
// Try to cleanup the pubsub cache at the cadence of 10 times in the configurable StreamInactivityPeriod.
if ((now - lastTimeCleanedPubSubCache) >= config.StreamInactivityPeriod.Divide(StreamInactivityCheckFrequency))
{
lastTimeCleanedPubSubCache = now;
CleanupPubSubCache(now);
}
if (queueCache != null)
{
IList<IBatchContainer> purgedItems;
if (queueCache.TryPurgeFromCache(out purgedItems))
{
await rcvr.MessagesDeliveredAsync(purgedItems);
}
}
if (queueCache != null && queueCache.IsUnderPressure())
{
// Under back pressure. Exit the loop. Will attempt again in the next timer callback.
logger.Info((int)ErrorCode.PersistentStreamPullingAgent_24, "Stream cache is under pressure. Backing off.");
return;
}
// Retrive one multiBatch from the queue. Every multiBatch has an IEnumerable of IBatchContainers, each IBatchContainer may have multiple events.
IList<IBatchContainer> multiBatch = await rcvr.GetQueueMessagesAsync(maxCacheAddCount);
if (multiBatch == null || multiBatch.Count == 0) return; // queue is empty. Exit the loop. Will attempt again in the next timer callback.
if (queueCache != null)
{
queueCache.AddToCache(multiBatch);
}
numMessages += multiBatch.Count;
numReadMessagesCounter.IncrementBy(multiBatch.Count);
if (logger.IsVerbose2) logger.Verbose2((int)ErrorCode.PersistentStreamPullingAgent_11, "Got {0} messages from queue {1}. So far {2} msgs from this queue.",
multiBatch.Count, myQueueId.ToStringWithHashCode(), numMessages);
foreach (var group in
multiBatch
.Where(m => m != null)
.GroupBy(container => new Tuple<Guid, string>(container.StreamGuid, container.StreamNamespace)))
{
var streamId = StreamId.GetStreamId(group.Key.Item1, queueAdapter.Name, group.Key.Item2);
StreamConsumerCollection streamData;
if (pubSubCache.TryGetValue(streamId, out streamData))
{
streamData.RefreshActivity(now);
StartInactiveCursors(streamData); // if this is an existing stream, start any inactive cursors
}
else
{
RegisterStream(streamId, group.First().SequenceToken, now).Ignore(); // if this is a new stream register as producer of stream in pub sub system
}
}
}
}
catch (Exception exc)
{
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_12, "Exception while PersistentStreamPullingAgentGrain.AsyncTimerCallback", exc);
}
}
private void CleanupPubSubCache(DateTime now)
{
if (pubSubCache.Count == 0) return;
var toRemove = pubSubCache.Where(pair => pair.Value.IsInactive(now, config.StreamInactivityPeriod))
.ToList();
toRemove.ForEach(tuple =>
{
pubSubCache.Remove(tuple.Key);
tuple.Value.DisposeAll(logger);
});
}
private async Task RegisterStream(StreamId streamId, StreamSequenceToken firstToken, DateTime now)
{
var streamData = new StreamConsumerCollection(now);
pubSubCache.Add(streamId, streamData);
// Create a fake cursor to point into a cache.
// That way we will not purge the event from the cache, until we talk to pub sub.
// This will help ensure the "casual consistency" between pre-existing subscripton (of a potentially new already subscribed consumer)
// and later production.
var pinCursor = queueCache.GetCacheCursor(streamId.Guid, streamId.Namespace, firstToken);
try
{
await RegisterAsStreamProducer(streamId, firstToken);
}finally
{
// Cleanup the fake pinning cursor.
pinCursor.Dispose();
}
}
private void StartInactiveCursors(StreamConsumerCollection streamData)
{
foreach (StreamConsumerData consumerData in streamData.AllConsumers())
{
if (consumerData.State == StreamConsumerDataState.Inactive)
{
// wake up inactive consumers
RunConsumerCursor(consumerData, consumerData.Filter).Ignore();
}
else
{
if (consumerData.Cursor != null)
{
consumerData.Cursor.Refresh();
}
}
}
}
private async Task RunConsumerCursor(StreamConsumerData consumerData, IStreamFilterPredicateWrapper filterWrapper)
{
try
{
// double check in case of interleaving
if (consumerData.State == StreamConsumerDataState.Active ||
consumerData.Cursor == null) return;
consumerData.State = StreamConsumerDataState.Active;
while (consumerData.Cursor != null)
{
IBatchContainer batch = null;
Exception exceptionOccured = null;
try
{
Exception ignore;
if (!consumerData.Cursor.MoveNext())
{
break;
}
batch = consumerData.Cursor.GetCurrent(out ignore);
}
catch (Exception exc)
{
exceptionOccured = exc;
consumerData.SafeDisposeCursor(logger);
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid, consumerData.StreamId.Namespace, null);
}
// Apply filtering to this batch, if applicable
if (filterWrapper != null && batch != null)
{
try
{
// Apply batch filter to this input batch, to see whether we should deliver it to this consumer.
if (!batch.ShouldDeliver(
consumerData.StreamId,
filterWrapper.FilterData,
filterWrapper.ShouldReceive)) continue; // Skip this batch -- nothing to do
}
catch (Exception exc)
{
var message = string.Format("Ignoring exception while trying to evaluate subscription filter function {0} on stream {1} in PersistentStreamPullingAgentGrain.RunConsumerCursor", filterWrapper, consumerData.StreamId);
logger.Warn((int) ErrorCode.PersistentStreamPullingAgent_13, message, exc);
}
}
try
{
numSentMessagesCounter.Increment();
if (batch != null)
{
await AsyncExecutorWithRetries.ExecuteWithRetries(
i => DeliverBatchToConsumer(consumerData, batch),
AsyncExecutorWithRetries.INFINITE_RETRIES,
(exception, i) => !(exception is DataNotAvailableException),
config.MaxEventDeliveryTime,
DefaultBackoffProvider);
}
}
catch (Exception exc)
{
var message = string.Format("Exception while trying to deliver msgs to stream {0} in PersistentStreamPullingAgentGrain.RunConsumerCursor", consumerData.StreamId);
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_14, message, exc);
exceptionOccured = new StreamEventDeliveryFailureException(consumerData.StreamId);
}
// if we failed to deliver a batch
if (exceptionOccured != null)
{
bool faultedSubscription = await ErrorProtocol(consumerData, exceptionOccured, true, batch, batch != null ? batch.SequenceToken : null);
if (faultedSubscription) return;
}
}
consumerData.State = StreamConsumerDataState.Inactive;
}
catch (Exception exc)
{
// RunConsumerCursor is fired with .Ignore so we should log if anything goes wrong, because there is no one to catch the exception
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_15, "Ignored RunConsumerCursor Error", exc);
consumerData.State = StreamConsumerDataState.Inactive;
throw;
}
}
private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
{
StreamHandshakeToken prevToken = consumerData.LastToken;
Task<StreamHandshakeToken> batchDeliveryTask;
bool isRequestContextSet = batch.ImportRequestContext();
try
{
batchDeliveryTask = consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);
}
finally
{
if (isRequestContextSet)
{
// clear RequestContext before await!
RequestContext.Clear();
}
}
StreamHandshakeToken newToken = await batchDeliveryTask;
if (newToken != null)
{
consumerData.LastToken = newToken;
consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
consumerData.StreamId.Namespace, newToken.Token);
}
else
{
consumerData.LastToken = StreamHandshakeToken.CreateDeliveyToken(batch.SequenceToken); // this is the currently delivered token
}
}
private static async Task DeliverErrorToConsumer(StreamConsumerData consumerData, Exception exc, IBatchContainer batch)
{
Task errorDeliveryTask;
bool isRequestContextSet = batch != null && batch.ImportRequestContext();
try
{
errorDeliveryTask = consumerData.StreamConsumer.ErrorInStream(consumerData.SubscriptionId, exc);
}
finally
{
if (isRequestContextSet)
{
RequestContext.Clear(); // clear RequestContext before await!
}
}
await errorDeliveryTask;
}
private async Task<bool> ErrorProtocol(StreamConsumerData consumerData, Exception exceptionOccured, bool isDeliveryError, IBatchContainer batch, StreamSequenceToken token)
{
// notify consumer about the error or that the data is not available.
await OrleansTaskExtentions.ExecuteAndIgnoreException(
() => DeliverErrorToConsumer(
consumerData, exceptionOccured, batch));
// record that there was a delivery failure
if (isDeliveryError)
{
await OrleansTaskExtentions.ExecuteAndIgnoreException(
() => streamFailureHandler.OnDeliveryFailure(
consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
}
else
{
await OrleansTaskExtentions.ExecuteAndIgnoreException(
() => streamFailureHandler.OnSubscriptionFailure(
consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
}
// if configured to fault on delivery failure and this is not an implicit subscription, fault and remove the subscription
if (streamFailureHandler.ShouldFaultSubsriptionOnError && !SubscriptionMarker.IsImplicitSubscription(consumerData.SubscriptionId.Guid))
{
try
{
// notify consumer of faulted subscription, if we can.
await OrleansTaskExtentions.ExecuteAndIgnoreException(
() => DeliverErrorToConsumer(
consumerData, new FaultedSubscriptionException(consumerData.SubscriptionId, consumerData.StreamId), batch));
// mark subscription as faulted.
await pubSub.FaultSubscription(consumerData.StreamId, consumerData.SubscriptionId);
}
finally
{
// remove subscription
RemoveSubscriber_Impl(consumerData.SubscriptionId, consumerData.StreamId);
}
return true;
}
return false;
}
private async Task RegisterAsStreamProducer(StreamId streamId, StreamSequenceToken streamStartToken)
{
try
{
if (pubSub == null) throw new NullReferenceException("Found pubSub reference not set up correctly in RetreaveNewStream");
IStreamProducerExtension meAsStreamProducer = this.AsReference<IStreamProducerExtension>();
ISet<PubSubSubscriptionState> streamData = await pubSub.RegisterProducer(streamId, streamProviderName, meAsStreamProducer);
if (logger.IsVerbose) logger.Verbose((int)ErrorCode.PersistentStreamPullingAgent_16, "Got back {0} Subscribers for stream {1}.", streamData.Count, streamId);
var addSubscriptionTasks = new List<Task>(streamData.Count);
foreach (PubSubSubscriptionState item in streamData)
{
addSubscriptionTasks.Add(AddSubscriber_Impl(item.SubscriptionId, item.Stream, item.Consumer, streamStartToken, item.Filter));
}
await Task.WhenAll(addSubscriptionTasks);
}
catch (Exception exc)
{
// RegisterAsStreamProducer is fired with .Ignore so we should log if anything goes wrong, because there is no one to catch the exception
logger.Error((int)ErrorCode.PersistentStreamPullingAgent_17, "Ignored RegisterAsStreamProducer Error", exc);
throw;
}
}
}
}
| |
using UnityEngine;
using System.IO;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace ChartboostSDK {
#if UNITY_EDITOR
[InitializeOnLoad]
#endif
public class CBSettings : ScriptableObject
{
const string cbSettingsAssetName = "ChartboostSettings";
const string cbSettingsPath = "Chartboost/Resources";
const string cbSettingsAssetExtension = ".asset";
const string iOSExampleAppIDLabel = "CB_IOS_APP_ID";
const string iOSExampleAppSignatureLabel = "CB_IOS_APP_SIGNATURE";
const string iOSExampleAppID = "4f21c409cd1cb2fb7000001b";
const string iOSExampleAppSignature = "92e2de2fd7070327bdeb54c15a5295309c6fcd2d";
const string androidExampleAppIDLabel = "CB_ANDROID_APP_ID";
const string androidExampleAppSignatureLabel = "CB_ANDROID_APP_SIGNATURE";
const string androidExampleAppID = "4f7b433509b6025804000002";
const string androidExampleAppSignature = "dd2d41b69ac01b80f443f5b6cf06096d457f82bd";
const string amazonExampleAppIDLabel = "CB_AMAZON_APP_ID";
const string amazonExampleAppSignatureLabel = "CB_AMAZON_APP_SIGNATURE";
const string amazonExampleAppID = "542ca35d1873da32dbc90488";
const string amazonExampleAppSignature = "90654a340386c9fb8de33315e4210d7c09989c43";
const string exampleCredentialsWarning = "CHARTBOOST: You are using a Chartboost example App ID or App Signature! Go to the Chartboost dashboard and replace these with an App ID & App Signature from your account! If you need help, email us: support@chartboost.com";
private static bool credentialsWarning = false;
private static CBSettings instance;
static CBSettings Instance
{
get
{
if (instance == null)
{
instance = Resources.Load(cbSettingsAssetName) as CBSettings;
if (instance == null)
{
// If not found, autocreate the asset object.
instance = CreateInstance<CBSettings>();
#if UNITY_EDITOR
string properPath = Path.Combine(Application.dataPath, cbSettingsPath);
if (!Directory.Exists(properPath))
{
AssetDatabase.CreateFolder("Assets/Chartboost", "Resources");
}
string fullPath = Path.Combine(Path.Combine("Assets", cbSettingsPath),
cbSettingsAssetName + cbSettingsAssetExtension
);
AssetDatabase.CreateAsset(instance, fullPath);
#endif
}
}
return instance;
}
}
#if UNITY_EDITOR
[MenuItem("Chartboost/Edit Settings")]
public static void Edit()
{
Selection.activeObject = Instance;
}
[MenuItem("Chartboost/SDK Documentation")]
public static void OpenDocumentation()
{
string url = "https://help.chartboost.com/documentation/unity";
Application.OpenURL(url);
}
#endif
#region App Settings
[SerializeField]
public string iOSAppId = iOSExampleAppIDLabel;
[SerializeField]
public string iOSAppSecret = iOSExampleAppSignatureLabel;
[SerializeField]
public string androidAppId = androidExampleAppIDLabel;
[SerializeField]
public string androidAppSecret = androidExampleAppSignatureLabel;
[SerializeField]
public string amazonAppId = amazonExampleAppIDLabel;
[SerializeField]
public string amazonAppSecret = amazonExampleAppSignatureLabel;
[SerializeField]
public bool isLoggingEnabled = false;
[SerializeField]
public string[] androidPlatformLabels = new[] { "Google Play", "Amazon" };
[SerializeField]
public int selectedAndroidPlatformIndex = 0;
public void SetAndroidPlatformIndex(int index)
{
if (selectedAndroidPlatformIndex != index)
{
selectedAndroidPlatformIndex = index;
DirtyEditor();
}
}
public int SelectedAndroidPlatformIndex
{
get { return selectedAndroidPlatformIndex; }
}
public string[] AndroidPlatformLabels
{
get { return androidPlatformLabels; }
set
{
if (androidPlatformLabels != value)
{
androidPlatformLabels = value;
DirtyEditor();
}
}
}
// iOS
public void SetIOSAppId(string id)
{
if (Instance.iOSAppId != id)
{
Instance.iOSAppId = id;
DirtyEditor();
}
}
public static string getIOSAppId()
{
if(Instance.iOSAppId == iOSExampleAppIDLabel)
{
CredentialsWarning();
return iOSExampleAppID;
}
return Instance.iOSAppId;
}
public void SetIOSAppSecret(string secret)
{
if (Instance.iOSAppSecret != secret)
{
Instance.iOSAppSecret = secret;
DirtyEditor();
}
}
public static string getIOSAppSecret()
{
if(Instance.iOSAppSecret == iOSExampleAppSignatureLabel)
{
CredentialsWarning();
return iOSExampleAppSignature;
}
return Instance.iOSAppSecret;
}
// Android
public void SetAndroidAppId(string id)
{
if (Instance.androidAppId != id)
{
Instance.androidAppId = id;
DirtyEditor();
}
}
public static string getAndroidAppId()
{
if(Instance.androidAppId == androidExampleAppIDLabel)
{
CredentialsWarning();
return androidExampleAppID;
}
return Instance.androidAppId;
}
public void SetAndroidAppSecret(string secret)
{
if (Instance.androidAppSecret != secret)
{
Instance.androidAppSecret = secret;
DirtyEditor();
}
}
public static string getAndroidAppSecret()
{
if(Instance.androidAppSecret == androidExampleAppSignatureLabel)
{
CredentialsWarning();
return androidExampleAppSignature;
}
return Instance.androidAppSecret;
}
// Amazon
public void SetAmazonAppId(string id)
{
if (Instance.amazonAppId != id)
{
Instance.amazonAppId = id;
DirtyEditor();
}
}
public static string getAmazonAppId()
{
if(Instance.amazonAppId == amazonExampleAppIDLabel)
{
CredentialsWarning();
return amazonExampleAppID;
}
return Instance.amazonAppId;
}
public void SetAmazonAppSecret(string secret)
{
if (Instance.amazonAppSecret != secret)
{
Instance.amazonAppSecret = secret;
DirtyEditor();
}
}
public static string getAmazonAppSecret()
{
if(Instance.amazonAppSecret == amazonExampleAppSignatureLabel)
{
CredentialsWarning();
return amazonExampleAppSignature;
}
return Instance.amazonAppSecret;
}
public static string getSelectAndroidAppId()
{
// Google
if (Instance.selectedAndroidPlatformIndex == 0)
{
return Instance.androidAppId;
}
// Amazon
else
{
return Instance.amazonAppId;
}
}
public static string getSelectAndroidAppSecret()
{
// Google
if (Instance.selectedAndroidPlatformIndex == 0)
{
return Instance.androidAppSecret;
}
// Amazon
else
{
return Instance.amazonAppSecret;
}
}
public static void enableLogging(bool enabled)
{
Instance.isLoggingEnabled = enabled;
DirtyEditor();
}
public static bool isLogging()
{
return Instance.isLoggingEnabled;
}
private static void DirtyEditor()
{
#if UNITY_EDITOR
EditorUtility.SetDirty(Instance);
#endif
}
private static void CredentialsWarning()
{
if(credentialsWarning == false)
{
credentialsWarning = true;
Debug.LogWarning(exampleCredentialsWarning);
}
}
#endregion
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.