context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//#define DEBUG_SLICE
#define TRACE_PERFORMANCE
using System;
using System.Collections.Generic;
using System.Diagnostics.Contracts;
using System.IO;
using Microsoft.Cci.MutableCodeModel;
using Microsoft.Research.CodeAnalysis;
using System.Linq;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace Microsoft.Cci.Analysis {
public class CCI2Slicer : IDisposable,
ICodeWriter<LocalDefAdaptor, IParameterTypeInformation, MethodReferenceAdaptor, FieldReferenceAdaptor, IPropertyDefinition, IEventDefinition, TypeReferenceAdaptor, ICustomAttribute, IAssemblyReference>
{
[ContractInvariantMethod]
private void ObjectInvariant()
{
Contract.Invariant(this.disposableObjectAllocatedByThisHost != null);
}
private readonly HostEnvironment host;
protected readonly List<IDisposable> disposableObjectAllocatedByThisHost = new List<IDisposable>();
readonly private INamespaceTypeReference systemString;
readonly private INamespaceTypeReference systemInt;
private CCI2Slicer(HostEnvironment host) {
Contract.Requires(host != null);
this.host = host;
this.systemString = this.host.PlatformType.SystemString;
this.systemInt = this.host.PlatformType.SystemInt32;
}
public static CCI2Slicer CreateSlicer(HostEnvironment host)
{
Contract.Requires(host != null);
// MB: no singleton please
return new CCI2Slicer(host);
}
[SuppressMessage("Microsoft.Contracts", "TestAlwaysEvaluatingToAConstant")]
public bool WriteSliceToFile(ISlice<MethodReferenceAdaptor, FieldReferenceAdaptor, TypeReferenceAdaptor, IAssemblyReference> slice, string directory, out string dll)
{
#if TRACE_PERFORMANCE
var stopWatch = new Research.DataStructures.CustomStopwatch();
stopWatch.Start();
#endif
var newAssembly = Prune.PruneAssembly(host, slice);
#if TRACE_PERFORMANCE
Console.WriteLine("Time to prune the assembly: {0}", stopWatch.Elapsed);
#endif
var errors = ValidateAssembly(host, newAssembly);
if (/*errors != null && */ 0 < errors.Count)
{
#if !DEBUG_SLICE
dll = null;
return false;
#endif
}
//Get a PDB reader if there is a PDB file.
PdbReader/*?*/ pdbReader = null;
string pdbFile = slice.ContainingAssembly.ResolvedAssembly.DebugInformationLocation;
if (string.IsNullOrEmpty(pdbFile) || !File.Exists(pdbFile))
pdbFile = Path.ChangeExtension(slice.ContainingAssembly.ResolvedAssembly.Location, "pdb");
if (File.Exists(pdbFile)) {
using (var pdbStream = File.OpenRead(pdbFile)) {
pdbReader = new PdbReader(pdbStream, host);
}
}
using (pdbReader) {
ISourceLocationProvider sourceLocationProvider = pdbReader;
ILocalScopeProvider localScopeProvider = pdbReader;
Contract.Assume(sourceLocationProvider != null, "why??");
if (!MakeSureSliceHasAtLeastMethodSourceLocation(slice, sourceLocationProvider)) {
dll = null;
return false;
}
dll = Path.Combine(directory, slice.Name + ".dll");
#if TRACE_PERFORMANCE
stopWatch.Reset();
#endif
using (var peStream = File.Create(dll)) {
if (pdbReader == null) {
PeWriter.WritePeToStream(newAssembly, host, peStream);
} else {
using (var pdbWriter = new PdbWriter(dll.Replace(".dll", ".pdb"), pdbReader, emitTokenSourceInfo: true)) {
PeWriter.WritePeToStream(newAssembly, host, peStream, sourceLocationProvider, localScopeProvider, pdbWriter);
}
}
}
#if TRACE_PERFORMANCE
Console.WriteLine("Time spent to write on the disk: {0}", stopWatch.Elapsed);
#endif
}
#if !DEBUG_SLICE
if (errors != null && 0 < errors.Count)
{
using (var tw = new StreamWriter(File.Create(Path.Combine(directory, slice.Name + ".errors.txt"))))
{
// something is performed asynchronously and may not be terminated here, that is wrong!
lock (errors)
{
foreach (var err in errors)
{
tw.WriteLine(err.Location);
foreach (var e in err.Errors)
tw.WriteLine("{0} {1} {2}", e.IsWarning ? "WARNING" : "ERROR ", e.Code, e.Message);
tw.WriteLine();
}
}
}
return false;
}
#endif
// Can this be checked before writing it out?
if (newAssembly.AssemblyReferences.Any(ar => ar.AssemblyIdentity.Equals(slice.ContainingAssembly.AssemblyIdentity))) {
}
return true;
}
private bool MakeSureSliceHasAtLeastMethodSourceLocation(ISlice<MethodReferenceAdaptor, FieldReferenceAdaptor, TypeReferenceAdaptor, IAssemblyReference> slice, ISourceLocationProvider sourceLocationProvider) {
Contract.Requires(slice != null);
Contract.Requires(sourceLocationProvider != null);
foreach (var m in slice.Methods) {
var methodDefinition = m.reference.ResolvedMethod;
if (sourceLocationProvider.GetPrimarySourceLocationsFor(methodDefinition.Locations).Any())
return true;
}
return false;
}
private static bool IsGetter(IMethodDefinition methodDefinition) {
Contract.Requires(methodDefinition != null);
return methodDefinition.IsSpecialName && methodDefinition.Name.Value.StartsWith("get_");
}
private static bool IsSetter(IMethodDefinition methodDefinition) {
Contract.Requires(methodDefinition != null);
return methodDefinition.IsSpecialName && methodDefinition.Name.Value.StartsWith("set_");
}
private static IPropertyDefinition GetPropertyFromAccessor(IMethodDefinition methodDefinition) {
Contract.Requires(methodDefinition != null);
if (!IsGetter(methodDefinition) && !IsSetter(methodDefinition)) return null;
// TODO: Need to cache this information. This is expensive.
foreach (var p in methodDefinition.ContainingTypeDefinition.Properties) {
if (p.Setter != null && p.Setter.ResolvedMethod.InternedKey == methodDefinition.InternedKey) {
return p;
} else if (p.Getter != null && p.Getter.ResolvedMethod.InternedKey == methodDefinition.InternedKey) {
return p;
}
}
return null;
}
private static List<Microsoft.Cci.ErrorEventArgs> ValidateAssembly(HostEnvironment host, IAssembly assembly) {
Contract.Requires(host != null);
Contract.Ensures(Contract.Result<List<Microsoft.Cci.ErrorEventArgs>>() != null);
List<Microsoft.Cci.ErrorEventArgs> errorEvents = new List<Microsoft.Cci.ErrorEventArgs>();
host.Errors += (object sender, Microsoft.Cci.ErrorEventArgs e) => { lock (errorEvents) errorEvents.Add(e); }; // MB: without lock I'm getting exceptions
var mv = new MetadataValidator(host);
mv.Validate(assembly);
return errorEvents;
}
#region IDisposable members
private void Close()
{
foreach (var disposable in this.disposableObjectAllocatedByThisHost)
disposable.Dispose();
}
public virtual void Dispose()
{
this.Close();
GC.SuppressFinalize(this);
}
~CCI2Slicer()
{
this.Close();
}
#endregion
}
}
| |
using System;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using log4net;
namespace NetGore.Collections
{
/// <summary>
/// Manages a pool of reusable objects.
/// </summary>
/// <typeparam name="T">The type of object to pool.</typeparam>
public class ObjectPool<T> : IObjectPool<T> where T : class, IPoolable
{
static readonly ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
readonly object _threadSync;
ObjectPoolObjectCreator<T> _creator;
int _liveObjects;
T[] _poolObjects;
/// <summary>
/// Initializes a new instance of the <see cref="ObjectPool{T}"/> class.
/// </summary>
/// <param name="creator">The delegate used to create new object instances.</param>
/// <param name="threadSafe">If true, this collection will be thread safe at a slight performance cost.
/// Set this value to true if you plan on ever accessing this collection from more than one thread.</param>
public ObjectPool(ObjectPoolObjectCreator<T> creator, bool threadSafe) : this(creator, null, null, threadSafe)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ObjectPool{T}"/> class.
/// </summary>
/// <param name="creator">The delegate used to create new object instances.</param>
/// <param name="initializer">The delegate used to initialize an object as it is acquired from the pool
/// (when Acquire() is called). Can be null.</param>
/// <param name="deinitializer">The delegate used to deinitialize an object as it is freed (when Free()
/// is called). Can be null.</param>
/// <param name="threadSafe">If true, this collection will be thread safe at a slight performance cost.
/// Set this value to true if you plan on ever accessing this collection from more than one thread.</param>
public ObjectPool(ObjectPoolObjectCreator<T> creator, ObjectPoolObjectHandler<T> initializer,
ObjectPoolObjectHandler<T> deinitializer, bool threadSafe)
: this(16, creator, initializer, deinitializer, threadSafe)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ObjectPool{T}"/> class.
/// </summary>
/// <param name="initialSize">The initial size of the pool.</param>
/// <param name="creator">The delegate used to create new object instances.</param>
/// <param name="initializer">The delegate used to initialize an object as it is acquired from the pool
/// (when Acquire() is called). Can be null.</param>
/// <param name="deinitializer">The delegate used to deinitialize an object as it is freed (when Free()
/// is called). Can be null.</param>
/// <param name="threadSafe">If true, this collection will be thread safe at a slight performance cost.
/// Set this value to true if you plan on ever accessing this collection from more than one thread.</param>
public ObjectPool(int initialSize, ObjectPoolObjectCreator<T> creator, ObjectPoolObjectHandler<T> initializer,
ObjectPoolObjectHandler<T> deinitializer, bool threadSafe)
{
if (threadSafe)
_threadSync = new object();
// Store our delegates
Creator = creator;
Initializer = initializer;
Deinitializer = deinitializer;
// Create the initial pool and the object instances
_poolObjects = new T[initialSize];
for (var i = 0; i < _poolObjects.Length; i++)
{
_poolObjects[i] = CreateObject(i);
}
AssertValidPoolIndexForLiveObjects();
}
/// <summary>
/// Gets or sets the delegate used to create new object instances. Cannot be null.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="value"/> is null.</exception>
public ObjectPoolObjectCreator<T> Creator
{
get { return _creator; }
set
{
if (value == null)
throw new ArgumentNullException("value");
_creator = value;
}
}
/// <summary>
/// Gets or sets the delegate used to deinitialize an object as it is freed (when Free()
/// is called). Can be null.
/// </summary>
public ObjectPoolObjectHandler<T> Deinitializer { get; set; }
/// <summary>
/// Gets or sets the delegate used to initialize an object as it is acquired from the pool
/// (when Acquire() is called). Can be null.
/// </summary>
public ObjectPoolObjectHandler<T> Initializer { get; set; }
/// <summary>
/// Gets the index of the last live object.
/// </summary>
int LastLiveObjectIndex
{
get { return LiveObjects - 1; }
}
/// <summary>
/// Ensures the indices are all correct.
/// </summary>
[Conditional("DEBUG")]
void AssertValidPoolIndexForLiveObjects()
{
for (var i = 0; i < LiveObjects; i++)
{
var obj = _poolObjects[i];
Debug.Assert(obj.PoolIndex == i);
}
}
/// <summary>
/// Creates a new instance of the poolable object.
/// </summary>
/// <param name="index">The index of the object in the pool array.</param>
/// <returns>A new instance of the poolable object.</returns>
T CreateObject(int index)
{
var obj = Creator(this);
obj.PoolIndex = index;
return obj;
}
/// <summary>
/// Expands the size of the object pool array.
/// </summary>
void ExpandPool()
{
var oldLength = _poolObjects.Length;
// Expand the pool
Array.Resize(ref _poolObjects, _poolObjects.Length << 2);
// Allocate the new object instances
for (var i = oldLength; i < _poolObjects.Length; i++)
{
Debug.Assert(_poolObjects[i] == null);
_poolObjects[i] = CreateObject(i);
}
AssertValidPoolIndexForLiveObjects();
}
T InternalAcquire()
{
// Expand the pool if needed
if (LiveObjects >= _poolObjects.Length - 1)
ExpandPool();
Debug.Assert(LiveObjects < _poolObjects.Length);
// Grab the next free object
var ret = _poolObjects[_liveObjects++];
Debug.Assert(ret.PoolIndex == LiveObjects - 1);
return ret;
}
void InternalClear()
{
// Call the deinitializer on all the live objects
if (Deinitializer != null)
{
for (var i = 0; i < LiveObjects; i++)
{
Deinitializer(_poolObjects[i]);
}
}
// Decrease the live objects count to zero, marking all objects in the array as dead
_liveObjects = 0;
}
/// <summary>
/// Performs the actual freeing of an object from the pool.
/// </summary>
/// <param name="poolObject">The pooled object to free.</param>
/// <param name="throwArgumentException">When true, an <see cref="ArgumentException"/> can be thrown when the
/// item removed belongs to a different pool or the <see cref="IPoolable.PoolIndex"/> has changed by something other than
/// the pool (which it shouldn't). When false, this exception will never be raised.</param>
/// <exception cref="ArgumentException">The poolObject belongs to a different pool than this one, or the
/// <see cref="IPoolable.PoolIndex"/> was altered externally.</exception>
void InternalFree(T poolObject, bool throwArgumentException)
{
// Ensure the object is in the living objects
if (poolObject.PoolIndex > LastLiveObjectIndex)
return;
// Ensure that this object belongs to this pool instance
if (_poolObjects[poolObject.PoolIndex] != poolObject)
{
const string errmsg =
"The poolObject `{0}` belongs to a different pool than this one, or the IPoolable.PoolIndex ({1}) was altered externally.";
if (log.IsErrorEnabled)
log.ErrorFormat(errmsg, poolObject, poolObject.PoolIndex);
Debug.Fail(string.Format(errmsg, poolObject, poolObject.PoolIndex));
// Throw exception?
if (throwArgumentException)
throw new ArgumentException(string.Format(errmsg, poolObject, poolObject.PoolIndex), "poolObject");
// If the object doesn't belong to this pool, we do NOT want to remove anything!
return;
}
Debug.Assert(_poolObjects[poolObject.PoolIndex] == poolObject);
// The object does belong to this pool, so free it by deinitializing it, then swapping it with the last
// live object in the pool so we only have to relocate two objects. By deincrementing the live object count,
// we which will push the object we just swapped to the end of the live objects block into the dead objects block,
// effectively marking it as not live.
if (Deinitializer != null)
Deinitializer(poolObject);
SwapPoolObjects(poolObject.PoolIndex, --_liveObjects);
Debug.Assert(poolObject.PoolIndex >= LiveObjects);
}
int InternalFreeAll(Func<T, bool> condition)
{
var count = 0;
// Loop through all live objects
var i = 0;
while (i < LiveObjects)
{
// Check the condition
var current = _poolObjects[i];
if (condition(current))
{
// Free the object (the same way Free() does it, but without the validation checks)
Debug.Assert(_poolObjects[current.PoolIndex] == current);
if (Deinitializer != null)
Deinitializer(current);
SwapPoolObjects(current.PoolIndex, --_liveObjects);
// Increase the count for how many objects we removed
++count;
}
else
{
// Move on to the next object. Only do this when we didn't remove an object so that way we will
// re-check the object we just swapped with.
++i;
}
}
return count;
}
void InternalPerform(Action<T> action)
{
for (var i = 0; i < LiveObjects; i++)
{
action(_poolObjects[i]);
}
}
/// <summary>
/// Swaps two objects in the object pool.
/// </summary>
/// <param name="aIndex">The index of the first object.</param>
/// <param name="bIndex">The index of the second object.</param>
void SwapPoolObjects(int aIndex, int bIndex)
{
// Grab the object references
var aObject = _poolObjects[aIndex];
var bObject = _poolObjects[bIndex];
// Swap the references in the array and update the indexes
_poolObjects[bIndex] = aObject;
aObject.PoolIndex = bIndex;
_poolObjects[aIndex] = bObject;
bObject.PoolIndex = aIndex;
Debug.Assert(_poolObjects[aIndex].PoolIndex == aIndex);
Debug.Assert(_poolObjects[bIndex].PoolIndex == bIndex);
}
#region IObjectPool<T> Members
/// <summary>
/// Gets the number of live objects in the pool.
/// </summary>
public int LiveObjects
{
get { return _liveObjects; }
}
/// <summary>
/// Returns a free object instance from the pool.
/// </summary>
/// <returns>A free object instance from the pool.</returns>
public T Acquire()
{
T ret;
// Use thread synchronization if needed
if (_threadSync != null)
{
// Thread-safe acquiring
lock (_threadSync)
{
ret = InternalAcquire();
}
}
else
{
// Non thread-safe acquiring
ret = InternalAcquire();
}
// Initialize
if (Initializer != null)
Initializer(ret);
return ret;
}
/// <summary>
/// Frees all live objects in the pool.
/// </summary>
public void Clear()
{
// Use thread synchronization if needed
if (_threadSync != null)
{
// Thread-safe clearing
lock (_threadSync)
{
InternalClear();
}
}
else
{
// Non thread-safe clearing
InternalClear();
}
}
/// <summary>
/// Frees the object so the pool can reuse it. After freeing an object, it should not be used
/// in any way, and be treated like it has been disposed. No exceptions will be thrown for trying to free
/// an object that does not belong to this pool.
/// </summary>
/// <param name="poolObject">The object to be freed.</param>
/// <exception cref="ArgumentNullException"><paramref name="poolObject"/> is null.</exception>
public void Free(T poolObject)
{
Free(poolObject, false);
}
/// <summary>
/// Frees the object so the pool can reuse it. After freeing an object, it should not be used
/// in any way, and be treated like it has been disposed.
/// </summary>
/// <param name="poolObject">The object to be freed.</param>
/// <param name="throwArgumentException">Whether or not an <see cref="ArgumentException"/> will be thrown for
/// objects that do not belong to this pool.</param>
/// <exception cref="ArgumentException"><paramref name="throwArgumentException"/> is tru and the
/// <paramref name="poolObject"/> does not belong to this pool.</exception>
/// <exception cref="ArgumentNullException"><paramref name="poolObject"/> is null.</exception>
public void Free(T poolObject, bool throwArgumentException)
{
if (poolObject == null)
throw new ArgumentNullException("poolObject");
// Use thread synchronization if needed
if (_threadSync != null)
{
// Thread-safe freeing
lock (_threadSync)
{
InternalFree(poolObject, throwArgumentException);
}
}
else
{
// Non thread-safe freeing
InternalFree(poolObject, throwArgumentException);
}
}
/// <summary>
/// Frees all live objects in the pool that match the given <paramref name="condition"/>.
/// </summary>
/// <param name="condition">The condition used to determine if an object should be freed.</param>
/// <returns>The number of objects that were freed.</returns>
/// <exception cref="ArgumentNullException"><paramref name="condition"/> is null.</exception>
public int FreeAll(Func<T, bool> condition)
{
if (condition == null)
throw new ArgumentNullException("condition");
// Use thread synchronization if needed
int ret;
if (_threadSync != null)
{
// Thread-safe freeing
lock (_threadSync)
{
ret = InternalFreeAll(condition);
}
}
else
{
// Non thread-safe freeing
ret = InternalFreeAll(condition);
}
return ret;
}
/// <summary>
/// Performs the <paramref name="action"/> on all live objects in the object pool.
/// </summary>
/// <param name="action">The action to perform on all live objects in the object pool.</param>
public void Perform(Action<T> action)
{
// Use thread synchronization if needed
if (_threadSync != null)
{
// Thread-safe performing
lock (_threadSync)
{
InternalPerform(action);
}
}
else
{
// Non thread-safe performing
InternalPerform(action);
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Linq;
using System.Linq.Expressions;
using Xunit;
namespace ComparedQueryable.Test.NativeQueryableTests
{
public class SumTests : EnumerableBasedTests
{
[Fact]
public void SumOfInt_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<int> sourceInt = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceInt.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceInt.Sum(x => x));
}
[Fact]
public void SumOfNullableOfInt_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<int?> sourceNullableInt = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableInt.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableInt.Sum(x => x));
}
[Fact]
public void SumOfLong_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<long> sourceLong = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceLong.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceLong.Sum(x => x));
}
[Fact]
public void SumOfNullableOfLong_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<long?> sourceNullableLong = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableLong.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableLong.Sum(x => x));
}
[Fact]
public void SumOfFloat_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<float> sourceFloat = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceFloat.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceFloat.Sum(x => x));
}
[Fact]
public void SumOfNullableOfFloat_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<float?> sourceNullableFloat = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableFloat.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableFloat.Sum(x => x));
}
[Fact]
public void SumOfDouble_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<double> sourceDouble = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceDouble.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceDouble.Sum(x => x));
}
[Fact]
public void SumOfNullableOfDouble_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<double?> sourceNullableDouble = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableDouble.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableDouble.Sum(x => x));
}
[Fact]
public void SumOfDecimal_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<decimal> sourceDecimal = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceDecimal.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceDecimal.Sum(x => x));
}
[Fact]
public void SumOfNullableOfDecimal_SourceIsNull_ArgumentNullExceptionThrown()
{
IQueryable<decimal?> sourceNullableDecimal = null;
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableDecimal.Sum());
AssertExtensions.Throws<ArgumentNullException>("source", () => sourceNullableDecimal.Sum(x => x));
}
[Fact]
public void SumOfInt_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<int> sourceInt = Enumerable.Empty<int>().AsNaturalQueryable();
Expression<Func<int, int>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceInt.Sum(selector));
}
[Fact]
public void SumOfNullableOfInt_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<int?> sourceNullableInt = Enumerable.Empty<int?>().AsNaturalQueryable();
Expression<Func<int?, int?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceNullableInt.Sum(selector));
}
[Fact]
public void SumOfLong_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<long> sourceLong = Enumerable.Empty<long>().AsNaturalQueryable();
Expression<Func<long, long>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceLong.Sum(selector));
}
[Fact]
public void SumOfNullableOfLong_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<long?> sourceNullableLong = Enumerable.Empty<long?>().AsNaturalQueryable();
Expression<Func<long?, long?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceNullableLong.Sum(selector));
}
[Fact]
public void SumOfFloat_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<float> sourceFloat = Enumerable.Empty<float>().AsNaturalQueryable();
Expression<Func<float, float>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceFloat.Sum(selector));
}
[Fact]
public void SumOfNullableOfFloat_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<float?> sourceNullableFloat = Enumerable.Empty<float?>().AsNaturalQueryable();
Expression<Func<float?, float?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceNullableFloat.Sum(selector));
}
[Fact]
public void SumOfDouble_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<double> sourceDouble = Enumerable.Empty<double>().AsNaturalQueryable();
Expression<Func<double, double>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceDouble.Sum(selector));
}
[Fact]
public void SumOfNullableOfDouble_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<double?> sourceNullableDouble = Enumerable.Empty<double?>().AsNaturalQueryable();
Expression<Func<double?, double?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceNullableDouble.Sum(selector));
}
[Fact]
public void SumOfDecimal_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<decimal> sourceDecimal = Enumerable.Empty<decimal>().AsNaturalQueryable();
Expression<Func<decimal, decimal>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceDecimal.Sum(selector));
}
[Fact]
public void SumOfNullableOfDecimal_SelectorIsNull_ArgumentNullExceptionThrown()
{
IQueryable<decimal?> sourceNullableDecimal = Enumerable.Empty<decimal?>().AsNaturalQueryable();
Expression<Func<decimal?, decimal?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => sourceNullableDecimal.Sum(selector));
}
[Fact]
public void SumOfInt_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<int> sourceInt = Enumerable.Empty<int>().AsNaturalQueryable();
Assert.Equal(0, sourceInt.Sum());
Assert.Equal(0, sourceInt.Sum(x => x));
}
[Fact]
public void SumOfNullableOfInt_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<int?> sourceNullableInt = Enumerable.Empty<int?>().AsNaturalQueryable();
Assert.Equal(0, sourceNullableInt.Sum());
Assert.Equal(0, sourceNullableInt.Sum(x => x));
}
[Fact]
public void SumOfLong_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<long> sourceLong = Enumerable.Empty<long>().AsNaturalQueryable();
Assert.Equal(0L, sourceLong.Sum());
Assert.Equal(0L, sourceLong.Sum(x => x));
}
[Fact]
public void SumOfNullableOfLong_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<long?> sourceNullableLong = Enumerable.Empty<long?>().AsNaturalQueryable();
Assert.Equal(0L, sourceNullableLong.Sum());
Assert.Equal(0L, sourceNullableLong.Sum(x => x));
}
[Fact]
public void SumOfFloat_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<float> sourceFloat = Enumerable.Empty<float>().AsNaturalQueryable();
Assert.Equal(0f, sourceFloat.Sum());
Assert.Equal(0f, sourceFloat.Sum(x => x));
}
[Fact]
public void SumOfNullableOfFloat_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<float?> sourceNullableFloat = Enumerable.Empty<float?>().AsNaturalQueryable();
Assert.Equal(0f, sourceNullableFloat.Sum());
Assert.Equal(0f, sourceNullableFloat.Sum(x => x));
}
[Fact]
public void SumOfDouble_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<double> sourceDouble = Enumerable.Empty<double>().AsNaturalQueryable();
Assert.Equal(0d, sourceDouble.Sum());
Assert.Equal(0d, sourceDouble.Sum(x => x));
}
[Fact]
public void SumOfNullableOfDouble_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<double?> sourceNullableDouble = Enumerable.Empty<double?>().AsNaturalQueryable();
Assert.Equal(0d, sourceNullableDouble.Sum());
Assert.Equal(0d, sourceNullableDouble.Sum(x => x));
}
[Fact]
public void SumOfDecimal_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<decimal> sourceDecimal = Enumerable.Empty<decimal>().AsNaturalQueryable();
Assert.Equal(0m, sourceDecimal.Sum());
Assert.Equal(0m, sourceDecimal.Sum(x => x));
}
[Fact]
public void SumOfNullableOfDecimal_SourceIsEmptyCollection_ZeroReturned()
{
IQueryable<decimal?> sourceNullableDecimal = Enumerable.Empty<decimal?>().AsNaturalQueryable();
Assert.Equal(0m, sourceNullableDecimal.Sum());
Assert.Equal(0m, sourceNullableDecimal.Sum(x => x));
}
[Fact]
public void Sum1()
{
var val = (new int[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((int)3, val);
}
[Fact]
public void Sum2()
{
var val = (new int?[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((int)3, val);
}
[Fact]
public void Sum3()
{
var val = (new long[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((long)3, val);
}
[Fact]
public void Sum4()
{
var val = (new long?[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((long)3, val);
}
[Fact]
public void Sum5()
{
var val = (new float[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((float)3, val);
}
[Fact]
public void Sum6()
{
var val = (new float?[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((float)3, val);
}
[Fact]
public void Sum7()
{
var val = (new double[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((double)3, val);
}
[Fact]
public void Sum8()
{
var val = (new double?[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((double)3, val);
}
[Fact]
public void Sum9()
{
var val = (new decimal[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((decimal)3, val);
}
[Fact]
public void Sum10()
{
var val = (new decimal?[] { 0, 2, 1 }).AsNaturalQueryable().Sum();
Assert.Equal((decimal)3, val);
}
[Fact]
public void Sum11()
{
var val = (new int[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((int)3, val);
}
[Fact]
public void Sum12()
{
var val = (new int?[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((int)3, val);
}
[Fact]
public void Sum13()
{
var val = (new long[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((long)3, val);
}
[Fact]
public void Sum14()
{
var val = (new long?[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((long)3, val);
}
[Fact]
public void Sum15()
{
var val = (new float[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((float)3, val);
}
[Fact]
public void Sum16()
{
var val = (new float?[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((float)3, val);
}
[Fact]
public void Sum17()
{
var val = (new double[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((double)3, val);
}
[Fact]
public void Sum18()
{
var val = (new double?[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((double)3, val);
}
[Fact]
public void Sum19()
{
var val = (new decimal[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((decimal)3, val);
}
[Fact]
public void Sum20()
{
var val = (new decimal?[] { 0, 2, 1 }).AsNaturalQueryable().Sum(n => n);
Assert.Equal((decimal)3, val);
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System;
using System.Linq;
using System.IO;
using System.Collections.Generic;
namespace AssetBundleGraph {
/**
IntegratedGUIImportSetting is the class for apply specific setting to already imported files.
*/
public class IntegratedGUIImportSetting : INodeOperation {
public void Setup (BuildTarget target,
NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<ConnectionData> connectionsToOutput,
PerformGraph.Output Output)
{
Action<Type, Type, AssetReference> multipleAssetTypeFound = (Type expectedType, Type foundType, AssetReference foundAsset) => {
throw new NodeException(string.Format("{3} :ImportSetting expect {0}, but different type of incoming asset is found({1} {2})",
expectedType.FullName, foundType.FullName, foundAsset.fileNameAndExtension, node.Name), node.Id);
};
Action<Type> unsupportedType = (Type unsupported) => {
throw new NodeException(string.Format("{0} :Incoming asset type is not supported by ImportSetting (Incoming type:{1}). Perhaps you want to use Modifier instead?",
node.Name, (unsupported != null)?unsupported.FullName:"null"), node.Id);
};
Action<Type, Type> incomingTypeMismatch = (Type expectedType, Type incomingType) => {
throw new NodeException(string.Format("{0} :Incoming asset type is does not match with this ImportSetting (Expected type:{1}, Incoming type:{2}).",
node.Name, (expectedType != null)?expectedType.FullName:"null", (incomingType != null)?incomingType.FullName:"null"), node.Id);
};
Action<ConfigStatus> errorInConfig = (ConfigStatus _) => {
var firstAsset = TypeUtility.GetFirstIncomingAsset(incoming);
if(firstAsset != null) {
// give a try first in sampling file
SaveSampleFile(node, firstAsset);
ValidateInputSetting(node, target, incoming, multipleAssetTypeFound, unsupportedType, incomingTypeMismatch, (ConfigStatus eType) => {
if(eType == ConfigStatus.NoSampleFound) {
throw new NodeException(node.Name + " :ImportSetting has no sampling file. Please configure it from Inspector.", node.Id);
}
if(eType == ConfigStatus.TooManySamplesFound) {
throw new NodeException(node.Name + " :ImportSetting has too many sampling file. Please fix it from Inspector.", node.Id);
}
});
}
};
ValidateInputSetting(node, target, incoming, multipleAssetTypeFound, unsupportedType, incomingTypeMismatch, errorInConfig);
// ImportSettings does not add, filter or change structure of group, so just pass given group of assets
if(incoming != null && Output != null) {
var dst = (connectionsToOutput == null || !connectionsToOutput.Any())?
null : connectionsToOutput.First();
foreach(var ag in incoming) {
Output(dst, ag.assetGroups);
}
}
}
public void Run (BuildTarget target,
NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<ConnectionData> connectionsToOutput,
PerformGraph.Output Output,
Action<NodeData, string, float> progressFunc)
{
if(incoming != null){
ApplyImportSetting(node, incoming, progressFunc);
var dst = (connectionsToOutput == null || !connectionsToOutput.Any())?
null : connectionsToOutput.First();
foreach(var ag in incoming) {
Output(dst, ag.assetGroups);
}
}
}
private void SaveSampleFile(NodeData node, AssetReference asset) {
var samplingDirectoryPath = FileUtility.PathCombine(AssetBundleGraphSettings.IMPORTER_SETTINGS_PLACE, node.Id);
if (!Directory.Exists(samplingDirectoryPath)) {
Directory.CreateDirectory(samplingDirectoryPath);
}
var configFilePath = FileUtility.GetImportSettingTemplateFilePath(asset);
UnityEngine.Assertions.Assert.IsNotNull(configFilePath);
var targetFilePath = FileUtility.PathCombine(samplingDirectoryPath, Path.GetFileName(configFilePath));
FileUtility.CopyFile(configFilePath, targetFilePath);
AssetDatabase.Refresh(ImportAssetOptions.ImportRecursive);
}
public static ConfigStatus GetConfigStatus(NodeData node) {
var sampleFileDir = FileUtility.PathCombine(AssetBundleGraphSettings.IMPORTER_SETTINGS_PLACE, node.Id);
if(!Directory.Exists(sampleFileDir)) {
return ConfigStatus.NoSampleFound;
}
var sampleFiles = FileUtility.GetFilePathsInFolder(sampleFileDir)
.Where(path => !path.EndsWith(AssetBundleGraphSettings.UNITY_METAFILE_EXTENSION))
.ToList();
if(sampleFiles.Count == 0) {
return ConfigStatus.NoSampleFound;
}
if(sampleFiles.Count == 1) {
return ConfigStatus.GoodSampleFound;
}
return ConfigStatus.TooManySamplesFound;
}
public static void ResetConfig(NodeData node) {
var sampleFileDir = FileUtility.PathCombine(AssetBundleGraphSettings.IMPORTER_SETTINGS_PLACE, node.Id);
FileUtility.RemakeDirectory(sampleFileDir);
}
public static AssetImporter GetReferenceAssetImporter(NodeData node) {
var sampleFileDir = FileUtility.PathCombine(AssetBundleGraphSettings.IMPORTER_SETTINGS_PLACE, node.Id);
UnityEngine.Assertions.Assert.IsTrue(Directory.Exists(sampleFileDir));
var sampleFiles = FileUtility.GetFilePathsInFolder(sampleFileDir)
.Where(path => !path.EndsWith(AssetBundleGraphSettings.UNITY_METAFILE_EXTENSION))
.ToList();
UnityEngine.Assertions.Assert.IsTrue(sampleFiles.Count == 1);
return AssetImporter.GetAtPath(sampleFiles[0]);
}
private void ApplyImportSetting(NodeData node, IEnumerable<PerformGraph.AssetGroups> incoming, Action<NodeData, string, float> progressFunc) {
var referenceImporter = GetReferenceAssetImporter(node);
var configurator = new ImportSettingsConfigurator(referenceImporter);
foreach(var ag in incoming) {
foreach(var assets in ag.assetGroups.Values) {
foreach(var asset in assets) {
var importer = AssetImporter.GetAtPath(asset.importFrom);
if(!configurator.IsEqual(importer)) {
if(progressFunc != null) progressFunc(node, string.Format("Modifying {0}", asset.fileNameAndExtension), 0.5f);
configurator.OverwriteImportSettings(importer);
asset.TouchImportAsset();
}
}
}
}
}
public enum ConfigStatus {
NoSampleFound,
TooManySamplesFound,
GoodSampleFound
}
public static void ValidateInputSetting (
NodeData node,
BuildTarget target,
IEnumerable<PerformGraph.AssetGroups> incoming,
Action<Type, Type, AssetReference> multipleAssetTypeFound,
Action<Type> unsupportedType,
Action<Type, Type> incomingTypeMismatch,
Action<ConfigStatus> errorInConfig
) {
Type expectedType = TypeUtility.FindFirstIncomingAssetType(incoming);
if(multipleAssetTypeFound != null) {
if(expectedType != null && incoming != null) {
foreach(var ag in incoming) {
foreach(var assets in ag.assetGroups.Values) {
foreach(var a in assets) {
Type assetType = a.filterType;
if(assetType != expectedType) {
multipleAssetTypeFound(expectedType, assetType, a);
}
}
}
}
}
}
if(unsupportedType != null) {
if(expectedType != null) {
if(expectedType == typeof(UnityEditor.TextureImporter) ||
expectedType == typeof(UnityEditor.ModelImporter) ||
expectedType == typeof(UnityEditor.AudioImporter)
) {
// good. do nothing
} else {
unsupportedType(expectedType);
}
}
}
var status = GetConfigStatus(node);
if(errorInConfig != null) {
if(status != ConfigStatus.GoodSampleFound) {
errorInConfig(status);
}
}
if(incomingTypeMismatch != null) {
// if there is no incoming assets, there is no way to check if
// right type of asset is coming in - so we'll just skip the test
if(incoming != null && expectedType != null && status == ConfigStatus.GoodSampleFound) {
Type targetType = GetReferenceAssetImporter(node).GetType();
if( targetType != expectedType ) {
incomingTypeMismatch(targetType, expectedType);
}
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using PathfinderCharacter.Models;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.EntityFrameworkCore;
namespace PathfinderCharacter.Controllers
{
[Route("api/[controller]")]
public class SpellsController : Controller
{
private IMemoryCache _cache;
private PathfinderContext pfContext;
public SpellsController(PathfinderContext pfContext, IMemoryCache memoryCache)
{
_cache = memoryCache;
this.pfContext = pfContext;
}
//var rng = new Random();
//rng.Next(-20, 55)
//Summary = Summaries[rng.Next(Summaries.Length)]
[HttpGet("[action]")]
public IEnumerable<Spell> Spells()
{
string key = "spellCache";
List<Spell> mySpells;
if (!_cache.TryGetValue(key, out mySpells))
{
//4.fetch the data from the object
mySpells = pfContext.Spells
.Include(s => s.School)
.Include(s => s.SpellSubschools)
.ThenInclude(ss => ss.Subschool)
.Include(s=>s.SpellSavingThrows)
.ThenInclude(sst=>sst.SavingThrow)
.Include(s => s.Classes)
.ThenInclude(c=> c.HeroClass)
.Include(s=> s.Components)
.ThenInclude(c => c.ComponentType)
.Include(s => s.Components)
.ThenInclude(c => c.MaterialCost)
.ThenInclude(m => m.CurrencyUnit)
.ToList();
//5.Save the received data in cache
_cache.Set(key, mySpells,
new MemoryCacheEntryOptions()
.SetAbsoluteExpiration(TimeSpan.FromMinutes(30)));
}
else
{
mySpells = _cache.Get(key) as List<Spell>;
}
return mySpells;
}
[HttpGet("[action]")]
public string DataMigrate()
{
RawSpell tempspell;
try
{
List<RawSpell> rawSpells = pfContext.RawSpells.ToList();
List<Spell> spells = new List<Spell>();
List<CurrencyUnit> currencyUnits= pfContext.CurrencyUnits.ToList();
List<Subschool> subschools = pfContext.Subschools.ToList();
List<CastingTime> castingTimes = pfContext.CastingTime.ToList();
List<School> schools = pfContext.Schools.ToList();
List<Bloodline> bloodlines = pfContext.Bloodlines.ToList();
List<HeroClass> heroClasses = pfContext.HeroClasses.ToList();
List<ComponentType> componentTypes = pfContext.ComponentTypes.ToList();
List<Descriptor> spellDescriptors = pfContext.SpellDescriptors.ToList();
List<Domain> domains = pfContext.Domains.ToList();
List<SavingThrow> savingThrows = pfContext.SavingThrows.ToList();
foreach (var rawSpell in rawSpells)
{
tempspell = rawSpell;
Spell spell = new Spell()
{
Area = rawSpell.area,
AugmentedText = rawSpell.augmented,
CastingTime = castingTimes.FirstOrDefault(c=> c.Name == rawSpell.casting_time),
Description = rawSpell.description,
Duration = rawSpell.duration,
Effect = rawSpell.effect,
HTMLDescription = rawSpell.description_formated,
Mythic = rawSpell.mythic.HasValue ? rawSpell.mythic.Value : false,
MythicText = rawSpell.mythic_text,
Name = rawSpell.name,
Range = rawSpell.range,
ReferenceSource = pfContext.ReferenceSources.FirstOrDefault(c => c.Name == rawSpell.source),
School = schools.FirstOrDefault(s => s.Name.ToLower() == rawSpell.school.ToLower()),
ShortDescription = rawSpell.short_description,
SpellLikeAbilityLevel = rawSpell.SLA_Level.HasValue ? rawSpell.SLA_Level.Value : -1,
SpellResistance = !string.IsNullOrEmpty(rawSpell.spell_resistence) ? rawSpell.spell_resistence.ToLower().Contains("yes") : false,
SpellResistanceHarmless = !string.IsNullOrEmpty(rawSpell.spell_resistence) ? rawSpell.spell_resistence.ToLower().Contains("harmless") : false,
SpellResistanceObject = !string.IsNullOrEmpty(rawSpell.spell_resistence) ? rawSpell.spell_resistence.ToLower().Contains("object") : false,
Targets = rawSpell.targets
};
//Bloodlines
if (!string.IsNullOrEmpty(rawSpell.bloodline))
{
foreach (var bloodlineString in rawSpell.bloodline.Trim().Replace(" ", "").Split(','))
{
string bloodline = bloodlineString.Split('(')[0];
int level = int.Parse(bloodlineString.Split('(')[1].TrimEnd(")".ToCharArray()));
spell.BloodlineSpells.Add(new BloodlineSpell() {
Bloodline = bloodlines.FirstOrDefault(b => b.Name == bloodline),
level = level
});
}
}
//Hero Classes
if (rawSpell.adept.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.adept.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Adept")
});
}
if (rawSpell.alchemist.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.alchemist.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Alchemist")
});
}
if (rawSpell.antipaladin.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.antipaladin.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Antipaladin")
});
}
if (rawSpell.bard.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.bard.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Bard")
});
}
if (rawSpell.bloodrager.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.bloodrager.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Bloodrager")
});
}
if (rawSpell.cleric.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.cleric.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Cleric")
});
}
if (rawSpell.druid.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.druid.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Druid")
});
}
if (rawSpell.hunter.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.hunter.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Hunter")
});
}
if (rawSpell.inquisitor.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.inquisitor.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Inquisitor")
});
}
if (rawSpell.investigator.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.investigator.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Investigator")
});
}
if (rawSpell.magus.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.magus.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Magus")
});
}
if (rawSpell.medium.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.medium.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Medium")
});
}
if (rawSpell.mesmerist.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.mesmerist.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Mesmerist")
});
}
if (rawSpell.occultist.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.occultist.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Occultist")
});
}
if (rawSpell.oracle.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.oracle.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Oracle")
});
}
if (rawSpell.paladin.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.paladin.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Paladin")
});
}
if (rawSpell.psychic.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.psychic.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Psychic")
});
}
if (rawSpell.ranger.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.ranger.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Ranger")
});
}
if (rawSpell.shaman.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.shaman.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Shaman")
});
}
if (rawSpell.skald.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.skald.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Skald")
});
}
if (rawSpell.sor.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.sor.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Sorcerer")
});
}
if (rawSpell.spiritualist.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.spiritualist.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Spiritualist")
});
}
if (rawSpell.summoner.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.summoner.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Summoner")
});
}
if (rawSpell.witch.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.witch.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Witch")
});
}
if (rawSpell.wiz.HasValue)
{
spell.Classes.Add(new SpellClass()
{
ClassLevel = rawSpell.wiz.Value,
HeroClass = heroClasses.FirstOrDefault(h => h.Name == "Wizard")
});
}
//Spell Components
string rawComponentString = " , " + rawSpell.components;
foreach (var lookupComponent in componentTypes)
{
string lookupSearch = ", " + lookupComponent.Symbol;
if (rawComponentString.Contains(lookupSearch + ",") || rawComponentString.Contains(lookupSearch + " ("))
{
Currency materialCost = null;
string material = "";
int index = rawComponentString.IndexOf(lookupSearch + " (");
if (index != -1)
{
material = rawComponentString.Substring(index + lookupSearch.Length + 2).Split(')')[0];
materialCost = new Currency()
{
Amount = rawSpell.material_costs.HasValue && rawSpell.costly_components.HasValue && rawSpell.costly_components.Value ? rawSpell.material_costs.Value : 0,
CurrencyUnit = currencyUnits.FirstOrDefault(c => c.NameShort == "gp")
};
}
spell.Components.Add(new Component()
{
ComponentType = lookupComponent,
Material = material,
MaterialCost = materialCost
});
}
}
//Spell Descriptors
foreach (var descriptor in spellDescriptors)
{
if (!string.IsNullOrEmpty(rawSpell.descriptor) && rawSpell.descriptor.ToLower().Contains(descriptor.Name.ToLower()))
{
spell.Descriptors.Add(descriptor);
}
}
//Spell Domains
if (!string.IsNullOrEmpty(rawSpell.domain))
{
foreach (var domain in rawSpell.domain.Split(','))
{
string domainName = domain.Split('(')[0].Trim();
int level = int.Parse(domain.Split('(')[1].Split(')')[0].Trim());
spell.SpellDomains.Add(new SpellDomain() {
Domain = domains.FirstOrDefault(d => d.Name == domainName),
Level = level
});
}
}
foreach (var savingThrow in savingThrows)
{
if (!string.IsNullOrEmpty(rawSpell.saving_throw) && rawSpell.saving_throw.ToLower().Contains(savingThrow.Name.ToLower()))
{
spell.SpellSavingThrows.Add(new SpellSavingThrow() { SavingThrow = savingThrow });
}
}
foreach (var subschool in subschools)
{
if (!string.IsNullOrEmpty(rawSpell.subschool) && rawSpell.subschool.ToLower().Contains(subschool.Name.ToLower()))
{
spell.SpellSubschools.Add(new SpellSubschool() { Subschool = subschool });
}
}
spells.Add(spell);
}
pfContext.Spells.AddRange(spells);
pfContext.SaveChanges();
}
catch (Exception e)
{
return e.Message + e.InnerException?.Message;
}
return "success";
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.Serialization;
using System.Windows;
using System.Windows.Controls;
using Microsoft.Phone.Controls;
//using Windows.Devices.Geolocation;
//using Windows.UI.Core;
using WPCordovaClassLib;
using WPCordovaClassLib.Cordova;
using WPCordovaClassLib.Cordova.Commands;
using WPCordovaClassLib.Cordova.JSON;
using GoogleAds;
namespace Cordova.Extension.Commands
{
///
/// Google AD Mob wrapper for showing banner and interstitial adverts
///
public sealed class AdMob : BaseCommand
{
#region Const
// ad size
// only banner and smart banner supported on windows phones, see:
// https://developers.google.com/mobile-ads-sdk/docs/admob/wp/banner
public const string ADSIZE_BANNER = "BANNER";
public const string ADSIZE_SMART_BANNER = "SMART_BANNER";
//public const string ADSIZE_MEDIUM_RECTANGLE = "MEDIUM_RECTANGLE";
//public const string ADSIZE_FULL_BANNER = "FULL_BANNER";
//public const string ADSIZE_LEADERBOARD = "LEADERBOARD";
//public const string ADSIZE_SKYSCRAPER = "SKYSCRAPPER";
//public const string ADSIZE_CUSTOM = "CUSTOM";
// ad event
public const string EVENT_AD_LOADED = "onAdLoaded";
public const string EVENT_AD_FAILLOAD = "onAdFailLoad";
public const string EVENT_AD_PRESENT = "onAdPresent";
public const string EVENT_AD_LEAVEAPP = "onAdLeaveApp";
public const string EVENT_AD_DISMISS = "onAdDismiss";
public const string EVENT_AD_WILLPRESENT = "onAdWillPresent";
public const string EVENT_AD_WILLDISMISS = "onAdWillDismiss";
// ad type
public const string ADTYPE_BANNER = "banner";
public const string ADTYPE_INTERSTITIAL = "interstitial";
public const string ADTYPE_NATIVE = "native";
// options
public const string OPT_ADID = "adId";
public const string OPT_AUTO_SHOW = "autoShow";
public const string OPT_IS_TESTING = "isTesting";
public const string OPT_LOG_VERBOSE = "logVerbose";
public const string OPT_AD_SIZE = "adSize";
public const string OPT_WIDTH = "width";
public const string OPT_HEIGHT = "height";
public const string OPT_OVERLAP = "overlap";
public const string OPT_ORIENTATION_RENEW = "orientationRenew";
public const string OPT_POSITION = "position";
public const string OPT_X = "x";
public const string OPT_Y = "y";
public const string OPT_BANNER_ID = "bannerId";
public const string OPT_INTERSTITIAL_ID = "interstitialId";
private const string TEST_BANNER_ID = "ca-app-pub-6869992474017983/9375997553";
private const string TEST_INTERSTITIAL_ID = "ca-app-pub-6869992474017983/1355127956";
// banner positions
public const int NO_CHANGE = 0;
public const int TOP_LEFT = 1;
public const int TOP_CENTER = 2;
public const int TOP_RIGHT = 3;
public const int LEFT = 4;
public const int CENTER = 5;
public const int RIGHT = 6;
public const int BOTTOM_LEFT = 7;
public const int BOTTOM_CENTER = 8;
public const int BOTTOM_RIGHT = 9;
public const int POS_XY = 10;
#endregion
#region Members
private bool isTesting = false;
private bool logVerbose = false;
private string bannerId = "";
private string interstitialId = "";
private AdFormats adSize = AdFormats.SmartBanner;
private int adWidth = 320;
private int adHeight = 50;
private bool overlap = false;
private bool orientationRenew = true;
private int adPosition = BOTTOM_CENTER;
private int posX = 0;
private int posY = 0;
private bool autoShowBanner = true;
private bool autoShowInterstitial = false;
private bool bannerVisible = false;
private const string UI_LAYOUT_ROOT = "LayoutRoot";
private const string UI_CORDOVA_VIEW = "CordovaView";
private const int BANNER_HEIGHT_PORTRAIT = 50;
private const int BANNER_HEIGHT_LANDSCAPE = 32;
private RowDefinition row = null;
private AdView bannerAd = null;
private InterstitialAd interstitialAd = null;
private double initialViewHeight = 0.0;
private double initialViewWidth = 0.0;
#endregion
static AdFormats adSizeFromString(String size) {
if (ADSIZE_BANNER.Equals (size)) {
return AdFormats.Banner; //Banner (320x50, Phones and Tablets)
} else {
return AdFormats.SmartBanner; //Smart banner (Auto size, Phones and Tablets)
}
}
#region Public methods
public void setOptions(string args) {
if(logVerbose) Debug.WriteLine("AdMob.setOptions: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
__setOptions(options);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void createBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.createBanner: " + args);
try
{
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1)
{
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
if (options != null)
{
__setOptions(options);
string adId = TEST_BANNER_ID;
bool autoShow = true;
if (!string.IsNullOrEmpty(options.adId))
adId = options.adId;
//if (options.ContainsKey(OPT_AUTO_SHOW))
// autoShow = Convert.ToBoolean(options[OPT_AUTO_SHOW]);
__createBanner(adId, autoShow);
}
}
}
catch (Exception ex)
{
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void removeBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.removeBanner: " + args);
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() =>
{
__hideBanner();
// Remove event handlers
bannerAd.FailedToReceiveAd -= banner_onAdFailLoad;
bannerAd.LeavingApplication -= banner_onAdLeaveApp;
bannerAd.ReceivedAd -= banner_onAdLoaded;
bannerAd.ShowingOverlay -= banner_onAdPresent;
bannerAd.DismissingOverlay -= banner_onAdDismiss;
bannerAd = null;
});
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void showBanner(string args) {
if(logVerbose) Debug.WriteLine("AdMob.showBanner: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
int position = Convert.ToInt32(inputs[0]);
__showBanner(position, 0, 0);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void showBannerAtXY(string args) {
if(logVerbose) Debug.WriteLine("AdMob.showBannerAtXY: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
int x = Convert.ToInt32(inputs[0]);
int y = Convert.ToInt32(inputs[1]);
__showBanner(POS_XY, x, y);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void hideBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.hideBanner: " + args);
__hideBanner();
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void prepareInterstitial(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.prepareInterstitial: " + args);
string adId = "";
bool autoShow = false;
try
{
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1)
{
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
if (options != null)
{
__setOptions(options);
if (!string.IsNullOrEmpty(options.adId))
{
adId = options.adId;
//if (options.ContainsKey(OPT_AUTO_SHOW))
// autoShow = Convert.ToBoolean(options[OPT_AUTO_SHOW]);
__prepareInterstitial(adId, autoShow);
}
}
}
}
catch (Exception ex)
{
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void showInterstitial(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.showInterstitial: " + args);
if (interstitialAd != null)
{
__showInterstitial();
}
else
{
__prepareInterstitial(interstitialId, true);
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
#endregion
#region Private methods
private void __setOptions(AdMobOptions options)
{
if (options == null)
return;
if (options.isTesting.HasValue)
isTesting = options.isTesting.Value;
if (options.logVerbose.HasValue)
logVerbose = options.logVerbose.Value;
if (options.overlap.HasValue)
overlap = options.overlap.Value;
if (options.orientationRenew.HasValue)
orientationRenew = options.orientationRenew.Value;
if (options.position.HasValue)
adPosition = options.position.Value;
if (options.x.HasValue)
posX = options.x.Value;
if (options.y.HasValue)
posY = options.y.Value;
if (options.bannerId != null)
bannerId = options.bannerId;
if (options.interstitialId != null)
interstitialId = options.interstitialId;
if (options.adSize != null)
adSize = adSizeFromString( options.adSize );
if (options.width.HasValue)
adWidth = options.width.Value;
if (options.height.HasValue)
adHeight = options.height.Value;
}
private void __createBanner(string adId, bool autoShow) {
if (bannerAd != null) {
if(logVerbose) Debug.WriteLine("banner already created.");
return;
}
if (isTesting)
adId = TEST_BANNER_ID;
if ((adId!=null) && (adId.Length > 0))
bannerId = adId;
else
adId = bannerId;
autoShowBanner = autoShow;
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
if(bannerAd == null) {
bannerAd = new AdView {
Format = adSize,
AdUnitID = bannerId
};
// Add event handlers
bannerAd.FailedToReceiveAd += banner_onAdFailLoad;
bannerAd.LeavingApplication += banner_onAdLeaveApp;
bannerAd.ReceivedAd += banner_onAdLoaded;
bannerAd.ShowingOverlay += banner_onAdPresent;
bannerAd.DismissingOverlay += banner_onAdDismiss;
}
bannerVisible = false;
AdRequest adRequest = new AdRequest();
adRequest.ForceTesting = isTesting;
bannerAd.LoadAd( adRequest );
if(autoShowBanner) {
__showBanner(adPosition, posX, posY);
}
});
}
private void __showBanner(int argPos, int argX, int argY) {
if (bannerAd == null) {
if(logVerbose) Debug.WriteLine("banner is null, call createBanner() first.");
return;
}
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
if(grid.Children.Contains(bannerAd)) grid.Children.Remove(bannerAd);
if(overlap) {
__showBannerOverlap(grid, adPosition);
} else {
if(! bannerVisible) {
initialViewHeight = view.ActualHeight;
initialViewWidth = view.ActualWidth;
frame.OrientationChanged += onOrientationChanged;
}
__showBannerSplit(grid, view, adPosition);
setCordovaViewHeight(frame, view);
}
bannerAd.Visibility = Visibility.Visible;
bannerVisible = true;
}
});
}
private void __showBannerOverlap(Grid grid, int position) {
switch ((position - 1) % 3) {
case 0:
bannerAd.HorizontalAlignment = HorizontalAlignment.Left;
break;
case 1:
bannerAd.HorizontalAlignment = HorizontalAlignment.Center;
break;
case 2:
bannerAd.HorizontalAlignment = HorizontalAlignment.Right;
break;
}
switch ((position - 1) / 3) {
case 0:
bannerAd.VerticalAlignment = VerticalAlignment.Top;
break;
case 1:
bannerAd.VerticalAlignment = VerticalAlignment.Center;
break;
case 2:
bannerAd.VerticalAlignment = VerticalAlignment.Bottom;
break;
}
grid.Children.Add (bannerAd);
}
private void __showBannerSplit(Grid grid, CordovaView view, int position) {
if(row == null) {
row = new RowDefinition();
row.Height = GridLength.Auto;
}
grid.Children.Add(bannerAd);
switch((position-1)/3) {
case 0:
grid.RowDefinitions.Insert(0,row);
Grid.SetRow(bannerAd, 0);
Grid.SetRow(view, 1);
break;
case 1:
case 2:
grid.RowDefinitions.Add(row);
Grid.SetRow(bannerAd, 1);
break;
}
}
private void __hideBanner() {
if (bannerAd == null) {
if(logVerbose) Debug.WriteLine("banner is null, call createBanner() first.");
return;
}
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
grid.Children.Remove(bannerAd);
grid.RowDefinitions.Remove(row);
row = null;
bannerAd.Visibility = Visibility.Collapsed;
bannerVisible = false;
if(! overlap) {
frame.OrientationChanged -= onOrientationChanged;
setCordovaViewHeight(frame, view);
}
}
});
}
private void __prepareInterstitial(string adId, bool autoShow) {
if (isTesting)
adId = TEST_INTERSTITIAL_ID;
if ((adId != null) && (adId.Length > 0)) {
interstitialId = adId;
} else {
adId = interstitialId;
}
autoShowInterstitial = autoShow;
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
interstitialAd = new InterstitialAd( interstitialId );
// Add event listeners
interstitialAd.ReceivedAd += interstitial_onAdLoaded;
interstitialAd.FailedToReceiveAd += interstitial_onAdFailLoad;
interstitialAd.ShowingOverlay += interstitial_onAdPresent;
interstitialAd.DismissingOverlay += interstitial_onAdDismiss;
AdRequest adRequest = new AdRequest();
adRequest.ForceTesting = isTesting;
interstitialAd.LoadAd(adRequest);
});
}
private void __showInterstitial() {
if (interstitialAd == null) {
if(logVerbose) Debug.WriteLine("interstitial is null, call prepareInterstitial() first.");
return;
}
Deployment.Current.Dispatcher.BeginInvoke(() => {
interstitialAd.ShowAd ();
});
}
// Events --------
// Device orientation
private void onOrientationChanged(object sender, OrientationChangedEventArgs e)
{
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
setCordovaViewHeight(frame, view);
}
});
}
/// Set cordova view height based on banner height and frame orientation
private void setCordovaViewHeight(PhoneApplicationFrame frame, CordovaView view) {
bool deduct = bannerVisible && (! overlap);
if (frame.Orientation == PageOrientation.Portrait ||
frame.Orientation == PageOrientation.PortraitDown ||
frame.Orientation == PageOrientation.PortraitUp) {
view.Height = initialViewHeight - (deduct ? BANNER_HEIGHT_PORTRAIT : 0);
} else {
view.Height = initialViewWidth - (deduct ? BANNER_HEIGHT_LANDSCAPE : 0);
}
fireEvent ("window", "resize", null);
}
// Banner events
private void banner_onAdFailLoad(object sender, AdErrorEventArgs args) {
fireAdErrorEvent (EVENT_AD_FAILLOAD, ADTYPE_BANNER, getErrCode(args.ErrorCode), getErrStr(args.ErrorCode));
}
private void banner_onAdLoaded(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LOADED, ADTYPE_BANNER);
if( (! bannerVisible) && autoShowBanner ) {
__showBanner(adPosition, posX, posY);
}
}
private void banner_onAdPresent(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_PRESENT, ADTYPE_BANNER);
}
private void banner_onAdLeaveApp(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LEAVEAPP, ADTYPE_BANNER);
}
private void banner_onAdDismiss(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_DISMISS, ADTYPE_BANNER);
}
// Interstitial events
private void interstitial_onAdFailLoad(object sender, AdErrorEventArgs args) {
fireAdErrorEvent (EVENT_AD_FAILLOAD, ADTYPE_INTERSTITIAL, getErrCode(args.ErrorCode), getErrStr(args.ErrorCode));
}
private void interstitial_onAdLoaded(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LOADED, ADTYPE_INTERSTITIAL);
if (autoShowInterstitial) {
__showInterstitial ();
}
}
private void interstitial_onAdPresent(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_PRESENT, ADTYPE_INTERSTITIAL);
}
private void interstitial_onAdDismiss(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_DISMISS, ADTYPE_INTERSTITIAL);
}
private int getErrCode(AdErrorCode errorCode) {
switch(errorCode) {
case AdErrorCode.InternalError: return 0;
case AdErrorCode.InvalidRequest: return 1;
case AdErrorCode.NetworkError: return 2;
case AdErrorCode.NoFill: return 3;
case AdErrorCode.Cancelled: return 4;
case AdErrorCode.StaleInterstitial: return 5;
case AdErrorCode.NoError: return 6;
}
return -1;
}
private string getErrStr(AdErrorCode errorCode) {
switch(errorCode) {
case AdErrorCode.InternalError: return "Internal error";
case AdErrorCode.InvalidRequest: return "Invalid request";
case AdErrorCode.NetworkError: return "Network error";
case AdErrorCode.NoFill: return "No fill";
case AdErrorCode.Cancelled: return "Cancelled";
case AdErrorCode.StaleInterstitial: return "Stale interstitial";
case AdErrorCode.NoError: return "No error";
}
return "Unknown";
}
private void fireAdEvent(string adEvent, string adType) {
string json = "{'adNetwork':'AdMob','adType':'" + adType + "','adEvent':'" + adEvent + "'}";
fireEvent("document", adEvent, json);
}
private void fireAdErrorEvent(string adEvent, string adType, int errCode, string errMsg) {
string json = "{'adNetwork':'AdMob','adType':'" + adType
+ "','adEvent':'" + adEvent + "','error':" + errCode + ",'reason':'" + errMsg + "'}";
fireEvent("document", adEvent, json);
}
private void fireEvent(string obj, string eventName, string jsonData) {
if(logVerbose) Debug.WriteLine( eventName );
string js = "";
if("window".Equals(obj)) {
js = "var evt=document.createEvent('UIEvents');evt.initUIEvent('" + eventName
+ "',true,false,window,0);window.dispatchEvent(evt);";
} else {
js = "javascript:cordova.fireDocumentEvent('" + eventName + "'";
if(jsonData != null) {
js += "," + jsonData;
}
js += ");";
}
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view)) {
// Asynchronous threading call
view.Browser.Dispatcher.BeginInvoke(() =>{
try {
view.Browser.InvokeScript("eval", new string[] { js });
} catch {
if(logVerbose) Debug.WriteLine("AdMob.fireEvent: Failed to invoke script: " + js);
}
});
}
});
}
#endregion
static bool TryCast<T>(object obj, out T result) where T : class {
result = obj as T;
return result != null;
}
}
}
| |
using System.Text;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using Microsoft.Win32;
using System.Runtime.CompilerServices;
using System.Net.Configuration;
namespace System.Net
{
// This class uses WinHttp APIs only to find, download and execute the PAC file.
internal sealed class WinHttpWebProxyFinder : BaseWebProxyFinder
{
private SafeInternetHandle session;
private bool autoDetectFailed;
public WinHttpWebProxyFinder(AutoWebProxyScriptEngine engine)
: base(engine)
{
// Don't specify a user agent and dont' specify proxy settings. This is the same behavior WinHttp
// uses when downloading the PAC file.
session = UnsafeNclNativeMethods.WinHttp.WinHttpOpen(null,
UnsafeNclNativeMethods.WinHttp.AccessType.NoProxy, null, null, 0);
// Don't throw on error, just log the error information. This is consistent with how auto-proxy
// works: we never throw on error (discovery, download, execution errors).
if (session == null || session.IsInvalid)
{
int errorCode = GetLastWin32Error();
if (Logging.On) Logging.PrintError(Logging.Web, SR.GetString(SR.net_log_proxy_winhttp_cant_open_session, errorCode));
}
else
{
// The default download-timeout is 1 min.
// WinHTTP will use the sum of all four timeouts provided in WinHttpSetTimeouts as the
// actual timeout. Setting a value to 0 means "infinite".
// Since we don't provide the ability to specify finegrained timeouts like WinHttp does,
// we simply apply the configured timeout to all four WinHttp timeouts.
int timeout = SettingsSectionInternal.Section.DownloadTimeout;
if (!UnsafeNclNativeMethods.WinHttp.WinHttpSetTimeouts(session, timeout, timeout, timeout, timeout))
{
// We weren't able to set the timeouts. Just log and continue.
int errorCode = GetLastWin32Error();
if (Logging.On) Logging.PrintError(Logging.Web, SR.GetString(SR.net_log_proxy_winhttp_timeout_error, errorCode));
}
}
}
public override bool GetProxies(Uri destination, out IList<string> proxyList)
{
proxyList = null;
if (session == null || session.IsInvalid)
{
return false;
}
if (State == AutoWebProxyState.UnrecognizedScheme)
{
// If a previous call already determined that we don't support the scheme of the script
// location, then just return false.
return false;
}
string proxyListString = null;
// Set to auto-detect failed. In case auto-detect is turned off and a script-location is available
// we'll try downloading the script from that location.
int errorCode = (int)UnsafeNclNativeMethods.WinHttp.ErrorCodes.AudodetectionFailed;
// If auto-detect is turned on, try to execute DHCP/DNS query to get PAC file, then run the script
if (Engine.AutomaticallyDetectSettings && !autoDetectFailed)
{
errorCode = GetProxies(destination, null, out proxyListString);
// Remember if auto-detect failed. If config-script works, then the next time GetProxies() is
// called, we'll not try auto-detect but jump right to config-script.
autoDetectFailed = IsErrorFatalForAutoDetect(errorCode);
if (errorCode == (int)UnsafeNclNativeMethods.WinHttp.ErrorCodes.UnrecognizedScheme)
{
// DHCP returned FILE or FTP scheme for the PAC file location: We should stop here
// since this is not an error, but a feature WinHttp doesn't currently support. The
// caller may be able to handle this case by using another WebProxyFinder.
State = AutoWebProxyState.UnrecognizedScheme;
return false;
}
}
// If auto-detect failed or was turned off, and a config-script location is available, download
// the script from that location and execute it.
if ((Engine.AutomaticConfigurationScript != null) && (IsRecoverableAutoProxyError(errorCode)))
{
errorCode = GetProxies(destination, Engine.AutomaticConfigurationScript,
out proxyListString);
}
State = GetStateFromErrorCode(errorCode);
if (State == AutoWebProxyState.Completed)
{
if (string.IsNullOrEmpty(proxyListString))
{
// In this case the PAC file execution returned "DIRECT", i.e. WinHttp returned
// 'true' with a 'null' proxy string. This state is represented as a list
// containing one element with value 'null'.
proxyList = new string[1] { null };
}
else
{
// WinHttp doesn't really clear all whitespaces. It does a pretty good job with
// spaces, but e.g. tabs aren't removed. Therefore make sure all whitespaces get
// removed.
// Note: Even though the PAC script could use space characters as separators,
// WinHttp will always use ';' as separator character. E.g. for the PAC result
// "PROXY 192.168.0.1 PROXY 192.168.0.2" WinHttp will return "192.168.0.1;192.168.0.2".
// WinHttp will also remove trailing ';'.
proxyListString = RemoveWhitespaces(proxyListString);
proxyList = proxyListString.Split(';');
}
return true;
}
// We get here if something went wrong, or if neither auto-detect nor script-location
// were turned on.
return false;
}
public override void Abort()
{
// WinHttp doesn't support aborts. Therefore we can't do anything here.
}
public override void Reset()
{
base.Reset();
// Reset auto-detect failure: If the connection changes, we may be able to do auto-detect again.
autoDetectFailed = false;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (session != null && !session.IsInvalid)
{
session.Close();
}
}
}
private int GetProxies(Uri destination, Uri scriptLocation, out string proxyListString)
{
int errorCode = 0;
proxyListString = null;
UnsafeNclNativeMethods.WinHttp.WINHTTP_AUTOPROXY_OPTIONS autoProxyOptions =
new UnsafeNclNativeMethods.WinHttp.WINHTTP_AUTOPROXY_OPTIONS();
// Always try to download the PAC file without authentication. If we turn auth. on, the WinHttp
// service will create a new session for every request (performance/memory implications).
// Therefore we only turn auto-logon on if it is really needed.
autoProxyOptions.AutoLogonIfChallenged = false;
if (scriptLocation == null)
{
// Use auto-discovery to find the script location.
autoProxyOptions.Flags = UnsafeNclNativeMethods.WinHttp.AutoProxyFlags.AutoDetect;
autoProxyOptions.AutoConfigUrl = null;
autoProxyOptions.AutoDetectFlags = UnsafeNclNativeMethods.WinHttp.AutoDetectType.Dhcp |
UnsafeNclNativeMethods.WinHttp.AutoDetectType.DnsA;
}
else
{
// Use the provided script location for the PAC file.
autoProxyOptions.Flags = UnsafeNclNativeMethods.WinHttp.AutoProxyFlags.AutoProxyConfigUrl;
autoProxyOptions.AutoConfigUrl = scriptLocation.ToString();
autoProxyOptions.AutoDetectFlags = UnsafeNclNativeMethods.WinHttp.AutoDetectType.None;
}
if (!WinHttpGetProxyForUrl(destination.ToString(), ref autoProxyOptions, out proxyListString))
{
errorCode = GetLastWin32Error();
// If the PAC file can't be downloaded because auth. was required, we check if the
// credentials are set; if so, then we try again using auto-logon.
// Note that by default webProxy.Credentials will be null. The user needs to set
// <defaultProxy useDefaultCredentials="true"> in the config file, in order for
// webProxy.Credentials to be set to DefaultNetworkCredentials.
if ((errorCode == (int)UnsafeNclNativeMethods.WinHttp.ErrorCodes.LoginFailure) &&
(Engine.Credentials != null))
{
// Now we need to try again, this time by enabling auto-logon.
autoProxyOptions.AutoLogonIfChallenged = true;
if (!WinHttpGetProxyForUrl(destination.ToString(), ref autoProxyOptions,
out proxyListString))
{
errorCode = GetLastWin32Error();
}
}
if (Logging.On) Logging.PrintError(Logging.Web, SR.GetString(SR.net_log_proxy_winhttp_getproxy_failed, destination, errorCode));
}
return errorCode;
}
private bool WinHttpGetProxyForUrl(string destination,
ref UnsafeNclNativeMethods.WinHttp.WINHTTP_AUTOPROXY_OPTIONS autoProxyOptions,
out string proxyListString)
{
proxyListString = null;
bool success = false;
UnsafeNclNativeMethods.WinHttp.WINHTTP_PROXY_INFO proxyInfo =
new UnsafeNclNativeMethods.WinHttp.WINHTTP_PROXY_INFO();
// Make sure the strings get cleaned up in a CER (thus unexpected exceptions, like
// ThreadAbortException will not interrupt the execution of the finally block, and we'll not
// leak resources).
RuntimeHelpers.PrepareConstrainedRegions();
try
{
success = UnsafeNclNativeMethods.WinHttp.WinHttpGetProxyForUrl(session,
destination, ref autoProxyOptions, out proxyInfo);
if (success)
{
proxyListString = Marshal.PtrToStringUni(proxyInfo.Proxy);
}
}
finally
{
Marshal.FreeHGlobal(proxyInfo.Proxy);
Marshal.FreeHGlobal(proxyInfo.ProxyBypass);
}
return success;
}
private static int GetLastWin32Error()
{
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == NativeMethods.ERROR_NOT_ENOUGH_MEMORY)
{
throw new OutOfMemoryException();
}
return errorCode;
}
private static bool IsRecoverableAutoProxyError(int errorCode)
{
GlobalLog.Assert(errorCode != UnsafeNclNativeMethods.ErrorCodes.ERROR_INVALID_PARAMETER,
"WinHttpGetProxyForUrl() call: Error code 'Invalid parameter' should not be returned.");
// According to WinHttp the following states can be considered "recoverable", i.e.
// we should continue trying WinHttpGetProxyForUrl() with the provided script-location
// (if available).
switch ((UnsafeNclNativeMethods.WinHttp.ErrorCodes)errorCode)
{
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.AutoProxyServiceError:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.AudodetectionFailed:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.BadAutoProxyScript:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.LoginFailure:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.OperationCancelled:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.Timeout:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.UnableToDownloadScript:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.UnrecognizedScheme:
return true;
}
return false;
}
private static AutoWebProxyState GetStateFromErrorCode(int errorCode)
{
if (errorCode == UnsafeNclNativeMethods.ErrorCodes.ERROR_SUCCESS)
{
return AutoWebProxyState.Completed;
}
switch ((UnsafeNclNativeMethods.WinHttp.ErrorCodes)errorCode)
{
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.AudodetectionFailed:
return AutoWebProxyState.DiscoveryFailure;
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.UnableToDownloadScript:
return AutoWebProxyState.DownloadFailure;
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.UnrecognizedScheme:
return AutoWebProxyState.UnrecognizedScheme;
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.BadAutoProxyScript:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.InvalidUrl:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.AutoProxyServiceError:
// AutoProxy succeeded, but no proxy could be found for this request
return AutoWebProxyState.Completed;
default:
// We don't know the exact cause of the failure. Set the state to compilation failure to
// indicate that something went wrong.
return AutoWebProxyState.CompilationFailure;
}
}
private static string RemoveWhitespaces(string value)
{
StringBuilder result = new StringBuilder();
foreach (char c in value)
{
if (!char.IsWhiteSpace(c))
{
result.Append(c);
}
}
return result.ToString();
}
// Should we ignore auto-detect from now on?
// http://msdn.microsoft.com/en-us/library/aa384097(VS.85).aspx
private static bool IsErrorFatalForAutoDetect(int errorCode)
{
switch ((UnsafeNclNativeMethods.WinHttp.ErrorCodes)errorCode)
{
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.Success:
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.InvalidUrl:
// Some URIs are not supported (like Unicode hosts on Win7 and lower),
// but our proxy is still valid
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.BadAutoProxyScript:
// Got the script, but something went wrong in execution. For example,
// the request was for an unresolvable single label name.
case UnsafeNclNativeMethods.WinHttp.ErrorCodes.AutoProxyServiceError:
// Returned when a proxy for the specified URL cannot be located.
return false;
default:
return true;
}
}
}
}
| |
//css_ref Microsoft.CSharp;
using System;
using System.Reflection;
using System.Collections.Generic;
using System.Diagnostics;
using CSScriptLibrary;
public interface ICalculator
{
int Add(int a, int b);
}
public interface IAnotherCalculator
{
int Add(int a, int b);
}
public class Host
{
static void Main()
{
Assembly assembly = CSScript.LoadCode(
@"using System;
public class Calculator : ICalculator
{
public int Add(int a, int b)
{
return a + b;
}
public string Join(string a, string b)
{
return a + b;
}
}");
AsmHelper calc = new AsmHelper(assembly);
object instance = calc.CreateObject("Calculator"); //calc.CreateObject("*") can be used too if assembly has only one class defined
FastInvokeDelegate methodInvoker = calc.GetMethodInvoker("Calculator.Add", 0, 0);
int numOfLoops = 1000000;
TestReflection(numOfLoops, calc, instance);
TestFastInvoking(numOfLoops, calc, instance);
TestDelegates(numOfLoops, methodInvoker, instance);
TestInterface(numOfLoops, instance);
TestInterfaceAlignment(numOfLoops, instance);
TestDynamic(numOfLoops, instance);
TestCompiledCode(numOfLoops);
TestCompiledDelegate(numOfLoops);
//TestMethodDelegates();
}
static void TestReflection(int numOfLoops, AsmHelper script, object instance)
{
//Starting from version v2.2 pure Reflection calls (script.CachingEnabled = false)
//are ~2 times faster simple because of internal optimization in AsmHelper.
script.CachingEnabled = false;
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
script.InvokeInst(instance, "Calculator.Add", 1, 2);
sw.Stop();
Console.WriteLine("Reflection: " + sw.ElapsedMilliseconds);
}
static void TestFastInvoking(int numOfLoops, AsmHelper script, object instance)
{
//Starting from version v2.2 pure Reflection calls are no longer the only available
//option for invoking scrips methods. AsmHelper can cache dynamically emitted method
//invokers and use them internally when AsmHelper's Invoke()/InvokeInst() called.
//
//Thus Invoke()/InvokeInst() are more than 100 times faster in v2.2 than in v2.1 when
//AsmHelper caching is enabled (script.CachingEnabled = true).
script.CachingEnabled = true; //it is true by default
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
script.InvokeInst(instance, "Calculator.Add", 1, 2);
sw.Stop();
Console.WriteLine("Fast invoking: " + sw.ElapsedMilliseconds);
}
static void TestDelegates(int numOfLoops, FastInvokeDelegate fastInvoker, object instance)
{
//Starting from version v2.2 AsmHelper can return dynamically emitted method
//invoker (delegate) which can be used by the host application to invoke script methods without AsmHelper.
//
//This option allows script methods execution more than 250 times faster in than in pure reflection calls
//available in AsmHelper v2.1. The generated FastInvokeDelegate is almost as fast as direct calls for statically compiled types.
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
fastInvoker(instance, 1, 2);
sw.Stop();
Console.WriteLine("Delegate: " + sw.ElapsedMilliseconds);
}
static void TestInterface(int numOfLoops, object instance)
{
//Using interfaces represents the best possible Invocation option with respect to performance (and type safety).
//When dynamic type (from the script) is typecasted to the interface it is no longer "treated" as a dynamic type.
//You can use compile time type checking and at runtime all method calls are "direct calls".
//
//This option is a clear winner (along with InterfaceAlignment) as it also allows usage of intellisense (at development stage) for the script types.
ICalculator iCalc = (ICalculator)instance;
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
iCalc.Add(1, 2);
sw.Stop();
Console.WriteLine("Interface: " + sw.ElapsedMilliseconds);
}
static void TestDynamic(int numOfLoops, object instance)
{
//Using C# 4.0 gives you the smallest coding overhead and great readability. Though even if it is faster than Reflection it still slower than Interfaces. And also
//it is less typesasfe than InterfaceAlignment, which allows validating the whole script class at once.
//
dynamic Calc = instance;
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
Calc.Add(1, 2);
sw.Stop();
Console.WriteLine("Dynamic: " + sw.ElapsedMilliseconds);
}
static void TestInterfaceAlignment(int numOfLoops, object instance)
{
//Using interfaces represents the best possible Invocation option with respect to performance (and type safety).
//When dynamic type (from the script) is typecasted to the interface it is no longer "treated" as a dynamic type.
//You can use compiletime type checking and at runtime all method calls are "direct calls".
//
//This option is a clear winner as it also allows usage of intellisense (at development stage) for the script types.
//
//Note the Calculator instance does not actually inmplement IPrivateCalculator but it still can be aligned with this
//interface as it has int Add(int a, int b) method.
IAnotherCalculator iCalc = instance.AlignToInterface<IAnotherCalculator>();
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
iCalc.Add(1, 2);
sw.Stop();
Console.WriteLine("Interface (Aligned): " + sw.ElapsedMilliseconds);
}
static int Add(int a, int b)
{
return a + b;
}
static void TestCompiledCode(int numOfLoops)
{
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
Add(1, 2);
sw.Stop();
Console.WriteLine("Compiled code: " + sw.ElapsedMilliseconds);
}
static void TestCompiledDelegate(int numOfLoops)
{
Func<int, int, int> add = (a, b) => a + b;
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < numOfLoops; i++)
add(1, 2);
sw.Stop();
Console.WriteLine("Compiled delegate: " + sw.ElapsedMilliseconds);
}
static void TestMethodDelegates()
{
Assembly assembly = CSScript.LoadCode(
@"using System;
public class Calculator
{
static public void PrintSum(int a, int b)
{
Console.WriteLine(a + b);
}
public int Multiply(int a, int b)
{
return (a * b);
}
}");
AsmHelper calc = new AsmHelper(assembly);
//using static method delegate
var PrintSum = calc.GetStaticMethod("Calculator.PrintSum", 0, 0);
PrintSum(1, 2);
//using instance method delegate
var obj = calc.CreateObject("Calculator");
var Multiply = calc.GetMethod(obj, "Multiply", 0, 0);
Console.WriteLine(Multiply(3, 5));
//using general method delegate; can invoke both static and instance methods
var methodInvoker = calc.GetMethodInvoker("Calculator.PrintSum", 0, 0);
methodInvoker(null, 5, 12);
}
}
| |
using System;
using UnityEngine;
namespace UnityStandardAssets.CinematicEffects
{
[ExecuteInEditMode]
[RequireComponent(typeof(Camera))]
[AddComponentMenu("Image Effects/Cinematic/Bloom")]
#if UNITY_5_4_OR_NEWER
[ImageEffectAllowedInSceneView]
#endif
public class Bloom : MonoBehaviour
{
[Serializable]
public struct Settings
{
[SerializeField]
[Tooltip("Filters out pixels under this level of brightness.")]
public float threshold;
public float thresholdGamma
{
set { threshold = value; }
get { return Mathf.Max(0.0f, threshold); }
}
public float thresholdLinear
{
set { threshold = Mathf.LinearToGammaSpace(value); }
get { return Mathf.GammaToLinearSpace(thresholdGamma); }
}
[SerializeField, Range(0, 1)]
[Tooltip("Makes transition between under/over-threshold gradual.")]
public float softKnee;
[SerializeField, Range(1, 7)]
[Tooltip("Changes extent of veiling effects in a screen resolution-independent fashion.")]
public float radius;
[SerializeField]
[Tooltip("Blend factor of the result image.")]
public float intensity;
[SerializeField]
[Tooltip("Controls filter quality and buffer resolution.")]
public bool highQuality;
[SerializeField]
[Tooltip("Reduces flashing noise with an additional filter.")]
public bool antiFlicker;
public static Settings defaultSettings
{
get
{
var settings = new Settings
{
threshold = 0.9f,
softKnee = 0.5f,
radius = 2.0f,
intensity = 0.7f,
highQuality = true,
antiFlicker = false
};
return settings;
}
}
}
#region Public Properties
[SerializeField]
public Settings settings = Settings.defaultSettings;
#endregion
[SerializeField, HideInInspector]
private Shader m_Shader;
public Shader shader
{
get
{
if (m_Shader == null)
{
const string shaderName = "Hidden/Image Effects/Cinematic/Bloom";
m_Shader = Shader.Find(shaderName);
}
return m_Shader;
}
}
private Material m_Material;
public Material material
{
get
{
if (m_Material == null)
m_Material = ImageEffectHelper.CheckShaderAndCreateMaterial(shader);
return m_Material;
}
}
#region Private Members
const int kMaxIterations = 16;
RenderTexture[] m_blurBuffer1 = new RenderTexture[kMaxIterations];
RenderTexture[] m_blurBuffer2 = new RenderTexture[kMaxIterations];
private void OnEnable()
{
if (!ImageEffectHelper.IsSupported(shader, true, false, this))
enabled = false;
}
private void OnDisable()
{
if (m_Material != null)
DestroyImmediate(m_Material);
m_Material = null;
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
var useRGBM = Application.isMobilePlatform;
// source texture size
var tw = source.width;
var th = source.height;
// halve the texture size for the low quality mode
if (!settings.highQuality)
{
tw /= 2;
th /= 2;
}
// blur buffer format
var rtFormat = useRGBM ? RenderTextureFormat.Default : RenderTextureFormat.DefaultHDR;
// determine the iteration count
var logh = Mathf.Log(th, 2) + settings.radius - 8;
var logh_i = (int)logh;
var iterations = Mathf.Clamp(logh_i, 1, kMaxIterations);
// update the shader properties
var threshold = settings.thresholdLinear;
material.SetFloat("_Threshold", threshold);
var knee = threshold * settings.softKnee + 1e-5f;
var curve = new Vector3(threshold - knee, knee * 2, 0.25f / knee);
material.SetVector("_Curve", curve);
var pfo = !settings.highQuality && settings.antiFlicker;
material.SetFloat("_PrefilterOffs", pfo ? -0.5f : 0.0f);
material.SetFloat("_SampleScale", 0.5f + logh - logh_i);
material.SetFloat("_Intensity", Mathf.Max(0.0f, settings.intensity));
// prefilter pass
var prefiltered = RenderTexture.GetTemporary(tw, th, 0, rtFormat);
Graphics.Blit(source, prefiltered, material, settings.antiFlicker ? 1 : 0);
// construct a mip pyramid
var last = prefiltered;
for (var level = 0; level < iterations; level++)
{
m_blurBuffer1[level] = RenderTexture.GetTemporary(last.width / 2, last.height / 2, 0, rtFormat);
Graphics.Blit(last, m_blurBuffer1[level], material, level == 0 ? (settings.antiFlicker ? 3 : 2) : 4);
last = m_blurBuffer1[level];
}
// upsample and combine loop
for (var level = iterations - 2; level >= 0; level--)
{
var basetex = m_blurBuffer1[level];
material.SetTexture("_BaseTex", basetex);
m_blurBuffer2[level] = RenderTexture.GetTemporary(basetex.width, basetex.height, 0, rtFormat);
Graphics.Blit(last, m_blurBuffer2[level], material, settings.highQuality ? 6 : 5);
last = m_blurBuffer2[level];
}
// finish process
material.SetTexture("_BaseTex", source);
Graphics.Blit(last, destination, material, settings.highQuality ? 8 : 7);
// release the temporary buffers
for (var i = 0; i < kMaxIterations; i++)
{
if (m_blurBuffer1[i] != null) RenderTexture.ReleaseTemporary(m_blurBuffer1[i]);
if (m_blurBuffer2[i] != null) RenderTexture.ReleaseTemporary(m_blurBuffer2[i]);
m_blurBuffer1[i] = null;
m_blurBuffer2[i] = null;
}
RenderTexture.ReleaseTemporary(prefiltered);
}
#endregion
}
}
| |
//! \file Utility.cs
//! \date Sat Jul 05 02:47:33 2014
//! \brief utility classes for GameRes assembly.
//
// Copyright (C) 2014 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System.Collections.Generic;
using System.Text;
namespace GameRes.Utility
{
public static class Binary
{
public static uint BigEndian (uint u)
{
return u << 24 | (u & 0xff00) << 8 | (u & 0xff0000) >> 8 | u >> 24;
}
public static int BigEndian (int i)
{
return (int)BigEndian ((uint)i);
}
public static ushort BigEndian (ushort u)
{
return (ushort)(u << 8 | u >> 8);
}
public static short BigEndian (short i)
{
return (short)BigEndian ((ushort)i);
}
public static ulong BigEndian (ulong u)
{
return (ulong)BigEndian((uint)(u & 0xffffffff)) << 32
| (ulong)BigEndian((uint)(u >> 32));
}
public static long BigEndian (long i)
{
return (long)BigEndian ((ulong)i);
}
public static bool AsciiEqual (byte[] name1, string name2)
{
return AsciiEqual (name1, 0, name2);
}
/// <summary>
/// Check if sequence of ASCII characters in array <paramref name="name1"/> is equal to string <paramref name="name2"/>.
/// This methods avoids costly construction of the string object from byte array for a mere purpose of
/// comparison.
/// </summary>
public static bool AsciiEqual (byte[] name1, int offset, string name2)
{
return name1.AsciiEqual (offset, name2);
}
/// <summary>
/// Copy potentially overlapping sequence of <paramref name="count"/> bytes in array
/// <paramref name="data"/> from <paramref name="src"/> to <paramref name="dst"/>.
/// If destination offset resides within source region then sequence will repeat itself. Widely used
/// in various compression techniques.
/// </summary>
public static void CopyOverlapped (byte[] data, int src, int dst, int count)
{
if (dst > src)
{
while (count > 0)
{
int preceding = System.Math.Min (dst - src, count);
System.Buffer.BlockCopy (data, src, data, dst, preceding);
dst += preceding;
count -= preceding;
}
}
else
{
System.Buffer.BlockCopy (data, src, data, dst, count);
}
}
/// <summary>
/// Extract null-terminated string (a "C string") from array <paramref name="data"/> starting
/// at offset <paramref name="index"/> up to <paramref name="length_limit"/> bytes long, stored in
/// encoding <paramref name="enc"/>.
/// </summary>
public static string GetCString (byte[] data, int index, int length_limit, Encoding enc)
{
int name_length = 0;
while (name_length < length_limit && 0 != data[index+name_length])
name_length++;
return enc.GetString (data, index, name_length);
}
public static string GetCString (byte[] data, int index, int length_limit)
{
return GetCString (data, index, length_limit, Encodings.cp932);
}
public static string GetCString (byte[] data, int index)
{
return GetCString (data, index, data.Length - index, Encodings.cp932);
}
public static uint RotR (uint v, int count)
{
count &= 0x1F;
return v >> count | v << (32-count);
}
public static uint RotL (uint v, int count)
{
count &= 0x1F;
return v << count | v >> (32-count);
}
public static ulong RotR (ulong v, int count)
{
count &= 0x3F;
return v >> count | v << (64-count);
}
public static ulong RotL (ulong v, int count)
{
count &= 0x3F;
return v << count | v >> (64-count);
}
public static byte RotByteR (byte v, int count)
{
count &= 7;
return (byte)(v >> count | v << (8-count));
}
public static byte RotByteL (byte v, int count)
{
count &= 7;
return (byte)(v << count | v >> (8-count));
}
}
public static class BigEndian
{
public static ushort ToUInt16<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (ushort)(value[index] << 8 | value[index+1]);
}
public static short ToInt16<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (short)(value[index] << 8 | value[index+1]);
}
public static uint ToUInt32<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (uint)(value[index] << 24 | value[index+1] << 16 | value[index+2] << 8 | value[index+3]);
}
public static int ToInt32<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (int)ToUInt32 (value, index);
}
public static void Pack (ushort value, byte[] buf, int index)
{
buf[index] = (byte)(value >> 8);
buf[index+1] = (byte)(value);
}
public static void Pack (uint value, byte[] buf, int index)
{
buf[index] = (byte)(value >> 24);
buf[index+1] = (byte)(value >> 16);
buf[index+2] = (byte)(value >> 8);
buf[index+3] = (byte)(value);
}
public static void Pack (ulong value, byte[] buf, int index)
{
Pack ((uint)(value >> 32), buf, index);
Pack ((uint)value, buf, index+4);
}
public static void Pack (short value, byte[] buf, int index)
{
Pack ((ushort)value, buf, index);
}
public static void Pack (int value, byte[] buf, int index)
{
Pack ((uint)value, buf, index);
}
public static void Pack (long value, byte[] buf, int index)
{
Pack ((ulong)value, buf, index);
}
}
public static class LittleEndian
{
public static ushort ToUInt16<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (ushort)(value[index] | value[index+1] << 8);
}
public static short ToInt16<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (short)(value[index] | value[index+1] << 8);
}
public static uint ToUInt32<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (uint)(value[index] | value[index+1] << 8 | value[index+2] << 16 | value[index+3] << 24);
}
public static int ToInt32<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (int)ToUInt32 (value, index);
}
public static ulong ToUInt64<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (ulong)ToUInt32 (value, index) | ((ulong)ToUInt32 (value, index+4) << 32);
}
public static long ToInt64<TArray> (TArray value, int index) where TArray : IList<byte>
{
return (long)ToUInt64 (value, index);
}
public static void Pack (ushort value, byte[] buf, int index)
{
buf[index] = (byte)(value);
buf[index+1] = (byte)(value >> 8);
}
public static void Pack (uint value, byte[] buf, int index)
{
buf[index] = (byte)(value);
buf[index+1] = (byte)(value >> 8);
buf[index+2] = (byte)(value >> 16);
buf[index+3] = (byte)(value >> 24);
}
public static void Pack (ulong value, byte[] buf, int index)
{
Pack ((uint)value, buf, index);
Pack ((uint)(value >> 32), buf, index+4);
}
public static void Pack (short value, byte[] buf, int index)
{
Pack ((ushort)value, buf, index);
}
public static void Pack (int value, byte[] buf, int index)
{
Pack ((uint)value, buf, index);
}
public static void Pack (long value, byte[] buf, int index)
{
Pack ((ulong)value, buf, index);
}
}
public class AsciiString
{
public byte[] Value { get; set; }
public int Length { get { return Value.Length; } }
public AsciiString (int size)
{
Value = new byte[size];
}
public AsciiString (byte[] str)
{
Value = str;
}
public AsciiString (string str)
{
Value = Encoding.ASCII.GetBytes (str);
}
public override string ToString ()
{
return Encoding.ASCII.GetString (Value);
}
public override bool Equals (object o)
{
if (null == o)
return false;
var a = o as AsciiString;
if (null == (object)a)
return false;
return this == a;
}
public override int GetHashCode ()
{
int hash = 5381;
for (int i = 0; i < Value.Length; ++i)
{
hash = ((hash << 5) + hash) ^ Value[i];
}
return hash ^ (hash * 1566083941);;
}
public static bool operator== (AsciiString a, AsciiString b)
{
if (ReferenceEquals (a, b))
return true;
if (null == (object)a || null == (object)b)
return false;
if (a.Length != b.Length)
return false;
for (int i = 0; i < a.Length; ++i)
if (a.Value[i] != b.Value[i])
return false;
return true;
}
public static bool operator!= (AsciiString a, AsciiString b)
{
return !(a == b);
}
public static bool operator== (AsciiString a, string b)
{
return Binary.AsciiEqual (a.Value, b);
}
public static bool operator!= (AsciiString a, string b)
{
return !(a == b);
}
public static bool operator== (string a, AsciiString b)
{
return b == a;
}
public static bool operator!= (string a, AsciiString b)
{
return !(b == a);
}
}
public interface IDataUnpacker
{
byte[] Data { get; }
void Unpack ();
}
}
| |
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Collections.Generic;
using Pb.Collections;
namespace Pbtk
{
namespace TileMap2D
{
/// <summary>
/// Manages and displays GUI for tile map controllers
/// </summary>
[CustomEditor(typeof(TileMapController))]
public class TileMapControllerEditor :
Editor
{
/// <summary>
/// The tabs in the inspector
/// </summary>
public static string[] tabs = new string[]{"Chunks", "Aesthetic"};
/// <summary>
/// The controller being edited
/// </summary>
public TileMapController controller = null;
/// <summary>
/// The currently-selected tab in the inspector
/// </summary>
public int tab = 0;
/// <summary>
/// Whether the cursor is on the map
/// </summary>
public bool on_map = false;
/// <summary>
/// The coordinates of the tile under the cursor
/// </summary>
public IVector2 tile = IVector2.zero;
/// <summary>
/// The coordinates of the chunk under the cursor
/// </summary>
public IVector2 chunk
{
get
{
if (chunk_manager != null)
return Pb.Math.FloorDivide(tile, chunk_manager.chunk_size);
return IVector2.zero;
}
}
/// <summary>
/// The tile map of the controller being edited
/// </summary>
public TileMap tile_map
{
get
{
if (controller == null)
return null;
if (controller.tile_map == null)
return null;
return controller.tile_map as TileMap;
}
}
/// <summary>
/// The chunk manager of the controller being edited
/// </summary>
public ChunkManager2D chunk_manager
{
get
{
if (tile_map == null)
return null;
if (controller.tile_map.chunk_manager == null)
return null;
return controller.tile_map.chunk_manager as ChunkManager2D;
}
}
/// <summary>
/// Gets the chunk manager as a static chunk manager (if able)
/// </summary>
public StaticChunkManager static_chunk_manager
{
get
{
return (chunk_manager as StaticChunkManager);
}
}
/// <summary>
/// Gets the chunk manager as a dynamic chunk generator (if able)
/// </summary>
public DynamicChunkGenerator dynamic_chunk_generator
{
get
{
return (chunk_manager as DynamicChunkGenerator);
}
}
/// <summary>
/// Determines whether the chunk manager is a static chunk manager
/// </summary>
public bool is_static_chunk_manager
{
get
{
return static_chunk_manager != null;
}
}
/// <summary>
/// Determines whether the chunk manager is a dynamic chunk generator
/// </summary>
public bool is_dynamic_generator
{
get
{
return dynamic_chunk_generator != null;
}
}
/// <summary>
/// Sets the controller when it changes
/// </summary>
public void OnEnable()
{
controller = target as TileMapController;
}
/// <summary>
/// Displays the inspector GUI
/// </summary>
public override void OnInspectorGUI()
{
if (controller == null)
return;
EditorGUILayout.BeginHorizontal();
Pb.Utility.Undo.RegisterChange<Pb.TileMap.TileMap>(
EditorGUILayout.ObjectField("Tile map", controller.tile_map, typeof(TileMap), false) as Pb.TileMap.TileMap,
ref controller.tile_map, controller,
"Changed tile map");
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
tab = GUILayout.SelectionGrid(tab, tabs, tabs.Length);
EditorGUILayout.EndHorizontal();
switch (tabs[tab])
{
case "Chunks":
if (chunk_manager == null)
{
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("No chunk manager found");
EditorGUILayout.EndHorizontal();
break;
}
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("Chunk size:", chunk_manager.chunk_size.x + " x " + chunk_manager.chunk_size.y + " tiles");
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("Chunk range:", "(" + chunk_manager.chunk_least.x + ", " + chunk_manager.chunk_least.y + ") - (" + chunk_manager.chunk_greatest.x + ", " + chunk_manager.chunk_greatest.y + ")");
EditorGUILayout.EndHorizontal();
if (is_static_chunk_manager)
{
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("Resources path:", static_chunk_manager.resources_path);
EditorGUILayout.EndHorizontal();
}
break;
case "Aesthetic":
EditorGUILayout.BeginHorizontal();
Pb.Utility.Undo.RegisterChange<TileMapController.GizmosDrawTime>(
(TileMapController.GizmosDrawTime)EditorGUILayout.EnumPopup("When to draw", controller.when_draw_gizmos),
ref controller.when_draw_gizmos, controller,
"Changed gizmos draw condition");
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
Pb.Utility.Undo.RegisterChange<bool>(
EditorGUILayout.BeginToggleGroup("Draw tiles", controller.draw_tile_boundaries),
ref controller.draw_tile_boundaries, controller,
"Changed whether to draw tile boundaries");
Pb.Utility.Undo.RegisterChange<Color32>(
EditorGUILayout.ColorField(controller.gizmo_color_tile),
ref controller.gizmo_color_tile, controller,
"Changed tile boundary draw color");
EditorGUILayout.EndToggleGroup();
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
Pb.Utility.Undo.RegisterChange<bool>(
EditorGUILayout.BeginToggleGroup("Draw chunks", controller.draw_chunk_boundaries),
ref controller.draw_chunk_boundaries, controller,
"Changed whether to draw chunk boundaries");
Pb.Utility.Undo.RegisterChange<Color32>(
EditorGUILayout.ColorField(controller.gizmo_color_chunk),
ref controller.gizmo_color_chunk, controller,
"Changed chunk boundary draw color");
EditorGUILayout.EndToggleGroup();
EditorGUILayout.EndHorizontal();
break;
default:
Debug.Log("Invalid tab!");
break;
}
}
/// <summary>
/// A simple class that represents a context command
/// </summary>
public class ContextCommand
{
/// <summary>
/// The name of the context command
/// </summary>
public string name;
/// <summary>
/// Arguments to the context command
/// </summary>
public object[] args;
/// <summary>
/// Basic constructor
/// </summary>
/// <param name="n">The name of the context command</param>
/// <param name="a">The arguments to the context command</param>
public ContextCommand(string n, object[] a)
{
name = n;
args = a;
}
}
/// <summary>
/// Executes a context command
/// </summary>
/// <param name="command_arg">The context command to execute</param>
public void ExecuteContext(object command_arg)
{
ContextCommand command = command_arg as ContextCommand;
switch (command.name)
{
case "ping_chunk":
EditorGUIUtility.PingObject(controller.GetChunk((IVector3)command.args[0]));
break;
case "begin_editing":
controller.Begin();
break;
case "end_editing":
controller.End();
break;
case "load_chunk":
controller.LoadAndRenderChunk((IVector3)command.args[0]);
break;
case "unload_chunk":
controller.UnloadAndUnrenderChunk((IVector3)command.args[0]);
break;
case "load_all_chunks":
for (int x = chunk_manager.chunk_least.x; x <= chunk_manager.chunk_greatest.x; ++x)
for (int y = chunk_manager.chunk_least.y; y <= chunk_manager.chunk_greatest.y; ++y)
controller.LoadAndRenderChunk((IVector3)(new IVector2(x, y)));
break;
case "unload_all_chunks":
for (int x = chunk_manager.chunk_least.x; x <= chunk_manager.chunk_greatest.x; ++x)
for (int y = chunk_manager.chunk_least.y; y <= chunk_manager.chunk_greatest.y; ++y)
controller.UnloadAndUnrenderChunk((IVector3)(new IVector2(x, y)));
break;
case "get_tile_id":
Debug.Log(controller.GetTile((IVector2)command.args[0], 0));
break;
case "reset_generator":
dynamic_chunk_generator.Reset();
break;
default:
throw new System.ArgumentException("Invalid context command argument '" + command.name + "'");
}
}
/// <summary>
/// Draws scene GUI and creates context menus if the space bar is pressed
/// </summary>
public void OnSceneGUI()
{
if (!controller.initialized)
controller.Init();
if (tile_map == null)
return;
Vector3 mouse_pos = new Vector3();
on_map = Pb.Utility.Projection.ProjectMousePosition(Vector3.forward, controller.transform, out mouse_pos);
bool needs_repaint = false;
if (on_map)
{
int old_x = tile.x;
int old_y = tile.y;
mouse_pos = controller.transform.worldToLocalMatrix.MultiplyPoint(mouse_pos);
mouse_pos = tile_map.geometry.mapToNormalMatrix.MultiplyPoint(mouse_pos);
tile = tile_map.geometry.NormalToTile(mouse_pos).discardZ();
if (old_x != tile.x || old_y != tile.y)
needs_repaint = true;
}
Handles.BeginGUI();
GUI.Label(new Rect(10, Screen.height - 60, 200, 20), "Tile: " + tile.x + ", " + tile.y);
GUI.Label(new Rect(10, Screen.height - 80, 200, 20), "Chunk: " + chunk.x + ", " + chunk.y);
Handles.EndGUI();
Event current = Event.current;
if (current.type == EventType.KeyDown && current.keyCode == KeyCode.Space && on_map)
{
GenericMenu menu = new GenericMenu();
if (!controller.ready)
menu.AddItem(new GUIContent("Begin editing"), false, ExecuteContext, new ContextCommand("begin_editing", null));
else
{
if (chunk_manager == null)
menu.AddDisabledItem(new GUIContent("No chunk manager"));
else
{
if (chunk.inInterval(chunk_manager.chunk_least, chunk_manager.chunk_greatest))
{
if (controller.IsChunkLoaded((IVector3)chunk))
{
menu.AddItem(new GUIContent("Unload chunk"), false, ExecuteContext, new ContextCommand("unload_chunk", new object[1] { new IVector3(chunk.x, chunk.y, 0) }));
menu.AddItem(new GUIContent("Print tile ID"), false, ExecuteContext, new ContextCommand("get_tile_id", new object[1] { new IVector2(tile.x, tile.y) }));
}
else
menu.AddItem(new GUIContent("Load chunk"), false, ExecuteContext, new ContextCommand("load_chunk", new object[1] { new IVector3(chunk.x, chunk.y, 0) }));
menu.AddItem(new GUIContent("Ping chunk"), false, ExecuteContext, new ContextCommand("ping_chunk", new object[1] { new IVector3(chunk.x, chunk.y, 0) }));
menu.AddSeparator("");
}
menu.AddItem(new GUIContent("Chunks/Load all chunks"), false, ExecuteContext, new ContextCommand("load_all_chunks", null));
menu.AddItem(new GUIContent("Chunks/Unload all chunks"), false, ExecuteContext, new ContextCommand("unload_all_chunks", null));
}
}
if (is_dynamic_generator)
{
menu.AddSeparator("");
menu.AddItem(new GUIContent("Reset generator"), false, ExecuteContext, new ContextCommand("reset_generator", null));
}
if (controller.ready)
{
menu.AddSeparator("");
menu.AddItem(new GUIContent("End editing"), false, ExecuteContext, new ContextCommand("end_editing", null));
}
menu.ShowAsContext();
current.Use();
}
if (needs_repaint)
SceneView.lastActiveSceneView.Repaint();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// TargetCore.cs
//
//
// The core implementation of a standard ITargetBlock<TInput>.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Linq;
namespace System.Threading.Tasks.Dataflow.Internal
{
// LOCK-LEVELING SCHEME
// --------------------
// TargetCore employs a single lock: IncomingLock. This lock must not be used when calling out to any targets,
// which TargetCore should not have, anyway. It also must not be held when calling back to any sources, except
// during calls to OfferMessage from that same source.
/// <summary>Options used to configure a target core.</summary>
[Flags]
internal enum TargetCoreOptions : byte
{
/// <summary>Synchronous completion, both a target and a source, etc.</summary>
None = 0x0,
/// <summary>Whether the block relies on the delegate to signal when an async operation has completed.</summary>
UsesAsyncCompletion = 0x1,
/// <summary>
/// Whether the block containing this target core is just a target or also has a source side.
/// If it's just a target, then this target core's completion represents the entire block's completion.
/// </summary>
RepresentsBlockCompletion = 0x2
}
/// <summary>
/// Provides a core implementation of <see cref="ITargetBlock{TInput}"/>.</summary>
/// <typeparam name="TInput">Specifies the type of data accepted by the <see cref="TargetCore{TInput}"/>.</typeparam>
[SuppressMessage("Microsoft.Design", "CA1001:TypesThatOwnDisposableFieldsShouldBeDisposable")]
[DebuggerDisplay("{DebuggerDisplayContent,nq}")]
internal sealed class TargetCore<TInput>
{
// *** These fields are readonly and are initialized at AppDomain startup.
/// <summary>Caching the keep alive predicate.</summary>
private static readonly Common.KeepAlivePredicate<TargetCore<TInput>, KeyValuePair<TInput, long>> _keepAlivePredicate =
(TargetCore<TInput> thisTargetCore, out KeyValuePair<TInput, long> messageWithId) =>
thisTargetCore.TryGetNextAvailableOrPostponedMessage(out messageWithId);
// *** These fields are readonly and are initialized to new instances at construction.
/// <summary>A task representing the completion of the block.</summary>
private readonly TaskCompletionSource<VoidResult> _completionSource = new TaskCompletionSource<VoidResult>();
// *** These fields are readonly and are initialized by arguments to the constructor.
/// <summary>The target block using this helper.</summary>
private readonly ITargetBlock<TInput> _owningTarget;
/// <summary>The messages in this target.</summary>
/// <remarks>This field doubles as the IncomingLock.</remarks>
private readonly IProducerConsumerQueue<KeyValuePair<TInput, long>> _messages;
/// <summary>The options associated with this block.</summary>
private readonly ExecutionDataflowBlockOptions _dataflowBlockOptions;
/// <summary>An action to invoke for every accepted message.</summary>
private readonly Action<KeyValuePair<TInput, long>> _callAction;
/// <summary>Whether the block relies on the delegate to signal when an async operation has completed.</summary>
private readonly TargetCoreOptions _targetCoreOptions;
/// <summary>Bounding state for when the block is executing in bounded mode.</summary>
private readonly BoundingStateWithPostponed<TInput> _boundingState;
/// <summary>The reordering buffer used by the owner. May be null.</summary>
private readonly IReorderingBuffer _reorderingBuffer;
/// <summary>Gets the object used as the incoming lock.</summary>
private object IncomingLock { get { return _messages; } }
// *** These fields are mutated during execution.
/// <summary>Exceptions that may have occurred and gone unhandled during processing.</summary>
private List<Exception> _exceptions;
/// <summary>Whether to stop accepting new messages.</summary>
private bool _decliningPermanently;
/// <summary>The number of operations (including service tasks) currently running asynchronously.</summary>
/// <remarks>Must always be accessed from inside a lock.</remarks>
private int _numberOfOutstandingOperations;
/// <summary>The number of service tasks in async mode currently running.</summary>
/// <remarks>Must always be accessed from inside a lock.</remarks>
private int _numberOfOutstandingServiceTasks;
/// <summary>The next available ID we can assign to a message about to be processed.</summary>
private PaddedInt64 _nextAvailableInputMessageId; // initialized to 0... very important for a reordering buffer
/// <summary>A task has reserved the right to run the completion routine.</summary>
private bool _completionReserved;
/// <summary>This counter is set by the processing loop to prevent itself from trying to keep alive.</summary>
private int _keepAliveBanCounter;
/// <summary>Initializes the target core.</summary>
/// <param name="owningTarget">The target using this helper.</param>
/// <param name="callAction">An action to invoke for all accepted items.</param>
/// <param name="reorderingBuffer">The reordering buffer used by the owner; may be null.</param>
/// <param name="dataflowBlockOptions">The options to use to configure this block. The target core assumes these options are immutable.</param>
/// <param name="targetCoreOptions">Options for how the target core should behave.</param>
internal TargetCore(
ITargetBlock<TInput> owningTarget,
Action<KeyValuePair<TInput, long>> callAction,
IReorderingBuffer reorderingBuffer,
ExecutionDataflowBlockOptions dataflowBlockOptions,
TargetCoreOptions targetCoreOptions)
{
// Validate internal arguments
Debug.Assert(owningTarget != null, "Core must be associated with a target block.");
Debug.Assert(dataflowBlockOptions != null, "Options must be provided to configure the core.");
Debug.Assert(callAction != null, "Action to invoke for each item is required.");
// Store arguments and do additional initialization
_owningTarget = owningTarget;
_callAction = callAction;
_reorderingBuffer = reorderingBuffer;
_dataflowBlockOptions = dataflowBlockOptions;
_targetCoreOptions = targetCoreOptions;
_messages = (dataflowBlockOptions.MaxDegreeOfParallelism == 1) ?
(IProducerConsumerQueue<KeyValuePair<TInput, long>>)new SingleProducerSingleConsumerQueue<KeyValuePair<TInput, long>>() :
(IProducerConsumerQueue<KeyValuePair<TInput, long>>)new MultiProducerMultiConsumerQueue<KeyValuePair<TInput, long>>();
if (_dataflowBlockOptions.BoundedCapacity != System.Threading.Tasks.Dataflow.DataflowBlockOptions.Unbounded)
{
Debug.Assert(_dataflowBlockOptions.BoundedCapacity > 0, "Positive bounding count expected; should have been verified by options ctor");
_boundingState = new BoundingStateWithPostponed<TInput>(_dataflowBlockOptions.BoundedCapacity);
}
}
/// <summary>Internal Complete entry point with extra parameters for different contexts.</summary>
/// <param name="exception">If not null, the block will be faulted.</param>
/// <param name="dropPendingMessages">If true, any unprocessed input messages will be dropped.</param>
/// <param name="storeExceptionEvenIfAlreadyCompleting">If true, an exception will be stored after _decliningPermanently has been set to true.</param>
/// <param name="unwrapInnerExceptions">If true, exception will be treated as an AggregateException.</param>
/// <param name="revertProcessingState">Indicates whether the processing state is dirty and has to be reverted.</param>
internal void Complete(Exception exception, bool dropPendingMessages, bool storeExceptionEvenIfAlreadyCompleting = false,
bool unwrapInnerExceptions = false, bool revertProcessingState = false)
{
Debug.Assert(storeExceptionEvenIfAlreadyCompleting || !revertProcessingState,
"Indicating dirty processing state may only come with storeExceptionEvenIfAlreadyCompleting==true.");
Contract.EndContractBlock();
// Ensure that no new messages may be added
lock (IncomingLock)
{
// Faulting from outside is allowed until we start declining permanently.
// Faulting from inside is allowed at any time.
if (exception != null && (!_decliningPermanently || storeExceptionEvenIfAlreadyCompleting))
{
Debug.Assert(_numberOfOutstandingOperations > 0 || !storeExceptionEvenIfAlreadyCompleting,
"Calls with storeExceptionEvenIfAlreadyCompleting==true may only be coming from processing task.");
#pragma warning disable 0420
Common.AddException(ref _exceptions, exception, unwrapInnerExceptions);
}
// Clear the messages queue if requested
if (dropPendingMessages)
{
KeyValuePair<TInput, long> dummy;
while (_messages.TryDequeue(out dummy)) ;
}
// Revert the dirty processing state if requested
if (revertProcessingState)
{
Debug.Assert(_numberOfOutstandingOperations > 0 && (!UsesAsyncCompletion || _numberOfOutstandingServiceTasks > 0),
"The processing state must be dirty when revertProcessingState==true.");
_numberOfOutstandingOperations--;
if (UsesAsyncCompletion) _numberOfOutstandingServiceTasks--;
}
// Trigger completion
_decliningPermanently = true;
CompleteBlockIfPossible();
}
}
/// <include file='XmlDocs/CommonXmlDocComments.xml' path='CommonXmlDocComments/Targets/Member[@name="OfferMessage"]/*' />
internal DataflowMessageStatus OfferMessage(DataflowMessageHeader messageHeader, TInput messageValue, ISourceBlock<TInput> source, Boolean consumeToAccept)
{
// Validate arguments
if (!messageHeader.IsValid) throw new ArgumentException(SR.Argument_InvalidMessageHeader, nameof(messageHeader));
if (source == null && consumeToAccept) throw new ArgumentException(SR.Argument_CantConsumeFromANullSource, nameof(consumeToAccept));
Contract.EndContractBlock();
lock (IncomingLock)
{
// If we shouldn't be accepting more messages, don't.
if (_decliningPermanently)
{
CompleteBlockIfPossible();
return DataflowMessageStatus.DecliningPermanently;
}
// We can directly accept the message if:
// 1) we are not bounding, OR
// 2) we are bounding AND there is room available AND there are no postponed messages AND no messages are currently being transfered to the input queue.
// (If there were any postponed messages, we would need to postpone so that ordering would be maintained.)
// (Unlike all other blocks, TargetCore can accept messages while processing, because
// input message IDs are properly assigned and the correct order is preserved.)
if (_boundingState == null ||
(_boundingState.OutstandingTransfers == 0 && _boundingState.CountIsLessThanBound && _boundingState.PostponedMessages.Count == 0))
{
// Consume the message from the source if necessary
if (consumeToAccept)
{
Debug.Assert(source != null, "We must have thrown if source == null && consumeToAccept == true.");
bool consumed;
messageValue = source.ConsumeMessage(messageHeader, _owningTarget, out consumed);
if (!consumed) return DataflowMessageStatus.NotAvailable;
}
// Assign a message ID - strictly sequential, no gaps.
// Once consumed, enqueue the message with its ID and kick off asynchronous processing.
long messageId = _nextAvailableInputMessageId.Value++;
Debug.Assert(messageId != Common.INVALID_REORDERING_ID, "The assigned message ID is invalid.");
if (_boundingState != null) _boundingState.CurrentCount += 1; // track this new item against our bound
_messages.Enqueue(new KeyValuePair<TInput, long>(messageValue, messageId));
ProcessAsyncIfNecessary();
return DataflowMessageStatus.Accepted;
}
// Otherwise, we try to postpone if a source was provided
else if (source != null)
{
Debug.Assert(_boundingState != null && _boundingState.PostponedMessages != null,
"PostponedMessages must have been initialized during construction in non-greedy mode.");
// Store the message's info and kick off asynchronous processing
_boundingState.PostponedMessages.Push(source, messageHeader);
ProcessAsyncIfNecessary();
return DataflowMessageStatus.Postponed;
}
// We can't do anything else about this message
return DataflowMessageStatus.Declined;
}
}
/// <include file='XmlDocs/CommonXmlDocComments.xml' path='CommonXmlDocComments/Blocks/Member[@name="Completion"]/*' />
internal Task Completion { get { return _completionSource.Task; } }
/// <summary>Gets the number of items waiting to be processed by this target.</summary>
internal int InputCount { get { return _messages.GetCountSafe(IncomingLock); } }
/// <summary>Signals to the target core that a previously launched asynchronous operation has now completed.</summary>
internal void SignalOneAsyncMessageCompleted()
{
SignalOneAsyncMessageCompleted(boundingCountChange: 0);
}
/// <summary>Signals to the target core that a previously launched asynchronous operation has now completed.</summary>
/// <param name="boundingCountChange">The number of elements by which to change the bounding count, if bounding is occurring.</param>
internal void SignalOneAsyncMessageCompleted(int boundingCountChange)
{
lock (IncomingLock)
{
// We're no longer processing, so decrement the DOP counter
Debug.Assert(_numberOfOutstandingOperations > 0, "Operations may only be completed if any are outstanding.");
if (_numberOfOutstandingOperations > 0) _numberOfOutstandingOperations--;
// Fix up the bounding count if necessary
if (_boundingState != null && boundingCountChange != 0)
{
Debug.Assert(boundingCountChange <= 0 && _boundingState.CurrentCount + boundingCountChange >= 0,
"Expected a negative bounding change and not to drop below zero.");
_boundingState.CurrentCount += boundingCountChange;
}
// However, we may have given up early because we hit our own configured
// processing limits rather than because we ran out of work to do. If that's
// the case, make sure we spin up another task to keep going.
ProcessAsyncIfNecessary(repeat: true);
// If, however, we stopped because we ran out of work to do and we
// know we'll never get more, then complete.
CompleteBlockIfPossible();
}
}
/// <summary>Gets whether this instance has been constructed for async processing.</summary>
private bool UsesAsyncCompletion
{
get
{
return (_targetCoreOptions & TargetCoreOptions.UsesAsyncCompletion) != 0;
}
}
/// <summary>Gets whether there's room to launch more processing operations.</summary>
private bool HasRoomForMoreOperations
{
get
{
Debug.Assert(_numberOfOutstandingOperations >= 0, "Number of outstanding operations should never be negative.");
Debug.Assert(_numberOfOutstandingServiceTasks >= 0, "Number of outstanding service tasks should never be negative.");
Debug.Assert(_numberOfOutstandingOperations >= _numberOfOutstandingServiceTasks, "Number of outstanding service tasks should never exceed the number of outstanding operations.");
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
// In async mode, we increment _numberOfOutstandingOperations before we start
// our own processing loop which should not count towards the MaxDOP.
return (_numberOfOutstandingOperations - _numberOfOutstandingServiceTasks) < _dataflowBlockOptions.ActualMaxDegreeOfParallelism;
}
}
/// <summary>Gets whether there's room to launch more service tasks for doing/launching processing operations.</summary>
private bool HasRoomForMoreServiceTasks
{
get
{
Debug.Assert(_numberOfOutstandingOperations >= 0, "Number of outstanding operations should never be negative.");
Debug.Assert(_numberOfOutstandingServiceTasks >= 0, "Number of outstanding service tasks should never be negative.");
Debug.Assert(_numberOfOutstandingOperations >= _numberOfOutstandingServiceTasks, "Number of outstanding service tasks should never exceed the number of outstanding operations.");
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
if (!UsesAsyncCompletion)
{
// Sync mode:
// We don't count service tasks, because our tasks are counted as operations.
// Therefore, return HasRoomForMoreOperations.
return HasRoomForMoreOperations;
}
else
{
// Async mode:
// We allow up to MaxDOP true service tasks.
// Checking whether there is room for more processing operations is not necessary,
// but doing so will help us avoid spinning up a task that will go away without
// launching any processing operation.
return HasRoomForMoreOperations &&
_numberOfOutstandingServiceTasks < _dataflowBlockOptions.ActualMaxDegreeOfParallelism;
}
}
}
/// <summary>Called when new messages are available to be processed.</summary>
/// <param name="repeat">Whether this call is the continuation of a previous message loop.</param>
private void ProcessAsyncIfNecessary(bool repeat = false)
{
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
if (HasRoomForMoreServiceTasks)
{
ProcessAsyncIfNecessary_Slow(repeat);
}
}
/// <summary>
/// Slow path for ProcessAsyncIfNecessary.
/// Separating out the slow path into its own method makes it more likely that the fast path method will get inlined.
/// </summary>
[SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope")]
private void ProcessAsyncIfNecessary_Slow(bool repeat)
{
Debug.Assert(HasRoomForMoreServiceTasks, "There must be room to process asynchronously.");
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
// Determine preconditions to launching a processing task
bool messagesAvailableOrPostponed =
!_messages.IsEmpty ||
(!_decliningPermanently && _boundingState != null && _boundingState.CountIsLessThanBound && _boundingState.PostponedMessages.Count > 0);
// If all conditions are met, launch away
if (messagesAvailableOrPostponed && !CanceledOrFaulted)
{
// Any book keeping related to the processing task like incrementing the
// DOP counter or eventually recording the tasks reference must be done
// before the task starts. That is because the task itself will do the
// reverse operation upon its completion.
_numberOfOutstandingOperations++;
if (UsesAsyncCompletion) _numberOfOutstandingServiceTasks++;
var taskForInputProcessing = new Task(thisTargetCore => ((TargetCore<TInput>)thisTargetCore).ProcessMessagesLoopCore(), this,
Common.GetCreationOptionsForTask(repeat));
#if FEATURE_TRACING
DataflowEtwProvider etwLog = DataflowEtwProvider.Log;
if (etwLog.IsEnabled())
{
etwLog.TaskLaunchedForMessageHandling(
_owningTarget, taskForInputProcessing, DataflowEtwProvider.TaskLaunchedReason.ProcessingInputMessages,
_messages.Count + (_boundingState != null ? _boundingState.PostponedMessages.Count : 0));
}
#endif
// Start the task handling scheduling exceptions
Exception exception = Common.StartTaskSafe(taskForInputProcessing, _dataflowBlockOptions.TaskScheduler);
if (exception != null)
{
// Get out from under currently held locks. Complete re-acquires the locks it needs.
Task.Factory.StartNew(exc => Complete(exception: (Exception)exc, dropPendingMessages: true, storeExceptionEvenIfAlreadyCompleting: true,
unwrapInnerExceptions: false, revertProcessingState: true),
exception, CancellationToken.None, Common.GetCreationOptionsForTask(), TaskScheduler.Default);
}
}
}
/// <summary>Task body used to process messages.</summary>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
private void ProcessMessagesLoopCore()
{
Common.ContractAssertMonitorStatus(IncomingLock, held: false);
KeyValuePair<TInput, long> messageWithId = default(KeyValuePair<TInput, long>);
try
{
bool useAsyncCompletion = UsesAsyncCompletion;
bool shouldAttemptPostponedTransfer = _boundingState != null && _boundingState.BoundedCapacity > 1;
int numberOfMessagesProcessedByThisTask = 0;
int numberOfMessagesProcessedSinceTheLastKeepAlive = 0;
int maxMessagesPerTask = _dataflowBlockOptions.ActualMaxMessagesPerTask;
while (numberOfMessagesProcessedByThisTask < maxMessagesPerTask && !CanceledOrFaulted)
{
// If we're bounding, try to transfer a message from the postponed queue
// to the input queue. This enables us to more quickly unblock sources
// sending data to the block (otherwise, no postponed messages will be consumed
// until the input queue is entirely empty). If the bounded size is 1,
// there's no need to transfer, as attempting to get the next message will
// just go and consume the postponed message anyway, and we'll save
// the extra trip through the _messages queue.
KeyValuePair<TInput, long> transferMessageWithId;
if (shouldAttemptPostponedTransfer &&
TryConsumePostponedMessage(forPostponementTransfer: true, result: out transferMessageWithId))
{
lock (IncomingLock)
{
Debug.Assert(
_boundingState.OutstandingTransfers > 0
&& _boundingState.OutstandingTransfers <= _dataflowBlockOptions.ActualMaxDegreeOfParallelism,
"Expected TryConsumePostponedMessage to have incremented the count and for the count to not exceed the DOP.");
_boundingState.OutstandingTransfers--; // was incremented in TryConsumePostponedMessage
_messages.Enqueue(transferMessageWithId);
ProcessAsyncIfNecessary();
}
}
if (useAsyncCompletion)
{
// Get the next message if DOP is available.
// If we can't get a message or DOP is not available, bail out.
if (!TryGetNextMessageForNewAsyncOperation(out messageWithId)) break;
}
else
{
// Try to get a message for sequential execution, i.e. without checking DOP availability
if (!TryGetNextAvailableOrPostponedMessage(out messageWithId))
{
// Try to keep the task alive only if MaxDOP=1
if (_dataflowBlockOptions.MaxDegreeOfParallelism != 1) break;
// If this task has processed enough messages without being kept alive,
// it has served its purpose. Don't keep it alive.
if (numberOfMessagesProcessedSinceTheLastKeepAlive > Common.KEEP_ALIVE_NUMBER_OF_MESSAGES_THRESHOLD) break;
// If keep alive is banned, don't attempt it
if (_keepAliveBanCounter > 0)
{
_keepAliveBanCounter--;
break;
}
// Reset the keep alive counter. (Keep this line together with TryKeepAliveUntil.)
numberOfMessagesProcessedSinceTheLastKeepAlive = 0;
// Try to keep the task alive briefly until a new message arrives
if (!Common.TryKeepAliveUntil(_keepAlivePredicate, this, out messageWithId))
{
// Keep alive was unsuccessful.
// Therefore ban further attempts temporarily.
_keepAliveBanCounter = Common.KEEP_ALIVE_BAN_COUNT;
break;
}
}
}
// We have popped a message from the queue.
// So increment the counter of processed messages.
numberOfMessagesProcessedByThisTask++;
numberOfMessagesProcessedSinceTheLastKeepAlive++;
// Invoke the user action
_callAction(messageWithId);
}
}
catch (Exception exc)
{
Common.StoreDataflowMessageValueIntoExceptionData(exc, messageWithId.Key);
Complete(exc, dropPendingMessages: true, storeExceptionEvenIfAlreadyCompleting: true, unwrapInnerExceptions: false);
}
finally
{
lock (IncomingLock)
{
// We incremented _numberOfOutstandingOperations before we launched this task.
// So we must decremented it before exiting.
// Note that each async task additionally incremented it before starting and
// is responsible for decrementing it prior to exiting.
Debug.Assert(_numberOfOutstandingOperations > 0, "Expected a positive number of outstanding operations, since we're completing one here.");
_numberOfOutstandingOperations--;
// If we are in async mode, we've also incremented _numberOfOutstandingServiceTasks.
// Now it's time to decrement it.
if (UsesAsyncCompletion)
{
Debug.Assert(_numberOfOutstandingServiceTasks > 0, "Expected a positive number of outstanding service tasks, since we're completing one here.");
_numberOfOutstandingServiceTasks--;
}
// However, we may have given up early because we hit our own configured
// processing limits rather than because we ran out of work to do. If that's
// the case, make sure we spin up another task to keep going.
ProcessAsyncIfNecessary(repeat: true);
// If, however, we stopped because we ran out of work to do and we
// know we'll never get more, then complete.
CompleteBlockIfPossible();
}
}
}
/// <summary>Retrieves the next message from the input queue for the useAsyncCompletion mode.</summary>
/// <param name="messageWithId">The next message retrieved.</param>
/// <returns>true if a message was found and removed; otherwise, false.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
private bool TryGetNextMessageForNewAsyncOperation(out KeyValuePair<TInput, long> messageWithId)
{
Debug.Assert(UsesAsyncCompletion, "Only valid to use when in async mode.");
Common.ContractAssertMonitorStatus(IncomingLock, held: false);
bool parallelismAvailable;
lock (IncomingLock)
{
// If we have room for another asynchronous operation, reserve it.
// If later it turns out that we had no work to fill the slot, we'll undo the addition.
parallelismAvailable = HasRoomForMoreOperations;
if (parallelismAvailable) ++_numberOfOutstandingOperations;
}
messageWithId = default(KeyValuePair<TInput, long>);
if (parallelismAvailable)
{
// If a parallelism slot was available, try to get an item.
// Be careful, because an exception may be thrown from ConsumeMessage
// and we have already incremented _numberOfOutstandingOperations.
bool gotMessage = false;
try
{
gotMessage = TryGetNextAvailableOrPostponedMessage(out messageWithId);
}
catch
{
// We have incremented the counter, but we didn't get a message.
// So we must undo the increment and eventually complete the block.
SignalOneAsyncMessageCompleted();
// Re-throw the exception. The processing loop will catch it.
throw;
}
// There may not be an error, but may have still failed to get a message.
// So we must undo the increment and eventually complete the block.
if (!gotMessage) SignalOneAsyncMessageCompleted();
return gotMessage;
}
// If there was no parallelism available, we didn't increment _numberOfOutstandingOperations.
// So there is nothing to do except to return false.
return false;
}
/// <summary>
/// Either takes the next available message from the input queue or retrieves a postponed
/// message from a source, based on whether we're in greedy or non-greedy mode.
/// </summary>
/// <param name="messageWithId">The retrieved item with its Id.</param>
/// <returns>true if a message could be removed and returned; otherwise, false.</returns>
private bool TryGetNextAvailableOrPostponedMessage(out KeyValuePair<TInput, long> messageWithId)
{
Common.ContractAssertMonitorStatus(IncomingLock, held: false);
// First try to get a message from our input buffer.
if (_messages.TryDequeue(out messageWithId))
{
return true;
}
// If we can't, but if we have any postponed messages due to bounding, then
// try to consume one of these postponed messages.
// Since we are not currently holding the lock, it is possible that new messages get queued up
// by the time we take the lock to manipulate _boundingState. So we have to double-check the
// input queue once we take the lock before we consider postponed messages.
else if (_boundingState != null && TryConsumePostponedMessage(forPostponementTransfer: false, result: out messageWithId))
{
return true;
}
// Otherwise, there's no message available.
else
{
messageWithId = default(KeyValuePair<TInput, long>);
return false;
}
}
/// <summary>Consumes a single postponed message.</summary>
/// <param name="forPostponementTransfer">
/// true if the method is being called to consume a message that'll then be stored into the input queue;
/// false if the method is being called to consume a message that'll be processed immediately.
/// If true, the bounding state's ForcePostponement will be updated.
/// If false, the method will first try (while holding the lock) to consume from the input queue before
/// consuming a postponed message.
/// </param>
/// <param name="result">The consumed message.</param>
/// <returns>true if a message was consumed; otherwise, false.</returns>
private bool TryConsumePostponedMessage(
bool forPostponementTransfer,
out KeyValuePair<TInput, long> result)
{
Debug.Assert(
_dataflowBlockOptions.BoundedCapacity !=
System.Threading.Tasks.Dataflow.DataflowBlockOptions.Unbounded, "Only valid to use when in bounded mode.");
Common.ContractAssertMonitorStatus(IncomingLock, held: false);
// Iterate until we either consume a message successfully or there are no more postponed messages.
bool countIncrementedExpectingToGetItem = false;
long messageId = Common.INVALID_REORDERING_ID;
while (true)
{
KeyValuePair<ISourceBlock<TInput>, DataflowMessageHeader> element;
lock (IncomingLock)
{
// If we are declining permanently, don't consume postponed messages.
if (_decliningPermanently) break;
// New messages may have been queued up while we weren't holding the lock.
// In particular, the input queue may have been filled up and messages may have
// gotten postponed. If we process such a postponed message, we would mess up the
// order. Therefore, we have to double-check the input queue first.
if (!forPostponementTransfer && _messages.TryDequeue(out result)) return true;
// We can consume a message to process if there's one to process and also if
// if we have logical room within our bound for the message.
if (!_boundingState.CountIsLessThanBound || !_boundingState.PostponedMessages.TryPop(out element))
{
if (countIncrementedExpectingToGetItem)
{
countIncrementedExpectingToGetItem = false;
_boundingState.CurrentCount -= 1;
}
break;
}
if (!countIncrementedExpectingToGetItem)
{
countIncrementedExpectingToGetItem = true;
messageId = _nextAvailableInputMessageId.Value++; // optimistically assign an ID
Debug.Assert(messageId != Common.INVALID_REORDERING_ID, "The assigned message ID is invalid.");
_boundingState.CurrentCount += 1; // optimistically take bounding space
if (forPostponementTransfer)
{
Debug.Assert(_boundingState.OutstandingTransfers >= 0, "Expected TryConsumePostponedMessage to not be negative.");
_boundingState.OutstandingTransfers++; // temporarily force postponement until we've successfully consumed the element
}
}
} // Must not call to source while holding lock
bool consumed;
TInput consumedValue = element.Key.ConsumeMessage(element.Value, _owningTarget, out consumed);
if (consumed)
{
result = new KeyValuePair<TInput, long>(consumedValue, messageId);
return true;
}
else
{
if (forPostponementTransfer)
{
// We didn't consume message so we need to decrement because we havent consumed the element.
_boundingState.OutstandingTransfers--;
}
}
}
// We optimistically acquired a message ID for a message that, in the end, we never got.
// So, we need to let the reordering buffer (if one exists) know that it should not
// expect an item with this ID. Otherwise, it would stall forever.
if (_reorderingBuffer != null && messageId != Common.INVALID_REORDERING_ID) _reorderingBuffer.IgnoreItem(messageId);
// Similarly, we optimistically increased the bounding count, expecting to get another message in.
// Since we didn't, we need to fix the bounding count back to what it should have been.
if (countIncrementedExpectingToGetItem) ChangeBoundingCount(-1);
// Inform the caller that no message could be consumed.
result = default(KeyValuePair<TInput, long>);
return false;
}
/// <summary>Gets whether the target has had cancellation requested or an exception has occurred.</summary>
private bool CanceledOrFaulted
{
get
{
return _dataflowBlockOptions.CancellationToken.IsCancellationRequested || Volatile.Read(ref _exceptions) != null;
}
}
/// <summary>Completes the block once all completion conditions are met.</summary>
private void CompleteBlockIfPossible()
{
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
bool noMoreMessages = _decliningPermanently && _messages.IsEmpty;
if (noMoreMessages || CanceledOrFaulted)
{
CompleteBlockIfPossible_Slow();
}
}
/// <summary>
/// Slow path for CompleteBlockIfPossible.
/// Separating out the slow path into its own method makes it more likely that the fast path method will get inlined.
/// </summary>
private void CompleteBlockIfPossible_Slow()
{
Debug.Assert((_decliningPermanently && _messages.IsEmpty) || CanceledOrFaulted, "There must be no more messages.");
Common.ContractAssertMonitorStatus(IncomingLock, held: true);
bool notCurrentlyProcessing = _numberOfOutstandingOperations == 0;
if (notCurrentlyProcessing && !_completionReserved)
{
// Make sure no one else tries to call CompleteBlockOncePossible
_completionReserved = true;
// Make sure the target is declining
_decliningPermanently = true;
// Get out from under currently held locks. This is to avoid
// invoking synchronous continuations off of _completionSource.Task
// while holding a lock.
Task.Factory.StartNew(state => ((TargetCore<TInput>)state).CompleteBlockOncePossible(),
this, CancellationToken.None, Common.GetCreationOptionsForTask(), TaskScheduler.Default);
}
}
/// <summary>
/// Completes the block. This must only be called once, and only once all of the completion conditions are met.
/// As such, it must only be called from CompleteBlockIfPossible.
/// </summary>
private void CompleteBlockOncePossible()
{
// Since the lock is needed only for the Assert, we do this only in DEBUG mode
#if DEBUG
lock (IncomingLock) Debug.Assert(_numberOfOutstandingOperations == 0, "Everything must be done by now.");
#endif
// Release any postponed messages
if (_boundingState != null)
{
// Note: No locks should be held at this point.
Common.ReleaseAllPostponedMessages(_owningTarget, _boundingState.PostponedMessages, ref _exceptions);
}
// For good measure and help in preventing leaks, clear out the incoming message queue,
// which may still contain orphaned data if we were canceled or faulted. However,
// we don't reset the bounding count here, as the block as a whole may still be active.
KeyValuePair<TInput, long> ignored;
IProducerConsumerQueue<KeyValuePair<TInput, long>> messages = _messages;
while (messages.TryDequeue(out ignored)) ;
// If we completed with any unhandled exception, finish in an error state
if (Volatile.Read(ref _exceptions) != null)
{
// It's ok to read _exceptions' content here, because
// at this point no more exceptions can be generated and thus no one will
// be writing to it.
_completionSource.TrySetException(Volatile.Read(ref _exceptions));
}
// If we completed with cancellation, finish in a canceled state
else if (_dataflowBlockOptions.CancellationToken.IsCancellationRequested)
{
_completionSource.TrySetCanceled();
}
// Otherwise, finish in a successful state.
else
{
_completionSource.TrySetResult(default(VoidResult));
}
#if FEATURE_TRACING
// We only want to do tracing for block completion if this target core represents the whole block.
// If it only represents a part of the block (i.e. there's a source associated with it as well),
// then we shouldn't log just for the first half of the block; the source half will handle logging.
DataflowEtwProvider etwLog;
if ((_targetCoreOptions & TargetCoreOptions.RepresentsBlockCompletion) != 0 &&
(etwLog = DataflowEtwProvider.Log).IsEnabled())
{
etwLog.DataflowBlockCompleted(_owningTarget);
}
#endif
}
/// <summary>Gets whether the target core is operating in a bounded mode.</summary>
internal bool IsBounded { get { return _boundingState != null; } }
/// <summary>Increases or decreases the bounding count.</summary>
/// <param name="count">The incremental addition (positive to increase, negative to decrease).</param>
internal void ChangeBoundingCount(int count)
{
Debug.Assert(count != 0, "Should only be called when the count is actually changing.");
Common.ContractAssertMonitorStatus(IncomingLock, held: false);
if (_boundingState != null)
{
lock (IncomingLock)
{
Debug.Assert(count > 0 || (count < 0 && _boundingState.CurrentCount + count >= 0),
"If count is negative, it must not take the total count negative.");
_boundingState.CurrentCount += count;
ProcessAsyncIfNecessary();
CompleteBlockIfPossible();
}
}
}
/// <summary>Gets the object to display in the debugger display attribute.</summary>
[SuppressMessage("Microsoft.Globalization", "CA1305:SpecifyIFormatProvider")]
private object DebuggerDisplayContent
{
get
{
var displayTarget = _owningTarget as IDebuggerDisplay;
return string.Format("Block=\"{0}\"",
displayTarget != null ? displayTarget.Content : _owningTarget);
}
}
/// <summary>Gets the DataflowBlockOptions used to configure this block.</summary>
internal ExecutionDataflowBlockOptions DataflowBlockOptions { get { return _dataflowBlockOptions; } }
/// <summary>Gets information about this helper to be used for display in a debugger.</summary>
/// <returns>Debugging information about this target.</returns>
internal DebuggingInformation GetDebuggingInformation() { return new DebuggingInformation(this); }
/// <summary>Provides a wrapper for commonly needed debugging information.</summary>
internal sealed class DebuggingInformation
{
/// <summary>The target being viewed.</summary>
private readonly TargetCore<TInput> _target;
/// <summary>Initializes the debugging helper.</summary>
/// <param name="target">The target being viewed.</param>
internal DebuggingInformation(TargetCore<TInput> target) { _target = target; }
/// <summary>Gets the number of messages waiting to be processed.</summary>
internal int InputCount { get { return _target._messages.Count; } }
/// <summary>Gets the messages waiting to be processed.</summary>
internal IEnumerable<TInput> InputQueue { get { return _target._messages.Select(kvp => kvp.Key).ToList(); } }
/// <summary>Gets any postponed messages.</summary>
internal QueuedMap<ISourceBlock<TInput>, DataflowMessageHeader> PostponedMessages
{
get { return _target._boundingState != null ? _target._boundingState.PostponedMessages : null; }
}
/// <summary>Gets the current number of outstanding input processing operations.</summary>
internal Int32 CurrentDegreeOfParallelism { get { return _target._numberOfOutstandingOperations - _target._numberOfOutstandingServiceTasks; } }
/// <summary>Gets the DataflowBlockOptions used to configure this block.</summary>
internal ExecutionDataflowBlockOptions DataflowBlockOptions { get { return _target._dataflowBlockOptions; } }
/// <summary>Gets whether the block is declining further messages.</summary>
internal bool IsDecliningPermanently { get { return _target._decliningPermanently; } }
/// <summary>Gets whether the block is completed.</summary>
internal bool IsCompleted { get { return _target.Completion.IsCompleted; } }
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Timers;
using System.Collections.Generic;
using System.IO;
using System.Net.Sockets;
using System.Reflection;
using System.Text.RegularExpressions;
using System.Threading;
using OpenMetaverse;
using log4net;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Monitoring;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Region.OptionalModules.Avatar.Chat
{
public class IRCConnector
{
#region Global (static) state
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
// Local constants
// This computation is not the real region center if the region is larger than 256.
// This computation isn't fixed because there is not a handle back to the region.
private static readonly Vector3 CenterOfRegion = new Vector3(((int)Constants.RegionSize * 0.5f), ((int)Constants.RegionSize * 0.5f), 20);
private static readonly char[] CS_SPACE = { ' ' };
private const int WD_INTERVAL = 1000; // base watchdog interval
private static int PING_PERIOD = 15; // WD intervals per PING
private static int ICCD_PERIOD = 10; // WD intervals between Connects
private static int L_TIMEOUT = 25; // Login time out interval
private static int _idk_ = 0; // core connector identifier
private static int _pdk_ = 0; // ping interval counter
private static int _icc_ = ICCD_PERIOD; // IRC connect counter
// List of configured connectors
private static List<IRCConnector> m_connectors = new List<IRCConnector>();
// Watchdog state
private static System.Timers.Timer m_watchdog = null;
// The watch-dog gets started as soon as the class is instantiated, and
// ticks once every second (WD_INTERVAL)
static IRCConnector()
{
m_log.DebugFormat("[IRC-Connector]: Static initialization started");
m_watchdog = new System.Timers.Timer(WD_INTERVAL);
m_watchdog.Elapsed += new ElapsedEventHandler(WatchdogHandler);
m_watchdog.AutoReset = true;
m_watchdog.Start();
m_log.DebugFormat("[IRC-Connector]: Static initialization complete");
}
#endregion
#region Instance state
// Connector identity
internal int idn = _idk_++;
// How many regions depend upon this connection
// This count is updated by the ChannelState object and reflects the sum
// of the region clients associated with the set of associated channel
// state instances. That's why it cannot be managed here.
internal int depends = 0;
// This variable counts the number of resets that have been performed
// on the connector. When a listener thread terminates, it checks to
// see of the reset count has changed before it schedules another
// reset.
internal int m_resetk = 0;
// Working threads
private Thread m_listener = null;
private Object msyncConnect = new Object();
internal bool m_randomizeNick = true; // add random suffix
internal string m_baseNick = null; // base name for randomizing
internal string m_nick = null; // effective nickname
public string Nick // Public property
{
get { return m_nick; }
set { m_nick = value; }
}
private bool m_enabled = false; // connector enablement
public bool Enabled
{
get { return m_enabled; }
}
private bool m_connected = false; // connection status
private bool m_pending = false; // login disposition
private int m_timeout = L_TIMEOUT; // login timeout counter
public bool Connected
{
get { return m_connected; }
}
private string m_ircChannel; // associated channel id
public string IrcChannel
{
get { return m_ircChannel; }
set { m_ircChannel = value; }
}
private uint m_port = 6667; // session port
public uint Port
{
get { return m_port; }
set { m_port = value; }
}
private string m_server = null; // IRC server name
public string Server
{
get { return m_server; }
set { m_server = value; }
}
private string m_password = null;
public string Password
{
get { return m_password; }
set { m_password = value; }
}
private string m_user = "USER OpenSimBot 8 * :I'm an OpenSim to IRC bot";
public string User
{
get { return m_user; }
}
// Network interface
private TcpClient m_tcp;
private NetworkStream m_stream = null;
private StreamReader m_reader;
private StreamWriter m_writer;
// Channel characteristic info (if available)
internal string usermod = String.Empty;
internal string chanmod = String.Empty;
internal string version = String.Empty;
internal bool motd = false;
#endregion
#region connector instance management
internal IRCConnector(ChannelState cs)
{
// Prepare network interface
m_tcp = null;
m_writer = null;
m_reader = null;
// Setup IRC session parameters
m_server = cs.Server;
m_password = cs.Password;
m_baseNick = cs.BaseNickname;
m_randomizeNick = cs.RandomizeNickname;
m_ircChannel = cs.IrcChannel;
m_port = cs.Port;
m_user = cs.User;
if (m_watchdog == null)
{
// Non-differentiating
ICCD_PERIOD = cs.ConnectDelay;
PING_PERIOD = cs.PingDelay;
// Smaller values are not reasonable
if (ICCD_PERIOD < 5)
ICCD_PERIOD = 5;
if (PING_PERIOD < 5)
PING_PERIOD = 5;
_icc_ = ICCD_PERIOD; // get started right away!
}
// The last line of defense
if (m_server == null || m_baseNick == null || m_ircChannel == null || m_user == null)
throw new Exception("Invalid connector configuration");
// Generate an initial nickname
if (m_randomizeNick)
m_nick = m_baseNick + Util.RandomClass.Next(1, 99);
else
m_nick = m_baseNick;
m_log.InfoFormat("[IRC-Connector-{0}]: Initialization complete", idn);
}
~IRCConnector()
{
m_watchdog.Stop();
Close();
}
// Mark the connector as connectable. Harmless if already enabled.
public void Open()
{
if (!m_enabled)
{
if (!Connected)
{
Connect();
}
lock (m_connectors)
m_connectors.Add(this);
m_enabled = true;
}
}
// Only close the connector if the dependency count is zero.
public void Close()
{
m_log.InfoFormat("[IRC-Connector-{0}] Closing", idn);
lock (msyncConnect)
{
if ((depends == 0) && Enabled)
{
m_enabled = false;
if (Connected)
{
m_log.DebugFormat("[IRC-Connector-{0}] Closing interface", idn);
// Cleanup the IRC session
try
{
m_writer.WriteLine(String.Format("QUIT :{0} to {1} wormhole to {2} closing",
m_nick, m_ircChannel, m_server));
m_writer.Flush();
}
catch (Exception) { }
m_connected = false;
try { m_writer.Close(); }
catch (Exception) { }
try { m_reader.Close(); }
catch (Exception) { }
try { m_stream.Close(); }
catch (Exception) { }
try { m_tcp.Close(); }
catch (Exception) { }
}
lock (m_connectors)
m_connectors.Remove(this);
}
}
m_log.InfoFormat("[IRC-Connector-{0}] Closed", idn);
}
#endregion
#region session management
// Connect to the IRC server. A connector should always be connected, once enabled
public void Connect()
{
if (!m_enabled)
return;
// Delay until next WD cycle if this is too close to the last start attempt
while (_icc_ < ICCD_PERIOD)
return;
m_log.DebugFormat("[IRC-Connector-{0}]: Connection request for {1} on {2}:{3}", idn, m_nick, m_server, m_ircChannel);
lock (msyncConnect)
{
_icc_ = 0;
try
{
if (m_connected) return;
m_connected = true;
m_pending = true;
m_timeout = L_TIMEOUT;
m_tcp = new TcpClient(m_server, (int)m_port);
m_stream = m_tcp.GetStream();
m_reader = new StreamReader(m_stream);
m_writer = new StreamWriter(m_stream);
m_log.InfoFormat("[IRC-Connector-{0}]: Connected to {1}:{2}", idn, m_server, m_port);
m_listener = new Thread(new ThreadStart(ListenerRun));
m_listener.Name = "IRCConnectorListenerThread";
m_listener.IsBackground = true;
m_listener.Start();
// This is the message order recommended by RFC 2812
if (m_password != null)
m_writer.WriteLine(String.Format("PASS {0}", m_password));
m_writer.WriteLine(String.Format("NICK {0}", m_nick));
m_writer.Flush();
m_writer.WriteLine(m_user);
m_writer.Flush();
m_writer.WriteLine(String.Format("JOIN {0}", m_ircChannel));
m_writer.Flush();
m_log.InfoFormat("[IRC-Connector-{0}]: {1} has asked to join {2}", idn, m_nick, m_ircChannel);
}
catch (Exception e)
{
m_log.ErrorFormat("[IRC-Connector-{0}] cannot connect {1} to {2}:{3}: {4}",
idn, m_nick, m_server, m_port, e.Message);
// It might seem reasonable to reset connected and pending status here
// Seeing as we know that the login has failed, but if we do that, then
// connection will be retried each time the interconnection interval
// expires. By leaving them as they are, the connection will be retried
// when the login timeout expires. Which is preferred.
}
}
return;
}
// Reconnect is used to force a re-cycle of the IRC connection. Should generally
// be a transparent event
public void Reconnect()
{
m_log.DebugFormat("[IRC-Connector-{0}]: Reconnect request for {1} on {2}:{3}", idn, m_nick, m_server, m_ircChannel);
// Don't do this if a Connect is in progress...
lock (msyncConnect)
{
if (m_connected)
{
m_log.InfoFormat("[IRC-Connector-{0}] Resetting connector", idn);
// Mark as disconnected. This will allow the listener thread
// to exit if still in-flight.
// The listener thread is not aborted - it *might* actually be
// the thread that is running the Reconnect! Instead just close
// the socket and it will disappear of its own accord, once this
// processing is completed.
try { m_writer.Close(); }
catch (Exception) { }
try { m_reader.Close(); }
catch (Exception) { }
try { m_tcp.Close(); }
catch (Exception) { }
m_connected = false;
m_pending = false;
m_resetk++;
}
}
Connect();
}
#endregion
#region Outbound (to-IRC) message handlers
public void PrivMsg(string pattern, string from, string region, string msg)
{
// m_log.DebugFormat("[IRC-Connector-{0}] PrivMsg to IRC from {1}: <{2}>", idn, from,
// String.Format(pattern, m_ircChannel, from, region, msg));
// One message to the IRC server
try
{
m_writer.WriteLine(pattern, m_ircChannel, from, region, msg);
m_writer.Flush();
// m_log.DebugFormat("[IRC-Connector-{0}]: PrivMsg from {1} in {2}: {3}", idn, from, region, msg);
}
catch (IOException)
{
m_log.ErrorFormat("[IRC-Connector-{0}]: PrivMsg I/O Error: disconnected from IRC server", idn);
Reconnect();
}
catch (Exception ex)
{
m_log.ErrorFormat("[IRC-Connector-{0}]: PrivMsg exception : {1}", idn, ex.Message);
m_log.Debug(ex);
}
}
public void Send(string msg)
{
// m_log.DebugFormat("[IRC-Connector-{0}] Send to IRC : <{1}>", idn, msg);
try
{
m_writer.WriteLine(msg);
m_writer.Flush();
// m_log.DebugFormat("[IRC-Connector-{0}] Sent command string: {1}", idn, msg);
}
catch (IOException)
{
m_log.ErrorFormat("[IRC-Connector-{0}] Disconnected from IRC server.(Send)", idn);
Reconnect();
}
catch (Exception ex)
{
m_log.ErrorFormat("[IRC-Connector-{0}] Send exception trap: {0}", idn, ex.Message);
m_log.Debug(ex);
}
}
#endregion
public void ListenerRun()
{
string inputLine;
int resetk = m_resetk;
try
{
while (m_enabled && m_connected)
{
if ((inputLine = m_reader.ReadLine()) == null)
throw new Exception("Listener input socket closed");
// m_log.Info("[IRCConnector]: " + inputLine);
if (inputLine.Contains("PRIVMSG"))
{
Dictionary<string, string> data = ExtractMsg(inputLine);
// Any chat ???
if (data != null)
{
OSChatMessage c = new OSChatMessage();
c.Message = data["msg"];
c.Type = ChatTypeEnum.Region;
c.Position = CenterOfRegion;
c.From = data["nick"];
c.Sender = null;
c.SenderUUID = UUID.Zero;
// Is message "\001ACTION foo bar\001"?
// Then change to: "/me foo bar"
if ((1 == c.Message[0]) && c.Message.Substring(1).StartsWith("ACTION"))
c.Message = String.Format("/me {0}", c.Message.Substring(8, c.Message.Length - 9));
ChannelState.OSChat(this, c, false);
}
}
else
{
ProcessIRCCommand(inputLine);
}
}
}
catch (Exception /*e*/)
{
// m_log.ErrorFormat("[IRC-Connector-{0}]: ListenerRun exception trap: {1}", idn, e.Message);
// m_log.Debug(e);
}
// This is potentially circular, but harmless if so.
// The connection is marked as not connected the first time
// through reconnect.
if (m_enabled && (m_resetk == resetk))
Reconnect();
}
private Regex RE = new Regex(@":(?<nick>[\w-]*)!(?<user>\S*) PRIVMSG (?<channel>\S+) :(?<msg>.*)",
RegexOptions.Multiline);
private Dictionary<string, string> ExtractMsg(string input)
{
//examines IRC commands and extracts any private messages
// which will then be reboadcast in the Sim
// m_log.InfoFormat("[IRC-Connector-{0}]: ExtractMsg: {1}", idn, input);
Dictionary<string, string> result = null;
MatchCollection matches = RE.Matches(input);
// Get some direct matches $1 $4 is a
if ((matches.Count == 0) || (matches.Count != 1) || (matches[0].Groups.Count != 5))
{
// m_log.Info("[IRCConnector]: Number of matches: " + matches.Count);
// if (matches.Count > 0)
// {
// m_log.Info("[IRCConnector]: Number of groups: " + matches[0].Groups.Count);
// }
return null;
}
result = new Dictionary<string, string>();
result.Add("nick", matches[0].Groups[1].Value);
result.Add("user", matches[0].Groups[2].Value);
result.Add("channel", matches[0].Groups[3].Value);
result.Add("msg", matches[0].Groups[4].Value);
return result;
}
public void BroadcastSim(string sender, string format, params string[] args)
{
try
{
OSChatMessage c = new OSChatMessage();
c.From = sender;
c.Message = String.Format(format, args);
c.Type = ChatTypeEnum.Region; // ChatTypeEnum.Say;
c.Position = CenterOfRegion;
c.Sender = null;
c.SenderUUID = UUID.Zero;
ChannelState.OSChat(this, c, true);
}
catch (Exception ex) // IRC gate should not crash Sim
{
m_log.ErrorFormat("[IRC-Connector-{0}]: BroadcastSim Exception Trap: {1}\n{2}", idn, ex.Message, ex.StackTrace);
}
}
#region IRC Command Handlers
public void ProcessIRCCommand(string command)
{
string[] commArgs;
string c_server = m_server;
string pfx = String.Empty;
string cmd = String.Empty;
string parms = String.Empty;
// ":" indicates that a prefix is present
// There are NEVER more than 17 real
// fields. A parameter that starts with
// ":" indicates that the remainder of the
// line is a single parameter value.
commArgs = command.Split(CS_SPACE, 2);
if (commArgs[0].StartsWith(":"))
{
pfx = commArgs[0].Substring(1);
commArgs = commArgs[1].Split(CS_SPACE, 2);
}
cmd = commArgs[0];
parms = commArgs[1];
// m_log.DebugFormat("[IRC-Connector-{0}] prefix = <{1}> cmd = <{2}>", idn, pfx, cmd);
switch (cmd)
{
// Messages 001-004 are always sent
// following signon.
case "001": // Welcome ...
case "002": // Server information
case "003": // Welcome ...
break;
case "004": // Server information
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
commArgs = parms.Split(CS_SPACE);
c_server = commArgs[1];
m_server = c_server;
version = commArgs[2];
usermod = commArgs[3];
chanmod = commArgs[4];
break;
case "005": // Server information
break;
case "042":
case "250":
case "251":
case "252":
case "254":
case "255":
case "265":
case "266":
case "332": // Subject
case "333": // Subject owner (?)
case "353": // Name list
case "366": // End-of-Name list marker
case "372": // MOTD body
case "375": // MOTD start
// m_log.InfoFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]);
break;
case "376": // MOTD end
// m_log.InfoFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]);
motd = true;
break;
case "451": // Not registered
break;
case "433": // Nickname in use
// Gen a new name
m_nick = m_baseNick + Util.RandomClass.Next(1, 99);
m_log.ErrorFormat("[IRC-Connector-{0}]: [{1}] IRC SERVER reports NicknameInUse, trying {2}", idn, cmd, m_nick);
// Retry
m_writer.WriteLine(String.Format("NICK {0}", m_nick));
m_writer.Flush();
m_writer.WriteLine(m_user);
m_writer.Flush();
m_writer.WriteLine(String.Format("JOIN {0}", m_ircChannel));
m_writer.Flush();
break;
case "479": // Bad channel name, etc. This will never work, so disable the connection
m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE, 2)[1]);
m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] Connector disabled", idn, cmd);
m_enabled = false;
m_connected = false;
m_pending = false;
break;
case "NOTICE":
// m_log.WarnFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE,2)[1]);
break;
case "ERROR":
m_log.ErrorFormat("[IRC-Connector-{0}] [{1}] {2}", idn, cmd, parms.Split(CS_SPACE, 2)[1]);
if (parms.Contains("reconnect too fast"))
ICCD_PERIOD++;
m_pending = false;
Reconnect();
break;
case "PING":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
m_writer.WriteLine(String.Format("PONG {0}", parms));
m_writer.Flush();
break;
case "PONG":
break;
case "JOIN":
if (m_pending)
{
m_log.InfoFormat("[IRC-Connector-{0}] [{1}] Connected", idn, cmd);
m_pending = false;
}
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcJoin(pfx, cmd, parms);
break;
case "PART":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcPart(pfx, cmd, parms);
break;
case "MODE":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcMode(pfx, cmd, parms);
break;
case "NICK":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcNickChange(pfx, cmd, parms);
break;
case "KICK":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcKick(pfx, cmd, parms);
break;
case "QUIT":
m_log.DebugFormat("[IRC-Connector-{0}] [{1}] parms = <{2}>", idn, cmd, parms);
eventIrcQuit(pfx, cmd, parms);
break;
default:
m_log.DebugFormat("[IRC-Connector-{0}] Command '{1}' ignored, parms = {2}", idn, cmd, parms);
break;
}
// m_log.DebugFormat("[IRC-Connector-{0}] prefix = <{1}> cmd = <{2}> complete", idn, pfx, cmd);
}
public void eventIrcJoin(string prefix, string command, string parms)
{
string[] args = parms.Split(CS_SPACE, 2);
string IrcUser = prefix.Split('!')[0];
string IrcChannel = args[0];
if (IrcChannel.StartsWith(":"))
IrcChannel = IrcChannel.Substring(1);
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCJoin {1}:{2}", idn, m_server, m_ircChannel);
BroadcastSim(IrcUser, "/me joins {0}", IrcChannel);
}
public void eventIrcPart(string prefix, string command, string parms)
{
string[] args = parms.Split(CS_SPACE, 2);
string IrcUser = prefix.Split('!')[0];
string IrcChannel = args[0];
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCPart {1}:{2}", idn, m_server, m_ircChannel);
BroadcastSim(IrcUser, "/me parts {0}", IrcChannel);
}
public void eventIrcMode(string prefix, string command, string parms)
{
string[] args = parms.Split(CS_SPACE, 2);
string UserMode = args[1];
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCMode {1}:{2}", idn, m_server, m_ircChannel);
if (UserMode.Substring(0, 1) == ":")
{
UserMode = UserMode.Remove(0, 1);
}
}
public void eventIrcNickChange(string prefix, string command, string parms)
{
string[] args = parms.Split(CS_SPACE, 2);
string UserOldNick = prefix.Split('!')[0];
string UserNewNick = args[0].Remove(0, 1);
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCNickChange {1}:{2}", idn, m_server, m_ircChannel);
BroadcastSim(UserOldNick, "/me is now known as {0}", UserNewNick);
}
public void eventIrcKick(string prefix, string command, string parms)
{
string[] args = parms.Split(CS_SPACE, 3);
string UserKicker = prefix.Split('!')[0];
string IrcChannel = args[0];
string UserKicked = args[1];
string KickMessage = args[2];
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCKick {1}:{2}", idn, m_server, m_ircChannel);
BroadcastSim(UserKicker, "/me kicks kicks {0} off {1} saying \"{2}\"", UserKicked, IrcChannel, KickMessage);
if (UserKicked == m_nick)
{
BroadcastSim(m_nick, "Hey, that was me!!!");
}
}
public void eventIrcQuit(string prefix, string command, string parms)
{
string IrcUser = prefix.Split('!')[0];
string QuitMessage = parms;
m_log.DebugFormat("[IRC-Connector-{0}] Event: IRCQuit {1}:{2}", idn, m_server, m_ircChannel);
BroadcastSim(IrcUser, "/me quits saying \"{0}\"", QuitMessage);
}
#endregion
#region Connector Watch Dog
// A single watch dog monitors extant connectors and makes sure that they
// are re-connected as necessary. If a connector IS connected, then it is
// pinged, but only if a PING period has elapsed.
protected static void WatchdogHandler(Object source, ElapsedEventArgs args)
{
// m_log.InfoFormat("[IRC-Watchdog] Status scan, pdk = {0}, icc = {1}", _pdk_, _icc_);
_pdk_ = (_pdk_ + 1) % PING_PERIOD; // cycle the ping trigger
_icc_++; // increment the inter-consecutive-connect-delay counter
lock (m_connectors)
foreach (IRCConnector connector in m_connectors)
{
// m_log.InfoFormat("[IRC-Watchdog] Scanning {0}", connector);
if (connector.Enabled)
{
if (!connector.Connected)
{
try
{
// m_log.DebugFormat("[IRC-Watchdog] Connecting {1}:{2}", connector.idn, connector.m_server, connector.m_ircChannel);
connector.Connect();
}
catch (Exception e)
{
m_log.ErrorFormat("[IRC-Watchdog] Exception on connector {0}: {1} ", connector.idn, e.Message);
}
}
else
{
if (connector.m_pending)
{
if (connector.m_timeout == 0)
{
m_log.ErrorFormat("[IRC-Watchdog] Login timed-out for connector {0}, reconnecting", connector.idn);
connector.Reconnect();
}
else
connector.m_timeout--;
}
// Being marked connected is not enough to ping. Socket establishment can sometimes take a long
// time, in which case the watch dog might try to ping the server before the socket has been
// set up, with nasty side-effects.
else if (_pdk_ == 0)
{
try
{
connector.m_writer.WriteLine(String.Format("PING :{0}", connector.m_server));
connector.m_writer.Flush();
}
catch (Exception e)
{
m_log.ErrorFormat("[IRC-PingRun] Exception on connector {0}: {1} ", connector.idn, e.Message);
m_log.Debug(e);
connector.Reconnect();
}
}
}
}
}
// m_log.InfoFormat("[IRC-Watchdog] Status scan completed");
}
#endregion
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="MouseControllerProvider.cs" company="Google Inc.">
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
//-----------------------------------------------------------------------
using Gvr;
using UnityEngine;
namespace Gvr.Internal
{
/// Mocks controller input by using the mouse.
/// The controller is connected when holding left shift.
/// Move the mouse to control gyroscope and orientation.
/// The left mouse button is used for the clickButton.
/// The right mouse button is used for the appButton.
/// The middle mouse button is used for the homeButton.
class MouseControllerProvider : IControllerProvider
{
private const string AXIS_MOUSE_X = "Mouse X";
private const string AXIS_MOUSE_Y = "Mouse Y";
private ControllerState state = new ControllerState();
private Vector2 mouseDelta = new Vector2();
/// Need to store the state of the buttons from the previous frame.
/// This is because Input.GetMouseButtonDown and Input.GetMouseButtonUp
/// don't work when called after WaitForEndOfFrame, which is when ReadState is called.
private bool wasTouching;
private GvrControllerButton lastButtonsState;
private const float ROTATE_SENSITIVITY = 4.5f;
private const float TOUCH_SENSITIVITY = .12f;
private static readonly Vector3 INVERT_Y = new Vector3(1, -1, 1);
private static readonly ControllerState dummyState = new ControllerState();
public static bool IsMouseAvailable
{
get { return Input.mousePresent && IsActivateButtonPressed; }
}
public static bool IsActivateButtonPressed
{
get { return Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift); }
}
public static bool IsClickButtonPressed
{
get { return Input.GetMouseButton(0); }
}
public static bool IsAppButtonPressed
{
get { return Input.GetMouseButton(1); }
}
public static bool IsHomeButtonPressed
{
get { return Input.GetMouseButton(2); }
}
public static bool IsTouching
{
get { return Input.GetKey(KeyCode.LeftControl) || Input.GetKey(KeyCode.RightControl); }
}
public bool SupportsBatteryStatus
{
get { return false; }
}
public int MaxControllerCount
{
get { return 1; }
}
internal MouseControllerProvider()
{
}
public void Dispose()
{
}
public void ReadState(ControllerState outState, int controller_id)
{
if (controller_id != 0)
{
outState.CopyFrom(dummyState);
return;
}
lock (state)
{
UpdateState();
outState.CopyFrom(state);
}
state.ClearTransientState();
}
public void OnPause()
{
}
public void OnResume()
{
}
private void UpdateState()
{
GvrCursorHelper.ControllerEmulationActive = IsMouseAvailable;
if (!IsMouseAvailable)
{
ClearState();
return;
}
state.connectionState = GvrConnectionState.Connected;
state.apiStatus = GvrControllerApiStatus.Ok;
state.isCharging = false;
state.batteryLevel = GvrControllerBatteryLevel.Full;
UpdateButtonStates();
mouseDelta.Set(
Input.GetAxis(AXIS_MOUSE_X),
Input.GetAxis(AXIS_MOUSE_Y));
if (0 != (state.buttonsState & GvrControllerButton.TouchPadTouch))
{
UpdateTouchPos();
}
else
{
UpdateOrientation();
}
}
private void UpdateTouchPos()
{
Vector3 currentMousePosition = Input.mousePosition;
Vector2 touchDelta = mouseDelta * TOUCH_SENSITIVITY;
touchDelta.y *= -1.0f;
state.touchPos += touchDelta;
state.touchPos.x = Mathf.Clamp01(state.touchPos.x);
state.touchPos.y = Mathf.Clamp01(state.touchPos.y);
}
private void UpdateOrientation()
{
Vector3 deltaDegrees = Vector3.Scale(mouseDelta, INVERT_Y) * ROTATE_SENSITIVITY;
state.gyro = deltaDegrees * (Mathf.Deg2Rad / Time.unscaledDeltaTime);
Quaternion yaw = Quaternion.AngleAxis(deltaDegrees.x, Vector3.up);
Quaternion pitch = Quaternion.AngleAxis(deltaDegrees.y, Vector3.right);
state.orientation = state.orientation * yaw * pitch;
}
private void UpdateButtonStates()
{
state.buttonsState = 0;
if (IsClickButtonPressed)
{
state.buttonsState |= GvrControllerButton.TouchPadButton;
}
if (IsAppButtonPressed)
{
state.buttonsState |= GvrControllerButton.App;
}
if (IsHomeButtonPressed)
{
state.buttonsState |= GvrControllerButton.System;
}
if (IsTouching)
{
state.buttonsState |= GvrControllerButton.TouchPadTouch;
}
state.SetButtonsUpDownFromPrevious(lastButtonsState);
lastButtonsState = state.buttonsState;
if (0 != (state.buttonsUp & GvrControllerButton.TouchPadTouch))
{
ClearTouchPos();
}
if (0 != (state.buttonsUp & GvrControllerButton.System))
{
Recenter();
}
}
private void Recenter()
{
Quaternion yawCorrection = Quaternion.AngleAxis(-state.orientation.eulerAngles.y, Vector3.up);
state.orientation = state.orientation * yawCorrection;
state.recentered = true;
}
private void ClearTouchPos()
{
state.touchPos = new Vector2(0.5f, 0.5f);
}
private void ClearState()
{
state.connectionState = GvrConnectionState.Disconnected;
state.buttonsState = 0;
state.buttonsDown = 0;
state.buttonsUp = 0;
ClearTouchPos();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Concurrent;
using System.Data.Common;
using System.Diagnostics;
namespace System.Data.ProviderBase
{
// set_ConnectionString calls DbConnectionFactory.GetConnectionPoolGroup
// when not found a new pool entry is created and potentially added
// DbConnectionPoolGroup starts in the Active state
// Open calls DbConnectionFactory.GetConnectionPool
// if the existing pool entry is Disabled, GetConnectionPoolGroup is called for a new entry
// DbConnectionFactory.GetConnectionPool calls DbConnectionPoolGroup.GetConnectionPool
// DbConnectionPoolGroup.GetConnectionPool will return pool for the current identity
// or null if identity is restricted or pooling is disabled or state is disabled at time of add
// state changes are Active->Active, Idle->Active
// DbConnectionFactory.PruneConnectionPoolGroups calls Prune
// which will QueuePoolForRelease on all empty pools
// and once no pools remain, change state from Active->Idle->Disabled
// Once Disabled, factory can remove its reference to the pool entry
internal sealed class DbConnectionPoolGroup
{
private readonly DbConnectionOptions _connectionOptions;
private readonly DbConnectionPoolKey _poolKey;
private readonly DbConnectionPoolGroupOptions _poolGroupOptions;
private ConcurrentDictionary<DbConnectionPoolIdentity, DbConnectionPool> _poolCollection;
private int _state; // see PoolGroupState* below
private DbConnectionPoolGroupProviderInfo _providerInfo;
// always lock this before changing _state, we don't want to move out of the 'Disabled' state
// PoolGroupStateUninitialized = 0;
private const int PoolGroupStateActive = 1; // initial state, GetPoolGroup from cache, connection Open
private const int PoolGroupStateIdle = 2; // all pools are pruned via Clear
private const int PoolGroupStateDisabled = 4; // factory pool entry pruning method
internal DbConnectionPoolGroup(DbConnectionOptions connectionOptions, DbConnectionPoolKey key, DbConnectionPoolGroupOptions poolGroupOptions)
{
Debug.Assert(null != connectionOptions, "null connection options");
_connectionOptions = connectionOptions;
_poolKey = key;
_poolGroupOptions = poolGroupOptions;
// always lock this object before changing state
// HybridDictionary does not create any sub-objects until add
// so it is safe to use for non-pooled connection as long as
// we check _poolGroupOptions first
_poolCollection = new ConcurrentDictionary<DbConnectionPoolIdentity, DbConnectionPool>();
_state = PoolGroupStateActive;
}
internal DbConnectionOptions ConnectionOptions
{
get
{
return _connectionOptions;
}
}
internal DbConnectionPoolKey PoolKey
{
get
{
return _poolKey;
}
}
internal DbConnectionPoolGroupProviderInfo ProviderInfo
{
get
{
return _providerInfo;
}
set
{
_providerInfo = value;
if (null != value)
{
_providerInfo.PoolGroup = this;
}
}
}
internal bool IsDisabled
{
get
{
return (PoolGroupStateDisabled == _state);
}
}
internal DbConnectionPoolGroupOptions PoolGroupOptions
{
get
{
return _poolGroupOptions;
}
}
internal int Clear()
{
// must be multi-thread safe with competing calls by Clear and Prune via background thread
// will return the number of connections in the group after clearing has finished
// First, note the old collection and create a new collection to be used
ConcurrentDictionary<DbConnectionPoolIdentity, DbConnectionPool> oldPoolCollection = null;
lock (this)
{
if (_poolCollection.Count > 0)
{
oldPoolCollection = _poolCollection;
_poolCollection = new ConcurrentDictionary<DbConnectionPoolIdentity, DbConnectionPool>();
}
}
// Then, if a new collection was created, release the pools from the old collection
if (oldPoolCollection != null)
{
foreach (var entry in oldPoolCollection)
{
DbConnectionPool pool = entry.Value;
if (pool != null)
{
DbConnectionFactory connectionFactory = pool.ConnectionFactory;
connectionFactory.QueuePoolForRelease(pool, true);
}
}
}
// Finally, return the pool collection count - this may be non-zero if something was added while we were clearing
return _poolCollection.Count;
}
internal DbConnectionPool GetConnectionPool(DbConnectionFactory connectionFactory)
{
// When this method returns null it indicates that the connection
// factory should not use pooling.
// We don't support connection pooling on Win9x;
// PoolGroupOptions will only be null when we're not supposed to pool
// connections.
DbConnectionPool pool = null;
if (null != _poolGroupOptions)
{
DbConnectionPoolIdentity currentIdentity = DbConnectionPoolIdentity.NoIdentity;
if (_poolGroupOptions.PoolByIdentity)
{
// if we're pooling by identity (because integrated security is
// being used for these connections) then we need to go out and
// search for the connectionPool that matches the current identity.
currentIdentity = DbConnectionPoolIdentity.GetCurrent();
// If the current token is restricted in some way, then we must
// not attempt to pool these connections.
if (currentIdentity.IsRestricted)
{
currentIdentity = null;
}
}
if (null != currentIdentity)
{
if (!_poolCollection.TryGetValue(currentIdentity, out pool))
{ // find the pool
DbConnectionPoolProviderInfo connectionPoolProviderInfo = connectionFactory.CreateConnectionPoolProviderInfo(this.ConnectionOptions);
// optimistically create pool, but its callbacks are delayed until after actual add
DbConnectionPool newPool = new DbConnectionPool(connectionFactory, this, currentIdentity, connectionPoolProviderInfo);
lock (this)
{
// Did someone already add it to the list?
if (!_poolCollection.TryGetValue(currentIdentity, out pool))
{
if (MarkPoolGroupAsActive())
{
// If we get here, we know for certain that we there isn't
// a pool that matches the current identity, so we have to
// add the optimistically created one
newPool.Startup(); // must start pool before usage
bool addResult = _poolCollection.TryAdd(currentIdentity, newPool);
Debug.Assert(addResult, "No other pool with current identity should exist at this point");
pool = newPool;
newPool = null;
}
else
{
// else pool entry has been disabled so don't create new pools
Debug.Assert(PoolGroupStateDisabled == _state, "state should be disabled");
}
}
else
{
// else found an existing pool to use instead
Debug.Assert(PoolGroupStateActive == _state, "state should be active since a pool exists and lock holds");
}
}
if (null != newPool)
{
// don't need to call connectionFactory.QueuePoolForRelease(newPool) because
// pool callbacks were delayed and no risk of connections being created
newPool.Shutdown();
}
}
// the found pool could be in any state
}
}
if (null == pool)
{
lock (this)
{
// keep the pool entry state active when not pooling
MarkPoolGroupAsActive();
}
}
return pool;
}
private bool MarkPoolGroupAsActive()
{
// when getting a connection, make the entry active if it was idle (but not disabled)
// must always lock this before calling
if (PoolGroupStateIdle == _state)
{
_state = PoolGroupStateActive;
}
return (PoolGroupStateActive == _state);
}
internal bool Prune()
{
// must only call from DbConnectionFactory.PruneConnectionPoolGroups on background timer thread
// must lock(DbConnectionFactory._connectionPoolGroups.SyncRoot) before calling ReadyToRemove
// to avoid conflict with DbConnectionFactory.CreateConnectionPoolGroup replacing pool entry
lock (this)
{
if (_poolCollection.Count > 0)
{
var newPoolCollection = new ConcurrentDictionary<DbConnectionPoolIdentity, DbConnectionPool>();
foreach (var entry in _poolCollection)
{
DbConnectionPool pool = entry.Value;
if (pool != null)
{
// Actually prune the pool if there are no connections in the pool and no errors occurred.
// Empty pool during pruning indicates zero or low activity, but
// an error state indicates the pool needs to stay around to
// throttle new connection attempts.
if ((!pool.ErrorOccurred) && (0 == pool.Count))
{
// Order is important here. First we remove the pool
// from the collection of pools so no one will try
// to use it while we're processing and finally we put the
// pool into a list of pools to be released when they
// are completely empty.
DbConnectionFactory connectionFactory = pool.ConnectionFactory;
connectionFactory.QueuePoolForRelease(pool, false);
}
else
{
newPoolCollection.TryAdd(entry.Key, entry.Value);
}
}
}
_poolCollection = newPoolCollection;
}
// must be pruning thread to change state and no connections
// otherwise pruning thread risks making entry disabled soon after user calls ClearPool
if (0 == _poolCollection.Count)
{
if (PoolGroupStateActive == _state)
{
_state = PoolGroupStateIdle;
}
else if (PoolGroupStateIdle == _state)
{
_state = PoolGroupStateDisabled;
}
}
return (PoolGroupStateDisabled == _state);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace System.IO
{
// This class implements a TextWriter for writing characters to a Stream.
// This is designed for character output in a particular Encoding,
// whereas the Stream class is designed for byte input and output.
public class StreamWriter : TextWriter
{
// For UTF-8, the values of 1K for the default buffer size and 4K for the
// file stream buffer size are reasonable & give very reasonable
// performance for in terms of construction time for the StreamWriter and
// write perf. Note that for UTF-8, we end up allocating a 4K byte buffer,
// which means we take advantage of adaptive buffering code.
// The performance using UnicodeEncoding is acceptable.
private const int DefaultBufferSize = 1024; // char[]
private const int DefaultFileStreamBufferSize = 4096;
private const int MinBufferSize = 128;
// Bit bucket - Null has no backing store. Non closable.
public static new readonly StreamWriter Null = new StreamWriter(Stream.Null, UTF8NoBOM, MinBufferSize, leaveOpen: true);
private readonly Stream _stream;
private readonly Encoding _encoding;
private readonly Encoder _encoder;
private readonly byte[] _byteBuffer;
private readonly char[] _charBuffer;
private int _charPos;
private int _charLen;
private bool _autoFlush;
private bool _haveWrittenPreamble;
private readonly bool _closable;
private bool _disposed;
// We don't guarantee thread safety on StreamWriter, but we should at
// least prevent users from trying to write anything while an Async
// write from the same thread is in progress.
private Task _asyncWriteTask = Task.CompletedTask;
private void CheckAsyncTaskInProgress()
{
// We are not locking the access to _asyncWriteTask because this is not meant to guarantee thread safety.
// We are simply trying to deter calling any Write APIs while an async Write from the same thread is in progress.
if (!_asyncWriteTask.IsCompleted)
{
ThrowAsyncIOInProgress();
}
}
[DoesNotReturn]
private static void ThrowAsyncIOInProgress() =>
throw new InvalidOperationException(SR.InvalidOperation_AsyncIOInProgress);
// The high level goal is to be tolerant of encoding errors when we read and very strict
// when we write. Hence, default StreamWriter encoding will throw on encoding error.
// Note: when StreamWriter throws on invalid encoding chars (for ex, high surrogate character
// D800-DBFF without a following low surrogate character DC00-DFFF), it will cause the
// internal StreamWriter's state to be irrecoverable as it would have buffered the
// illegal chars and any subsequent call to Flush() would hit the encoding error again.
// Even Close() will hit the exception as it would try to flush the unwritten data.
// Maybe we can add a DiscardBufferedData() method to get out of such situation (like
// StreamReader though for different reason). Either way, the buffered data will be lost!
private static Encoding UTF8NoBOM => EncodingCache.UTF8NoBOM;
public StreamWriter(Stream stream)
: this(stream, UTF8NoBOM, DefaultBufferSize, false)
{
}
public StreamWriter(Stream stream, Encoding encoding)
: this(stream, encoding, DefaultBufferSize, false)
{
}
// Creates a new StreamWriter for the given stream. The
// character encoding is set by encoding and the buffer size,
// in number of 16-bit characters, is set by bufferSize.
//
public StreamWriter(Stream stream, Encoding encoding, int bufferSize)
: this(stream, encoding, bufferSize, false)
{
}
public StreamWriter(Stream stream, Encoding? encoding = null, int bufferSize = -1, bool leaveOpen = false)
: base(null) // Ask for CurrentCulture all the time
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (encoding == null)
{
encoding = UTF8NoBOM;
}
if (!stream.CanWrite)
{
throw new ArgumentException(SR.Argument_StreamNotWritable);
}
if (bufferSize == -1)
{
bufferSize = DefaultBufferSize;
}
else if (bufferSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(bufferSize), SR.ArgumentOutOfRange_NeedPosNum);
}
_stream = stream;
_encoding = encoding;
_encoder = _encoding.GetEncoder();
if (bufferSize < MinBufferSize)
{
bufferSize = MinBufferSize;
}
_charBuffer = new char[bufferSize];
_byteBuffer = new byte[_encoding.GetMaxByteCount(bufferSize)];
_charLen = bufferSize;
// If we're appending to a Stream that already has data, don't write
// the preamble.
if (_stream.CanSeek && _stream.Position > 0)
{
_haveWrittenPreamble = true;
}
_closable = !leaveOpen;
}
public StreamWriter(string path)
: this(path, false, UTF8NoBOM, DefaultBufferSize)
{
}
public StreamWriter(string path, bool append)
: this(path, append, UTF8NoBOM, DefaultBufferSize)
{
}
public StreamWriter(string path, bool append, Encoding encoding)
: this(path, append, encoding, DefaultBufferSize)
{
}
public StreamWriter(string path, bool append, Encoding encoding, int bufferSize) :
this(ValidateArgsAndOpenPath(path, append, encoding, bufferSize), encoding, bufferSize, leaveOpen: false)
{
}
private static Stream ValidateArgsAndOpenPath(string path, bool append, Encoding encoding, int bufferSize)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
if (encoding == null)
throw new ArgumentNullException(nameof(encoding));
if (path.Length == 0)
throw new ArgumentException(SR.Argument_EmptyPath);
if (bufferSize <= 0)
throw new ArgumentOutOfRangeException(nameof(bufferSize), SR.ArgumentOutOfRange_NeedPosNum);
return new FileStream(path, append ? FileMode.Append : FileMode.Create, FileAccess.Write, FileShare.Read, DefaultFileStreamBufferSize, FileOptions.SequentialScan);
}
public override void Close()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected override void Dispose(bool disposing)
{
try
{
// We need to flush any buffered data if we are being closed/disposed.
// Also, we never close the handles for stdout & friends. So we can safely
// write any buffered data to those streams even during finalization, which
// is generally the right thing to do.
if (!_disposed && disposing)
{
// Note: flush on the underlying stream can throw (ex., low disk space)
CheckAsyncTaskInProgress();
Flush(flushStream: true, flushEncoder: true);
}
}
finally
{
CloseStreamFromDispose(disposing);
}
}
private void CloseStreamFromDispose(bool disposing)
{
// Dispose of our resources if this StreamWriter is closable.
if (_closable && !_disposed)
{
try
{
// Attempt to close the stream even if there was an IO error from Flushing.
// Note that Stream.Close() can potentially throw here (may or may not be
// due to the same Flush error). In this case, we still need to ensure
// cleaning up internal resources, hence the finally block.
if (disposing)
{
_stream.Close();
}
}
finally
{
_disposed = true;
_charLen = 0;
base.Dispose(disposing);
}
}
}
public override ValueTask DisposeAsync() =>
GetType() != typeof(StreamWriter) ?
base.DisposeAsync() :
DisposeAsyncCore();
private async ValueTask DisposeAsyncCore()
{
// Same logic as in Dispose(), but with async flushing.
Debug.Assert(GetType() == typeof(StreamWriter));
try
{
if (!_disposed)
{
await FlushAsync().ConfigureAwait(false);
}
}
finally
{
CloseStreamFromDispose(disposing: true);
}
GC.SuppressFinalize(this);
}
public override void Flush()
{
CheckAsyncTaskInProgress();
Flush(true, true);
}
private void Flush(bool flushStream, bool flushEncoder)
{
// flushEncoder should be true at the end of the file and if
// the user explicitly calls Flush (though not if AutoFlush is true).
// This is required to flush any dangling characters from our UTF-7
// and UTF-8 encoders.
ThrowIfDisposed();
// Perf boost for Flush on non-dirty writers.
if (_charPos == 0 && !flushStream && !flushEncoder)
{
return;
}
if (!_haveWrittenPreamble)
{
_haveWrittenPreamble = true;
ReadOnlySpan<byte> preamble = _encoding.Preamble;
if (preamble.Length > 0)
{
_stream.Write(preamble);
}
}
int count = _encoder.GetBytes(_charBuffer, 0, _charPos, _byteBuffer, 0, flushEncoder);
_charPos = 0;
if (count > 0)
{
_stream.Write(_byteBuffer, 0, count);
}
// By definition, calling Flush should flush the stream, but this is
// only necessary if we passed in true for flushStream. The Web
// Services guys have some perf tests where flushing needlessly hurts.
if (flushStream)
{
_stream.Flush();
}
}
public virtual bool AutoFlush
{
get => _autoFlush;
set
{
CheckAsyncTaskInProgress();
_autoFlush = value;
if (value)
{
Flush(true, false);
}
}
}
public virtual Stream BaseStream => _stream;
public override Encoding Encoding => _encoding;
public override void Write(char value)
{
CheckAsyncTaskInProgress();
if (_charPos == _charLen)
{
Flush(false, false);
}
_charBuffer[_charPos] = value;
_charPos++;
if (_autoFlush)
{
Flush(true, false);
}
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void Write(char[]? buffer)
{
WriteSpan(buffer, appendNewLine: false);
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void Write(char[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.Length - index < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
WriteSpan(buffer.AsSpan(index, count), appendNewLine: false);
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void Write(ReadOnlySpan<char> buffer)
{
if (GetType() == typeof(StreamWriter))
{
WriteSpan(buffer, appendNewLine: false);
}
else
{
// If a derived class may have overridden existing Write behavior,
// we need to make sure we use it.
base.Write(buffer);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe void WriteSpan(ReadOnlySpan<char> buffer, bool appendNewLine)
{
CheckAsyncTaskInProgress();
if (buffer.Length <= 4 && // Threshold of 4 chosen based on perf experimentation
buffer.Length <= _charLen - _charPos)
{
// For very short buffers and when we don't need to worry about running out of space
// in the char buffer, just copy the chars individually.
for (int i = 0; i < buffer.Length; i++)
{
_charBuffer[_charPos++] = buffer[i];
}
}
else
{
// For larger buffers or when we may run out of room in the internal char buffer, copy in chunks.
// Use unsafe code until https://github.com/dotnet/coreclr/issues/13827 is addressed, as spans are
// resulting in significant overhead (even when the if branch above is taken rather than this
// else) due to temporaries that need to be cleared. Given the use of unsafe code, we also
// make local copies of instance state to protect against potential concurrent misuse.
ThrowIfDisposed();
char[] charBuffer = _charBuffer;
fixed (char* bufferPtr = &MemoryMarshal.GetReference(buffer))
fixed (char* dstPtr = &charBuffer[0])
{
char* srcPtr = bufferPtr;
int count = buffer.Length;
int dstPos = _charPos; // use a local copy of _charPos for safety
while (count > 0)
{
if (dstPos == charBuffer.Length)
{
Flush(false, false);
dstPos = 0;
}
int n = Math.Min(charBuffer.Length - dstPos, count);
int bytesToCopy = n * sizeof(char);
Buffer.MemoryCopy(srcPtr, dstPtr + dstPos, bytesToCopy, bytesToCopy);
_charPos += n;
dstPos += n;
srcPtr += n;
count -= n;
}
}
}
if (appendNewLine)
{
char[] coreNewLine = CoreNewLine;
for (int i = 0; i < coreNewLine.Length; i++) // Generally 1 (\n) or 2 (\r\n) iterations
{
if (_charPos == _charLen)
{
Flush(false, false);
}
_charBuffer[_charPos] = coreNewLine[i];
_charPos++;
}
}
if (_autoFlush)
{
Flush(true, false);
}
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void Write(string? value)
{
WriteSpan(value, appendNewLine: false);
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void WriteLine(string? value)
{
CheckAsyncTaskInProgress();
WriteSpan(value, appendNewLine: true);
}
[MethodImpl(MethodImplOptions.NoInlining)] // prevent WriteSpan from bloating call sites
public override void WriteLine(ReadOnlySpan<char> value)
{
if (GetType() == typeof(StreamWriter))
{
CheckAsyncTaskInProgress();
WriteSpan(value, appendNewLine: true);
}
else
{
// If a derived class may have overridden existing WriteLine behavior,
// we need to make sure we use it.
base.WriteLine(value);
}
}
private void WriteFormatHelper(string format, ParamsArray args, bool appendNewLine)
{
StringBuilder sb =
StringBuilderCache.Acquire((format?.Length ?? 0) + args.Length * 8)
.AppendFormatHelper(null, format!, args); // AppendFormatHelper will appropriately throw ArgumentNullException for a null format
StringBuilder.ChunkEnumerator chunks = sb.GetChunks();
bool more = chunks.MoveNext();
while (more)
{
ReadOnlySpan<char> current = chunks.Current.Span;
more = chunks.MoveNext();
// If final chunk, include the newline if needed
WriteSpan(current, appendNewLine: more ? false : appendNewLine);
}
StringBuilderCache.Release(sb);
}
public override void Write(string format, object? arg0)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0), appendNewLine: false);
}
else
{
base.Write(format, arg0);
}
}
public override void Write(string format, object? arg0, object? arg1)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0, arg1), appendNewLine: false);
}
else
{
base.Write(format, arg0, arg1);
}
}
public override void Write(string format, object? arg0, object? arg1, object? arg2)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0, arg1, arg2), appendNewLine: false);
}
else
{
base.Write(format, arg0, arg1, arg2);
}
}
public override void Write(string format, params object?[] arg)
{
if (GetType() == typeof(StreamWriter))
{
if (arg == null)
{
throw new ArgumentNullException((format == null) ? nameof(format) : nameof(arg)); // same as base logic
}
WriteFormatHelper(format, new ParamsArray(arg), appendNewLine: false);
}
else
{
base.Write(format, arg);
}
}
public override void WriteLine(string format, object? arg0)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0), appendNewLine: true);
}
else
{
base.WriteLine(format, arg0);
}
}
public override void WriteLine(string format, object? arg0, object? arg1)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0, arg1), appendNewLine: true);
}
else
{
base.WriteLine(format, arg0, arg1);
}
}
public override void WriteLine(string format, object? arg0, object? arg1, object? arg2)
{
if (GetType() == typeof(StreamWriter))
{
WriteFormatHelper(format, new ParamsArray(arg0, arg1, arg2), appendNewLine: true);
}
else
{
base.WriteLine(format, arg0, arg1, arg2);
}
}
public override void WriteLine(string format, params object?[] arg)
{
if (GetType() == typeof(StreamWriter))
{
if (arg == null)
{
throw new ArgumentNullException(nameof(arg));
}
WriteFormatHelper(format, new ParamsArray(arg), appendNewLine: true);
}
else
{
base.WriteLine(format, arg);
}
}
public override Task WriteAsync(char value)
{
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteAsync(value);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, value, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: false);
_asyncWriteTask = task;
return task;
}
// We pass in private instance fields of this MarshalByRefObject-derived type as local params
// to ensure performant access inside the state machine that corresponds this async method.
// Fields that are written to must be assigned at the end of the method *and* before instance invocations.
private static async Task WriteAsyncInternal(StreamWriter _this, char value,
char[] charBuffer, int charPos, int charLen, char[] coreNewLine,
bool autoFlush, bool appendNewLine)
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
charBuffer[charPos] = value;
charPos++;
if (appendNewLine)
{
for (int i = 0; i < coreNewLine.Length; i++) // Expect 2 iterations, no point calling BlockCopy
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
charBuffer[charPos] = coreNewLine[i];
charPos++;
}
}
if (autoFlush)
{
await _this.FlushAsyncInternal(true, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
_this._charPos = charPos;
}
public override Task WriteAsync(string? value)
{
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteAsync(value);
}
if (value != null)
{
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, value, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: false);
_asyncWriteTask = task;
return task;
}
else
{
return Task.CompletedTask;
}
}
// We pass in private instance fields of this MarshalByRefObject-derived type as local params
// to ensure performant access inside the state machine that corresponds this async method.
// Fields that are written to must be assigned at the end of the method *and* before instance invocations.
private static async Task WriteAsyncInternal(StreamWriter _this, string value,
char[] charBuffer, int charPos, int charLen, char[] coreNewLine,
bool autoFlush, bool appendNewLine)
{
Debug.Assert(value != null);
int count = value.Length;
int index = 0;
while (count > 0)
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
int n = charLen - charPos;
if (n > count)
{
n = count;
}
Debug.Assert(n > 0, "StreamWriter::Write(String) isn't making progress! This is most likely a race condition in user code.");
value.CopyTo(index, charBuffer, charPos, n);
charPos += n;
index += n;
count -= n;
}
if (appendNewLine)
{
for (int i = 0; i < coreNewLine.Length; i++) // Expect 2 iterations, no point calling BlockCopy
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
charBuffer[charPos] = coreNewLine[i];
charPos++;
}
}
if (autoFlush)
{
await _this.FlushAsyncInternal(true, false, charBuffer, charPos).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
_this._charPos = charPos;
}
public override Task WriteAsync(char[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.Length - index < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteAsync(buffer, index, count);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, new ReadOnlyMemory<char>(buffer, index, count), _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: false, cancellationToken: default);
_asyncWriteTask = task;
return task;
}
public override Task WriteAsync(ReadOnlyMemory<char> buffer, CancellationToken cancellationToken = default)
{
if (GetType() != typeof(StreamWriter))
{
// If a derived type may have overridden existing WriteASync(char[], ...) behavior, make sure we use it.
return base.WriteAsync(buffer, cancellationToken);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled(cancellationToken);
}
Task task = WriteAsyncInternal(this, buffer, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: false, cancellationToken: cancellationToken);
_asyncWriteTask = task;
return task;
}
// We pass in private instance fields of this MarshalByRefObject-derived type as local params
// to ensure performant access inside the state machine that corresponds this async method.
// Fields that are written to must be assigned at the end of the method *and* before instance invocations.
private static async Task WriteAsyncInternal(StreamWriter _this, ReadOnlyMemory<char> source,
char[] charBuffer, int charPos, int charLen, char[] coreNewLine,
bool autoFlush, bool appendNewLine, CancellationToken cancellationToken)
{
int copied = 0;
while (copied < source.Length)
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos, cancellationToken).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
int n = Math.Min(charLen - charPos, source.Length - copied);
Debug.Assert(n > 0, "StreamWriter::Write(char[], int, int) isn't making progress! This is most likely a race condition in user code.");
source.Span.Slice(copied, n).CopyTo(new Span<char>(charBuffer, charPos, n));
charPos += n;
copied += n;
}
if (appendNewLine)
{
for (int i = 0; i < coreNewLine.Length; i++) // Expect 2 iterations, no point calling BlockCopy
{
if (charPos == charLen)
{
await _this.FlushAsyncInternal(false, false, charBuffer, charPos, cancellationToken).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
charBuffer[charPos] = coreNewLine[i];
charPos++;
}
}
if (autoFlush)
{
await _this.FlushAsyncInternal(true, false, charBuffer, charPos, cancellationToken).ConfigureAwait(false);
Debug.Assert(_this._charPos == 0);
charPos = 0;
}
_this._charPos = charPos;
}
public override Task WriteLineAsync()
{
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteLineAsync();
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, ReadOnlyMemory<char>.Empty, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: true, cancellationToken: default);
_asyncWriteTask = task;
return task;
}
public override Task WriteLineAsync(char value)
{
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteLineAsync(value);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, value, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: true);
_asyncWriteTask = task;
return task;
}
public override Task WriteLineAsync(string? value)
{
if (value == null)
{
return WriteLineAsync();
}
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteLineAsync(value);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, value, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: true);
_asyncWriteTask = task;
return task;
}
public override Task WriteLineAsync(char[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.Length - index < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Write() which a subclass might have overridden.
// To be safe we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Write) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.WriteLineAsync(buffer, index, count);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = WriteAsyncInternal(this, new ReadOnlyMemory<char>(buffer, index, count), _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: true, cancellationToken: default);
_asyncWriteTask = task;
return task;
}
public override Task WriteLineAsync(ReadOnlyMemory<char> buffer, CancellationToken cancellationToken = default)
{
if (GetType() != typeof(StreamWriter))
{
return base.WriteLineAsync(buffer, cancellationToken);
}
ThrowIfDisposed();
CheckAsyncTaskInProgress();
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled(cancellationToken);
}
Task task = WriteAsyncInternal(this, buffer, _charBuffer, _charPos, _charLen, CoreNewLine, _autoFlush, appendNewLine: true, cancellationToken: cancellationToken);
_asyncWriteTask = task;
return task;
}
public override Task FlushAsync()
{
// If we have been inherited into a subclass, the following implementation could be incorrect
// since it does not call through to Flush() which a subclass might have overridden. To be safe
// we will only use this implementation in cases where we know it is safe to do so,
// and delegate to our base class (which will call into Flush) when we are not sure.
if (GetType() != typeof(StreamWriter))
{
return base.FlushAsync();
}
// flushEncoder should be true at the end of the file and if
// the user explicitly calls Flush (though not if AutoFlush is true).
// This is required to flush any dangling characters from our UTF-7
// and UTF-8 encoders.
ThrowIfDisposed();
CheckAsyncTaskInProgress();
Task task = FlushAsyncInternal(true, true, _charBuffer, _charPos);
_asyncWriteTask = task;
return task;
}
private Task FlushAsyncInternal(bool flushStream, bool flushEncoder,
char[] sCharBuffer, int sCharPos, CancellationToken cancellationToken = default)
{
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled(cancellationToken);
}
// Perf boost for Flush on non-dirty writers.
if (sCharPos == 0 && !flushStream && !flushEncoder)
{
return Task.CompletedTask;
}
Task flushTask = FlushAsyncInternal(this, flushStream, flushEncoder, sCharBuffer, sCharPos, _haveWrittenPreamble,
_encoding, _encoder, _byteBuffer, _stream, cancellationToken);
_charPos = 0;
return flushTask;
}
// We pass in private instance fields of this MarshalByRefObject-derived type as local params
// to ensure performant access inside the state machine that corresponds this async method.
private static async Task FlushAsyncInternal(StreamWriter _this, bool flushStream, bool flushEncoder,
char[] charBuffer, int charPos, bool haveWrittenPreamble,
Encoding encoding, Encoder encoder, byte[] byteBuffer, Stream stream, CancellationToken cancellationToken)
{
if (!haveWrittenPreamble)
{
_this._haveWrittenPreamble = true;
byte[] preamble = encoding.GetPreamble();
if (preamble.Length > 0)
{
await stream.WriteAsync(new ReadOnlyMemory<byte>(preamble), cancellationToken).ConfigureAwait(false);
}
}
int count = encoder.GetBytes(charBuffer, 0, charPos, byteBuffer, 0, flushEncoder);
if (count > 0)
{
await stream.WriteAsync(new ReadOnlyMemory<byte>(byteBuffer, 0, count), cancellationToken).ConfigureAwait(false);
}
// By definition, calling Flush should flush the stream, but this is
// only necessary if we passed in true for flushStream. The Web
// Services guys have some perf tests where flushing needlessly hurts.
if (flushStream)
{
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
}
private void ThrowIfDisposed()
{
if (_disposed)
{
ThrowObjectDisposedException();
}
void ThrowObjectDisposedException() => throw new ObjectDisposedException(GetType().Name, SR.ObjectDisposed_WriterClosed);
}
} // class StreamWriter
} // namespace
| |
//
// MRItem.cs
//
// Author:
// Steve Jakab <>
//
// Copyright (c) 2014 Steve Jakab
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using AssemblyCSharp;
namespace PortableRealm
{
public class MRItem : MRIGamePiece, MRISerializable, MRIColorSource
{
#region Properties
public bool Active
{
get{
return mIsActive;
}
set{
mIsActive = value;
}
}
public int Index
{
get{
return mIndex;
}
}
public MRGame.eStrength BaseWeight
{
get{
return mBaseWeight;
}
}
public int BaseFame
{
get{
return mBaseFame;
}
}
public int BaseNotoriety
{
get{
return mBaseNotoriety;
}
}
public int BasePrice
{
get{
return mBasePrice;
}
}
public virtual int CurrentPrice
{
get{
return BasePrice;
}
}
public MRCharacter Owner
{
get{
return mOwner;
}
set{
mOwner = value;
}
}
public MRGamePieceStack StartStack
{
get{
return mStartStack;
}
set{
mStartStack = value;
}
}
/**********************/
// MRIGamePiece properties
public uint Id
{
get{
return mId;
}
protected set{
mId = value;
MRGame.TheGame.AddGamePiece(this);
}
}
public int Layer
{
get {
return mCounter.layer;
}
set {
if (value >= 0 && value < 32)
{
mCounter.layer = value;
foreach (Transform transform in mCounter.GetComponentsInChildren<Transform>())
{
transform.gameObject.layer = value;
}
}
else
{
Debug.LogError("Trying to set item " + Name + " to layer " + value);
}
}
}
public virtual Vector3 Position
{
get{
return mCounter.transform.position;
}
set{
mCounter.transform.position = value;
}
}
public virtual Vector3 LocalScale
{
get{
return mCounter.transform.localScale;
}
set{
mCounter.transform.localScale = value;
}
}
public virtual Vector3 LossyScale
{
get{
return mCounter.transform.lossyScale;
}
}
public virtual Quaternion Rotation
{
get {
return mCounter.transform.rotation;
}
set {
mCounter.transform.rotation = value;
}
}
public Transform Parent
{
get {
return mCounter.transform.parent;
}
set {
mCounter.transform.SetParent(value);
}
}
public Bounds Bounds
{
get{
if (mCounter != null)
return mCounter.GetComponentInChildren<SpriteRenderer>().sprite.bounds;
else
return new Bounds();
}
}
public Vector3 OldScale
{
get {
return mOldScale;
}
set {
mOldScale = value;
}
}
public string Name
{
get{
return mName;
}
}
public MRGamePieceStack Stack
{
get{
return mStack;
}
set{
if (mStartStack == null)
mStartStack = value;
mStack = value;
}
}
public virtual bool Visible
{
get{
return mCounter.activeSelf;
}
set{
MRUtility.SetObjectVisibility(mCounter, value);
}
}
public virtual int SortValue
{
get{
return (int)MRGame.eSortValue.Item;
}
}
/**********************/
// MRIMagicSource properties
/// <summary>
/// Returns a list of the color magic supplied by this object.
/// </summary>
/// <value>The magic supplied.</value>
public virtual IList<MRGame.eMagicColor> MagicSupplied
{
get {
List<MRGame.eMagicColor> magic = new List<MRGame.eMagicColor>();
return magic;
}
}
#endregion
#region Methods
protected MRItem()
{
}
protected MRItem(JSONObject data, int index)
{
mName = ((JSONString)data["name"]).Value;
mIndex = index;
mBaseFame = ((JSONNumber)data["fame"]).IntValue;
mBaseNotoriety = ((JSONNumber)data["notoriety"]).IntValue;
mBasePrice = ((JSONNumber)data["gold"]).IntValue;
string weight = ((JSONString)data["weight"]).Value;
mBaseWeight = weight.Strength();
// compute the id by using the item name plus the item index
uint id = MRUtility.IdForName(mName, index);
while (msItems.ContainsKey(id))
id = MRUtility.IdForName(mName, ++index);
Id = id;
msItems.Add(id, this);
}
public static MRItem GetItem(uint id)
{
MRItem item = null;
msItems.TryGetValue(id, out item);
return item;
}
// Update is called once per frame
public virtual void Update ()
{
}
public virtual bool Load(JSONObject root)
{
if (mId != MRGame.TheGame.GetPieceId(root["id"]))
return false;
return true;
}
public virtual void Save(JSONObject root)
{
root["id"] = new JSONNumber(mId);
}
#endregion
#region Members
private uint mId;
private string mName;
private int mIndex;
private MRGame.eStrength mBaseWeight;
private int mBaseFame;
private int mBaseNotoriety;
private int mBasePrice;
private bool mIsActive;
private MRCharacter mOwner;
protected GameObject mCounter;
protected Vector3 mOldScale;
protected MRGamePieceStack mStartStack;
protected MRGamePieceStack mStack;
private static IDictionary<uint, MRItem> msItems = new Dictionary<uint, MRItem>();
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Cassette.Scripts;
using Cassette.Stylesheets;
using Cassette.Utilities;
#if NET35
using Iesi.Collections.Generic;
#endif
namespace Cassette
{
class ReferenceBuilder : IReferenceBuilder
{
public ReferenceBuilder(BundleCollection allBundles, IPlaceholderTracker placeholderTracker, IBundleFactoryProvider bundleFactoryProvider, CassetteSettings settings)
{
this.allBundles = allBundles;
this.placeholderTracker = placeholderTracker;
this.bundleFactoryProvider = bundleFactoryProvider;
this.settings = settings;
}
readonly BundleCollection allBundles;
readonly IPlaceholderTracker placeholderTracker;
readonly IBundleFactoryProvider bundleFactoryProvider;
readonly CassetteSettings settings;
readonly Dictionary<string, List<Bundle>> bundlesByLocation = new Dictionary<string, List<Bundle>>();
readonly HashedSet<string> renderedLocations = new HashedSet<string>();
readonly Dictionary<Bundle, string> bundlePageLocations = new Dictionary<Bundle, string>();
public void Reference<T>(string path, string location = null)
where T : Bundle
{
using (allBundles.GetReadLock())
{
var factory = bundleFactoryProvider.GetBundleFactory<T>();
var bundles = GetBundles(path, () => factory.CreateExternalBundle(path)).OfType<T>();
#if NET35
Reference(bundles.Cast<Bundle>(), location);
#else
Reference(bundles, location);
#endif
}
}
public void Reference(string path, string location = null)
{
using (allBundles.GetReadLock())
{
var bundles = GetBundles(path, () => CreateExternalBundleByInferringTypeFromFileExtension(path));
Reference(bundles, location);
}
}
Bundle CreateExternalBundleByInferringTypeFromFileExtension(string path)
{
var pathToExamine = path.IsUrl() ? RemoveQuerystring(path) : path;
if (pathToExamine.EndsWith(".js", StringComparison.OrdinalIgnoreCase))
{
return CreateExternalScriptBundle(path);
}
else if (pathToExamine.EndsWith(".css", StringComparison.OrdinalIgnoreCase))
{
return CreateExternalStylesheetBundle(path);
}
else
{
throw new ArgumentException(
string.Format(
"Cannot determine the type of bundle for the URL \"{0}\". Specify the type using the generic type parameter.",
path
)
);
}
}
string RemoveQuerystring(string url)
{
var index = url.IndexOf('?');
if (index < 0) return url;
return url.Substring(0, index);
}
Bundle CreateExternalScriptBundle(string path)
{
var factory = bundleFactoryProvider.GetBundleFactory<ScriptBundle>();
return factory.CreateExternalBundle(path);
}
Bundle CreateExternalStylesheetBundle(string path)
{
var factory = bundleFactoryProvider.GetBundleFactory<StylesheetBundle>();
return factory.CreateExternalBundle(path);
}
IEnumerable<Bundle> GetBundles(string path, Func<Bundle> createExternalBundle)
{
path = PathUtilities.AppRelative(path);
var bundles = allBundles.FindBundlesContainingPath(path).ToArray();
if (bundles.Length == 0 && path.IsUrl())
{
var bundle = createExternalBundle();
bundle.Process(settings);
bundles = new[] { bundle };
}
if (bundles.Length == 0)
{
throw new ArgumentException("Cannot find an asset bundle containing the path \"" + path + "\".");
}
return bundles;
}
public void Reference(Bundle bundle, string location = null)
{
using (allBundles.GetReadLock())
{
Reference(new[] { bundle }, location);
}
}
void Reference(IEnumerable<Bundle> bundles, string location = null)
{
if (!settings.IsHtmlRewritingEnabled && HasRenderedLocation(location))
{
ThrowRewritingRequiredException(location);
}
foreach (var bundle in bundles)
{
// Bundle can define it's own prefered location. Use this when we aren't given
// an explicit location argument i.e. null.
if (location == null)
{
location = bundle.PageLocation;
}
bundlePageLocations[bundle] = location;
var bundlesForLocation = GetOrCreateBundleSet(location);
if (bundlesForLocation.Contains(bundle)) return;
bundlesForLocation.Add(bundle);
}
}
bool HasRenderedLocation(string location)
{
return renderedLocations.Contains(location ?? "");
}
void ThrowRewritingRequiredException(string location)
{
if (string.IsNullOrEmpty(location))
{
throw new InvalidOperationException(
"Cannot add a bundle reference. The bundles have already been rendered. Either move the reference before the render call, or set ICassetteApplication.IsHtmlRewritingEnabled to true in your Cassette configuration."
);
}
else
{
throw new InvalidOperationException(
string.Format(
"Cannot add a bundle reference, for location \"{0}\". This location has already been rendered. Either move the reference before the render call, or set ICassetteApplication.IsHtmlRewritingEnabled to true in your Cassette configuration.",
location
)
);
}
}
public IEnumerable<Bundle> GetBundles(string location)
{
var bundles = GetOrCreateBundleSet(location);
var bundlesForLocation = GetOrCreateBundleSet(location);
return allBundles
.IncludeReferencesAndSortBundles(bundles)
.Where(b => bundlesForLocation.Contains(b) || BundlePageLocationIs(b, location));
}
bool BundlePageLocationIs(Bundle bundle, string location)
{
string assignedLocation;
if (bundlePageLocations.TryGetValue(bundle, out assignedLocation))
{
return assignedLocation == location;
}
return bundle.PageLocation == location;
}
public string Render<T>(string location = null)
where T : Bundle
{
renderedLocations.Add(location ?? "");
return placeholderTracker.InsertPlaceholder(
() => CreateHtml<T>(location)
);
}
string CreateHtml<T>(string location)
where T : Bundle
{
return string.Join(Environment.NewLine,
GetBundles(location).OfType<T>().Select(
bundle => bundle.Render()
).ToArray()
);
}
List<Bundle> GetOrCreateBundleSet(string location)
{
location = location ?? ""; // Dictionary doesn't accept null keys.
List<Bundle> bundles;
if (bundlesByLocation.TryGetValue(location, out bundles))
{
return bundles;
}
else
{
bundles = new List<Bundle>();
bundlesByLocation.Add(location, bundles);
return bundles;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using Internal.TypeSystem;
using Internal.TypeSystem.Interop;
using Debug = System.Diagnostics.Debug;
using Internal.TypeSystem.Ecma;
namespace Internal.IL.Stubs
{
/// <summary>
/// Provides method bodies for PInvoke methods
///
/// This by no means intends to provide full PInvoke support. The intended use of this is to
/// a) prevent calls getting generated to targets that require a full marshaller
/// (this compiler doesn't provide that), and b) offer a hand in some very simple marshalling
/// situations (but support for this part might go away as the product matures).
/// </summary>
public struct PInvokeILEmitter
{
private readonly MethodDesc _targetMethod;
private readonly Marshaller[] _marshallers;
private readonly PInvokeILEmitterConfiguration _pInvokeILEmitterConfiguration;
private readonly PInvokeMetadata _importMetadata;
private readonly InteropStateManager _interopStateManager;
private PInvokeILEmitter(MethodDesc targetMethod, PInvokeILEmitterConfiguration pinvokeILEmitterConfiguration, InteropStateManager interopStateManager)
{
Debug.Assert(targetMethod.IsPInvoke || targetMethod is DelegateMarshallingMethodThunk);
_targetMethod = targetMethod;
_pInvokeILEmitterConfiguration = pinvokeILEmitterConfiguration;
_importMetadata = targetMethod.GetPInvokeMethodMetadata();
_interopStateManager = interopStateManager;
PInvokeFlags flags = new PInvokeFlags();
if (targetMethod.IsPInvoke)
{
flags = _importMetadata.Flags;
}
else
{
var delegateType = ((DelegateMarshallingMethodThunk)_targetMethod).DelegateType as EcmaType;
if (delegateType != null)
{
flags = delegateType.GetDelegatePInvokeFlags();
}
}
_marshallers = InitializeMarshallers(targetMethod, interopStateManager, flags);
}
private static Marshaller[] InitializeMarshallers(MethodDesc targetMethod, InteropStateManager interopStateManager, PInvokeFlags flags)
{
bool isDelegate = targetMethod is DelegateMarshallingMethodThunk;
MethodSignature methodSig = isDelegate ? ((DelegateMarshallingMethodThunk)targetMethod).DelegateSignature : targetMethod.Signature;
MarshalDirection direction = isDelegate ? ((DelegateMarshallingMethodThunk)targetMethod).Direction: MarshalDirection.Forward;
int indexOffset = 0;
if (!methodSig.IsStatic && direction == MarshalDirection.Forward)
{
// For instance methods(eg. Forward delegate marshalling thunk), first argument is
// the instance
indexOffset = 1;
}
ParameterMetadata[] parameterMetadataArray = targetMethod.GetParameterMetadata();
Marshaller[] marshallers = new Marshaller[methodSig.Length + 1];
int parameterIndex = 0;
ParameterMetadata parameterMetadata;
for (int i = 0; i < marshallers.Length; i++)
{
Debug.Assert(parameterIndex == parameterMetadataArray.Length || i <= parameterMetadataArray[parameterIndex].Index);
if (parameterIndex == parameterMetadataArray.Length || i < parameterMetadataArray[parameterIndex].Index)
{
// if we don't have metadata for the parameter, create a dummy one
parameterMetadata = new ParameterMetadata(i, ParameterMetadataAttributes.None, null);
}
else
{
Debug.Assert(i == parameterMetadataArray[parameterIndex].Index);
parameterMetadata = parameterMetadataArray[parameterIndex++];
}
TypeDesc parameterType = (i == 0) ? methodSig.ReturnType : methodSig[i - 1]; //first item is the return type
marshallers[i] = Marshaller.CreateMarshaller(parameterType,
MarshallerType.Argument,
parameterMetadata.MarshalAsDescriptor,
direction,
marshallers,
interopStateManager,
indexOffset + parameterMetadata.Index,
flags,
parameterMetadata.In,
parameterMetadata.Out,
parameterMetadata.Return
);
}
return marshallers;
}
private void EmitDelegateCall(DelegateMarshallingMethodThunk delegateMethod, PInvokeILCodeStreams ilCodeStreams)
{
ILEmitter emitter = ilCodeStreams.Emitter;
ILCodeStream fnptrLoadStream = ilCodeStreams.FunctionPointerLoadStream;
ILCodeStream callsiteSetupCodeStream = ilCodeStreams.CallsiteSetupCodeStream;
TypeSystemContext context = _targetMethod.Context;
Debug.Assert(delegateMethod != null);
if (delegateMethod.Kind == DelegateMarshallingMethodThunkKind.ReverseOpenStatic)
{
//
// For Open static delegates call
// InteropHelpers.GetCurrentCalleeOpenStaticDelegateFunctionPointer()
// which returns a function pointer. Just call the function pointer and we are done.
//
TypeDesc[] parameters = new TypeDesc[_marshallers.Length - 1];
for (int i = 1; i < _marshallers.Length; i++)
{
parameters[i - 1] = _marshallers[i].ManagedParameterType;
}
MethodSignature managedSignature = new MethodSignature(
MethodSignatureFlags.Static, 0, _marshallers[0].ManagedParameterType, parameters);
fnptrLoadStream.Emit(ILOpcode.call, emitter.NewToken(
delegateMethod.Context.GetHelperType("InteropHelpers").GetKnownMethod(
"GetCurrentCalleeOpenStaticDelegateFunctionPointer", null)));
ILLocalVariable vDelegateStub = emitter.NewLocal(
delegateMethod.Context.GetWellKnownType(WellKnownType.IntPtr));
fnptrLoadStream.EmitStLoc(vDelegateStub);
callsiteSetupCodeStream.EmitLdLoc(vDelegateStub);
callsiteSetupCodeStream.Emit(ILOpcode.calli, emitter.NewToken(managedSignature));
}
else if (delegateMethod.Kind == DelegateMarshallingMethodThunkKind.ReverseClosed)
{
//
// For closed delegates call
// InteropHelpers.GetCurrentCalleeDelegate<Delegate>
// which returns the delegate. Do a CallVirt on the invoke method.
//
MethodDesc instantiatedHelper = delegateMethod.Context.GetInstantiatedMethod(
delegateMethod.Context.GetHelperType("InteropHelpers")
.GetKnownMethod("GetCurrentCalleeDelegate", null),
new Instantiation((delegateMethod.DelegateType)));
fnptrLoadStream.Emit(ILOpcode.call, emitter.NewToken(instantiatedHelper));
ILLocalVariable vDelegateStub = emitter.NewLocal(delegateMethod.DelegateType);
fnptrLoadStream.EmitStLoc(vDelegateStub);
fnptrLoadStream.EmitLdLoc(vDelegateStub);
MethodDesc invokeMethod = delegateMethod.DelegateType.GetKnownMethod("Invoke", null);
callsiteSetupCodeStream.Emit(ILOpcode.callvirt, emitter.NewToken(invokeMethod));
}
else if (delegateMethod.Kind == DelegateMarshallingMethodThunkKind
.ForwardNativeFunctionWrapper)
{
//
// For NativeFunctionWrapper we need to load the native function and call it
//
fnptrLoadStream.EmitLdArg(0);
fnptrLoadStream.Emit(ILOpcode.call, emitter.NewToken(InteropTypes
.GetNativeFunctionPointerWrapper(context)
.GetMethod("get_NativeFunctionPointer", null)));
var fnPtr = emitter.NewLocal(
context.GetWellKnownType(WellKnownType.IntPtr));
fnptrLoadStream.EmitStLoc(fnPtr);
callsiteSetupCodeStream.EmitLdLoc(fnPtr);
TypeDesc nativeReturnType = _marshallers[0].NativeParameterType;
TypeDesc[] nativeParameterTypes = new TypeDesc[_marshallers.Length - 1];
for (int i = 1; i < _marshallers.Length; i++)
{
nativeParameterTypes[i - 1] = _marshallers[i].NativeParameterType;
}
MethodSignatureFlags flags = MethodSignatureFlags.Static;
var delegateType = ((DelegateMarshallingMethodThunk)_targetMethod).DelegateType as EcmaType;
if (delegateType != null)
{
flags |= delegateType.GetDelegatePInvokeFlags().UnmanagedCallingConvention;
}
MethodSignature nativeSig = new MethodSignature(
flags, 0, nativeReturnType, nativeParameterTypes);
callsiteSetupCodeStream.Emit(ILOpcode.calli, emitter.NewToken(nativeSig));
}
else
{
Debug.Assert(false, "Unexpected DelegateMarshallingMethodThunkKind");
}
}
private MethodIL EmitIL()
{
PInvokeILCodeStreams pInvokeILCodeStreams = new PInvokeILCodeStreams();
ILEmitter emitter = pInvokeILCodeStreams.Emitter;
ILCodeStream fnptrLoadStream = pInvokeILCodeStreams.FunctionPointerLoadStream;
ILCodeStream callsiteSetupCodeStream = pInvokeILCodeStreams.CallsiteSetupCodeStream;
ILCodeStream unmarshallingCodestream = pInvokeILCodeStreams.UnmarshallingCodestream;
TypeSystemContext context = _targetMethod.Context;
// Marshal the arguments
for (int i = 0; i < _marshallers.Length; i++)
{
_marshallers[i].EmitMarshallingIL(pInvokeILCodeStreams);
}
// if the SetLastError flag is set in DllImport, clear the error code before doing P/Invoke
if (_importMetadata.Flags.SetLastError)
{
callsiteSetupCodeStream.Emit(ILOpcode.call, emitter.NewToken(
InteropTypes.GetPInvokeMarshal(context).GetKnownMethod("ClearLastWin32Error", null)));
}
// make the call
DelegateMarshallingMethodThunk delegateMethod = _targetMethod as DelegateMarshallingMethodThunk;
if (delegateMethod != null)
{
EmitDelegateCall(delegateMethod, pInvokeILCodeStreams);
}
else
{
TypeDesc nativeReturnType = _marshallers[0].NativeParameterType;
TypeDesc[] nativeParameterTypes = new TypeDesc[_marshallers.Length - 1];
for (int i = 1; i < _marshallers.Length; i++)
{
nativeParameterTypes[i - 1] = _marshallers[i].NativeParameterType;
}
if (MarshalHelpers.UseLazyResolution(_targetMethod,
_importMetadata.Module,
_pInvokeILEmitterConfiguration))
{
MetadataType lazyHelperType = _targetMethod.Context.GetHelperType("InteropHelpers");
FieldDesc lazyDispatchCell = new PInvokeLazyFixupField(_targetMethod);
fnptrLoadStream.Emit(ILOpcode.ldsflda, emitter.NewToken(lazyDispatchCell));
fnptrLoadStream.Emit(ILOpcode.call, emitter.NewToken(lazyHelperType
.GetKnownMethod("ResolvePInvoke", null)));
MethodSignatureFlags unmanagedCallConv = _importMetadata.Flags.UnmanagedCallingConvention;
MethodSignature nativeSig = new MethodSignature(
_targetMethod.Signature.Flags | unmanagedCallConv, 0, nativeReturnType,
nativeParameterTypes);
ILLocalVariable vNativeFunctionPointer = emitter.NewLocal(_targetMethod.Context
.GetWellKnownType(WellKnownType.IntPtr));
fnptrLoadStream.EmitStLoc(vNativeFunctionPointer);
callsiteSetupCodeStream.EmitLdLoc(vNativeFunctionPointer);
callsiteSetupCodeStream.Emit(ILOpcode.calli, emitter.NewToken(nativeSig));
}
else
{
// Eager call
MethodSignature nativeSig = new MethodSignature(
_targetMethod.Signature.Flags, 0, nativeReturnType, nativeParameterTypes);
MethodDesc nativeMethod =
new PInvokeTargetNativeMethod(_targetMethod, nativeSig);
callsiteSetupCodeStream.Emit(ILOpcode.call, emitter.NewToken(nativeMethod));
}
}
// if the SetLastError flag is set in DllImport, call the PInvokeMarshal.
// SaveLastWin32Error so that last error can be used later by calling
// PInvokeMarshal.GetLastWin32Error
if (_importMetadata.Flags.SetLastError)
{
callsiteSetupCodeStream.Emit(ILOpcode.call, emitter.NewToken(
InteropTypes.GetPInvokeMarshal(context)
.GetKnownMethod("SaveLastWin32Error", null)));
}
unmarshallingCodestream.Emit(ILOpcode.ret);
return new PInvokeILStubMethodIL((ILStubMethodIL)emitter.Link(_targetMethod),
IsStubRequired());
}
public static MethodIL EmitIL(MethodDesc method,
PInvokeILEmitterConfiguration pinvokeILEmitterConfiguration,
InteropStateManager interopStateManager)
{
try
{
return new PInvokeILEmitter(method, pinvokeILEmitterConfiguration, interopStateManager)
.EmitIL();
}
catch (NotSupportedException)
{
string message = "Method '" + method.ToString() +
"' requires non-trivial marshalling that is not yet supported by this compiler.";
return MarshalHelpers.EmitExceptionBody(message, method);
}
catch (InvalidProgramException ex)
{
Debug.Assert(!String.IsNullOrEmpty(ex.Message));
return MarshalHelpers.EmitExceptionBody(ex.Message, method);
}
}
private bool IsStubRequired()
{
Debug.Assert(_targetMethod.IsPInvoke || _targetMethod is DelegateMarshallingMethodThunk);
if (_targetMethod is DelegateMarshallingMethodThunk)
{
return true;
}
if (MarshalHelpers.UseLazyResolution(_targetMethod, _importMetadata.Module,
_pInvokeILEmitterConfiguration))
{
return true;
}
if (_importMetadata.Flags.SetLastError)
{
return true;
}
for (int i = 0; i < _marshallers.Length; i++)
{
if (_marshallers[i].IsMarshallingRequired())
return true;
}
return false;
}
}
internal sealed class PInvokeILCodeStreams
{
public ILEmitter Emitter { get; }
public ILCodeStream FunctionPointerLoadStream { get; }
public ILCodeStream MarshallingCodeStream { get; }
public ILCodeStream CallsiteSetupCodeStream { get; }
public ILCodeStream ReturnValueMarshallingCodeStream { get; }
public ILCodeStream UnmarshallingCodestream { get; }
public PInvokeILCodeStreams()
{
Emitter = new ILEmitter();
// We have 4 code streams:
// - _marshallingCodeStream is used to convert each argument into a native type and
// store that into the local
// - callsiteSetupCodeStream is used to used to load each previously generated local
// and call the actual target native method.
// - _returnValueMarshallingCodeStream is used to convert the native return value
// to managed one.
// - _unmarshallingCodestream is used to propagate [out] native arguments values to
// managed ones.
FunctionPointerLoadStream = Emitter.NewCodeStream();
MarshallingCodeStream = Emitter.NewCodeStream();
CallsiteSetupCodeStream = Emitter.NewCodeStream();
ReturnValueMarshallingCodeStream = Emitter.NewCodeStream();
UnmarshallingCodestream = Emitter.NewCodeStream();
}
public PInvokeILCodeStreams(ILEmitter emitter, ILCodeStream codeStream)
{
Emitter = emitter;
MarshallingCodeStream = codeStream;
}
}
public sealed class PInvokeILStubMethodIL : ILStubMethodIL
{
public bool IsStubRequired { get; }
public PInvokeILStubMethodIL(ILStubMethodIL methodIL, bool isStubRequired) : base(methodIL)
{
IsStubRequired = isStubRequired;
}
}
}
| |
// This file was generated by the Gtk# code generator.
// Any changes made will be lost if regenerated.
namespace GLib {
using System;
using System.Collections;
using System.Runtime.InteropServices;
#region Autogenerated code
public class VolumeMonitor : GLib.Object {
[Obsolete]
protected VolumeMonitor(GLib.GType gtype) : base(gtype) {}
public VolumeMonitor(IntPtr raw) : base(raw) {}
protected VolumeMonitor() : base(IntPtr.Zero)
{
CreateNativeObject (new string [0], new GLib.Value [0]);
}
[GLib.CDeclCallback]
delegate void MountChangedVMDelegate (IntPtr volume_monitor, IntPtr mount);
static MountChangedVMDelegate MountChangedVMCallback;
static void mountchanged_cb (IntPtr volume_monitor, IntPtr mount)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnMountChanged (GLib.MountAdapter.GetObject (mount, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideMountChanged (GLib.GType gtype)
{
if (MountChangedVMCallback == null)
MountChangedVMCallback = new MountChangedVMDelegate (mountchanged_cb);
OverrideVirtualMethod (gtype, "mount-changed", MountChangedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideMountChanged")]
protected virtual void OnMountChanged (GLib.Mount mount)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (mount);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("mount-changed")]
public event GLib.MountChangedHandler MountChanged {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-changed", typeof (GLib.MountChangedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-changed", typeof (GLib.MountChangedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void DriveDisconnectedVMDelegate (IntPtr volume_monitor, IntPtr drive);
static DriveDisconnectedVMDelegate DriveDisconnectedVMCallback;
static void drivedisconnected_cb (IntPtr volume_monitor, IntPtr drive)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnDriveDisconnected (GLib.DriveAdapter.GetObject (drive, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideDriveDisconnected (GLib.GType gtype)
{
if (DriveDisconnectedVMCallback == null)
DriveDisconnectedVMCallback = new DriveDisconnectedVMDelegate (drivedisconnected_cb);
OverrideVirtualMethod (gtype, "drive-disconnected", DriveDisconnectedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideDriveDisconnected")]
protected virtual void OnDriveDisconnected (GLib.Drive drive)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (drive);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("drive-disconnected")]
public event GLib.DriveDisconnectedHandler DriveDisconnected {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-disconnected", typeof (GLib.DriveDisconnectedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-disconnected", typeof (GLib.DriveDisconnectedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void DriveConnectedVMDelegate (IntPtr volume_monitor, IntPtr drive);
static DriveConnectedVMDelegate DriveConnectedVMCallback;
static void driveconnected_cb (IntPtr volume_monitor, IntPtr drive)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnDriveConnected (GLib.DriveAdapter.GetObject (drive, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideDriveConnected (GLib.GType gtype)
{
if (DriveConnectedVMCallback == null)
DriveConnectedVMCallback = new DriveConnectedVMDelegate (driveconnected_cb);
OverrideVirtualMethod (gtype, "drive-connected", DriveConnectedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideDriveConnected")]
protected virtual void OnDriveConnected (GLib.Drive drive)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (drive);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("drive-connected")]
public event GLib.DriveConnectedHandler DriveConnected {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-connected", typeof (GLib.DriveConnectedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-connected", typeof (GLib.DriveConnectedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void VolumeChangedVMDelegate (IntPtr volume_monitor, IntPtr volume);
static VolumeChangedVMDelegate VolumeChangedVMCallback;
static void volumechanged_cb (IntPtr volume_monitor, IntPtr volume)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnVolumeChanged (GLib.VolumeAdapter.GetObject (volume, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideVolumeChanged (GLib.GType gtype)
{
if (VolumeChangedVMCallback == null)
VolumeChangedVMCallback = new VolumeChangedVMDelegate (volumechanged_cb);
OverrideVirtualMethod (gtype, "volume-changed", VolumeChangedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideVolumeChanged")]
protected virtual void OnVolumeChanged (GLib.Volume volume)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (volume);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("volume-changed")]
public event GLib.VolumeChangedHandler VolumeChanged {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-changed", typeof (GLib.VolumeChangedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-changed", typeof (GLib.VolumeChangedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void VolumeAddedVMDelegate (IntPtr volume_monitor, IntPtr volume);
static VolumeAddedVMDelegate VolumeAddedVMCallback;
static void volumeadded_cb (IntPtr volume_monitor, IntPtr volume)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnVolumeAdded (GLib.VolumeAdapter.GetObject (volume, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideVolumeAdded (GLib.GType gtype)
{
if (VolumeAddedVMCallback == null)
VolumeAddedVMCallback = new VolumeAddedVMDelegate (volumeadded_cb);
OverrideVirtualMethod (gtype, "volume-added", VolumeAddedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideVolumeAdded")]
protected virtual void OnVolumeAdded (GLib.Volume volume)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (volume);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("volume-added")]
public event GLib.VolumeAddedHandler VolumeAdded {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-added", typeof (GLib.VolumeAddedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-added", typeof (GLib.VolumeAddedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void DriveStopButtonVMDelegate (IntPtr volume_monitor, IntPtr drive);
static DriveStopButtonVMDelegate DriveStopButtonVMCallback;
static void drivestopbutton_cb (IntPtr volume_monitor, IntPtr drive)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnDriveStopButton (GLib.DriveAdapter.GetObject (drive, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideDriveStopButton (GLib.GType gtype)
{
if (DriveStopButtonVMCallback == null)
DriveStopButtonVMCallback = new DriveStopButtonVMDelegate (drivestopbutton_cb);
OverrideVirtualMethod (gtype, "drive-stop-button", DriveStopButtonVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideDriveStopButton")]
protected virtual void OnDriveStopButton (GLib.Drive drive)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (drive);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("drive-stop-button")]
public event GLib.DriveStopButtonHandler DriveStopButton {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-stop-button", typeof (GLib.DriveStopButtonArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-stop-button", typeof (GLib.DriveStopButtonArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void MountAddedVMDelegate (IntPtr volume_monitor, IntPtr mount);
static MountAddedVMDelegate MountAddedVMCallback;
static void mountadded_cb (IntPtr volume_monitor, IntPtr mount)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnMountAdded (GLib.MountAdapter.GetObject (mount, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideMountAdded (GLib.GType gtype)
{
if (MountAddedVMCallback == null)
MountAddedVMCallback = new MountAddedVMDelegate (mountadded_cb);
OverrideVirtualMethod (gtype, "mount-added", MountAddedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideMountAdded")]
protected virtual void OnMountAdded (GLib.Mount mount)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (mount);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("mount-added")]
public event GLib.MountAddedHandler MountAdded {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-added", typeof (GLib.MountAddedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-added", typeof (GLib.MountAddedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void DriveEjectButtonVMDelegate (IntPtr volume_monitor, IntPtr drive);
static DriveEjectButtonVMDelegate DriveEjectButtonVMCallback;
static void driveejectbutton_cb (IntPtr volume_monitor, IntPtr drive)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnDriveEjectButton (GLib.DriveAdapter.GetObject (drive, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideDriveEjectButton (GLib.GType gtype)
{
if (DriveEjectButtonVMCallback == null)
DriveEjectButtonVMCallback = new DriveEjectButtonVMDelegate (driveejectbutton_cb);
OverrideVirtualMethod (gtype, "drive-eject-button", DriveEjectButtonVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideDriveEjectButton")]
protected virtual void OnDriveEjectButton (GLib.Drive drive)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (drive);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("drive-eject-button")]
public event GLib.DriveEjectButtonHandler DriveEjectButton {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-eject-button", typeof (GLib.DriveEjectButtonArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-eject-button", typeof (GLib.DriveEjectButtonArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void DriveChangedVMDelegate (IntPtr volume_monitor, IntPtr drive);
static DriveChangedVMDelegate DriveChangedVMCallback;
static void drivechanged_cb (IntPtr volume_monitor, IntPtr drive)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnDriveChanged (GLib.DriveAdapter.GetObject (drive, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideDriveChanged (GLib.GType gtype)
{
if (DriveChangedVMCallback == null)
DriveChangedVMCallback = new DriveChangedVMDelegate (drivechanged_cb);
OverrideVirtualMethod (gtype, "drive-changed", DriveChangedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideDriveChanged")]
protected virtual void OnDriveChanged (GLib.Drive drive)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (drive);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("drive-changed")]
public event GLib.DriveChangedHandler DriveChanged {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-changed", typeof (GLib.DriveChangedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "drive-changed", typeof (GLib.DriveChangedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void MountPreUnmountVMDelegate (IntPtr volume_monitor, IntPtr mount);
static MountPreUnmountVMDelegate MountPreUnmountVMCallback;
static void mountpreunmount_cb (IntPtr volume_monitor, IntPtr mount)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnMountPreUnmount (GLib.MountAdapter.GetObject (mount, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideMountPreUnmount (GLib.GType gtype)
{
if (MountPreUnmountVMCallback == null)
MountPreUnmountVMCallback = new MountPreUnmountVMDelegate (mountpreunmount_cb);
OverrideVirtualMethod (gtype, "mount-pre-unmount", MountPreUnmountVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideMountPreUnmount")]
protected virtual void OnMountPreUnmount (GLib.Mount mount)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (mount);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("mount-pre-unmount")]
public event GLib.MountPreUnmountHandler MountPreUnmount {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-pre-unmount", typeof (GLib.MountPreUnmountArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-pre-unmount", typeof (GLib.MountPreUnmountArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void VolumeRemovedVMDelegate (IntPtr volume_monitor, IntPtr volume);
static VolumeRemovedVMDelegate VolumeRemovedVMCallback;
static void volumeremoved_cb (IntPtr volume_monitor, IntPtr volume)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnVolumeRemoved (GLib.VolumeAdapter.GetObject (volume, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideVolumeRemoved (GLib.GType gtype)
{
if (VolumeRemovedVMCallback == null)
VolumeRemovedVMCallback = new VolumeRemovedVMDelegate (volumeremoved_cb);
OverrideVirtualMethod (gtype, "volume-removed", VolumeRemovedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideVolumeRemoved")]
protected virtual void OnVolumeRemoved (GLib.Volume volume)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (volume);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("volume-removed")]
public event GLib.VolumeRemovedHandler VolumeRemoved {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-removed", typeof (GLib.VolumeRemovedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "volume-removed", typeof (GLib.VolumeRemovedArgs));
sig.RemoveDelegate (value);
}
}
[GLib.CDeclCallback]
delegate void MountRemovedVMDelegate (IntPtr volume_monitor, IntPtr mount);
static MountRemovedVMDelegate MountRemovedVMCallback;
static void mountremoved_cb (IntPtr volume_monitor, IntPtr mount)
{
try {
VolumeMonitor volume_monitor_managed = GLib.Object.GetObject (volume_monitor, false) as VolumeMonitor;
volume_monitor_managed.OnMountRemoved (GLib.MountAdapter.GetObject (mount, false));
} catch (Exception e) {
GLib.ExceptionManager.RaiseUnhandledException (e, false);
}
}
private static void OverrideMountRemoved (GLib.GType gtype)
{
if (MountRemovedVMCallback == null)
MountRemovedVMCallback = new MountRemovedVMDelegate (mountremoved_cb);
OverrideVirtualMethod (gtype, "mount-removed", MountRemovedVMCallback);
}
[GLib.DefaultSignalHandler(Type=typeof(GLib.VolumeMonitor), ConnectionMethod="OverrideMountRemoved")]
protected virtual void OnMountRemoved (GLib.Mount mount)
{
GLib.Value ret = GLib.Value.Empty;
GLib.ValueArray inst_and_params = new GLib.ValueArray (2);
GLib.Value[] vals = new GLib.Value [2];
vals [0] = new GLib.Value (this);
inst_and_params.Append (vals [0]);
vals [1] = new GLib.Value (mount);
inst_and_params.Append (vals [1]);
g_signal_chain_from_overridden (inst_and_params.ArrayPtr, ref ret);
foreach (GLib.Value v in vals)
v.Dispose ();
}
[GLib.Signal("mount-removed")]
public event GLib.MountRemovedHandler MountRemoved {
add {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-removed", typeof (GLib.MountRemovedArgs));
sig.AddDelegate (value);
}
remove {
GLib.Signal sig = GLib.Signal.Lookup (this, "mount-removed", typeof (GLib.MountRemovedArgs));
sig.RemoveDelegate (value);
}
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_mount_for_uuid(IntPtr raw, IntPtr uuid);
public GLib.Mount GetMountForUuid(string uuid) {
IntPtr native_uuid = GLib.Marshaller.StringToPtrGStrdup (uuid);
IntPtr raw_ret = g_volume_monitor_get_mount_for_uuid(Handle, native_uuid);
GLib.Mount ret = GLib.MountAdapter.GetObject (raw_ret, false);
GLib.Marshaller.Free (native_uuid);
return ret;
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get();
public static GLib.VolumeMonitor Default {
get {
IntPtr raw_ret = g_volume_monitor_get();
GLib.VolumeMonitor ret = GLib.Object.GetObject(raw_ret) as GLib.VolumeMonitor;
return ret;
}
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_type();
public static new GLib.GType GType {
get {
IntPtr raw_ret = g_volume_monitor_get_type();
GLib.GType ret = new GLib.GType(raw_ret);
return ret;
}
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_adopt_orphan_mount(IntPtr mount);
[Obsolete]
public static GLib.Volume AdoptOrphanMount(GLib.Mount mount) {
IntPtr raw_ret = g_volume_monitor_adopt_orphan_mount(mount == null ? IntPtr.Zero : mount.Handle);
GLib.Volume ret = GLib.VolumeAdapter.GetObject (raw_ret, false);
return ret;
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_mounts(IntPtr raw);
public GLib.Mount[] Mounts {
get {
IntPtr raw_ret = g_volume_monitor_get_mounts(Handle);
GLib.Mount[] ret = (GLib.Mount[]) GLib.Marshaller.ListPtrToArray (raw_ret, typeof(GLib.List), true, false, typeof(GLib.Mount));
return ret;
}
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_connected_drives(IntPtr raw);
public GLib.Drive[] ConnectedDrives {
get {
IntPtr raw_ret = g_volume_monitor_get_connected_drives(Handle);
GLib.Drive[] ret = (GLib.Drive[]) GLib.Marshaller.ListPtrToArray (raw_ret, typeof(GLib.List), true, false, typeof(GLib.Drive));
return ret;
}
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_volume_for_uuid(IntPtr raw, IntPtr uuid);
public GLib.Volume GetVolumeForUuid(string uuid) {
IntPtr native_uuid = GLib.Marshaller.StringToPtrGStrdup (uuid);
IntPtr raw_ret = g_volume_monitor_get_volume_for_uuid(Handle, native_uuid);
GLib.Volume ret = GLib.VolumeAdapter.GetObject (raw_ret, false);
GLib.Marshaller.Free (native_uuid);
return ret;
}
[DllImport("libgio-2.0-0.dll")]
static extern IntPtr g_volume_monitor_get_volumes(IntPtr raw);
public GLib.Volume[] Volumes {
get {
IntPtr raw_ret = g_volume_monitor_get_volumes(Handle);
GLib.Volume[] ret = (GLib.Volume[]) GLib.Marshaller.ListPtrToArray (raw_ret, typeof(GLib.List), true, false, typeof(GLib.Volume));
return ret;
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
namespace Lucene.Net.Index
{
using Lucene.Net.Support;
using System.Collections.Concurrent;
using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Analyzer = Lucene.Net.Analysis.Analyzer;
using BinaryDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.BinaryDocValuesUpdate;
using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
using Event = Lucene.Net.Index.IndexWriter.Event;
using FlushedSegment = Lucene.Net.Index.DocumentsWriterPerThread.FlushedSegment;
using InfoStream = Lucene.Net.Util.InfoStream;
using NumericDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.NumericDocValuesUpdate;
using Query = Lucene.Net.Search.Query;
using SegmentFlushTicket = Lucene.Net.Index.DocumentsWriterFlushQueue.SegmentFlushTicket;
using ThreadState = Lucene.Net.Index.DocumentsWriterPerThreadPool.ThreadState;
/// <summary>
/// this class accepts multiple added documents and directly
/// writes segment files.
///
/// Each added document is passed to the <seealso cref="DocConsumer"/>,
/// which in turn processes the document and interacts with
/// other consumers in the indexing chain. Certain
/// consumers, like <seealso cref="StoredFieldsConsumer"/> and {@link
/// TermVectorsConsumer}, digest a document and
/// immediately write bytes to the "doc store" files (ie,
/// they do not consume RAM per document, except while they
/// are processing the document).
///
/// Other consumers, eg <seealso cref="FreqProxTermsWriter"/> and
/// <seealso cref="NormsConsumer"/>, buffer bytes in RAM and flush only
/// when a new segment is produced.
///
/// Once we have used our allowed RAM buffer, or the number
/// of added docs is large enough (in the case we are
/// flushing by doc count instead of RAM usage), we create a
/// real segment and flush it to the Directory.
///
/// Threads:
///
/// Multiple threads are allowed into addDocument at once.
/// There is an initial synchronized call to getThreadState
/// which allocates a ThreadState for this thread. The same
/// thread will get the same ThreadState over time (thread
/// affinity) so that if there are consistent patterns (for
/// example each thread is indexing a different content
/// source) then we make better use of RAM. Then
/// processDocument is called on that ThreadState without
/// synchronization (most of the "heavy lifting" is in this
/// call). Finally the synchronized "finishDocument" is
/// called to flush changes to the directory.
///
/// When flush is called by IndexWriter we forcefully idle
/// all threads and flush only once they are all idle. this
/// means you can call flush with a given thread even while
/// other threads are actively adding/deleting documents.
///
///
/// Exceptions:
///
/// Because this class directly updates in-memory posting
/// lists, and flushes stored fields and term vectors
/// directly to files in the directory, there are certain
/// limited times when an exception can corrupt this state.
/// For example, a disk full while flushing stored fields
/// leaves this file in a corrupt state. Or, an OOM
/// exception while appending to the in-memory posting lists
/// can corrupt that posting list. We call such exceptions
/// "aborting exceptions". In these cases we must call
/// abort() to discard all docs added since the last flush.
///
/// All other exceptions ("non-aborting exceptions") can
/// still partially update the index structures. These
/// updates are consistent, but, they represent only a part
/// of the document seen up until the exception was hit.
/// When this happens, we immediately mark the document as
/// deleted so that the document is always atomically ("all
/// or none") added to the index.
/// </summary>
public sealed class DocumentsWriter : IDisposable
{
private readonly Directory Directory;
private volatile bool Closed;
private readonly InfoStream InfoStream;
private readonly LiveIndexWriterConfig LIWConfig;
private readonly AtomicInteger NumDocsInRAM = new AtomicInteger(0);
// TODO: cut over to BytesRefHash in BufferedDeletes
internal volatile DocumentsWriterDeleteQueue DeleteQueue = new DocumentsWriterDeleteQueue();
private readonly DocumentsWriterFlushQueue TicketQueue = new DocumentsWriterFlushQueue();
/*
* we preserve changes during a full flush since IW might not checkout before
* we release all changes. NRT Readers otherwise suddenly return true from
* isCurrent while there are actually changes currently committed. See also
* #anyChanges() & #flushAllThreads
*/
private volatile bool PendingChangesInCurrentFullFlush;
internal readonly DocumentsWriterPerThreadPool PerThreadPool;
internal readonly FlushPolicy FlushPolicy;
internal readonly DocumentsWriterFlushControl FlushControl;
private readonly IndexWriter Writer;
private readonly ConcurrentQueue<Event> Events;
internal DocumentsWriter(IndexWriter writer, LiveIndexWriterConfig config, Directory directory)
{
this.Directory = directory;
this.LIWConfig = config;
this.InfoStream = config.InfoStream;
this.PerThreadPool = config.IndexerThreadPool;
FlushPolicy = config.FlushPolicy;
this.Writer = writer;
this.Events = new ConcurrentQueue<Event>();
FlushControl = new DocumentsWriterFlushControl(this, config, writer.BufferedUpdatesStream);
}
internal bool DeleteQueries(params Query[] queries)
{
lock (this)
{
// TODO why is this synchronized?
DocumentsWriterDeleteQueue deleteQueue = this.DeleteQueue;
deleteQueue.AddDelete(queries);
FlushControl.DoOnDelete();
return ApplyAllDeletes(deleteQueue);
}
}
// TODO: we could check w/ FreqProxTermsWriter: if the
// term doesn't exist, don't bother buffering into the
// per-DWPT map (but still must go into the global map)
internal bool DeleteTerms(params Term[] terms)
{
lock (this)
{
// TODO why is this synchronized?
DocumentsWriterDeleteQueue deleteQueue = this.DeleteQueue;
deleteQueue.AddDelete(terms);
FlushControl.DoOnDelete();
return ApplyAllDeletes(deleteQueue);
}
}
internal bool UpdateNumericDocValue(Term term, string field, long? value)
{
lock (this)
{
DocumentsWriterDeleteQueue deleteQueue = this.DeleteQueue;
deleteQueue.AddNumericUpdate(new NumericDocValuesUpdate(term, field, value));
FlushControl.DoOnDelete();
return ApplyAllDeletes(deleteQueue);
}
}
internal bool UpdateBinaryDocValue(Term term, string field, BytesRef value)
{
lock (this)
{
DocumentsWriterDeleteQueue deleteQueue = this.DeleteQueue;
deleteQueue.AddBinaryUpdate(new BinaryDocValuesUpdate(term, field, value));
FlushControl.DoOnDelete();
return ApplyAllDeletes(deleteQueue);
}
}
internal DocumentsWriterDeleteQueue CurrentDeleteSession()
{
return DeleteQueue;
}
private bool ApplyAllDeletes(DocumentsWriterDeleteQueue deleteQueue)
{
if (FlushControl.AndResetApplyAllDeletes)
{
if (deleteQueue != null && !FlushControl.FullFlush)
{
TicketQueue.AddDeletes(deleteQueue);
}
PutEvent(ApplyDeletesEvent.INSTANCE); // apply deletes event forces a purge
return true;
}
return false;
}
internal int PurgeBuffer(IndexWriter writer, bool forced)
{
if (forced)
{
return TicketQueue.ForcePurge(writer);
}
else
{
return TicketQueue.TryPurge(writer);
}
}
/// <summary>
/// Returns how many docs are currently buffered in RAM. </summary>
internal int NumDocs
{
get
{
return NumDocsInRAM.Get();
}
}
private void EnsureOpen()
{
if (Closed)
{
throw new AlreadyClosedException("this IndexWriter is closed");
}
}
/// <summary>
/// Called if we hit an exception at a bad time (when
/// updating the index files) and must discard all
/// currently buffered docs. this resets our state,
/// discarding any docs added since last flush.
/// </summary>
internal void Abort(IndexWriter writer)
{
lock (this)
{
//Debug.Assert(!Thread.HoldsLock(writer), "IndexWriter lock should never be hold when aborting");
bool success = false;
HashSet<string> newFilesSet = new HashSet<string>();
try
{
DeleteQueue.Clear();
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "abort");
}
int limit = PerThreadPool.ActiveThreadState;
for (int i = 0; i < limit; i++)
{
ThreadState perThread = PerThreadPool.GetThreadState(i);
perThread.@Lock();
try
{
AbortThreadState(perThread, newFilesSet);
}
finally
{
perThread.Unlock();
}
}
FlushControl.AbortPendingFlushes(newFilesSet);
PutEvent(new DeleteNewFilesEvent(newFilesSet));
FlushControl.WaitForFlush();
success = true;
}
finally
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "done abort; abortedFiles=" + newFilesSet + " success=" + success);
}
}
}
}
internal void LockAndAbortAll(IndexWriter indexWriter)
{
lock (this)
{
//Debug.Assert(indexWriter.HoldsFullFlushLock());
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "lockAndAbortAll");
}
bool success = false;
try
{
DeleteQueue.Clear();
int limit = PerThreadPool.MaxThreadStates;
HashSet<string> newFilesSet = new HashSet<string>();
for (int i = 0; i < limit; i++)
{
ThreadState perThread = PerThreadPool.GetThreadState(i);
perThread.@Lock();
AbortThreadState(perThread, newFilesSet);
}
DeleteQueue.Clear();
FlushControl.AbortPendingFlushes(newFilesSet);
PutEvent(new DeleteNewFilesEvent(newFilesSet));
FlushControl.WaitForFlush();
success = true;
}
finally
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "finished lockAndAbortAll success=" + success);
}
if (!success)
{
// if something happens here we unlock all states again
UnlockAllAfterAbortAll(indexWriter);
}
}
}
}
private void AbortThreadState(ThreadState perThread, ISet<string> newFiles)
{
//Debug.Assert(perThread.HeldByCurrentThread);
if (perThread.Active) // we might be closed
{
if (perThread.Initialized)
{
try
{
SubtractFlushedNumDocs(perThread.Dwpt.NumDocsInRAM);
perThread.Dwpt.Abort(newFiles);
}
finally
{
perThread.Dwpt.CheckAndResetHasAborted();
FlushControl.DoOnAbort(perThread);
}
}
else
{
FlushControl.DoOnAbort(perThread);
}
}
else
{
Debug.Assert(Closed);
}
}
internal void UnlockAllAfterAbortAll(IndexWriter indexWriter)
{
lock (this)
{
//Debug.Assert(indexWriter.HoldsFullFlushLock());
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "unlockAll");
}
int limit = PerThreadPool.MaxThreadStates;
for (int i = 0; i < limit; i++)
{
try
{
ThreadState perThread = PerThreadPool.GetThreadState(i);
//if (perThread.HeldByCurrentThread)
//{
perThread.Unlock();
//}
}
catch (Exception e)
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "unlockAll: could not unlock state: " + i + " msg:" + e.Message);
}
// ignore & keep on unlocking
}
}
}
}
internal bool AnyChanges()
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "anyChanges? numDocsInRam=" + NumDocsInRAM.Get() + " deletes=" + AnyDeletions() + " hasTickets:" + TicketQueue.HasTickets() + " pendingChangesInFullFlush: " + PendingChangesInCurrentFullFlush);
}
/*
* changes are either in a DWPT or in the deleteQueue.
* yet if we currently flush deletes and / or dwpt there
* could be a window where all changes are in the ticket queue
* before they are published to the IW. ie we need to check if the
* ticket queue has any tickets.
*/
return NumDocsInRAM.Get() != 0 || AnyDeletions() || TicketQueue.HasTickets() || PendingChangesInCurrentFullFlush;
}
public int BufferedDeleteTermsSize
{
get
{
return DeleteQueue.BufferedUpdatesTermsSize;
}
}
//for testing
public int NumBufferedDeleteTerms
{
get
{
return DeleteQueue.NumGlobalTermDeletes();
}
}
public bool AnyDeletions()
{
return DeleteQueue.AnyChanges();
}
public void Dispose()
{
Closed = true;
FlushControl.SetClosed();
}
private bool PreUpdate()
{
EnsureOpen();
bool hasEvents = false;
if (FlushControl.AnyStalledThreads() || FlushControl.NumQueuedFlushes() > 0)
{
// Help out flushing any queued DWPTs so we can un-stall:
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "DocumentsWriter has queued dwpt; will hijack this thread to flush pending segment(s)");
}
do
{
// Try pick up pending threads here if possible
DocumentsWriterPerThread flushingDWPT;
while ((flushingDWPT = FlushControl.NextPendingFlush()) != null)
{
// Don't push the delete here since the update could fail!
hasEvents |= DoFlush(flushingDWPT);
}
if (InfoStream.IsEnabled("DW"))
{
if (FlushControl.AnyStalledThreads())
{
InfoStream.Message("DW", "WARNING DocumentsWriter has stalled threads; waiting");
}
}
FlushControl.WaitIfStalled(); // block if stalled
} while (FlushControl.NumQueuedFlushes() != 0); // still queued DWPTs try help flushing
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "continue indexing after helping out flushing DocumentsWriter is healthy");
}
}
return hasEvents;
}
private bool PostUpdate(DocumentsWriterPerThread flushingDWPT, bool hasEvents)
{
hasEvents |= ApplyAllDeletes(DeleteQueue);
if (flushingDWPT != null)
{
hasEvents |= DoFlush(flushingDWPT);
}
else
{
DocumentsWriterPerThread nextPendingFlush = FlushControl.NextPendingFlush();
if (nextPendingFlush != null)
{
hasEvents |= DoFlush(nextPendingFlush);
}
}
return hasEvents;
}
private void EnsureInitialized(ThreadState state)
{
if (state.Active && state.Dwpt == null)
{
FieldInfos.Builder infos = new FieldInfos.Builder(Writer.GlobalFieldNumberMap);
state.Dwpt = new DocumentsWriterPerThread(Writer.NewSegmentName(), Directory, LIWConfig, InfoStream, DeleteQueue, infos);
}
}
internal bool UpdateDocuments(IEnumerable<IEnumerable<IndexableField>> docs, Analyzer analyzer, Term delTerm)
{
bool hasEvents = PreUpdate();
ThreadState perThread = FlushControl.ObtainAndLock();
DocumentsWriterPerThread flushingDWPT;
try
{
if (!perThread.Active)
{
EnsureOpen();
Debug.Assert(false, "perThread is not active but we are still open");
}
EnsureInitialized(perThread);
Debug.Assert(perThread.Initialized);
DocumentsWriterPerThread dwpt = perThread.Dwpt;
int dwptNumDocs = dwpt.NumDocsInRAM;
try
{
int docCount = dwpt.UpdateDocuments(docs, analyzer, delTerm);
NumDocsInRAM.AddAndGet(docCount);
}
finally
{
if (dwpt.CheckAndResetHasAborted())
{
if (dwpt.PendingFilesToDelete().Count > 0)
{
PutEvent(new DeleteNewFilesEvent(dwpt.PendingFilesToDelete()));
}
SubtractFlushedNumDocs(dwptNumDocs);
FlushControl.DoOnAbort(perThread);
}
}
bool isUpdate = delTerm != null;
flushingDWPT = FlushControl.DoAfterDocument(perThread, isUpdate);
}
finally
{
perThread.Unlock();
}
return PostUpdate(flushingDWPT, hasEvents);
}
internal bool UpdateDocument(IEnumerable<IndexableField> doc, Analyzer analyzer, Term delTerm)
{
bool hasEvents = PreUpdate();
ThreadState perThread = FlushControl.ObtainAndLock();
DocumentsWriterPerThread flushingDWPT;
try
{
if (!perThread.Active)
{
EnsureOpen();
Debug.Assert(false, "perThread is not active but we are still open");
}
EnsureInitialized(perThread);
Debug.Assert(perThread.Initialized);
DocumentsWriterPerThread dwpt = perThread.Dwpt;
int dwptNumDocs = dwpt.NumDocsInRAM;
try
{
dwpt.UpdateDocument(doc, analyzer, delTerm);
NumDocsInRAM.IncrementAndGet();
}
finally
{
if (dwpt.CheckAndResetHasAborted())
{
if (dwpt.PendingFilesToDelete().Count > 0)
{
PutEvent(new DeleteNewFilesEvent(dwpt.PendingFilesToDelete()));
}
SubtractFlushedNumDocs(dwptNumDocs);
FlushControl.DoOnAbort(perThread);
}
}
bool isUpdate = delTerm != null;
flushingDWPT = FlushControl.DoAfterDocument(perThread, isUpdate);
}
finally
{
perThread.Unlock();
}
return PostUpdate(flushingDWPT, hasEvents);
}
private bool DoFlush(DocumentsWriterPerThread flushingDWPT)
{
bool hasEvents = false;
while (flushingDWPT != null)
{
hasEvents = true;
bool success = false;
SegmentFlushTicket ticket = null;
try
{
Debug.Assert(CurrentFullFlushDelQueue == null || flushingDWPT.DeleteQueue == CurrentFullFlushDelQueue, "expected: " + CurrentFullFlushDelQueue + "but was: " + flushingDWPT.DeleteQueue + " " + FlushControl.FullFlush);
/*
* Since with DWPT the flush process is concurrent and several DWPT
* could flush at the same time we must maintain the order of the
* flushes before we can apply the flushed segment and the frozen global
* deletes it is buffering. The reason for this is that the global
* deletes mark a certain point in time where we took a DWPT out of
* rotation and freeze the global deletes.
*
* Example: A flush 'A' starts and freezes the global deletes, then
* flush 'B' starts and freezes all deletes occurred since 'A' has
* started. if 'B' finishes before 'A' we need to wait until 'A' is done
* otherwise the deletes frozen by 'B' are not applied to 'A' and we
* might miss to deletes documents in 'A'.
*/
try
{
// Each flush is assigned a ticket in the order they acquire the ticketQueue lock
ticket = TicketQueue.AddFlushTicket(flushingDWPT);
int flushingDocsInRam = flushingDWPT.NumDocsInRAM;
bool dwptSuccess = false;
try
{
// flush concurrently without locking
FlushedSegment newSegment = flushingDWPT.Flush();
TicketQueue.AddSegment(ticket, newSegment);
dwptSuccess = true;
}
finally
{
SubtractFlushedNumDocs(flushingDocsInRam);
if (flushingDWPT.PendingFilesToDelete().Count > 0)
{
PutEvent(new DeleteNewFilesEvent(flushingDWPT.PendingFilesToDelete()));
hasEvents = true;
}
if (!dwptSuccess)
{
PutEvent(new FlushFailedEvent(flushingDWPT.SegmentInfo));
hasEvents = true;
}
}
// flush was successful once we reached this point - new seg. has been assigned to the ticket!
success = true;
}
finally
{
if (!success && ticket != null)
{
// In the case of a failure make sure we are making progress and
// apply all the deletes since the segment flush failed since the flush
// ticket could hold global deletes see FlushTicket#canPublish()
TicketQueue.MarkTicketFailed(ticket);
}
}
/*
* Now we are done and try to flush the ticket queue if the head of the
* queue has already finished the flush.
*/
if (TicketQueue.TicketCount >= PerThreadPool.ActiveThreadState)
{
// this means there is a backlog: the one
// thread in innerPurge can't keep up with all
// other threads flushing segments. In this case
// we forcefully stall the producers.
PutEvent(ForcedPurgeEvent.INSTANCE);
break;
}
}
finally
{
FlushControl.DoAfterFlush(flushingDWPT);
flushingDWPT.CheckAndResetHasAborted();
}
flushingDWPT = FlushControl.NextPendingFlush();
}
if (hasEvents)
{
PutEvent(MergePendingEvent.INSTANCE);
}
// If deletes alone are consuming > 1/2 our RAM
// buffer, force them all to apply now. this is to
// prevent too-frequent flushing of a long tail of
// tiny segments:
double ramBufferSizeMB = LIWConfig.RAMBufferSizeMB;
if (ramBufferSizeMB != IndexWriterConfig.DISABLE_AUTO_FLUSH && FlushControl.DeleteBytesUsed > (1024 * 1024 * ramBufferSizeMB / 2))
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "force apply deletes bytesUsed=" + FlushControl.DeleteBytesUsed + " vs ramBuffer=" + (1024 * 1024 * ramBufferSizeMB));
}
hasEvents = true;
if (!this.ApplyAllDeletes(DeleteQueue))
{
PutEvent(ApplyDeletesEvent.INSTANCE);
}
}
return hasEvents;
}
internal void SubtractFlushedNumDocs(int numFlushed)
{
int oldValue = NumDocsInRAM.Get();
while (!NumDocsInRAM.CompareAndSet(oldValue, oldValue - numFlushed))
{
oldValue = NumDocsInRAM.Get();
}
}
// for asserts
private volatile DocumentsWriterDeleteQueue CurrentFullFlushDelQueue = null;
// for asserts
private bool SetFlushingDeleteQueue(DocumentsWriterDeleteQueue session)
{
lock (this)
{
CurrentFullFlushDelQueue = session;
return true;
}
}
/*
* FlushAllThreads is synced by IW fullFlushLock. Flushing all threads is a
* two stage operation; the caller must ensure (in try/finally) that finishFlush
* is called after this method, to release the flush lock in DWFlushControl
*/
internal bool FlushAllThreads(IndexWriter indexWriter)
{
DocumentsWriterDeleteQueue flushingDeleteQueue;
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", "startFullFlush");
}
lock (this)
{
PendingChangesInCurrentFullFlush = AnyChanges();
flushingDeleteQueue = DeleteQueue;
/* Cutover to a new delete queue. this must be synced on the flush control
* otherwise a new DWPT could sneak into the loop with an already flushing
* delete queue */
FlushControl.MarkForFullFlush(); // swaps the delQueue synced on FlushControl
Debug.Assert(SetFlushingDeleteQueue(flushingDeleteQueue));
}
Debug.Assert(CurrentFullFlushDelQueue != null);
Debug.Assert(CurrentFullFlushDelQueue != DeleteQueue);
bool anythingFlushed = false;
try
{
DocumentsWriterPerThread flushingDWPT;
// Help out with flushing:
while ((flushingDWPT = FlushControl.NextPendingFlush()) != null)
{
anythingFlushed |= DoFlush(flushingDWPT);
}
// If a concurrent flush is still in flight wait for it
FlushControl.WaitForFlush();
if (!anythingFlushed && flushingDeleteQueue.AnyChanges()) // apply deletes if we did not flush any document
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", Thread.CurrentThread.Name + ": flush naked frozen global deletes");
}
TicketQueue.AddDeletes(flushingDeleteQueue);
}
TicketQueue.ForcePurge(indexWriter);
Debug.Assert(!flushingDeleteQueue.AnyChanges() && !TicketQueue.HasTickets());
}
finally
{
Debug.Assert(flushingDeleteQueue == CurrentFullFlushDelQueue);
}
return anythingFlushed;
}
internal void FinishFullFlush(bool success)
{
try
{
if (InfoStream.IsEnabled("DW"))
{
InfoStream.Message("DW", Thread.CurrentThread.Name + " finishFullFlush success=" + success);
}
Debug.Assert(SetFlushingDeleteQueue(null));
if (success)
{
// Release the flush lock
FlushControl.FinishFullFlush();
}
else
{
HashSet<string> newFilesSet = new HashSet<string>();
FlushControl.AbortFullFlushes(newFilesSet);
PutEvent(new DeleteNewFilesEvent(newFilesSet));
}
}
finally
{
PendingChangesInCurrentFullFlush = false;
}
}
public LiveIndexWriterConfig Config
{
get
{
return LIWConfig;
}
}
private void PutEvent(Event @event)
{
Events.Enqueue(@event);
}
internal sealed class ApplyDeletesEvent : Event
{
internal static readonly Event INSTANCE = new ApplyDeletesEvent();
internal int InstCount = 0;
internal ApplyDeletesEvent()
{
Debug.Assert(InstCount == 0);
InstCount++;
}
public void Process(IndexWriter writer, bool triggerMerge, bool forcePurge)
{
writer.ApplyDeletesAndPurge(true); // we always purge!
}
}
internal sealed class MergePendingEvent : Event
{
internal static readonly Event INSTANCE = new MergePendingEvent();
internal int InstCount = 0;
internal MergePendingEvent()
{
Debug.Assert(InstCount == 0);
InstCount++;
}
public void Process(IndexWriter writer, bool triggerMerge, bool forcePurge)
{
writer.DoAfterSegmentFlushed(triggerMerge, forcePurge);
}
}
internal sealed class ForcedPurgeEvent : Event
{
internal static readonly Event INSTANCE = new ForcedPurgeEvent();
internal int InstCount = 0;
internal ForcedPurgeEvent()
{
Debug.Assert(InstCount == 0);
InstCount++;
}
public void Process(IndexWriter writer, bool triggerMerge, bool forcePurge)
{
writer.Purge(true);
}
}
internal class FlushFailedEvent : Event
{
internal readonly SegmentInfo Info;
public FlushFailedEvent(SegmentInfo info)
{
this.Info = info;
}
public void Process(IndexWriter writer, bool triggerMerge, bool forcePurge)
{
writer.FlushFailed(Info);
}
}
internal class DeleteNewFilesEvent : Event
{
internal readonly ICollection<string> Files;
public DeleteNewFilesEvent(ICollection<string> files)
{
this.Files = files;
}
public void Process(IndexWriter writer, bool triggerMerge, bool forcePurge)
{
writer.DeleteNewFiles(Files);
}
}
public ConcurrentQueue<Event> EventQueue()
{
return Events;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Binary;
namespace System.Text.Http.Parser.Internal
{
internal static class HttpUtilities
{
public const string Http10Version = "HTTP/1.0";
public const string Http11Version = "HTTP/1.1";
public const string HttpUriScheme = "http://";
public const string HttpsUriScheme = "https://";
// readonly primitive statics can be Jit'd to consts https://github.com/dotnet/coreclr/issues/1079
private readonly static ulong _httpSchemeLong = GetAsciiStringAsLong(HttpUriScheme + "\0");
private readonly static ulong _httpsSchemeLong = GetAsciiStringAsLong(HttpsUriScheme);
private readonly static ulong _httpConnectMethodLong = GetAsciiStringAsLong("CONNECT ");
private readonly static ulong _httpDeleteMethodLong = GetAsciiStringAsLong("DELETE \0");
private const uint _httpGetMethodInt = 542393671; // retun of GetAsciiStringAsInt("GET "); const results in better codegen
private readonly static ulong _httpHeadMethodLong = GetAsciiStringAsLong("HEAD \0\0\0");
private readonly static ulong _httpPatchMethodLong = GetAsciiStringAsLong("PATCH \0\0");
private readonly static ulong _httpPostMethodLong = GetAsciiStringAsLong("POST \0\0\0");
private readonly static ulong _httpPutMethodLong = GetAsciiStringAsLong("PUT \0\0\0\0");
private readonly static ulong _httpOptionsMethodLong = GetAsciiStringAsLong("OPTIONS ");
private readonly static ulong _httpTraceMethodLong = GetAsciiStringAsLong("TRACE \0\0");
private const ulong _http10VersionLong = 3471766442030158920; // GetAsciiStringAsLong("HTTP/1.0"); const results in better codegen
private const ulong _http11VersionLong = 3543824036068086856; // GetAsciiStringAsLong("HTTP/1.1"); const results in better codegen
private readonly static ulong _mask8Chars = GetMaskAsLong(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff });
private readonly static ulong _mask7Chars = GetMaskAsLong(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00 });
private readonly static ulong _mask6Chars = GetMaskAsLong(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00 });
private readonly static ulong _mask5Chars = GetMaskAsLong(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00 });
private readonly static ulong _mask4Chars = GetMaskAsLong(new byte[] { 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00 });
private readonly static Tuple<ulong, ulong, Http.Method, int>[] _knownMethods =
{
Tuple.Create(_mask4Chars, _httpPutMethodLong, Http.Method.Put, 3),
Tuple.Create(_mask5Chars, _httpPostMethodLong, Http.Method.Post, 4),
Tuple.Create(_mask5Chars, _httpHeadMethodLong, Http.Method.Head, 4),
Tuple.Create(_mask6Chars, _httpTraceMethodLong, Http.Method.Trace, 5),
Tuple.Create(_mask6Chars, _httpPatchMethodLong, Http.Method.Patch, 5),
Tuple.Create(_mask7Chars, _httpDeleteMethodLong, Http.Method.Delete, 6),
Tuple.Create(_mask8Chars, _httpConnectMethodLong, Http.Method.Connect, 7),
Tuple.Create(_mask8Chars, _httpOptionsMethodLong, Http.Method.Options, 7),
};
private readonly static string[] _methodNames = CreateMethodNames();
private static string[] CreateMethodNames()
{
var methodNames = new string[9];
methodNames[(byte)Http.Method.Get] = "GET";
methodNames[(byte)Http.Method.Put] = "PUT";
methodNames[(byte)Http.Method.Delete] = "DELETE";
methodNames[(byte)Http.Method.Post] = "POST";
methodNames[(byte)Http.Method.Head] = "HEAD";
methodNames[(byte)Http.Method.Trace] = "TRACE";
methodNames[(byte)Http.Method.Patch] = "PATCH";
methodNames[(byte)Http.Method.Connect] = "CONNECT";
methodNames[(byte)Http.Method.Options] = "OPTIONS";
return methodNames;
}
private unsafe static ulong GetAsciiStringAsLong(string str)
{
Debug.Assert(str.Length == 8, "String must be exactly 8 (ASCII) characters long.");
var buffer = stackalloc byte[8];
Span<byte> span = new Span<byte>(buffer, 8);
TextEncoder.Utf8.TryEncode(str, span, out var written);
return span.Read<ulong>();
}
private unsafe static uint GetAsciiStringAsInt(string str)
{
Debug.Assert(str.Length == 4, "String must be exactly 4 (ASCII) characters long.");
var buffer = stackalloc byte[4];
Span<byte> span = new Span<byte>(buffer, 4);
TextEncoder.Utf8.TryEncode(str, span, out var written);
return span.Read<uint>();
}
private unsafe static ulong GetMaskAsLong(byte[] bytes)
{
Debug.Assert(bytes.Length == 8, "Mask must be exactly 8 bytes long.");
fixed (byte* ptr = bytes)
{
return *(ulong*)ptr;
}
}
public unsafe static string GetAsciiStringNonNullCharacters(this Span<byte> span)
{
if (span.IsEmpty)
{
return string.Empty;
}
var asciiString = new string('\0', span.Length);
fixed (char* output = asciiString)
fixed (byte* buffer = &span.DangerousGetPinnableReference())
{
// This version if AsciiUtilities returns null if there are any null (0 byte) characters
// in the string
if (!AsciiUtilities.TryGetAsciiString(buffer, output, span.Length))
{
throw new InvalidOperationException();
}
}
return asciiString;
}
public static string GetAsciiStringEscaped(this Span<byte> span, int maxChars)
{
var sb = new StringBuilder();
for (var i = 0; i < Math.Min(span.Length, maxChars); i++)
{
var ch = span[i];
sb.Append(ch < 0x20 || ch >= 0x7F ? $"\\x{ch:X2}" : ((char)ch).ToString());
}
if (span.Length > maxChars)
{
sb.Append("...");
}
return sb.ToString();
}
/// <summary>
/// Checks that up to 8 bytes from <paramref name="span"/> correspond to a known HTTP method.
/// </summary>
/// <remarks>
/// A "known HTTP method" can be an HTTP method name defined in the HTTP/1.1 RFC.
/// Since all of those fit in at most 8 bytes, they can be optimally looked up by reading those bytes as a long. Once
/// in that format, it can be checked against the known method.
/// The Known Methods (CONNECT, DELETE, GET, HEAD, PATCH, POST, PUT, OPTIONS, TRACE) are all less than 8 bytes
/// and will be compared with the required space. A mask is used if the Known method is less than 8 bytes.
/// To optimize performance the GET method will be checked first.
/// </remarks>
/// <returns><c>true</c> if the input matches a known string, <c>false</c> otherwise.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe bool GetKnownMethod(this Span<byte> span, out Http.Method method, out int length)
{
fixed (byte* data = &span.DangerousGetPinnableReference())
{
method = GetKnownMethod(data, span.Length, out length);
return method != Http.Method.Custom;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal unsafe static Http.Method GetKnownMethod(byte* data, int length, out int methodLength)
{
methodLength = 0;
if (length < sizeof(uint))
{
return Http.Method.Custom;
}
else if (*(uint*)data == _httpGetMethodInt)
{
methodLength = 3;
return Http.Method.Get;
}
else if (length < sizeof(ulong))
{
return Http.Method.Custom;
}
else
{
var value = *(ulong*)data;
foreach (var x in _knownMethods)
{
if ((value & x.Item1) == x.Item2)
{
methodLength = x.Item4;
return x.Item3;
}
}
}
return Http.Method.Custom;
}
/// <summary>
/// Checks 9 bytes from <paramref name="span"/> correspond to a known HTTP version.
/// </summary>
/// <remarks>
/// A "known HTTP version" Is is either HTTP/1.0 or HTTP/1.1.
/// Since those fit in 8 bytes, they can be optimally looked up by reading those bytes as a long. Once
/// in that format, it can be checked against the known versions.
/// The Known versions will be checked with the required '\r'.
/// To optimize performance the HTTP/1.1 will be checked first.
/// </remarks>
/// <returns><c>true</c> if the input matches a known string, <c>false</c> otherwise.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe bool GetKnownVersion(this Span<byte> span, out Http.Version knownVersion, out byte length)
{
fixed (byte* data = &span.DangerousGetPinnableReference())
{
knownVersion = GetKnownVersion(data, span.Length);
if (knownVersion != Http.Version.Unknown)
{
length = sizeof(ulong);
return true;
}
length = 0;
return false;
}
}
/// <summary>
/// Checks 9 bytes from <paramref name="location"/> correspond to a known HTTP version.
/// </summary>
/// <remarks>
/// A "known HTTP version" Is is either HTTP/1.0 or HTTP/1.1.
/// Since those fit in 8 bytes, they can be optimally looked up by reading those bytes as a long. Once
/// in that format, it can be checked against the known versions.
/// The Known versions will be checked with the required '\r'.
/// To optimize performance the HTTP/1.1 will be checked first.
/// </remarks>
/// <returns><c>true</c> if the input matches a known string, <c>false</c> otherwise.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal unsafe static Http.Version GetKnownVersion(byte* location, int length)
{
Http.Version knownVersion;
var version = *(ulong*)location;
if (length < sizeof(ulong) + 1 || location[sizeof(ulong)] != (byte)'\r')
{
knownVersion = Http.Version.Unknown;
}
else if (version == _http11VersionLong)
{
knownVersion = Http.Version.Http11;
}
else if (version == _http10VersionLong)
{
knownVersion = Http.Version.Http10;
}
else
{
knownVersion = Http.Version.Unknown;
}
return knownVersion;
}
/// <summary>
/// Checks 8 bytes from <paramref name="span"/> that correspond to 'http://' or 'https://'
/// </summary>
/// <param name="span">The span</param>
/// <param name="knownScheme">A reference to the known scheme, if the input matches any</param>
/// <returns>True when memory starts with known http or https schema</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe bool GetKnownHttpScheme(this Span<byte> span, out HttpScheme knownScheme)
{
fixed (byte* data = &span.DangerousGetPinnableReference())
{
return GetKnownHttpScheme(data, span.Length, out knownScheme);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe bool GetKnownHttpScheme(byte* location, int length, out HttpScheme knownScheme)
{
if (length >= sizeof(ulong))
{
var scheme = *(ulong*)location;
if ((scheme & _mask7Chars) == _httpSchemeLong)
{
knownScheme = HttpScheme.Http;
return true;
}
if (scheme == _httpsSchemeLong)
{
knownScheme = HttpScheme.Https;
return true;
}
}
knownScheme = HttpScheme.Unknown;
return false;
}
public static string VersionToString(Http.Version httpVersion)
{
switch (httpVersion)
{
case Http.Version.Http10:
return Http10Version;
case Http.Version.Http11:
return Http11Version;
default:
return null;
}
}
public static string MethodToString(Http.Method method)
{
int methodIndex = (int)method;
if (methodIndex >= 0 && methodIndex <= 8)
{
return _methodNames[methodIndex];
}
return null;
}
public static string SchemeToString(HttpScheme scheme)
{
switch (scheme)
{
case HttpScheme.Http:
return HttpUriScheme;
case HttpScheme.Https:
return HttpsUriScheme;
default:
return null;
}
}
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Text;
namespace ZXing.Common.ReedSolomon
{
/// <summary>
/// <p>Represents a polynomial whose coefficients are elements of a GF.
/// Instances of this class are immutable.</p>
/// <p>Much credit is due to William Rucklidge since portions of this code are an indirect
/// port of his C++ Reed-Solomon implementation.</p>
/// </summary>
/// <author>Sean Owen</author>
internal sealed class GenericGFPoly
{
private readonly GenericGF field;
private readonly int[] coefficients;
/// <summary>
/// Initializes a new instance of the <see cref="GenericGFPoly"/> class.
/// </summary>
/// <param name="field">the {@link GenericGF} instance representing the field to use
/// to perform computations</param>
/// <param name="coefficients">coefficients as ints representing elements of GF(size), arranged
/// from most significant (highest-power term) coefficient to least significant</param>
/// <exception cref="ArgumentException">if argument is null or empty,
/// or if leading coefficient is 0 and this is not a
/// constant polynomial (that is, it is not the monomial "0")</exception>
internal GenericGFPoly(GenericGF field, int[] coefficients)
{
if (coefficients.Length == 0)
{
throw new ArgumentException();
}
this.field = field;
int coefficientsLength = coefficients.Length;
if (coefficientsLength > 1 && coefficients[0] == 0)
{
// Leading term must be non-zero for anything except the constant polynomial "0"
int firstNonZero = 1;
while (firstNonZero < coefficientsLength && coefficients[firstNonZero] == 0)
{
firstNonZero++;
}
if (firstNonZero == coefficientsLength)
{
this.coefficients = new int[] { 0 };
}
else
{
this.coefficients = new int[coefficientsLength - firstNonZero];
Array.Copy(coefficients,
firstNonZero,
this.coefficients,
0,
this.coefficients.Length);
}
}
else
{
this.coefficients = coefficients;
}
}
internal int[] Coefficients
{
get { return coefficients; }
}
/// <summary>
/// degree of this polynomial
/// </summary>
internal int Degree
{
get
{
return coefficients.Length - 1;
}
}
/// <summary>
/// Gets a value indicating whether this <see cref="GenericGFPoly"/> is zero.
/// </summary>
/// <value>true iff this polynomial is the monomial "0"</value>
internal bool isZero
{
get { return coefficients[0] == 0; }
}
/// <summary>
/// coefficient of x^degree term in this polynomial
/// </summary>
/// <param name="degree">The degree.</param>
/// <returns>coefficient of x^degree term in this polynomial</returns>
internal int getCoefficient(int degree)
{
return coefficients[coefficients.Length - 1 - degree];
}
/// <summary>
/// evaluation of this polynomial at a given point
/// </summary>
/// <param name="a">A.</param>
/// <returns>evaluation of this polynomial at a given point</returns>
internal int evaluateAt(int a)
{
int result = 0;
if (a == 0)
{
// Just return the x^0 coefficient
return getCoefficient(0);
}
if (a == 1)
{
// Just the sum of the coefficients
foreach (var coefficient in coefficients)
{
result = GenericGF.addOrSubtract(result, coefficient);
}
return result;
}
result = coefficients[0];
int size = coefficients.Length;
for (int i = 1; i < size; i++)
{
result = GenericGF.addOrSubtract(field.multiply(a, result), coefficients[i]);
}
return result;
}
internal GenericGFPoly addOrSubtract(GenericGFPoly other)
{
if (!field.Equals(other.field))
{
throw new ArgumentException("GenericGFPolys do not have same GenericGF field");
}
if (isZero)
{
return other;
}
if (other.isZero)
{
return this;
}
int[] smallerCoefficients = this.coefficients;
int[] largerCoefficients = other.coefficients;
if (smallerCoefficients.Length > largerCoefficients.Length)
{
int[] temp = smallerCoefficients;
smallerCoefficients = largerCoefficients;
largerCoefficients = temp;
}
int[] sumDiff = new int[largerCoefficients.Length];
int lengthDiff = largerCoefficients.Length - smallerCoefficients.Length;
// Copy high-order terms only found in higher-degree polynomial's coefficients
Array.Copy(largerCoefficients, 0, sumDiff, 0, lengthDiff);
for (int i = lengthDiff; i < largerCoefficients.Length; i++)
{
sumDiff[i] = GenericGF.addOrSubtract(smallerCoefficients[i - lengthDiff], largerCoefficients[i]);
}
return new GenericGFPoly(field, sumDiff);
}
internal GenericGFPoly multiply(GenericGFPoly other)
{
if (!field.Equals(other.field))
{
throw new ArgumentException("GenericGFPolys do not have same GenericGF field");
}
if (isZero || other.isZero)
{
return field.Zero;
}
int[] aCoefficients = this.coefficients;
int aLength = aCoefficients.Length;
int[] bCoefficients = other.coefficients;
int bLength = bCoefficients.Length;
int[] product = new int[aLength + bLength - 1];
for (int i = 0; i < aLength; i++)
{
int aCoeff = aCoefficients[i];
for (int j = 0; j < bLength; j++)
{
product[i + j] = GenericGF.addOrSubtract(product[i + j],
field.multiply(aCoeff, bCoefficients[j]));
}
}
return new GenericGFPoly(field, product);
}
internal GenericGFPoly multiply(int scalar)
{
if (scalar == 0)
{
return field.Zero;
}
if (scalar == 1)
{
return this;
}
int size = coefficients.Length;
int[] product = new int[size];
for (int i = 0; i < size; i++)
{
product[i] = field.multiply(coefficients[i], scalar);
}
return new GenericGFPoly(field, product);
}
internal GenericGFPoly multiplyByMonomial(int degree, int coefficient)
{
if (degree < 0)
{
throw new ArgumentException();
}
if (coefficient == 0)
{
return field.Zero;
}
int size = coefficients.Length;
int[] product = new int[size + degree];
for (int i = 0; i < size; i++)
{
product[i] = field.multiply(coefficients[i], coefficient);
}
return new GenericGFPoly(field, product);
}
internal GenericGFPoly[] divide(GenericGFPoly other)
{
if (!field.Equals(other.field))
{
throw new ArgumentException("GenericGFPolys do not have same GenericGF field");
}
if (other.isZero)
{
throw new ArgumentException("Divide by 0");
}
GenericGFPoly quotient = field.Zero;
GenericGFPoly remainder = this;
int denominatorLeadingTerm = other.getCoefficient(other.Degree);
int inverseDenominatorLeadingTerm = field.inverse(denominatorLeadingTerm);
while (remainder.Degree >= other.Degree && !remainder.isZero)
{
int degreeDifference = remainder.Degree - other.Degree;
int scale = field.multiply(remainder.getCoefficient(remainder.Degree), inverseDenominatorLeadingTerm);
GenericGFPoly term = other.multiplyByMonomial(degreeDifference, scale);
GenericGFPoly iterationQuotient = field.buildMonomial(degreeDifference, scale);
quotient = quotient.addOrSubtract(iterationQuotient);
remainder = remainder.addOrSubtract(term);
}
return new GenericGFPoly[] { quotient, remainder };
}
public override String ToString()
{
if (isZero)
{
return "0";
}
StringBuilder result = new StringBuilder(8 * Degree);
for (int degree = Degree; degree >= 0; degree--)
{
int coefficient = getCoefficient(degree);
if (coefficient != 0)
{
if (coefficient < 0)
{
if (degree == Degree)
{
result.Append("-");
}
else
{
result.Append(" - ");
}
coefficient = -coefficient;
}
else
{
if (result.Length > 0)
{
result.Append(" + ");
}
}
if (degree == 0 || coefficient != 1)
{
int alphaPower = field.log(coefficient);
if (alphaPower == 0)
{
result.Append('1');
}
else if (alphaPower == 1)
{
result.Append('a');
}
else
{
result.Append("a^");
result.Append(alphaPower);
}
}
if (degree != 0)
{
if (degree == 1)
{
result.Append('x');
}
else
{
result.Append("x^");
result.Append(degree);
}
}
}
}
return result.ToString();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
** Class: SortedList
**
** Purpose: Represents a collection of key/value pairs
** that are sorted by the keys and are accessible
** by key and by index.
**
===========================================================*/
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Globalization;
namespace System.Collections
{
// The SortedList class implements a sorted list of keys and values. Entries in
// a sorted list are sorted by their keys and are accessible both by key and by
// index. The keys of a sorted list can be ordered either according to a
// specific IComparer implementation given when the sorted list is
// instantiated, or according to the IComparable implementation provided
// by the keys themselves. In either case, a sorted list does not allow entries
// with duplicate keys.
//
// A sorted list internally maintains two arrays that store the keys and
// values of the entries. The capacity of a sorted list is the allocated
// length of these internal arrays. As elements are added to a sorted list, the
// capacity of the sorted list is automatically increased as required by
// reallocating the internal arrays. The capacity is never automatically
// decreased, but users can call either TrimToSize or
// Capacity explicitly.
//
// The GetKeyList and GetValueList methods of a sorted list
// provides access to the keys and values of the sorted list in the form of
// List implementations. The List objects returned by these
// methods are aliases for the underlying sorted list, so modifications
// made to those lists are directly reflected in the sorted list, and vice
// versa.
//
// The SortedList class provides a convenient way to create a sorted
// copy of another dictionary, such as a Hashtable. For example:
//
// Hashtable h = new Hashtable();
// h.Add(...);
// h.Add(...);
// ...
// SortedList s = new SortedList(h);
//
// The last line above creates a sorted list that contains a copy of the keys
// and values stored in the hashtable. In this particular example, the keys
// will be ordered according to the IComparable interface, which they
// all must implement. To impose a different ordering, SortedList also
// has a constructor that allows a specific IComparer implementation to
// be specified.
//
[DebuggerTypeProxy(typeof(System.Collections.SortedList.SortedListDebugView))]
[DebuggerDisplay("Count = {Count}")]
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public class SortedList : IDictionary, ICloneable
{
private Object[] keys; // Do not rename (binary serialization)
private Object[] values; // Do not rename (binary serialization)
private int _size; // Do not rename (binary serialization)
private int version; // Do not rename (binary serialization)
private IComparer comparer; // Do not rename (binary serialization)
private KeyList keyList; // Do not rename (binary serialization)
private ValueList valueList; // Do not rename (binary serialization)
[NonSerialized]
private Object _syncRoot;
private const int _defaultCapacity = 16;
// Copy of Array.MaxArrayLength
internal const int MaxArrayLength = 0X7FEFFFFF;
// Constructs a new sorted list. The sorted list is initially empty and has
// a capacity of zero. Upon adding the first element to the sorted list the
// capacity is increased to 16, and then increased in multiples of two as
// required. The elements of the sorted list are ordered according to the
// IComparable interface, which must be implemented by the keys of
// all entries added to the sorted list.
public SortedList()
{
Init();
}
private void Init()
{
keys = Array.Empty<Object>();
values = Array.Empty<Object>();
_size = 0;
comparer = new Comparer(CultureInfo.CurrentCulture);
}
// Constructs a new sorted list. The sorted list is initially empty and has
// a capacity of zero. Upon adding the first element to the sorted list the
// capacity is increased to 16, and then increased in multiples of two as
// required. The elements of the sorted list are ordered according to the
// IComparable interface, which must be implemented by the keys of
// all entries added to the sorted list.
//
public SortedList(int initialCapacity)
{
if (initialCapacity < 0)
throw new ArgumentOutOfRangeException(nameof(initialCapacity), SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
keys = new Object[initialCapacity];
values = new Object[initialCapacity];
comparer = new Comparer(CultureInfo.CurrentCulture);
}
// Constructs a new sorted list with a given IComparer
// implementation. The sorted list is initially empty and has a capacity of
// zero. Upon adding the first element to the sorted list the capacity is
// increased to 16, and then increased in multiples of two as required. The
// elements of the sorted list are ordered according to the given
// IComparer implementation. If comparer is null, the
// elements are compared to each other using the IComparable
// interface, which in that case must be implemented by the keys of all
// entries added to the sorted list.
//
public SortedList(IComparer comparer)
: this()
{
if (comparer != null) this.comparer = comparer;
}
// Constructs a new sorted list with a given IComparer
// implementation and a given initial capacity. The sorted list is
// initially empty, but will have room for the given number of elements
// before any reallocations are required. The elements of the sorted list
// are ordered according to the given IComparer implementation. If
// comparer is null, the elements are compared to each other using
// the IComparable interface, which in that case must be implemented
// by the keys of all entries added to the sorted list.
//
public SortedList(IComparer comparer, int capacity)
: this(comparer)
{
Capacity = capacity;
}
// Constructs a new sorted list containing a copy of the entries in the
// given dictionary. The elements of the sorted list are ordered according
// to the IComparable interface, which must be implemented by the
// keys of all entries in the given dictionary as well as keys
// subsequently added to the sorted list.
//
public SortedList(IDictionary d)
: this(d, null)
{
}
// Constructs a new sorted list containing a copy of the entries in the
// given dictionary. The elements of the sorted list are ordered according
// to the given IComparer implementation. If comparer is
// null, the elements are compared to each other using the
// IComparable interface, which in that case must be implemented
// by the keys of all entries in the given dictionary as well as keys
// subsequently added to the sorted list.
//
public SortedList(IDictionary d, IComparer comparer)
: this(comparer, (d != null ? d.Count : 0))
{
if (d == null)
throw new ArgumentNullException(nameof(d), SR.ArgumentNull_Dictionary);
Contract.EndContractBlock();
d.Keys.CopyTo(keys, 0);
d.Values.CopyTo(values, 0);
// Array.Sort(Array keys, Array values, IComparer comparer) does not exist in System.Runtime contract v4.0.10.0.
// This works around that by sorting only on the keys and then assigning values accordingly.
Array.Sort(keys, comparer);
for (int i = 0; i < keys.Length; i++)
{
values[i] = d[keys[i]];
}
_size = d.Count;
}
// Adds an entry with the given key and value to this sorted list. An
// ArgumentException is thrown if the key is already present in the sorted list.
//
public virtual void Add(Object key, Object value)
{
if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key);
Contract.EndContractBlock();
int i = Array.BinarySearch(keys, 0, _size, key, comparer);
if (i >= 0)
throw new ArgumentException(SR.Format(SR.Argument_AddingDuplicate__, GetKey(i), key));
Insert(~i, key, value);
}
// Returns the capacity of this sorted list. The capacity of a sorted list
// represents the allocated length of the internal arrays used to store the
// keys and values of the list, and thus also indicates the maximum number
// of entries the list can contain before a reallocation of the internal
// arrays is required.
//
public virtual int Capacity
{
get
{
return keys.Length;
}
set
{
if (value < Count)
{
throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_SmallCapacity);
}
Contract.EndContractBlock();
if (value != keys.Length)
{
if (value > 0)
{
Object[] newKeys = new Object[value];
Object[] newValues = new Object[value];
if (_size > 0)
{
Array.Copy(keys, 0, newKeys, 0, _size);
Array.Copy(values, 0, newValues, 0, _size);
}
keys = newKeys;
values = newValues;
}
else
{
// size can only be zero here.
Debug.Assert(_size == 0, "Size is not zero");
keys = Array.Empty<Object>();
values = Array.Empty<Object>();
}
}
}
}
// Returns the number of entries in this sorted list.
//
public virtual int Count
{
get
{
return _size;
}
}
// Returns a collection representing the keys of this sorted list. This
// method returns the same object as GetKeyList, but typed as an
// ICollection instead of an IList.
//
public virtual ICollection Keys
{
get
{
return GetKeyList();
}
}
// Returns a collection representing the values of this sorted list. This
// method returns the same object as GetValueList, but typed as an
// ICollection instead of an IList.
//
public virtual ICollection Values
{
get
{
return GetValueList();
}
}
// Is this SortedList read-only?
public virtual bool IsReadOnly
{
get { return false; }
}
public virtual bool IsFixedSize
{
get { return false; }
}
// Is this SortedList synchronized (thread-safe)?
public virtual bool IsSynchronized
{
get { return false; }
}
// Synchronization root for this object.
public virtual Object SyncRoot
{
get
{
if (_syncRoot == null)
{
System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null);
}
return _syncRoot;
}
}
// Removes all entries from this sorted list.
public virtual void Clear()
{
// clear does not change the capacity
version++;
Array.Clear(keys, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references.
Array.Clear(values, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references.
_size = 0;
}
// Makes a virtually identical copy of this SortedList. This is a shallow
// copy. IE, the Objects in the SortedList are not cloned - we copy the
// references to those objects.
public virtual Object Clone()
{
SortedList sl = new SortedList(_size);
Array.Copy(keys, 0, sl.keys, 0, _size);
Array.Copy(values, 0, sl.values, 0, _size);
sl._size = _size;
sl.version = version;
sl.comparer = comparer;
// Don't copy keyList nor valueList.
return sl;
}
// Checks if this sorted list contains an entry with the given key.
//
public virtual bool Contains(Object key)
{
return IndexOfKey(key) >= 0;
}
// Checks if this sorted list contains an entry with the given key.
//
public virtual bool ContainsKey(Object key)
{
// Yes, this is a SPEC'ed duplicate of Contains().
return IndexOfKey(key) >= 0;
}
// Checks if this sorted list contains an entry with the given value. The
// values of the entries of the sorted list are compared to the given value
// using the Object.Equals method. This method performs a linear
// search and is substantially slower than the Contains
// method.
//
public virtual bool ContainsValue(Object value)
{
return IndexOfValue(value) >= 0;
}
// Copies the values in this SortedList to an array.
public virtual void CopyTo(Array array, int arrayIndex)
{
if (array == null)
throw new ArgumentNullException(nameof(array), SR.ArgumentNull_Array);
if (array.Rank != 1)
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
if (arrayIndex < 0)
throw new ArgumentOutOfRangeException(nameof(arrayIndex), SR.ArgumentOutOfRange_NeedNonNegNum);
if (array.Length - arrayIndex < Count)
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
Contract.EndContractBlock();
for (int i = 0; i < Count; i++)
{
DictionaryEntry entry = new DictionaryEntry(keys[i], values[i]);
array.SetValue(entry, i + arrayIndex);
}
}
// Copies the values in this SortedList to an KeyValuePairs array.
// KeyValuePairs is different from Dictionary Entry in that it has special
// debugger attributes on its fields.
internal virtual KeyValuePairs[] ToKeyValuePairsArray()
{
KeyValuePairs[] array = new KeyValuePairs[Count];
for (int i = 0; i < Count; i++)
{
array[i] = new KeyValuePairs(keys[i], values[i]);
}
return array;
}
// Ensures that the capacity of this sorted list is at least the given
// minimum value. If the current capacity of the list is less than
// min, the capacity is increased to twice the current capacity or
// to min, whichever is larger.
private void EnsureCapacity(int min)
{
int newCapacity = keys.Length == 0 ? 16 : keys.Length * 2;
// Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow.
// Note that this check works even when _items.Length overflowed thanks to the (uint) cast
if ((uint)newCapacity > MaxArrayLength) newCapacity = MaxArrayLength;
if (newCapacity < min) newCapacity = min;
Capacity = newCapacity;
}
// Returns the value of the entry at the given index.
//
public virtual Object GetByIndex(int index)
{
if (index < 0 || index >= Count)
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
return values[index];
}
// Returns an IEnumerator for this sorted list. If modifications
// made to the sorted list while an enumeration is in progress,
// the MoveNext and Remove methods
// of the enumerator will throw an exception.
//
IEnumerator IEnumerable.GetEnumerator()
{
return new SortedListEnumerator(this, 0, _size, SortedListEnumerator.DictEntry);
}
// Returns an IDictionaryEnumerator for this sorted list. If modifications
// made to the sorted list while an enumeration is in progress,
// the MoveNext and Remove methods
// of the enumerator will throw an exception.
//
public virtual IDictionaryEnumerator GetEnumerator()
{
return new SortedListEnumerator(this, 0, _size, SortedListEnumerator.DictEntry);
}
// Returns the key of the entry at the given index.
//
public virtual Object GetKey(int index)
{
if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
return keys[index];
}
// Returns an IList representing the keys of this sorted list. The
// returned list is an alias for the keys of this sorted list, so
// modifications made to the returned list are directly reflected in the
// underlying sorted list, and vice versa. The elements of the returned
// list are ordered in the same way as the elements of the sorted list. The
// returned list does not support adding, inserting, or modifying elements
// (the Add, AddRange, Insert, InsertRange,
// Reverse, Set, SetRange, and Sort methods
// throw exceptions), but it does allow removal of elements (through the
// Remove and RemoveRange methods or through an enumerator).
// Null is an invalid key value.
//
public virtual IList GetKeyList()
{
if (keyList == null) keyList = new KeyList(this);
return keyList;
}
// Returns an IList representing the values of this sorted list. The
// returned list is an alias for the values of this sorted list, so
// modifications made to the returned list are directly reflected in the
// underlying sorted list, and vice versa. The elements of the returned
// list are ordered in the same way as the elements of the sorted list. The
// returned list does not support adding or inserting elements (the
// Add, AddRange, Insert and InsertRange
// methods throw exceptions), but it does allow modification and removal of
// elements (through the Remove, RemoveRange, Set and
// SetRange methods or through an enumerator).
//
public virtual IList GetValueList()
{
if (valueList == null) valueList = new ValueList(this);
return valueList;
}
// Returns the value associated with the given key. If an entry with the
// given key is not found, the returned value is null.
//
public virtual Object this[Object key]
{
get
{
int i = IndexOfKey(key);
if (i >= 0) return values[i];
return null;
}
set
{
if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key);
Contract.EndContractBlock();
int i = Array.BinarySearch(keys, 0, _size, key, comparer);
if (i >= 0)
{
values[i] = value;
version++;
return;
}
Insert(~i, key, value);
}
}
// Returns the index of the entry with a given key in this sorted list. The
// key is located through a binary search, and thus the average execution
// time of this method is proportional to Log2(size), where
// size is the size of this sorted list. The returned value is -1 if
// the given key does not occur in this sorted list. Null is an invalid
// key value.
//
public virtual int IndexOfKey(Object key)
{
if (key == null)
throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key);
Contract.EndContractBlock();
int ret = Array.BinarySearch(keys, 0, _size, key, comparer);
return ret >= 0 ? ret : -1;
}
// Returns the index of the first occurrence of an entry with a given value
// in this sorted list. The entry is located through a linear search, and
// thus the average execution time of this method is proportional to the
// size of this sorted list. The elements of the list are compared to the
// given value using the Object.Equals method.
//
public virtual int IndexOfValue(Object value)
{
return Array.IndexOf(values, value, 0, _size);
}
// Inserts an entry with a given key and value at a given index.
private void Insert(int index, Object key, Object value)
{
if (_size == keys.Length) EnsureCapacity(_size + 1);
if (index < _size)
{
Array.Copy(keys, index, keys, index + 1, _size - index);
Array.Copy(values, index, values, index + 1, _size - index);
}
keys[index] = key;
values[index] = value;
_size++;
version++;
}
// Removes the entry at the given index. The size of the sorted list is
// decreased by one.
//
public virtual void RemoveAt(int index)
{
if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
_size--;
if (index < _size)
{
Array.Copy(keys, index + 1, keys, index, _size - index);
Array.Copy(values, index + 1, values, index, _size - index);
}
keys[_size] = null;
values[_size] = null;
version++;
}
// Removes an entry from this sorted list. If an entry with the specified
// key exists in the sorted list, it is removed. An ArgumentException is
// thrown if the key is null.
//
public virtual void Remove(Object key)
{
int i = IndexOfKey(key);
if (i >= 0)
RemoveAt(i);
}
// Sets the value at an index to a given value. The previous value of
// the given entry is overwritten.
//
public virtual void SetByIndex(int index, Object value)
{
if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
values[index] = value;
version++;
}
// Returns a thread-safe SortedList.
//
public static SortedList Synchronized(SortedList list)
{
if (list == null)
throw new ArgumentNullException(nameof(list));
Contract.EndContractBlock();
return new SyncSortedList(list);
}
// Sets the capacity of this sorted list to the size of the sorted list.
// This method can be used to minimize a sorted list's memory overhead once
// it is known that no new elements will be added to the sorted list. To
// completely clear a sorted list and release all memory referenced by the
// sorted list, execute the following statements:
//
// sortedList.Clear();
// sortedList.TrimToSize();
//
public virtual void TrimToSize()
{
Capacity = _size;
}
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
private class SyncSortedList : SortedList
{
private SortedList _list; // Do not rename (binary serialization)
private Object _root; // Do not rename (binary serialization)
internal SyncSortedList(SortedList list)
{
_list = list;
_root = list.SyncRoot;
}
public override int Count
{
get { lock (_root) { return _list.Count; } }
}
public override Object SyncRoot
{
get { return _root; }
}
public override bool IsReadOnly
{
get { return _list.IsReadOnly; }
}
public override bool IsFixedSize
{
get { return _list.IsFixedSize; }
}
public override bool IsSynchronized
{
get { return true; }
}
public override Object this[Object key]
{
get
{
lock (_root)
{
return _list[key];
}
}
set
{
lock (_root)
{
_list[key] = value;
}
}
}
public override void Add(Object key, Object value)
{
lock (_root)
{
_list.Add(key, value);
}
}
public override int Capacity
{
get { lock (_root) { return _list.Capacity; } }
}
public override void Clear()
{
lock (_root)
{
_list.Clear();
}
}
public override Object Clone()
{
lock (_root)
{
return _list.Clone();
}
}
public override bool Contains(Object key)
{
lock (_root)
{
return _list.Contains(key);
}
}
public override bool ContainsKey(Object key)
{
lock (_root)
{
return _list.ContainsKey(key);
}
}
public override bool ContainsValue(Object key)
{
lock (_root)
{
return _list.ContainsValue(key);
}
}
public override void CopyTo(Array array, int index)
{
lock (_root)
{
_list.CopyTo(array, index);
}
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
public override Object GetByIndex(int index)
{
lock (_root)
{
return _list.GetByIndex(index);
}
}
public override IDictionaryEnumerator GetEnumerator()
{
lock (_root)
{
return _list.GetEnumerator();
}
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
public override Object GetKey(int index)
{
lock (_root)
{
return _list.GetKey(index);
}
}
public override IList GetKeyList()
{
lock (_root)
{
return _list.GetKeyList();
}
}
public override IList GetValueList()
{
lock (_root)
{
return _list.GetValueList();
}
}
public override int IndexOfKey(Object key)
{
if (key == null)
throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key);
Contract.EndContractBlock();
lock (_root)
{
return _list.IndexOfKey(key);
}
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
public override int IndexOfValue(Object value)
{
lock (_root)
{
return _list.IndexOfValue(value);
}
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
public override void RemoveAt(int index)
{
lock (_root)
{
_list.RemoveAt(index);
}
}
public override void Remove(Object key)
{
lock (_root)
{
_list.Remove(key);
}
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
public override void SetByIndex(int index, Object value)
{
lock (_root)
{
_list.SetByIndex(index, value);
}
}
internal override KeyValuePairs[] ToKeyValuePairsArray()
{
return _list.ToKeyValuePairsArray();
}
public override void TrimToSize()
{
lock (_root)
{
_list.TrimToSize();
}
}
}
private class SortedListEnumerator : IDictionaryEnumerator, ICloneable
{
private SortedList _sortedList;
private Object _key;
private Object _value;
private int _index;
private int _startIndex; // Store for Reset.
private int _endIndex;
private int _version;
private bool _current; // Is the current element valid?
private int _getObjectRetType; // What should GetObject return?
internal const int Keys = 1;
internal const int Values = 2;
internal const int DictEntry = 3;
internal SortedListEnumerator(SortedList sortedList, int index, int count,
int getObjRetType)
{
_sortedList = sortedList;
_index = index;
_startIndex = index;
_endIndex = index + count;
_version = sortedList.version;
_getObjectRetType = getObjRetType;
_current = false;
}
public object Clone() => MemberwiseClone();
public virtual Object Key
{
get
{
if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
return _key;
}
}
public virtual bool MoveNext()
{
if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
if (_index < _endIndex)
{
_key = _sortedList.keys[_index];
_value = _sortedList.values[_index];
_index++;
_current = true;
return true;
}
_key = null;
_value = null;
_current = false;
return false;
}
public virtual DictionaryEntry Entry
{
get
{
if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
return new DictionaryEntry(_key, _value);
}
}
public virtual Object Current
{
get
{
if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
if (_getObjectRetType == Keys)
return _key;
else if (_getObjectRetType == Values)
return _value;
else
return new DictionaryEntry(_key, _value);
}
}
public virtual Object Value
{
get
{
if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
return _value;
}
}
public virtual void Reset()
{
if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
_index = _startIndex;
_current = false;
_key = null;
_value = null;
}
}
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
private class KeyList : IList
{
private SortedList sortedList; // Do not rename (binary serialization)
internal KeyList(SortedList sortedList)
{
this.sortedList = sortedList;
}
public virtual int Count
{
get { return sortedList._size; }
}
public virtual bool IsReadOnly
{
get { return true; }
}
public virtual bool IsFixedSize
{
get { return true; }
}
public virtual bool IsSynchronized
{
get { return sortedList.IsSynchronized; }
}
public virtual Object SyncRoot
{
get { return sortedList.SyncRoot; }
}
public virtual int Add(Object key)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
// return 0; // suppress compiler warning
}
public virtual void Clear()
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual bool Contains(Object key)
{
return sortedList.Contains(key);
}
public virtual void CopyTo(Array array, int arrayIndex)
{
if (array != null && array.Rank != 1)
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
Contract.EndContractBlock();
// defer error checking to Array.Copy
Array.Copy(sortedList.keys, 0, array, arrayIndex, sortedList.Count);
}
public virtual void Insert(int index, Object value)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual Object this[int index]
{
get
{
return sortedList.GetKey(index);
}
set
{
throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
}
}
public virtual IEnumerator GetEnumerator()
{
return new SortedListEnumerator(sortedList, 0, sortedList.Count, SortedListEnumerator.Keys);
}
public virtual int IndexOf(Object key)
{
if (key == null)
throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key);
Contract.EndContractBlock();
int i = Array.BinarySearch(sortedList.keys, 0,
sortedList.Count, key, sortedList.comparer);
if (i >= 0) return i;
return -1;
}
public virtual void Remove(Object key)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual void RemoveAt(int index)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
}
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
private class ValueList : IList
{
private SortedList sortedList; // Do not rename (binary serialization)
internal ValueList(SortedList sortedList)
{
this.sortedList = sortedList;
}
public virtual int Count
{
get { return sortedList._size; }
}
public virtual bool IsReadOnly
{
get { return true; }
}
public virtual bool IsFixedSize
{
get { return true; }
}
public virtual bool IsSynchronized
{
get { return sortedList.IsSynchronized; }
}
public virtual Object SyncRoot
{
get { return sortedList.SyncRoot; }
}
public virtual int Add(Object key)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual void Clear()
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual bool Contains(Object value)
{
return sortedList.ContainsValue(value);
}
public virtual void CopyTo(Array array, int arrayIndex)
{
if (array != null && array.Rank != 1)
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
Contract.EndContractBlock();
// defer error checking to Array.Copy
Array.Copy(sortedList.values, 0, array, arrayIndex, sortedList.Count);
}
public virtual void Insert(int index, Object value)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual Object this[int index]
{
get
{
return sortedList.GetByIndex(index);
}
set
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
}
public virtual IEnumerator GetEnumerator()
{
return new SortedListEnumerator(sortedList, 0, sortedList.Count, SortedListEnumerator.Values);
}
public virtual int IndexOf(Object value)
{
return Array.IndexOf(sortedList.values, value, 0, sortedList.Count);
}
public virtual void Remove(Object value)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
public virtual void RemoveAt(int index)
{
throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite);
}
}
// internal debug view class for sorted list
internal class SortedListDebugView
{
private SortedList _sortedList;
public SortedListDebugView(SortedList sortedList)
{
if (sortedList == null)
{
throw new ArgumentNullException(nameof(sortedList));
}
Contract.EndContractBlock();
_sortedList = sortedList;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public KeyValuePairs[] Items
{
get
{
return _sortedList.ToKeyValuePairsArray();
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Palmmedia.ReportGenerator.Core.Logging;
using Palmmedia.ReportGenerator.Core.Parser.FileReading;
namespace Palmmedia.ReportGenerator.Core.Parser.Analysis
{
/// <summary>
/// Represents a source code file.
/// </summary>
public class CodeFile
{
/// <summary>
/// The Logger.
/// </summary>
private static readonly ILogger Logger = LoggerFactory.GetLogger(typeof(CodeFile));
/// <summary>
/// The line coverage by test method.
/// </summary>
private readonly IDictionary<TestMethod, CoverageByTrackedMethod> lineCoveragesByTestMethod = new Dictionary<TestMethod, CoverageByTrackedMethod>();
/// <summary>
/// The method metrics of the class.
/// </summary>
private readonly HashSet<MethodMetric> methodMetrics = new HashSet<MethodMetric>();
/// <summary>
/// The code elements.
/// </summary>
private readonly HashSet<CodeElement> codeElements = new HashSet<CodeElement>();
/// <summary>
/// Array containing the coverage information by line number.
/// -1: Not coverable
/// 0: Not visited
/// >0: Number of visits
/// </summary>
private int[] lineCoverage;
/// <summary>
/// Array containing the line visit status by line number.
/// </summary>
private LineVisitStatus[] lineVisitStatus;
/// <summary>
/// The branches by line number.
/// </summary>
private IDictionary<int, ICollection<Branch>> branches;
/// <summary>
/// The optional additional file reader.
/// </summary>
private IFileReader additionalFileReader;
/// <summary>
/// Initializes a new instance of the <see cref="CodeFile" /> class.
/// </summary>
/// <param name="path">The path of the file.</param>
/// <param name="lineCoverage">The line coverage.</param>
/// <param name="lineVisitStatus">The line visit status.</param>
internal CodeFile(string path, int[] lineCoverage, LineVisitStatus[] lineVisitStatus)
: this(path, lineCoverage, lineVisitStatus, null, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CodeFile" /> class.
/// </summary>
/// <param name="path">The path of the file.</param>
/// <param name="lineCoverage">The line coverage.</param>
/// <param name="lineVisitStatus">The line visit status.</param>
/// <param name="additionalFileReader">The optional additional file reader.</param>
internal CodeFile(string path, int[] lineCoverage, LineVisitStatus[] lineVisitStatus, IFileReader additionalFileReader)
: this(path, lineCoverage, lineVisitStatus, null, additionalFileReader)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CodeFile" /> class.
/// </summary>
/// <param name="path">The path.</param>
/// <param name="lineCoverage">The line coverage.</param>
/// <param name="lineVisitStatus">The line visit status.</param>
/// <param name="branches">The branches.</param>
internal CodeFile(string path, int[] lineCoverage, LineVisitStatus[] lineVisitStatus, IDictionary<int, ICollection<Branch>> branches)
: this(path, lineCoverage, lineVisitStatus, branches, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CodeFile" /> class.
/// </summary>
/// <param name="path">The path.</param>
/// <param name="lineCoverage">The line coverage.</param>
/// <param name="lineVisitStatus">The line visit status.</param>
/// <param name="branches">The branches.</param>
/// <param name="additionalFileReader">The optional additional file reader.</param>
internal CodeFile(
string path,
int[] lineCoverage,
LineVisitStatus[] lineVisitStatus,
IDictionary<int, ICollection<Branch>> branches,
IFileReader additionalFileReader)
{
if (lineCoverage == null)
{
throw new ArgumentNullException(nameof(lineCoverage));
}
if (lineVisitStatus == null)
{
throw new ArgumentNullException(nameof(lineVisitStatus));
}
if (lineCoverage.LongLength != lineVisitStatus.LongLength)
{
throw new ArgumentException("Length of 'lineCoverage' and 'lineVisitStatus' must match", nameof(lineVisitStatus));
}
this.Path = path ?? throw new ArgumentNullException(nameof(path));
this.lineCoverage = lineCoverage;
this.lineVisitStatus = lineVisitStatus;
this.branches = branches;
this.additionalFileReader = additionalFileReader;
}
/// <summary>
/// Gets the path.
/// </summary>
/// <value>The path.</value>
public string Path { get; }
/// <summary>
/// Gets the test methods.
/// </summary>
/// <value>
/// The test methods.
/// </value>
public IEnumerable<TestMethod> TestMethods => this.lineCoveragesByTestMethod.Keys;
/// <summary>
/// Gets the method metrics.
/// </summary>
/// <value>The method metrics.</value>
public IEnumerable<MethodMetric> MethodMetrics => this.methodMetrics;
/// <summary>
/// Gets the code elements.
/// </summary>
/// <value>
/// The code elements.
/// </value>
public IEnumerable<CodeElement> CodeElements => this.codeElements;
/// <summary>
/// Gets the number of covered lines.
/// </summary>
/// <value>The number of covered lines.</value>
public int CoveredLines => this.lineCoverage.Count(l => l > 0);
/// <summary>
/// Gets the number of coverable lines.
/// </summary>
/// <value>The number of coverable lines.</value>
public int CoverableLines => this.lineCoverage.Count(l => l >= 0);
/// <summary>
/// Gets the number of total lines.
/// </summary>
/// <value>The number of total lines.</value>
public int? TotalLines { get; private set; }
/// <summary>
/// Gets line coverage information by line number for this file.
/// </summary>
public ReadOnlyCollection<int> LineCoverage => Array.AsReadOnly(this.lineCoverage);
/// <summary>
/// Gets line visit status by line number for this file.
/// </summary>
public ReadOnlyCollection<LineVisitStatus> LineVisitStatus => Array.AsReadOnly(this.lineVisitStatus);
/// <summary>
/// Gets the branches by line number.
/// </summary>
public IDictionary<int, ICollection<Branch>> BranchesByLine => this.branches ?? new Dictionary<int, ICollection<Branch>>();
/// <summary>
/// Gets the number of covered branches.
/// </summary>
/// <value>
/// The number of covered branches.
/// </value>
public int? CoveredBranches
{
get
{
if (this.branches == null)
{
return null;
}
return this.branches.Sum(l => l.Value.Count(b => b.BranchVisits > 0));
}
}
/// <summary>
/// Gets the number of total branches.
/// </summary>
/// <value>
/// The number of total branches.
/// </value>
public int? TotalBranches
{
get
{
if (this.branches == null)
{
return null;
}
return this.branches.Sum(l => l.Value.Count);
}
}
/// <summary>
/// Gets the number of covered code elements.
/// </summary>
/// <value>
/// The number of covered code elements.
/// </value>
public int CoveredCodeElements
{
get
{
return this.CodeElements.Count(
x => this.lineCoverage.Skip(x.FirstLine)
.Take(x.LastLine - x.FirstLine + 1)
.Any(y => y > 0));
}
}
/// <summary>
/// Gets the number of total code elements.
/// </summary>
/// <value>
/// The number of total code elements.
/// </value>
public int TotalCodeElements => this.codeElements.Count;
/// <summary>
/// Returns a <see cref="string" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="string" /> that represents this instance.
/// </returns>
public override string ToString()
{
return this.Path;
}
/// <summary>
/// Determines whether the specified <see cref="object"/> is equal to this instance.
/// </summary>
/// <param name="obj">The <see cref="object"/> to compare with this instance.</param>
/// <returns>
/// <c>true</c> if the specified <see cref="object"/> is equal to this instance; otherwise, <c>false</c>.
/// </returns>
public override bool Equals(object obj)
{
if (obj == null || !obj.GetType().Equals(typeof(CodeFile)))
{
return false;
}
else
{
var codeFile = (CodeFile)obj;
string fileNameToCompare = codeFile.Path.Substring(codeFile.Path.LastIndexOf('\\') + 1);
string fileName = this.Path.Substring(this.Path.LastIndexOf('\\') + 1);
return fileName.Equals(fileNameToCompare, StringComparison.OrdinalIgnoreCase);
}
}
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
/// <returns>
/// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.
/// </returns>
public override int GetHashCode() => this.Path.GetHashCode();
/// <summary>
/// Calculates the coverage quota in a given range of lines.
/// </summary>
/// <param name="firstLine">The first line.</param>
/// <param name="lastLine">The last line.</param>
/// <returns>The coverage quota or <code>null</code> if not applicable.</returns>
internal decimal? CoverageQuota(int firstLine, int lastLine)
{
if (firstLine < 0
|| firstLine >= this.lineVisitStatus.Length
|| lastLine < 0
|| lastLine >= this.lineVisitStatus.Length
|| firstLine > lastLine)
{
return null;
}
int coverableLines = 0;
int coveredLines = 0;
for (int i = firstLine; i <= lastLine; i++)
{
if (this.lineVisitStatus[i] != Analysis.LineVisitStatus.NotCoverable)
{
coverableLines++;
}
if (this.lineVisitStatus[i] > Analysis.LineVisitStatus.NotCovered)
{
coveredLines++;
}
}
return (coverableLines == 0) ? (decimal?)null : (decimal)Math.Truncate(1000 * (double)coveredLines / (double)coverableLines) / 10;
}
/// <summary>
/// Adds the coverage by test method.
/// </summary>
/// <param name="testMethod">The test method.</param>
/// <param name="trackedMethodCoverage">The coverage by for test method.</param>
internal void AddCoverageByTestMethod(TestMethod testMethod, CoverageByTrackedMethod trackedMethodCoverage)
{
if (testMethod == null)
{
throw new ArgumentNullException(nameof(testMethod));
}
if (trackedMethodCoverage == null)
{
throw new ArgumentNullException(nameof(trackedMethodCoverage));
}
CoverageByTrackedMethod existingTrackedMethodCoverage;
if (!this.lineCoveragesByTestMethod.TryGetValue(testMethod, out existingTrackedMethodCoverage))
{
this.lineCoveragesByTestMethod.Add(testMethod, trackedMethodCoverage);
}
else
{
this.lineCoveragesByTestMethod[testMethod] = MergeCoverageByTrackedMethod(existingTrackedMethodCoverage, trackedMethodCoverage);
}
}
/// <summary>
/// Adds the given method metric.
/// </summary>
/// <param name="methodMetric">The method metric.</param>
internal void AddMethodMetric(MethodMetric methodMetric)
{
this.methodMetrics.Add(methodMetric);
}
/// <summary>
/// Adds the code element.
/// </summary>
/// <param name="codeElement">The code element.</param>
internal void AddCodeElement(CodeElement codeElement)
{
this.codeElements.Add(codeElement);
}
/// <summary>
/// Performs the analysis of the source file.
/// </summary>
/// <param name="fileReader">The file reader.</param>
/// <returns>The analysis result.</returns>
internal FileAnalysis AnalyzeFile(IFileReader fileReader)
{
string error = null;
string[] lines = null;
if (this.additionalFileReader != null)
{
lines = this.additionalFileReader.LoadFile(this.Path, out error);
}
if (this.additionalFileReader == null || error != null)
{
error = null;
lines = fileReader.LoadFile(this.Path, out error);
}
if (error != null)
{
Logger.Error(error);
return new FileAnalysis(this.Path, error);
}
this.TotalLines = lines.Length;
int currentLineNumber = 0;
var result = new FileAnalysis(this.Path);
ICollection<Branch> branchesOfLine = null;
foreach (var line in lines)
{
currentLineNumber++;
int visits = this.lineCoverage.Length > currentLineNumber ? this.lineCoverage[currentLineNumber] : -1;
LineVisitStatus lineVisitStatus = this.lineVisitStatus.Length > currentLineNumber ? this.lineVisitStatus[currentLineNumber] : Analysis.LineVisitStatus.NotCoverable;
var lineCoverageByTestMethod = this.lineCoveragesByTestMethod
.ToDictionary(
l => l.Key,
l =>
{
if (l.Value.Coverage.Length > currentLineNumber)
{
return new ShortLineAnalysis(l.Value.Coverage[currentLineNumber], l.Value.LineVisitStatus[currentLineNumber]);
}
else
{
return new ShortLineAnalysis(-1, Analysis.LineVisitStatus.NotCoverable);
}
});
if (this.branches != null && this.branches.TryGetValue(currentLineNumber, out branchesOfLine))
{
result.AddLineAnalysis(
new LineAnalysis(
visits,
lineVisitStatus,
lineCoverageByTestMethod,
currentLineNumber,
line.TrimEnd(),
branchesOfLine.Count(b => b.BranchVisits > 0),
branchesOfLine.Count));
}
else
{
result.AddLineAnalysis(
new LineAnalysis(
visits,
lineVisitStatus,
lineCoverageByTestMethod,
currentLineNumber,
line.TrimEnd()));
}
}
return result;
}
/// <summary>
/// Merges the given file with the current instance.
/// </summary>
/// <param name="file">The file to merge.</param>
internal void Merge(CodeFile file)
{
if (file == null)
{
throw new ArgumentNullException(nameof(file));
}
// Resize coverage array if necessary
if (file.lineCoverage.LongLength > this.lineCoverage.LongLength)
{
int[] newLineCoverage = new int[file.lineCoverage.LongLength];
Array.Copy(this.lineCoverage, newLineCoverage, this.lineCoverage.LongLength);
for (long i = this.lineCoverage.LongLength; i < file.lineCoverage.LongLength; i++)
{
newLineCoverage[i] = -1;
}
this.lineCoverage = newLineCoverage;
}
// Resize line visit status array if necessary
if (file.lineVisitStatus.LongLength > this.lineVisitStatus.LongLength)
{
LineVisitStatus[] newLineVisitStatus = new LineVisitStatus[file.lineVisitStatus.LongLength];
Array.Copy(this.lineVisitStatus, newLineVisitStatus, this.lineVisitStatus.LongLength);
this.lineVisitStatus = newLineVisitStatus;
}
if (file.branches != null)
{
if (this.branches == null)
{
this.branches = new Dictionary<int, ICollection<Branch>>();
}
foreach (var branchByLine in file.branches)
{
ICollection<Branch> existingBranches = null;
if (this.branches.TryGetValue(branchByLine.Key, out existingBranches))
{
foreach (var branch in branchByLine.Value)
{
Branch existingBranch = existingBranches.FirstOrDefault(b => b.Equals(branch));
if (existingBranch != null)
{
existingBranch.BranchVisits += branch.BranchVisits;
}
else
{
existingBranches.Add(branch);
}
}
}
else
{
this.branches.Add(branchByLine);
}
}
}
for (long i = 0; i < file.lineCoverage.LongLength; i++)
{
int coverage = this.lineCoverage[i];
if (coverage < 0)
{
coverage = file.lineCoverage[i];
}
else if (file.lineCoverage[i] > 0)
{
coverage += file.lineCoverage[i];
}
this.lineCoverage[i] = coverage;
}
for (long i = 0; i < file.lineVisitStatus.LongLength; i++)
{
int lineVisitStatus = Math.Max((int)this.lineVisitStatus[i], (int)file.lineVisitStatus[i]);
this.lineVisitStatus[i] = (LineVisitStatus)lineVisitStatus;
if (this.lineVisitStatus[i] == Analysis.LineVisitStatus.PartiallyCovered
&& this.branches != null
&& this.branches.TryGetValue((int)i, out ICollection<Branch> branches))
{
if (branches.All(b => b.BranchVisits > 0))
{
this.lineVisitStatus[i] = Analysis.LineVisitStatus.Covered;
}
}
}
foreach (var lineCoverageByTestMethod in file.lineCoveragesByTestMethod)
{
CoverageByTrackedMethod existingTrackedMethodCoverage = null;
this.lineCoveragesByTestMethod.TryGetValue(lineCoverageByTestMethod.Key, out existingTrackedMethodCoverage);
if (existingTrackedMethodCoverage == null)
{
this.lineCoveragesByTestMethod.Add(lineCoverageByTestMethod);
}
else
{
this.lineCoveragesByTestMethod[lineCoverageByTestMethod.Key] = MergeCoverageByTrackedMethod(existingTrackedMethodCoverage, lineCoverageByTestMethod.Value);
}
}
foreach (var methodMetric in file.methodMetrics)
{
var existingMethodMetric = this.methodMetrics.FirstOrDefault(m => m.Equals(methodMetric));
if (existingMethodMetric != null)
{
existingMethodMetric.Merge(methodMetric);
}
else
{
this.AddMethodMetric(methodMetric);
}
}
foreach (var codeElement in file.codeElements)
{
this.codeElements.Add(codeElement);
}
foreach (var codeElement in this.codeElements)
{
codeElement.ApplyMaximumCoverageQuota(this.CoverageQuota(codeElement.FirstLine, codeElement.LastLine));
}
if (file.additionalFileReader == null)
{
file.additionalFileReader = this.additionalFileReader;
}
}
/// <summary>
/// Merges the two tracked method coverage.
/// </summary>
/// <param name="existingTrackedMethodCoverage">The existing tracked method coverage.</param>
/// <param name="lineCoverageByTestMethod">The new line coverage by test method.</param>
/// <returns>The merged tracked method coverage.</returns>
private static CoverageByTrackedMethod MergeCoverageByTrackedMethod(CoverageByTrackedMethod existingTrackedMethodCoverage, CoverageByTrackedMethod lineCoverageByTestMethod)
{
// Resize coverage array if neccessary
if (lineCoverageByTestMethod.Coverage.LongLength > existingTrackedMethodCoverage.Coverage.LongLength)
{
int[] newLineCoverage = new int[lineCoverageByTestMethod.Coverage.LongLength];
Array.Copy(lineCoverageByTestMethod.Coverage, newLineCoverage, lineCoverageByTestMethod.Coverage.LongLength);
for (long i = existingTrackedMethodCoverage.Coverage.LongLength; i < lineCoverageByTestMethod.Coverage.LongLength; i++)
{
newLineCoverage[i] = -1;
}
existingTrackedMethodCoverage.Coverage = newLineCoverage;
}
// Resize line visit status array if neccessary
if (lineCoverageByTestMethod.LineVisitStatus.LongLength > existingTrackedMethodCoverage.LineVisitStatus.LongLength)
{
LineVisitStatus[] newLineVisitStatus = new LineVisitStatus[lineCoverageByTestMethod.LineVisitStatus.LongLength];
Array.Copy(lineCoverageByTestMethod.LineVisitStatus, newLineVisitStatus, lineCoverageByTestMethod.LineVisitStatus.LongLength);
existingTrackedMethodCoverage.LineVisitStatus = newLineVisitStatus;
}
for (long i = 0; i < lineCoverageByTestMethod.Coverage.LongLength; i++)
{
int coverage = existingTrackedMethodCoverage.Coverage[i];
if (coverage < 0)
{
coverage = lineCoverageByTestMethod.Coverage[i];
}
else if (lineCoverageByTestMethod.Coverage[i] > 0)
{
coverage += lineCoverageByTestMethod.Coverage[i];
}
existingTrackedMethodCoverage.Coverage[i] = coverage;
}
for (long i = 0; i < lineCoverageByTestMethod.LineVisitStatus.LongLength; i++)
{
int lineVisitStatus = Math.Max((int)existingTrackedMethodCoverage.LineVisitStatus[i], (int)lineCoverageByTestMethod.LineVisitStatus[i]);
existingTrackedMethodCoverage.LineVisitStatus[i] = (LineVisitStatus)lineVisitStatus;
}
return existingTrackedMethodCoverage;
}
}
}
| |
#region -- License Terms --
//
// MessagePack for CLI
//
// Copyright (C) 2014-2015 FUJIWARA, Yusuke
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion -- License Terms --
#if UNITY_5 || UNITY_STANDALONE || UNITY_WEBPLAYER || UNITY_WII || UNITY_IPHONE || UNITY_ANDROID || UNITY_PS3 || UNITY_XBOX360 || UNITY_FLASH || UNITY_BKACKBERRY || UNITY_WINRT
#define UNITY
#endif
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
#if !UNITY
#if XAMIOS || XAMDROID
using Contract = MsgPack.MPContract;
#else
using System.Diagnostics.Contracts;
#endif // XAMIOS || XAMDROID
#endif // !UNITY
using System.Reflection;
using MsgPack.Serialization.DefaultSerializers;
using MsgPack.Serialization.Reflection;
namespace MsgPack.Serialization.ReflectionSerializers
{
/// <summary>
/// Helper static methods for reflection serializers.
/// </summary>
internal static class ReflectionSerializerHelper
{
public static MessagePackSerializer<T> CreateReflectionEnumMessagePackSerializer<T>( SerializationContext context )
{
#if !UNITY
return
ReflectionExtensions.CreateInstancePreservingExceptionType<MessagePackSerializer<T>>(
typeof( ReflectionEnumMessagePackSerializer<> ).MakeGenericType( typeof( T ) ),
context
);
#else
return MessagePackSerializer.Wrap<T>( context, new ReflectionEnumMessagePackSerializer( context, typeof( T ) ) );
#endif // !UNITY
}
#if !UNITY
public static MessagePackSerializer<T> CreateCollectionSerializer<T>(
#else
public static IMessagePackSingleObjectSerializer CreateCollectionSerializer<T>(
#endif // !UNITY
SerializationContext context,
Type targetType,
CollectionTraits traits,
PolymorphismSchema schema
)
{
switch ( traits.DetailedCollectionType )
{
case CollectionDetailedKind.Array:
{
return ArraySerializer.Create<T>( context, schema );
}
case CollectionDetailedKind.GenericList:
#if !NETFX_35 && !UNITY
case CollectionDetailedKind.GenericSet:
#endif // !NETFX_35 && !UNITY
case CollectionDetailedKind.GenericCollection:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( CollectionSerializerFactory<,> ).MakeGenericType( typeof( T ), traits.ElementType )
).Create( context, targetType, schema );
#else
new ReflectionCollectionMessagePackSerializer( context, typeof( T ), targetType, traits, schema );
#endif // !UNITY
}
case CollectionDetailedKind.GenericEnumerable:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( EnumerableSerializerFactory<,> ).MakeGenericType( typeof( T ), traits.ElementType )
).Create( context, targetType, schema );
#else
new ReflectionEnumerableMessagePackSerializer( context, typeof( T ), targetType, traits, schema );
#endif // !Enumerable
}
case CollectionDetailedKind.GenericDictionary:
{
var genericArgumentOfKeyValuePair = traits.ElementType.GetGenericArguments();
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( DictionarySerializerFactory<,,> ).MakeGenericType(
typeof( T ),
genericArgumentOfKeyValuePair[ 0 ],
genericArgumentOfKeyValuePair[ 1 ]
)
).Create( context, targetType, schema );
#else
new ReflectionDictionaryMessagePackSerializer(
context,
typeof( T ),
targetType,
genericArgumentOfKeyValuePair[ 0 ],
genericArgumentOfKeyValuePair[ 1 ],
traits,
schema
);
#endif // !UNITY
}
case CollectionDetailedKind.NonGenericList:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( NonGenericListSerializerFactory<> ).MakeGenericType( typeof( T ) )
).Create( context, targetType, schema );
#else
new ReflectionNonGenericListMessagePackSerializer( context, typeof( T ), targetType, schema );
#endif // !UNITY
}
case CollectionDetailedKind.NonGenericCollection:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( NonGenericCollectionSerializerFactory<> ).MakeGenericType( typeof( T ), traits.ElementType )
).Create( context, targetType, schema );
#else
new ReflectionNonGenericCollectionMessagePackSerializer( context, typeof( T ), targetType, schema );
#endif // !UNITY
}
case CollectionDetailedKind.NonGenericEnumerable:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( NonGenericEnumerableSerializerFactory<> ).MakeGenericType( typeof( T ), traits.ElementType )
).Create( context, targetType, schema );
#else
new ReflectionNonGenericEnumerableMessagePackSerializer( context, typeof( T ), targetType, schema );
#endif // !UNITY
}
case CollectionDetailedKind.NonGenericDictionary:
{
return
#if !UNITY
( MessagePackSerializer<T> )
ReflectionExtensions.CreateInstancePreservingExceptionType<IVariantReflectionSerializerFactory>(
typeof( NonGenericDictionarySerializerFactory<> ).MakeGenericType( typeof( T ) )
).Create( context, targetType, schema );
#else
new ReflectionNonGenericDictionaryMessagePackSerializer( context, typeof( T ), targetType, schema );
#endif // !UNITY
}
default:
{
return null;
}
}
}
#if !UNITY
public static Action<TCollection, TItem> GetAddItem<TCollection, TItem>( Type targetType )
#else
public static Action<object, object> GetAddItem( Type targetType )
#endif // !UNITY
{
var addMethod = targetType.GetCollectionTraits().AddMethod;
if ( addMethod == null )
{
throw new NotSupportedException(
String.Format(
CultureInfo.CurrentCulture,
"Reflection based serializer only supports collection types which implement interface to add new item such as '{0}' and '{1}'",
typeof( ICollection<> ).GetFullName(),
typeof( IList )
)
);
}
// CreateDelegate causes AOT error.
// So use reflection in AOT environment.
#if ( !UNITY && !XAMIOS ) || AOT_CHECK
try
{
return addMethod.CreateDelegate( typeof( Action<TCollection, TItem> ) ) as Action<TCollection, TItem>;
}
catch ( ArgumentException )
{
#endif // ( !UNITY && !XAMIOS ) || AOT_CHECK
return ( collection, item ) => addMethod.InvokePreservingExceptionType( collection, item );
#if ( !UNITY && !XAMIOS ) || AOT_CHECK
}
#endif // ( !UNITY && !XAMIOS ) || AOT_CHECK
}
public static void GetMetadata(
IList<SerializingMember> members,
SerializationContext context,
out Func<object, object>[] getters,
out Action<object, object>[] setters,
out MemberInfo[] memberInfos,
out DataMemberContract[] contracts,
out IMessagePackSerializer[] serializers )
{
getters = new Func<object, object>[ members.Count ];
setters = new Action<object, object>[ members.Count ];
memberInfos = new MemberInfo[ members.Count ];
contracts = new DataMemberContract[ members.Count ];
serializers = new IMessagePackSerializer[ members.Count ];
for ( var i = 0; i < members.Count; i++ )
{
var member = members[ i ];
if ( member.Member == null )
{
#if UNITY
contracts[ i ] = DataMemberContract.Null;
#endif // UNITY
continue;
}
FieldInfo asField;
if ( ( asField = member.Member as FieldInfo ) != null )
{
getters[ i ] = asField.GetValue;
setters[ i ] = asField.SetValue;
}
else
{
var property = member.Member as PropertyInfo;
#if DEBUG && !UNITY
Contract.Assert( property != null, "member.Member is PropertyInfo" );
#endif // DEBUG && !UNITY
getters[ i ] = target => property.GetGetMethod( true ).InvokePreservingExceptionType( target, null );
var setter = property.GetSetMethod( true );
if ( setter != null )
{
setters[ i ] = ( target, value ) => setter.InvokePreservingExceptionType( target, new[] { value } );
}
}
memberInfos[ i ] = member.Member;
#if !UNITY
contracts[ i ] = member.Contract;
#else
contracts[ i ] = member.Contract ?? DataMemberContract.Null;
#endif // !UNITY
var memberType = member.Member.GetMemberValueType();
if ( memberType.GetIsEnum() )
{
serializers[ i ] =
context.GetSerializer(
memberType,
EnumMessagePackSerializerHelpers.DetermineEnumSerializationMethod(
context,
memberType,
member.GetEnumMemberSerializationMethod()
)
);
}
else if ( DateTimeMessagePackSerializerHelpers.IsDateTime( memberType ) )
{
serializers[ i ] =
context.GetSerializer(
memberType,
DateTimeMessagePackSerializerHelpers.DetermineDateTimeConversionMethod(
context,
member.GetDateTimeMemberConversionMethod()
)
);
}
else
{
serializers[ i ] = context.GetSerializer( memberType, PolymorphismSchema.Create( memberType, member ) );
}
}
}
#if !UNITY
public static Func<int, T> CreateCollectionInstanceFactory<T, TKey>( Type targetType )
#else
public static Func<int, object> CreateCollectionInstanceFactory( Type abstractType, Type targetType, Type comparisonType )
#endif // !UNITY
{
var constructor = UnpackHelpers.GetCollectionConstructor( targetType );
var parameters = constructor.GetParameters();
switch ( parameters.Length )
{
case 0:
{
return _ =>
#if !UNITY
( T )
#endif // !UNITY
constructor.InvokePreservingExceptionType();
}
case 1:
{
if ( parameters[ 0 ].ParameterType == typeof( int ) )
{
return capacity =>
#if !UNITY
( T )
#endif // !UNITY
constructor.InvokePreservingExceptionType( capacity );
}
else if ( UnpackHelpers.IsIEqualityComparer( parameters[ 0 ].ParameterType ) )
{
var comparer =
#if !UNITY
EqualityComparer<TKey>.Default;
#else
UnpackHelpers.GetEqualityComparer( comparisonType );
#endif // !UNITY
return _ =>
#if !UNITY
( T )
#endif // !UNITY
constructor.InvokePreservingExceptionType( comparer );
}
break;
}
case 2:
{
var comparer =
#if !UNITY
EqualityComparer<TKey>.Default;
#else
UnpackHelpers.GetEqualityComparer( comparisonType );
#endif // !UNITY
if ( parameters[ 0 ].ParameterType == typeof( int )
&& UnpackHelpers.IsIEqualityComparer( parameters[ 1 ].ParameterType ) )
{
return capacity =>
#if !UNITY
( T )
#endif // !UNITY
constructor.InvokePreservingExceptionType( capacity, comparer );
}
else if ( UnpackHelpers.IsIEqualityComparer( parameters[ 0 ].ParameterType ) &&
parameters[ 0 ].ParameterType == typeof( int ) )
{
return capacity =>
#if !UNITY
( T )
#endif // !UNITY
constructor.InvokePreservingExceptionType( comparer, capacity );
}
break;
}
}
throw SerializationExceptions.NewTargetDoesNotHavePublicDefaultConstructorNorInitialCapacity(
#if !UNITY
typeof( T )
#else
abstractType
#endif // !UNITY
);
}
#if !UNITY
/// <summary>
/// Defines non-generic factory method for 'universal' serializers which use general collection features.
/// </summary>
private interface IVariantReflectionSerializerFactory
{
IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema );
}
// ReSharper disable MemberHidesStaticFromOuterClass
private sealed class NonGenericEnumerableSerializerFactory<T> : IVariantReflectionSerializerFactory
where T : IEnumerable
{
public NonGenericEnumerableSerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
return new ReflectionNonGenericEnumerableMessagePackSerializer<T>( context, targetType, schema );
}
}
private sealed class NonGenericCollectionSerializerFactory<T> : IVariantReflectionSerializerFactory
where T : ICollection
{
public NonGenericCollectionSerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
return new ReflectionNonGenericCollectionMessagePackSerializer<T>( context, targetType, schema );
}
}
private sealed class NonGenericListSerializerFactory<T> : IVariantReflectionSerializerFactory
where T : IList
{
public NonGenericListSerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
return new ReflectionNonGenericListMessagePackSerializer<T>( context, targetType, schema );
}
}
private sealed class NonGenericDictionarySerializerFactory<T> : IVariantReflectionSerializerFactory
where T : IDictionary
{
public NonGenericDictionarySerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
return new ReflectionNonGenericDictionaryMessagePackSerializer<T>( context, targetType, schema );
}
}
private sealed class EnumerableSerializerFactory<TCollection, TItem> : IVariantReflectionSerializerFactory
where TCollection : IEnumerable<TItem>
{
public EnumerableSerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
var itemSchema = schema ?? PolymorphismSchema.Default;
return new ReflectionEnumerableMessagePackSerializer<TCollection, TItem>( context, targetType, itemSchema );
}
}
private sealed class CollectionSerializerFactory<TCollection, TItem> : IVariantReflectionSerializerFactory
where TCollection : ICollection<TItem>
{
public CollectionSerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
var itemSchema = schema ?? PolymorphismSchema.Default;
return new ReflectionCollectionMessagePackSerializer<TCollection, TItem>( context, targetType, itemSchema );
}
}
private sealed class DictionarySerializerFactory<TDictionary, TKey, TValue> : IVariantReflectionSerializerFactory
where TDictionary : IDictionary<TKey, TValue>
{
public DictionarySerializerFactory() { }
public IMessagePackSingleObjectSerializer Create( SerializationContext context, Type targetType, PolymorphismSchema schema )
{
return new ReflectionDictionaryMessagePackSerializer<TDictionary, TKey, TValue>( context, targetType, schema );
}
}
// ReSharper restore MemberHidesStaticFromOuterClass
#endif // !UNITY
}
}
| |
//
// Authors:
// Marek Habersack grendel@twistedcode.net
//
// Copyright (c) 2010, Novell, Inc (http://novell.com/)
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
// * Neither the name of Novell, Inc nor names of the contributors may be used to endorse or promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using CaptainHook.Base;
namespace CaptainHook.Mail
{
public class TemplateParser : CommonBase
{
enum ParsingState
{
Any,
Start,
InMacro,
InMacroArgumentList,
InQuotedMacroArgument,
PlainText
}
sealed class CurrentState
{
TemplateFragment fragment;
public ParsingState State;
public TemplateFragment Fragment {
get {
if (fragment == null)
throw new InvalidOperationException ("Current state not initialized properly.");
return fragment;
}
set {
if (value == null)
throw new ArgumentNullException ("value");
fragment = value;
}
}
public CurrentState (ParsingState state)
{
this.State = state;
}
public CurrentState (ParsingState state, TemplateFragment fragment)
: this (state)
{
this.Fragment = fragment;
}
public T CastFragment <T> () where T : TemplateFragment
{
TemplateFragment fragment = Fragment;
Type t = fragment.GetType ();
if (t != typeof (T))
throw new InvalidOperationException (String.Format ("Expected fragment of type '{0}', found type '{1}' at {2}:{3},{4}",
typeof (T).FullName, t.FullName, fragment.LineStart, fragment.ColumnStart));
return Fragment as T;
}
}
public event EventHandler <FragmentParsedEventArguments> FragmentParsed;
Stack <CurrentState> stateStack;
string inputPath;
int currentLine, currentColumn;
public TemplateParser (string inputPath)
{
this.inputPath = inputPath;
}
public void Parse ()
{
stateStack = new Stack <CurrentState> ();
stateStack.Push (new CurrentState (ParsingState.Start));
currentLine = 1;
currentColumn = -1;
using (var sr = new StreamReader (inputPath, Encoding.UTF8)) {
Parse (sr);
}
}
void Parse (StreamReader sr)
{
int b = sr.Read ();
char ch;
CurrentState state, lastState;
TemplateFragment fragment;
while (b != -1) {
ValidateStateStack ();
state = stateStack.Peek ();
ch = (char)b;
currentColumn++;
switch (ch) {
case '@':
if (state.State == ParsingState.InMacro) {
lastState = ZapState (false);
ValidateStateStack ();
state = stateStack.Peek ();
if (state.State == ParsingState.InMacroArgumentList || state.State == ParsingState.InQuotedMacroArgument) {
var f = state.CastFragment<TemplateFragmentArgument> ();
f.Fragments.Add (lastState.Fragment);
} else
OnFragmentParsed (lastState.Fragment);
} else {
if (state.State == ParsingState.PlainText || state.State == ParsingState.Start)
ZapState ();
ValidateStateStack ();
fragment = CreateFragment<TemplateFragmentMacro> ();
stateStack.Push (new CurrentState (ParsingState.InMacro, fragment));
}
break;
case '(':
if (state.State == ParsingState.InMacro) {
fragment = CreateFragment<TemplateFragmentArgument> (f => {
f.Parent = state.CastFragment<TemplateFragmentMacro> ();
});
stateStack.Push (new CurrentState (ParsingState.InMacroArgumentList, fragment));
} else
goto default;
break;
case ')':
if (state.State == ParsingState.InMacroArgumentList) {
lastState = ZapState (false);
ValidateStateStack (ParsingState.InMacro);
state = stateStack.Peek ();
var f = state.CastFragment <TemplateFragmentMacro> ();
f.Arguments.Add (lastState.CastFragment <TemplateFragmentArgument> ());
} else
goto default;
break;
case '\r':
currentColumn--;
if (state.State == ParsingState.PlainText || state.State == ParsingState.Start)
goto default;
else
throw new InvalidOperationException ("Carriage return characters are allowed only in plain text.");
case '\n':
currentLine++;
currentColumn = 0;
if (state.State == ParsingState.PlainText || state.State == ParsingState.Start)
goto default;
else
throw new InvalidOperationException ("Newline characters are allowed only in plain text.");
default:
switch (state.State) {
default:
state.Fragment.Append (ch);
break;
case ParsingState.Start:
fragment = CreateFragment <TemplateFragmentPlainText> ();
fragment.Append (ch);
stateStack.Push (new CurrentState (ParsingState.PlainText, fragment));
break;
}
break;
}
b = sr.Read ();
}
ZapState ();
state = stateStack.Peek ();
if (state.State != ParsingState.Start)
throw new InvalidOperationException (String.Format ("Parsing error. Invalid state '{0}' on stack after parsing.", state.State));
}
void OnFragmentParsed (TemplateFragment fragment)
{
EventHandler <FragmentParsedEventArguments> eh = FragmentParsed;
if (eh != null)
eh (this, new FragmentParsedEventArguments (fragment));
}
void ValidateStateStack (params ParsingState[] expectedState)
{
if (stateStack.Count == 0)
throw new InvalidOperationException ("Internal error. Parser state stack is empty.");
CurrentState state = stateStack.Peek ();
if (state.State == ParsingState.Any)
throw new InvalidOperationException ("Internal error. Invalid parsing state value found on stack.");
bool failed = true;
if (expectedState == null || expectedState.Length == 0)
return;
foreach (ParsingState exp in expectedState) {
if (exp == ParsingState.Any)
return;
if (state.State == exp) {
failed = false;
break;
}
}
if (failed)
throw new InvalidOperationException (String.Format ("Internal error. Unexpected parser state '{0}' at {1}:{2},{3}.", state.State, inputPath, currentLine, currentColumn));
}
T CreateFragment <T> () where T : TemplateFragment, new ()
{
return CreateFragment <T> (null);
}
T CreateFragment <T> (Action <T> init) where T : TemplateFragment, new ()
{
T fragment = new T () {
LineStart = currentLine,
ColumnStart = currentLine,
InFile = inputPath
};
if (init == null)
return fragment;
init (fragment);
return fragment;
}
CurrentState ZapState ()
{
return ZapState (true);
}
CurrentState ZapState (bool triggerOnFragmentParsed)
{
CurrentState top = stateStack.Peek ();
if (top.State == ParsingState.Start)
return top;
top = stateStack.Pop ();
TemplateFragment fragment = top.Fragment;
fragment.LineEnd = currentLine;
fragment.ColumnEnd = currentColumn;
if (triggerOnFragmentParsed)
OnFragmentParsed (fragment);
return top;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Configuration;
using Apache.Ignite.Core.Cluster;
using Apache.Ignite.Core.Communication;
using Apache.Ignite.Core.Communication.Tcp;
using Apache.Ignite.Core.DataStructures.Configuration;
using Apache.Ignite.Core.Discovery;
using Apache.Ignite.Core.Discovery.Tcp;
using Apache.Ignite.Core.Events;
using Apache.Ignite.Core.Impl;
using Apache.Ignite.Core.Impl.Binary;
using Apache.Ignite.Core.Impl.Common;
using Apache.Ignite.Core.Lifecycle;
using Apache.Ignite.Core.Log;
using Apache.Ignite.Core.Transactions;
using BinaryReader = Apache.Ignite.Core.Impl.Binary.BinaryReader;
using BinaryWriter = Apache.Ignite.Core.Impl.Binary.BinaryWriter;
/// <summary>
/// Grid configuration.
/// </summary>
public class IgniteConfiguration
{
/// <summary>
/// Default initial JVM memory in megabytes.
/// </summary>
public const int DefaultJvmInitMem = -1;
/// <summary>
/// Default maximum JVM memory in megabytes.
/// </summary>
public const int DefaultJvmMaxMem = -1;
/// <summary>
/// Default metrics expire time.
/// </summary>
public static readonly TimeSpan DefaultMetricsExpireTime = TimeSpan.MaxValue;
/// <summary>
/// Default metrics history size.
/// </summary>
public const int DefaultMetricsHistorySize = 10000;
/// <summary>
/// Default metrics log frequency.
/// </summary>
public static readonly TimeSpan DefaultMetricsLogFrequency = TimeSpan.FromMilliseconds(60000);
/// <summary>
/// Default metrics update frequency.
/// </summary>
public static readonly TimeSpan DefaultMetricsUpdateFrequency = TimeSpan.FromMilliseconds(2000);
/// <summary>
/// Default network timeout.
/// </summary>
public static readonly TimeSpan DefaultNetworkTimeout = TimeSpan.FromMilliseconds(5000);
/// <summary>
/// Default network retry delay.
/// </summary>
public static readonly TimeSpan DefaultNetworkSendRetryDelay = TimeSpan.FromMilliseconds(1000);
/** */
private TimeSpan? _metricsExpireTime;
/** */
private int? _metricsHistorySize;
/** */
private TimeSpan? _metricsLogFrequency;
/** */
private TimeSpan? _metricsUpdateFrequency;
/** */
private int? _networkSendRetryCount;
/** */
private TimeSpan? _networkSendRetryDelay;
/** */
private TimeSpan? _networkTimeout;
/** */
private bool? _isDaemon;
/** */
private bool? _isLateAffinityAssignment;
/** */
private bool? _clientMode;
/// <summary>
/// Default network retry count.
/// </summary>
public const int DefaultNetworkSendRetryCount = 3;
/// <summary>
/// Default late affinity assignment mode.
/// </summary>
public const bool DefaultIsLateAffinityAssignment = true;
/// <summary>
/// Initializes a new instance of the <see cref="IgniteConfiguration"/> class.
/// </summary>
public IgniteConfiguration()
{
JvmInitialMemoryMb = DefaultJvmInitMem;
JvmMaxMemoryMb = DefaultJvmMaxMem;
}
/// <summary>
/// Initializes a new instance of the <see cref="IgniteConfiguration"/> class.
/// </summary>
/// <param name="configuration">The configuration to copy.</param>
public IgniteConfiguration(IgniteConfiguration configuration)
{
IgniteArgumentCheck.NotNull(configuration, "configuration");
CopyLocalProperties(configuration);
using (var stream = IgniteManager.Memory.Allocate().GetStream())
{
var marsh = new Marshaller(configuration.BinaryConfiguration);
configuration.Write(marsh.StartMarshal(stream));
stream.SynchronizeOutput();
stream.Seek(0, SeekOrigin.Begin);
ReadCore(marsh.StartUnmarshal(stream));
}
}
/// <summary>
/// Initializes a new instance of the <see cref="IgniteConfiguration"/> class from a reader.
/// </summary>
/// <param name="binaryReader">The binary reader.</param>
internal IgniteConfiguration(BinaryReader binaryReader)
{
Read(binaryReader);
}
/// <summary>
/// Writes this instance to a writer.
/// </summary>
/// <param name="writer">The writer.</param>
internal void Write(BinaryWriter writer)
{
Debug.Assert(writer != null);
// Simple properties
writer.WriteBooleanNullable(_clientMode);
writer.WriteIntArray(IncludedEventTypes == null ? null : IncludedEventTypes.ToArray());
writer.WriteTimeSpanAsLongNullable(_metricsExpireTime);
writer.WriteIntNullable(_metricsHistorySize);
writer.WriteTimeSpanAsLongNullable(_metricsLogFrequency);
writer.WriteTimeSpanAsLongNullable(_metricsUpdateFrequency);
writer.WriteIntNullable(_networkSendRetryCount);
writer.WriteTimeSpanAsLongNullable(_networkSendRetryDelay);
writer.WriteTimeSpanAsLongNullable(_networkTimeout);
writer.WriteString(WorkDirectory);
writer.WriteString(Localhost);
writer.WriteBooleanNullable(_isDaemon);
writer.WriteBooleanNullable(_isLateAffinityAssignment);
// Cache config
var caches = CacheConfiguration;
if (caches == null)
writer.WriteInt(0);
else
{
writer.WriteInt(caches.Count);
foreach (var cache in caches)
cache.Write(writer);
}
// Discovery config
var disco = DiscoverySpi;
if (disco != null)
{
writer.WriteBoolean(true);
var tcpDisco = disco as TcpDiscoverySpi;
if (tcpDisco == null)
throw new InvalidOperationException("Unsupported discovery SPI: " + disco.GetType());
tcpDisco.Write(writer);
}
else
writer.WriteBoolean(false);
// Communication config
var comm = CommunicationSpi;
if (comm != null)
{
writer.WriteBoolean(true);
var tcpComm = comm as TcpCommunicationSpi;
if (tcpComm == null)
throw new InvalidOperationException("Unsupported communication SPI: " + comm.GetType());
tcpComm.Write(writer);
}
else
writer.WriteBoolean(false);
// Binary config
var isCompactFooterSet = BinaryConfiguration != null && BinaryConfiguration.CompactFooterInternal != null;
writer.WriteBoolean(isCompactFooterSet);
if (isCompactFooterSet)
writer.WriteBoolean(BinaryConfiguration.CompactFooter);
// User attributes
var attrs = UserAttributes;
if (attrs == null)
writer.WriteInt(0);
else
{
writer.WriteInt(attrs.Count);
foreach (var pair in attrs)
{
writer.WriteString(pair.Key);
writer.Write(pair.Value);
}
}
// Atomic
if (AtomicConfiguration != null)
{
writer.WriteBoolean(true);
writer.WriteInt(AtomicConfiguration.AtomicSequenceReserveSize);
writer.WriteInt(AtomicConfiguration.Backups);
writer.WriteInt((int) AtomicConfiguration.CacheMode);
}
else
writer.WriteBoolean(false);
// Tx
if (TransactionConfiguration != null)
{
writer.WriteBoolean(true);
writer.WriteInt(TransactionConfiguration.PessimisticTransactionLogSize);
writer.WriteInt((int) TransactionConfiguration.DefaultTransactionConcurrency);
writer.WriteInt((int) TransactionConfiguration.DefaultTransactionIsolation);
writer.WriteLong((long) TransactionConfiguration.DefaultTimeout.TotalMilliseconds);
writer.WriteLong((int) TransactionConfiguration.PessimisticTransactionLogLinger.TotalMilliseconds);
}
else
writer.WriteBoolean(false);
}
/// <summary>
/// Validates this instance and outputs information to the log, if necessary.
/// </summary>
internal void Validate(ILogger log)
{
Debug.Assert(log != null);
var ccfg = CacheConfiguration;
if (ccfg != null)
{
foreach (var cfg in ccfg)
cfg.Validate(log);
}
}
/// <summary>
/// Reads data from specified reader into current instance.
/// </summary>
/// <param name="r">The binary reader.</param>
private void ReadCore(BinaryReader r)
{
// Simple properties
_clientMode = r.ReadBooleanNullable();
IncludedEventTypes = r.ReadIntArray();
_metricsExpireTime = r.ReadTimeSpanNullable();
_metricsHistorySize = r.ReadIntNullable();
_metricsLogFrequency = r.ReadTimeSpanNullable();
_metricsUpdateFrequency = r.ReadTimeSpanNullable();
_networkSendRetryCount = r.ReadIntNullable();
_networkSendRetryDelay = r.ReadTimeSpanNullable();
_networkTimeout = r.ReadTimeSpanNullable();
WorkDirectory = r.ReadString();
Localhost = r.ReadString();
_isDaemon = r.ReadBooleanNullable();
_isLateAffinityAssignment = r.ReadBooleanNullable();
// Cache config
var cacheCfgCount = r.ReadInt();
CacheConfiguration = new List<CacheConfiguration>(cacheCfgCount);
for (int i = 0; i < cacheCfgCount; i++)
CacheConfiguration.Add(new CacheConfiguration(r));
// Discovery config
DiscoverySpi = r.ReadBoolean() ? new TcpDiscoverySpi(r) : null;
// Communication config
CommunicationSpi = r.ReadBoolean() ? new TcpCommunicationSpi(r) : null;
// Binary config
if (r.ReadBoolean())
{
BinaryConfiguration = BinaryConfiguration ?? new BinaryConfiguration();
BinaryConfiguration.CompactFooter = r.ReadBoolean();
}
// User attributes
UserAttributes = Enumerable.Range(0, r.ReadInt())
.ToDictionary(x => r.ReadString(), x => r.ReadObject<object>());
// Atomic
if (r.ReadBoolean())
{
AtomicConfiguration = new AtomicConfiguration
{
AtomicSequenceReserveSize = r.ReadInt(),
Backups = r.ReadInt(),
CacheMode = (CacheMode) r.ReadInt()
};
}
// Tx
if (r.ReadBoolean())
{
TransactionConfiguration = new TransactionConfiguration
{
PessimisticTransactionLogSize = r.ReadInt(),
DefaultTransactionConcurrency = (TransactionConcurrency) r.ReadInt(),
DefaultTransactionIsolation = (TransactionIsolation) r.ReadInt(),
DefaultTimeout = TimeSpan.FromMilliseconds(r.ReadLong()),
PessimisticTransactionLogLinger = TimeSpan.FromMilliseconds(r.ReadInt())
};
}
}
/// <summary>
/// Reads data from specified reader into current instance.
/// </summary>
/// <param name="binaryReader">The binary reader.</param>
private void Read(BinaryReader binaryReader)
{
var r = binaryReader;
CopyLocalProperties(r.Marshaller.Ignite.Configuration);
ReadCore(r);
// Misc
IgniteHome = r.ReadString();
JvmInitialMemoryMb = (int) (r.ReadLong()/1024/2014);
JvmMaxMemoryMb = (int) (r.ReadLong()/1024/2014);
// Local data (not from reader)
JvmDllPath = Process.GetCurrentProcess().Modules.OfType<ProcessModule>()
.Single(x => string.Equals(x.ModuleName, IgniteUtils.FileJvmDll, StringComparison.OrdinalIgnoreCase))
.FileName;
}
/// <summary>
/// Copies the local properties (properties that are not written in Write method).
/// </summary>
private void CopyLocalProperties(IgniteConfiguration cfg)
{
GridName = cfg.GridName;
BinaryConfiguration = cfg.BinaryConfiguration == null
? null
: new BinaryConfiguration(cfg.BinaryConfiguration);
JvmClasspath = cfg.JvmClasspath;
JvmOptions = cfg.JvmOptions;
Assemblies = cfg.Assemblies;
SuppressWarnings = cfg.SuppressWarnings;
LifecycleBeans = cfg.LifecycleBeans;
Logger = cfg.Logger;
JvmInitialMemoryMb = cfg.JvmInitialMemoryMb;
JvmMaxMemoryMb = cfg.JvmMaxMemoryMb;
}
/// <summary>
/// Grid name which is used if not provided in configuration file.
/// </summary>
public string GridName { get; set; }
/// <summary>
/// Gets or sets the binary configuration.
/// </summary>
/// <value>
/// The binary configuration.
/// </value>
public BinaryConfiguration BinaryConfiguration { get; set; }
/// <summary>
/// Gets or sets the cache configuration.
/// </summary>
/// <value>
/// The cache configuration.
/// </value>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<CacheConfiguration> CacheConfiguration { get; set; }
/// <summary>
/// URL to Spring configuration file.
/// <para />
/// Spring configuration is loaded first, then <see cref="IgniteConfiguration"/> properties are applied.
/// Null property values do not override Spring values.
/// Value-typed properties are tracked internally: if setter was not called, Spring value won't be overwritten.
/// <para />
/// This merging happens on the top level only; e. g. if there are cache configurations defined in Spring
/// and in .NET, .NET caches will overwrite Spring caches.
/// </summary>
[SuppressMessage("Microsoft.Design", "CA1056:UriPropertiesShouldNotBeStrings")]
public string SpringConfigUrl { get; set; }
/// <summary>
/// Path jvm.dll file. If not set, it's location will be determined
/// using JAVA_HOME environment variable.
/// If path is neither set nor determined automatically, an exception
/// will be thrown.
/// </summary>
public string JvmDllPath { get; set; }
/// <summary>
/// Path to Ignite home. If not set environment variable IGNITE_HOME will be used.
/// </summary>
public string IgniteHome { get; set; }
/// <summary>
/// Classpath used by JVM on Ignite start.
/// </summary>
public string JvmClasspath { get; set; }
/// <summary>
/// Collection of options passed to JVM on Ignite start.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<string> JvmOptions { get; set; }
/// <summary>
/// List of additional .Net assemblies to load on Ignite start. Each item can be either
/// fully qualified assembly name, path to assembly to DLL or path to a directory when
/// assemblies reside.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<string> Assemblies { get; set; }
/// <summary>
/// Whether to suppress warnings.
/// </summary>
public bool SuppressWarnings { get; set; }
/// <summary>
/// Lifecycle beans.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<ILifecycleBean> LifecycleBeans { get; set; }
/// <summary>
/// Initial amount of memory in megabytes given to JVM. Maps to -Xms Java option.
/// <code>-1</code> maps to JVM defaults.
/// Defaults to <see cref="DefaultJvmInitMem"/>.
/// </summary>
[DefaultValue(DefaultJvmInitMem)]
public int JvmInitialMemoryMb { get; set; }
/// <summary>
/// Maximum amount of memory in megabytes given to JVM. Maps to -Xmx Java option.
/// <code>-1</code> maps to JVM defaults.
/// Defaults to <see cref="DefaultJvmMaxMem"/>.
/// </summary>
[DefaultValue(DefaultJvmMaxMem)]
public int JvmMaxMemoryMb { get; set; }
/// <summary>
/// Gets or sets the discovery service provider.
/// Null for default discovery.
/// </summary>
public IDiscoverySpi DiscoverySpi { get; set; }
/// <summary>
/// Gets or sets the communication service provider.
/// Null for default communication.
/// </summary>
public ICommunicationSpi CommunicationSpi { get; set; }
/// <summary>
/// Gets or sets a value indicating whether node should start in client mode.
/// Client node cannot hold data in the caches.
/// </summary>
public bool ClientMode
{
get { return _clientMode ?? default(bool); }
set { _clientMode = value; }
}
/// <summary>
/// Gets or sets a set of event types (<see cref="EventType" />) to be recorded by Ignite.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<int> IncludedEventTypes { get; set; }
/// <summary>
/// Gets or sets the time after which a certain metric value is considered expired.
/// </summary>
[DefaultValue(typeof(TimeSpan), "10675199.02:48:05.4775807")]
public TimeSpan MetricsExpireTime
{
get { return _metricsExpireTime ?? DefaultMetricsExpireTime; }
set { _metricsExpireTime = value; }
}
/// <summary>
/// Gets or sets the number of metrics kept in history to compute totals and averages.
/// </summary>
[DefaultValue(DefaultMetricsHistorySize)]
public int MetricsHistorySize
{
get { return _metricsHistorySize ?? DefaultMetricsHistorySize; }
set { _metricsHistorySize = value; }
}
/// <summary>
/// Gets or sets the frequency of metrics log print out.
/// <see cref="TimeSpan.Zero"/> to disable metrics print out.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:01:00")]
public TimeSpan MetricsLogFrequency
{
get { return _metricsLogFrequency ?? DefaultMetricsLogFrequency; }
set { _metricsLogFrequency = value; }
}
/// <summary>
/// Gets or sets the job metrics update frequency.
/// <see cref="TimeSpan.Zero"/> to update metrics on job start/finish.
/// Negative value to never update metrics.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:02")]
public TimeSpan MetricsUpdateFrequency
{
get { return _metricsUpdateFrequency ?? DefaultMetricsUpdateFrequency; }
set { _metricsUpdateFrequency = value; }
}
/// <summary>
/// Gets or sets the network send retry count.
/// </summary>
[DefaultValue(DefaultNetworkSendRetryCount)]
public int NetworkSendRetryCount
{
get { return _networkSendRetryCount ?? DefaultNetworkSendRetryCount; }
set { _networkSendRetryCount = value; }
}
/// <summary>
/// Gets or sets the network send retry delay.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:01")]
public TimeSpan NetworkSendRetryDelay
{
get { return _networkSendRetryDelay ?? DefaultNetworkSendRetryDelay; }
set { _networkSendRetryDelay = value; }
}
/// <summary>
/// Gets or sets the network timeout.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:05")]
public TimeSpan NetworkTimeout
{
get { return _networkTimeout ?? DefaultNetworkTimeout; }
set { _networkTimeout = value; }
}
/// <summary>
/// Gets or sets the work directory.
/// If not provided, a folder under <see cref="IgniteHome"/> will be used.
/// </summary>
public string WorkDirectory { get; set; }
/// <summary>
/// Gets or sets system-wide local address or host for all Ignite components to bind to.
/// If provided it will override all default local bind settings within Ignite.
/// <para />
/// If <c>null</c> then Ignite tries to use local wildcard address.That means that all services
/// will be available on all network interfaces of the host machine.
/// <para />
/// It is strongly recommended to set this parameter for all production environments.
/// </summary>
public string Localhost { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this node should be a daemon node.
/// <para />
/// Daemon nodes are the usual grid nodes that participate in topology but not visible on the main APIs,
/// i.e. they are not part of any cluster groups.
/// <para />
/// Daemon nodes are used primarily for management and monitoring functionality that is built on Ignite
/// and needs to participate in the topology, but also needs to be excluded from the "normal" topology,
/// so that it won't participate in the task execution or in-memory data grid storage.
/// </summary>
public bool IsDaemon
{
get { return _isDaemon ?? default(bool); }
set { _isDaemon = value; }
}
/// <summary>
/// Gets or sets the user attributes for this node.
/// <para />
/// These attributes can be retrieved later via <see cref="IClusterNode.GetAttributes"/>.
/// Environment variables are added to node attributes automatically.
/// NOTE: attribute names starting with "org.apache.ignite" are reserved for internal use.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public IDictionary<string, object> UserAttributes { get; set; }
/// <summary>
/// Gets or sets the atomic data structures configuration.
/// </summary>
public AtomicConfiguration AtomicConfiguration { get; set; }
/// <summary>
/// Gets or sets the transaction configuration.
/// </summary>
public TransactionConfiguration TransactionConfiguration { get; set; }
/// <summary>
/// Gets or sets a value indicating whether late affinity assignment mode should be used.
/// <para />
/// On each topology change, for each started cache, partition-to-node mapping is
/// calculated using AffinityFunction for cache. When late
/// affinity assignment mode is disabled then new affinity mapping is applied immediately.
/// <para />
/// With late affinity assignment mode, if primary node was changed for some partition, but data for this
/// partition is not rebalanced yet on this node, then current primary is not changed and new primary
/// is temporary assigned as backup. This nodes becomes primary only when rebalancing for all assigned primary
/// partitions is finished. This mode can show better performance for cache operations, since when cache
/// primary node executes some operation and data is not rebalanced yet, then it sends additional message
/// to force rebalancing from other nodes.
/// <para />
/// Note, that <see cref="ICacheAffinity"/> interface provides assignment information taking late assignment
/// into account, so while rebalancing for new primary nodes is not finished it can return assignment
/// which differs from assignment calculated by AffinityFunction.
/// <para />
/// This property should have the same value for all nodes in cluster.
/// <para />
/// If not provided, default value is <see cref="DefaultIsLateAffinityAssignment"/>.
/// </summary>
[DefaultValue(DefaultIsLateAffinityAssignment)]
public bool IsLateAffinityAssignment
{
get { return _isLateAffinityAssignment ?? DefaultIsLateAffinityAssignment; }
set { _isLateAffinityAssignment = value; }
}
/// <summary>
/// Serializes this instance to the specified XML writer.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="rootElementName">Name of the root element.</param>
public void ToXml(XmlWriter writer, string rootElementName)
{
IgniteArgumentCheck.NotNull(writer, "writer");
IgniteArgumentCheck.NotNullOrEmpty(rootElementName, "rootElementName");
IgniteConfigurationXmlSerializer.Serialize(this, writer, rootElementName);
}
/// <summary>
/// Serializes this instance to an XML string.
/// </summary>
public string ToXml()
{
var sb = new StringBuilder();
var settings = new XmlWriterSettings
{
Indent = true
};
using (var xmlWriter = XmlWriter.Create(sb, settings))
{
ToXml(xmlWriter, "igniteConfiguration");
}
return sb.ToString();
}
/// <summary>
/// Deserializes IgniteConfiguration from the XML reader.
/// </summary>
/// <param name="reader">The reader.</param>
/// <returns>Deserialized instance.</returns>
public static IgniteConfiguration FromXml(XmlReader reader)
{
IgniteArgumentCheck.NotNull(reader, "reader");
return IgniteConfigurationXmlSerializer.Deserialize(reader);
}
/// <summary>
/// Deserializes IgniteConfiguration from the XML string.
/// </summary>
/// <param name="xml">Xml string.</param>
/// <returns>Deserialized instance.</returns>
public static IgniteConfiguration FromXml(string xml)
{
IgniteArgumentCheck.NotNullOrEmpty(xml, "xml");
using (var xmlReader = XmlReader.Create(new StringReader(xml)))
{
// Skip XML header.
xmlReader.MoveToContent();
return FromXml(xmlReader);
}
}
/// <summary>
/// Gets or sets the logger.
/// <para />
/// If no logger is set, logging is delegated to Java, which uses the logger defined in Spring XML (if present)
/// or logs to console otherwise.
/// </summary>
public ILogger Logger { get; set; }
}
}
| |
/*
* Copyright 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using ZXing.Common;
namespace ZXing.OneD
{
/// <summary>
/// This object renders a CODE128 code as a <see cref="BitMatrix" />.
///
/// <author>erik.barbara@gmail.com (Erik Barbara)</author>
/// </summary>
public sealed class Code128Writer : OneDimensionalCodeWriter
{
private const int CODE_START_B = 104;
private const int CODE_START_C = 105;
private const int CODE_CODE_B = 100;
private const int CODE_CODE_C = 99;
private const int CODE_STOP = 106;
// Dummy characters used to specify control characters in input
private const char ESCAPE_FNC_1 = '\u00f1';
private const char ESCAPE_FNC_2 = '\u00f2';
private const char ESCAPE_FNC_3 = '\u00f3';
private const char ESCAPE_FNC_4 = '\u00f4';
private const int CODE_FNC_1 = 102; // Code A, Code B, Code C
private const int CODE_FNC_2 = 97; // Code A, Code B
private const int CODE_FNC_3 = 96; // Code A, Code B
private const int CODE_FNC_4_B = 100; // Code B
public override BitMatrix encode(String contents,
BarcodeFormat format,
int width,
int height,
IDictionary<EncodeHintType, object> hints)
{
if (format != BarcodeFormat.CODE_128)
{
throw new ArgumentException("Can only encode CODE_128, but got " + format);
}
return base.encode(contents, format, width, height, hints);
}
override public bool[] encode(String contents)
{
int length = contents.Length;
// Check length
if (length < 1 || length > 80)
{
throw new ArgumentException(
"Contents length should be between 1 and 80 characters, but got " + length);
}
// Check content
for (int i = 0; i < length; i++)
{
char c = contents[i];
if (c < ' ' || c > '~')
{
switch (c)
{
case ESCAPE_FNC_1:
case ESCAPE_FNC_2:
case ESCAPE_FNC_3:
case ESCAPE_FNC_4:
break;
default:
throw new ArgumentException("Bad character in input: " + c);
}
}
}
var patterns = new List<int[]>(); // temporary storage for patterns
int checkSum = 0;
int checkWeight = 1;
int codeSet = 0; // selected code (CODE_CODE_B or CODE_CODE_C)
int position = 0; // position in contents
while (position < length)
{
//Select code to use
int requiredDigitCount = codeSet == CODE_CODE_C ? 2 : 4;
int newCodeSet;
if (isDigits(contents, position, requiredDigitCount))
{
newCodeSet = CODE_CODE_C;
}
else
{
newCodeSet = CODE_CODE_B;
}
//Get the pattern index
int patternIndex;
if (newCodeSet == codeSet)
{
// Encode the current character
if (codeSet == CODE_CODE_B)
{
patternIndex = contents[position] - ' ';
position += 1;
}
else
{ // CODE_CODE_C
switch (contents[position])
{
case ESCAPE_FNC_1:
patternIndex = CODE_FNC_1;
position++;
break;
case ESCAPE_FNC_2:
patternIndex = CODE_FNC_2;
position++;
break;
case ESCAPE_FNC_3:
patternIndex = CODE_FNC_3;
position++;
break;
case ESCAPE_FNC_4:
patternIndex = CODE_FNC_4_B; // FIXME if this ever outputs Code A
position++;
break;
default:
patternIndex = Int32.Parse(contents.Substring(position, 2));
position += 2;
break;
}
}
}
else
{
// Should we change the current code?
// Do we have a code set?
if (codeSet == 0)
{
// No, we don't have a code set
if (newCodeSet == CODE_CODE_B)
{
patternIndex = CODE_START_B;
}
else
{
// CODE_CODE_C
patternIndex = CODE_START_C;
}
}
else
{
// Yes, we have a code set
patternIndex = newCodeSet;
}
codeSet = newCodeSet;
}
// Get the pattern
patterns.Add(Code128Reader.CODE_PATTERNS[patternIndex]);
// Compute checksum
checkSum += patternIndex * checkWeight;
if (position != 0)
{
checkWeight++;
}
}
// Compute and append checksum
checkSum %= 103;
patterns.Add(Code128Reader.CODE_PATTERNS[checkSum]);
// Append stop code
patterns.Add(Code128Reader.CODE_PATTERNS[CODE_STOP]);
// Compute code width
int codeWidth = 0;
foreach (int[] pattern in patterns)
{
foreach (int width in pattern)
{
codeWidth += width;
}
}
// Compute result
var result = new bool[codeWidth];
int pos = 0;
foreach (int[] pattern in patterns)
{
pos += appendPattern(result, pos, pattern, true);
}
return result;
}
private static bool isDigits(String value, int start, int length)
{
int end = start + length;
int last = value.Length;
for (int i = start; i < end && i < last; i++)
{
char c = value[i];
if (c < '0' || c > '9')
{
if (c != ESCAPE_FNC_1)
{
return false;
}
end++; // ignore FNC_1
}
}
return end <= last; // end > last if we've run out of string
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using Epi.Fields;
namespace Epi.Windows.MakeView.Dialogs.FieldDefinitionDialogs
{
/// <summary>
/// Field definition dialog for comment legal fields
/// </summary>
public partial class CommentLegalFieldDefinition : Epi.Windows.MakeView.Dialogs.FieldDefinitionDialogs.LegalValuesFieldDefinition
{
private new DDLFieldOfCommentLegal field;
private new string sourceTableName;
private new string textColumnName;
//private bool isCodeTableProcessed = false;
/// <summary>
/// Default Constsructor, for exclusive use by the designer.
/// </summary>
public CommentLegalFieldDefinition()
{
InitializeComponent();
}
/// <summary>
/// Constructor for the class
/// </summary>
/// <param name="frm">The parent form</param>
/// <param name="page">The current page</param>
public CommentLegalFieldDefinition(MainForm frm, Page page)
: base(frm)
{
InitializeComponent();
this.mode = FormMode.Create;
this.page = page;
}
/// <summary>
/// Constructor for the class
/// </summary>
/// <param name="frm">The parent form</param>
/// <param name="field">The fied to be edited</param>
public CommentLegalFieldDefinition(MainForm frm, DDLFieldOfCommentLegal field)
: base(frm)
{
InitializeComponent();
this.mode = FormMode.Edit;
this.field = field;
this.page = field.Page;
LoadFormData();
}
/// <summary>
/// Load the form with the saved data
/// </summary>
private new void LoadFormData()
{
SetFontStyles(field);
txtPrompt.Text = field.PromptText;
txtFieldName.Text = field.Name;
if (!string.IsNullOrEmpty(field.SourceTableName))
{
this.sourceTableName = field.SourceTableName;
this.textColumnName = field.TextColumnName;
txtDataSource.Text = field.SourceTableName + " :: " + field.TextColumnName;
}
chkReadOnly.Checked = field.IsReadOnly;
chkRepeatLast.Checked = field.ShouldRepeatLast;
chkRequired.Checked = field.IsRequired;
if (string.IsNullOrEmpty(txtFieldName.Text))
{
btnDataSource.Enabled = false;
}
else
{
btnDataSource.Enabled = true;
}
// special case when changing a field type from one type to this type - don't allow
// the OK button to be pressed until there's a data source.
if (string.IsNullOrEmpty(txtDataSource.Text))
{
btnOk.Enabled = false;
}
else
{
btnOk.Enabled = true;
}
}
/// <summary>
/// Sets the field's properties based on GUI values
/// </summary>
protected override void SetFieldProperties()
{
field.PromptText = txtPrompt.Text;
if (promptFont != null)
{
field.PromptFont = promptFont;
}
if (controlFont != null)
{
field.ControlFont = controlFont;
}
field.Name = txtFieldName.Text;
field.IsRequired = chkRequired.Checked;
field.IsReadOnly = chkReadOnly.Checked;
field.ShouldRepeatLast = chkRepeatLast.Checked;
if (!string.IsNullOrEmpty(this.sourceTableName) && !string.IsNullOrEmpty(this.textColumnName))
{
field.SourceTableName = this.sourceTableName;
field.TextColumnName = this.textColumnName;
}
}
/// <summary>
/// Gets the field defined by this field definition dialog
/// </summary>
public override RenderableField Field
{
get
{
return field;
}
}
/// <summary>
/// Handles the click event of the Datasource "..." button
/// </summary>
/// <param name="sender">Object that fired the event</param>
/// <param name="e">.NET supplied event parameters</param>
protected override void btnDataSource_Click(object sender, EventArgs e)
{
CommentLegalDialog commentLegalValuesDialog = new CommentLegalDialog((TableBasedDropDownField)this.Field, this.MainForm, txtFieldName.Text, this.page);
DialogResult result = commentLegalValuesDialog.ShowDialog();
if (result == DialogResult.OK)
{
if (!((string.IsNullOrEmpty(commentLegalValuesDialog.SourceTableName) && string.IsNullOrEmpty(commentLegalValuesDialog.TextColumnName))))
{
txtDataSource.Text = commentLegalValuesDialog.SourceTableName + " :: " + commentLegalValuesDialog.TextColumnName;
}
else
{
txtDataSource.Text = string.Empty;
field.SourceTableName = string.Empty;
field.TextColumnName = string.Empty;
}
this.sourceTableName = commentLegalValuesDialog.SourceTableName;
this.textColumnName = commentLegalValuesDialog.TextColumnName;
btnOk.Enabled = true;
}
}
protected virtual void txtFieldName_Leave(object sender, System.EventArgs e)
{
if (!string.IsNullOrEmpty(txtFieldName.Text))
{
btnDataSource.Enabled = true;
btnOk.Enabled = string.IsNullOrEmpty(txtDataSource.Text) ? false : true;
}
}
protected override void txtPrompt_Leave(object sender, System.EventArgs e)
{
if (!string.IsNullOrEmpty(txtPrompt.Text) && (string.IsNullOrEmpty(txtFieldName.Text)))
{
txtFieldName.Text = page.GetView().ComposeFieldNameFromPromptText(Util.Squeeze(txtPrompt.Text));
btnDataSource.Enabled = true;
btnOk.Enabled = false;
}
}
protected override void txtPrompt_OnKeyUp(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Enter)
{
if (!string.IsNullOrEmpty(txtPrompt.Text) && (string.IsNullOrEmpty(txtFieldName.Text)))
{
txtFieldName.Text = page.GetView().ComposeFieldNameFromPromptText(Util.Squeeze(txtPrompt.Text));
btnDataSource.Enabled = false;
btnOk.Enabled = false;
}
else
{
btnDataSource.Enabled = btnOk.Enabled = true;
}
}
}
protected void txtFieldName_OnKeyUp(object sender, KeyEventArgs e)
{
if (string.IsNullOrEmpty(txtFieldName.Text))
{
btnDataSource.Enabled = btnOk.Enabled = false;
}
else
{
btnDataSource.Enabled = btnOk.Enabled = true;
}
}
}
}
| |
// <copyright file="Program.cs" company="Stormpath, Inc.">
// Copyright (c) 2016 Stormpath, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Caching.Distributed;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Owin.Hosting;
using Owin;
using Stormpath.Configuration.Abstractions;
using Stormpath.Owin.Abstractions;
using Stormpath.Owin.Middleware;
using Stormpath.Owin.Views.Precompiled;
namespace Stormpath.Owin.NowinHarness
{
using AppFunc = Func<IDictionary<string, object>, Task>;
public static class Program
{
public static void Main(string[] args)
{
var options = new StartOptions
{
ServerFactory = "Nowin",
Port = 8080,
};
using (WebApp.Start<Startup>(options))
{
Console.WriteLine("Running a http server on port 8080");
Console.ReadKey();
}
}
}
public class Startup
{
public void Configuration(IAppBuilder app)
{
var logger = new ConsoleLogger(LogLevel.Trace);
// Initialize the Stormpath middleware
var stormpath = StormpathMiddleware.Create(new StormpathOwinOptions()
{
LibraryUserAgent = "nowin/0.22.2",
ViewRenderer = new PrecompiledViewRenderer(logger),
Logger = logger,
CacheProvider = new MemoryDistributedCache(new MemoryCache(new MemoryCacheOptions())),
CacheEntryOptions = new Dictionary<Type, DistributedCacheEntryOptions>()
{
[typeof(Middleware.Okta.User)] = new DistributedCacheEntryOptions { AbsoluteExpirationRelativeToNow = TimeSpan.FromSeconds(30) }
},
PreRegistrationHandler = (ctx, ct) =>
{
ctx.Account.CustomData["source"] = "Nowin";
return Task.FromResult(true);
},
PostRegistrationHandler = async (ctx, ct) =>
{
var customData = await ctx.Account.GetCustomDataAsync(ct);
},
PreLoginHandler = (ctx, ct) =>
{
return Task.FromResult(true);
},
PostLoginHandler = async (ctx, ct) =>
{
var customData = await ctx.Account.GetCustomDataAsync(ct);
},
SendVerificationEmailHandler = (ctx, ct) =>
{
return Task.FromResult(true);
},
Configuration = new StormpathConfiguration
{
Web = new WebConfiguration
{
ServerUri = "http://localhost:8080",
//Register = new WebRegisterRouteConfiguration
//{
// EmailVerificationRequired = false,
// Form = new WebRegisterRouteFormConfiguration
// {
// Fields = new Dictionary<string, WebFieldConfiguration>()
// {
// ["stormpathApiKey_1"] = new WebFieldConfiguration { Enabled = true, Required = false },
// }
// }
//},
ForgotPassword = new WebForgotPasswordRouteConfiguration
{
Enabled = true
},
ChangePassword = new WebChangePasswordRouteConfiguration
{
Enabled = true
},
VerifyEmail = new WebVerifyEmailRouteConfiguration()
{
Enabled = false
},
RefreshTokenCookie = new WebRefreshTokenCookieConfiguration
{
MaxAge = 30 * 86400 // 30 days
}
}
}
});
// Insert it into the OWIN pipeline
app.Use(stormpath);
// Add a sample middleware that responds to GET /
app.Use(new Func<AppFunc, AppFunc>(next => (async env =>
{
if (env["owin.RequestPath"] as string != "/")
{
await next.Invoke(env);
return;
}
using (var writer = new StreamWriter(env["owin.ResponseBody"] as Stream))
{
await writer.WriteAsync("<h1>Hello from OWIN!</h1>");
if (!env.ContainsKey(OwinKeys.StormpathUser))
{
await writer.WriteAsync("<a href=\"/login\">Log in</a> or <a href=\"/register\">Register</a>");
}
else
{
var user = env[OwinKeys.StormpathUser] as ICompatibleOktaAccount;
await writer.WriteAsync($"<p>Logged in as {user?.FullName} ({user?.Email})</p>");
await writer.WriteAsync(@"
<form action=""/logout"" method=""post"" id=""logout_form"">
<a onclick=""document.getElementById('logout_form').submit();"" style=""cursor: pointer;"">
Log Out
</a>
</form>");
}
await writer.FlushAsync();
}
})));
// Add a "protected" route
app.Use(new Func<AppFunc, AppFunc>(next => (async env =>
{
if (env["owin.RequestPath"] as string != "/protected")
{
await next.Invoke(env);
return;
}
if (!env.ContainsKey(OwinKeys.StormpathUser))
{
var deleteCookieAction =
new Action<Configuration.Abstractions.Immutable.WebCookieConfiguration>(_ => { });
var setStatusCodeAction = new Action<int>(code => env["owin.ResponseStatusCode"] = code);
var setHeaderAction = new Action<string, string>((name, value) =>
(env["owin.ResponseHeaders"] as IDictionary<string, string[]>).SetString(name, value));
var redirectAction = new Action<string>(location =>
{
setStatusCodeAction(302);
setHeaderAction("Location", location);
});
var routeProtector = new RouteProtector(
stormpath.Configuration,
deleteCookieAction,
setStatusCodeAction,
setHeaderAction,
redirectAction,
null);
routeProtector.OnUnauthorized("text/html", "/protected");
}
else
{
using (var writer = new StreamWriter(env["owin.ResponseBody"] as Stream))
{
await writer.WriteAsync("<p>Zomg secret!</p>");
await writer.FlushAsync();
}
}
})));
// Add a group-only route
app.Use(new Func<AppFunc, AppFunc>(next => (async env =>
{
if (env["owin.RequestPath"] as string != "/group")
{
await next.Invoke(env);
return;
}
env.TryGetValue(OwinKeys.StormpathUser, out var rawUser);
var groupsFilter = stormpath.AuthorizationFilterFactory.CreateGroupFilter(new[] { "Superadmins" });
var allowed = await groupsFilter.IsAuthorizedAsync(rawUser as ICompatibleOktaAccount, CancellationToken.None);
if (allowed)
{
using (var writer = new StreamWriter(env["owin.ResponseBody"] as Stream))
{
await writer.WriteAsync("<p>Secret page for Superadmins!</p>");
await writer.FlushAsync();
}
}
else
{
env["owin.ResponseStatusCode"] = 401;
}
})));
// Add a custom data-required route
app.Use(new Func<AppFunc, AppFunc>(next => (async env =>
{
if (env["owin.RequestPath"] as string != "/customdata")
{
await next.Invoke(env);
return;
}
env.TryGetValue(OwinKeys.StormpathUser, out var rawUser);
var customDataFilter = stormpath.AuthorizationFilterFactory.CreateCustomDataFilter("source", "Nowin");
var allowed = await customDataFilter.IsAuthorizedAsync(rawUser as ICompatibleOktaAccount, CancellationToken.None);
if (allowed)
{
using (var writer = new StreamWriter(env["owin.ResponseBody"] as Stream))
{
await writer.WriteAsync("<p>Secret page for Nowin folks!</p>");
await writer.FlushAsync();
}
}
else
{
env["owin.ResponseStatusCode"] = 401;
}
})));
}
}
public class ConsoleLogger : ILogger
{
private readonly LogLevel level;
public ConsoleLogger(LogLevel level)
{
this.level = level;
}
public IDisposable BeginScope<TState>(TState state)
{
throw new NotImplementedException();
}
public bool IsEnabled(LogLevel logLevel)
=> logLevel >= level;
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception exception, Func<TState, Exception, string> formatter)
{
var message = $"{logLevel}: {formatter(state, exception)}";
Console.WriteLine(message);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace Lucene.Net.Index
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using ArrayUtil = Lucene.Net.Util.ArrayUtil;
using BinaryDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.BinaryDocValuesUpdate;
using NumericDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.NumericDocValuesUpdate;
using Query = Lucene.Net.Search.Query;
using QueryAndLimit = Lucene.Net.Index.BufferedUpdatesStream.QueryAndLimit;
using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
/// <summary>
/// Holds buffered deletes and updates by term or query, once pushed. Pushed
/// deletes/updates are write-once, so we shift to more memory efficient data
/// structure to hold them. We don't hold docIDs because these are applied on
/// flush.
/// </summary>
internal class FrozenBufferedUpdates
{
/// <summary>Query we often undercount (say 24 bytes), plus int.</summary>
internal static readonly int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_INT32 + 24;
/// <summary>Terms, in sorted order:</summary>
internal readonly PrefixCodedTerms terms;
internal int termCount; // just for debugging
/// <summary>Parallel array of deleted query, and the docIDUpto for each</summary>
internal readonly Query[] queries;
internal readonly int[] queryLimits;
/// <summary>numeric DV update term and their updates</summary>
internal readonly NumericDocValuesUpdate[] numericDVUpdates;
/// <summary>binary DV update term and their updates</summary>
internal readonly BinaryDocValuesUpdate[] binaryDVUpdates;
internal readonly int bytesUsed;
internal readonly int numTermDeletes;
private long gen = -1; // assigned by BufferedDeletesStream once pushed
internal readonly bool isSegmentPrivate; // set to true iff this frozen packet represents
// a segment private deletes. in that case is should
// only have Queries
public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate)
{
this.isSegmentPrivate = isSegmentPrivate;
Debug.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries");
Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.Terms.Count]*/);
termCount = termsArray.Length;
ArrayUtil.TimSort(termsArray);
PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder();
foreach (Term term in termsArray)
{
builder.Add(term);
}
terms = builder.Finish();
queries = new Query[deletes.queries.Count];
queryLimits = new int[deletes.queries.Count];
int upto = 0;
foreach (KeyValuePair<Query, int?> ent in deletes.queries)
{
queries[upto] = ent.Key;
if (ent.Value.HasValue)
{
queryLimits[upto] = ent.Value.Value;
}
else
{
// LUCENENET NOTE: According to this: http://stackoverflow.com/a/13914344
// we are supposed to throw an exception in this case, rather than
// silently fail.
throw new NullReferenceException();
}
upto++;
}
// TODO if a Term affects multiple fields, we could keep the updates key'd by Term
// so that it maps to all fields it affects, sorted by their docUpto, and traverse
// that Term only once, applying the update to all fields that still need to be
// updated.
IList<NumericDocValuesUpdate> allNumericUpdates = new List<NumericDocValuesUpdate>();
int numericUpdatesSize = 0;
foreach (var numericUpdates in deletes.numericUpdates.Values)
{
foreach (NumericDocValuesUpdate update in numericUpdates.Values)
{
allNumericUpdates.Add(update);
numericUpdatesSize += update.GetSizeInBytes();
}
}
numericDVUpdates = allNumericUpdates.ToArray();
// TODO if a Term affects multiple fields, we could keep the updates key'd by Term
// so that it maps to all fields it affects, sorted by their docUpto, and traverse
// that Term only once, applying the update to all fields that still need to be
// updated.
IList<BinaryDocValuesUpdate> allBinaryUpdates = new List<BinaryDocValuesUpdate>();
int binaryUpdatesSize = 0;
foreach (var binaryUpdates in deletes.binaryUpdates.Values)
{
foreach (BinaryDocValuesUpdate update in binaryUpdates.Values)
{
allBinaryUpdates.Add(update);
binaryUpdatesSize += update.GetSizeInBytes();
}
}
binaryDVUpdates = allBinaryUpdates.ToArray();
bytesUsed = (int)terms.GetSizeInBytes() + queries.Length * BYTES_PER_DEL_QUERY + numericUpdatesSize + numericDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF + binaryUpdatesSize + binaryDVUpdates.Length * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
numTermDeletes = deletes.numTermDeletes.Get();
}
public virtual long DelGen
{
set
{
Debug.Assert(this.gen == -1);
this.gen = value;
}
get
{
Debug.Assert(gen != -1);
return gen;
}
}
// LUCENENET NOTE: This was termsIterable() in Lucene
public virtual IEnumerable<Term> GetTermsEnumerable()
{
return new IterableAnonymousInnerClassHelper(this);
}
private class IterableAnonymousInnerClassHelper : IEnumerable<Term>
{
private readonly FrozenBufferedUpdates outerInstance;
public IterableAnonymousInnerClassHelper(FrozenBufferedUpdates outerInstance)
{
this.outerInstance = outerInstance;
}
public virtual IEnumerator<Term> GetEnumerator()
{
return outerInstance.terms.GetEnumerator();
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
// LUCENENET NOTE: This was queriesIterable() in Lucene
public virtual IEnumerable<QueryAndLimit> GetQueriesEnumerable()
{
return new IterableAnonymousInnerClassHelper2(this);
}
private class IterableAnonymousInnerClassHelper2 : IEnumerable<QueryAndLimit>
{
private readonly FrozenBufferedUpdates outerInstance;
public IterableAnonymousInnerClassHelper2(FrozenBufferedUpdates outerInstance)
{
this.outerInstance = outerInstance;
}
public virtual IEnumerator<QueryAndLimit> GetEnumerator()
{
return new IteratorAnonymousInnerClassHelper(this);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private class IteratorAnonymousInnerClassHelper : IEnumerator<QueryAndLimit>
{
private readonly IterableAnonymousInnerClassHelper2 outerInstance;
private int upto, i;
private QueryAndLimit current;
public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper2 outerInstance)
{
this.outerInstance = outerInstance;
upto = this.outerInstance.outerInstance.queries.Length;
i = 0;
}
public virtual bool MoveNext()
{
if (i < upto)
{
current = new QueryAndLimit(outerInstance.outerInstance.queries[i], outerInstance.outerInstance.queryLimits[i]);
i++;
return true;
}
return false;
}
public virtual QueryAndLimit Current
{
get
{
return current;
}
}
object System.Collections.IEnumerator.Current
{
get { return Current; }
}
public virtual void Reset()
{
throw new NotSupportedException();
}
public void Dispose()
{
}
}
}
public override string ToString()
{
string s = "";
if (numTermDeletes != 0)
{
s += " " + numTermDeletes + " deleted terms (unique count=" + termCount + ")";
}
if (queries.Length != 0)
{
s += " " + queries.Length + " deleted queries";
}
if (bytesUsed != 0)
{
s += " bytesUsed=" + bytesUsed;
}
return s;
}
public virtual bool Any()
{
return termCount > 0 || queries.Length > 0 || numericDVUpdates.Length > 0 || binaryDVUpdates.Length > 0;
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the autoscaling-2011-01-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.AutoScaling.Model
{
/// <summary>
/// Describes a scheduled update to an Auto Scaling group.
/// </summary>
public partial class ScheduledUpdateGroupAction
{
private string _autoScalingGroupName;
private int? _desiredCapacity;
private DateTime? _endTime;
private int? _maxSize;
private int? _minSize;
private string _recurrence;
private string _scheduledActionARN;
private string _scheduledActionName;
private DateTime? _startTime;
private DateTime? _time;
/// <summary>
/// Gets and sets the property AutoScalingGroupName.
/// <para>
/// The name of the group.
/// </para>
/// </summary>
public string AutoScalingGroupName
{
get { return this._autoScalingGroupName; }
set { this._autoScalingGroupName = value; }
}
// Check to see if AutoScalingGroupName property is set
internal bool IsSetAutoScalingGroupName()
{
return this._autoScalingGroupName != null;
}
/// <summary>
/// Gets and sets the property DesiredCapacity.
/// <para>
/// The number of instances you prefer to maintain in the group.
/// </para>
/// </summary>
public int DesiredCapacity
{
get { return this._desiredCapacity.GetValueOrDefault(); }
set { this._desiredCapacity = value; }
}
// Check to see if DesiredCapacity property is set
internal bool IsSetDesiredCapacity()
{
return this._desiredCapacity.HasValue;
}
/// <summary>
/// Gets and sets the property EndTime.
/// <para>
/// The time that the action is scheduled to end. This value can be up to one month in
/// the future.
/// </para>
/// </summary>
public DateTime EndTime
{
get { return this._endTime.GetValueOrDefault(); }
set { this._endTime = value; }
}
// Check to see if EndTime property is set
internal bool IsSetEndTime()
{
return this._endTime.HasValue;
}
/// <summary>
/// Gets and sets the property MaxSize.
/// <para>
/// The maximum size of the group.
/// </para>
/// </summary>
public int MaxSize
{
get { return this._maxSize.GetValueOrDefault(); }
set { this._maxSize = value; }
}
// Check to see if MaxSize property is set
internal bool IsSetMaxSize()
{
return this._maxSize.HasValue;
}
/// <summary>
/// Gets and sets the property MinSize.
/// <para>
/// The minimum size of the group.
/// </para>
/// </summary>
public int MinSize
{
get { return this._minSize.GetValueOrDefault(); }
set { this._minSize = value; }
}
// Check to see if MinSize property is set
internal bool IsSetMinSize()
{
return this._minSize.HasValue;
}
/// <summary>
/// Gets and sets the property Recurrence.
/// <para>
/// The regular schedule that an action occurs.
/// </para>
/// </summary>
public string Recurrence
{
get { return this._recurrence; }
set { this._recurrence = value; }
}
// Check to see if Recurrence property is set
internal bool IsSetRecurrence()
{
return this._recurrence != null;
}
/// <summary>
/// Gets and sets the property ScheduledActionARN.
/// <para>
/// The Amazon Resource Name (ARN) of the scheduled action.
/// </para>
/// </summary>
public string ScheduledActionARN
{
get { return this._scheduledActionARN; }
set { this._scheduledActionARN = value; }
}
// Check to see if ScheduledActionARN property is set
internal bool IsSetScheduledActionARN()
{
return this._scheduledActionARN != null;
}
/// <summary>
/// Gets and sets the property ScheduledActionName.
/// <para>
/// The name of the scheduled action.
/// </para>
/// </summary>
public string ScheduledActionName
{
get { return this._scheduledActionName; }
set { this._scheduledActionName = value; }
}
// Check to see if ScheduledActionName property is set
internal bool IsSetScheduledActionName()
{
return this._scheduledActionName != null;
}
/// <summary>
/// Gets and sets the property StartTime.
/// <para>
/// The time that the action is scheduled to begin. This value can be up to one month
/// in the future.
/// </para>
///
/// <para>
/// When <code>StartTime</code> and <code>EndTime</code> are specified with <code>Recurrence</code>,
/// they form the boundaries of when the recurring action will start and stop.
/// </para>
/// </summary>
public DateTime StartTime
{
get { return this._startTime.GetValueOrDefault(); }
set { this._startTime = value; }
}
// Check to see if StartTime property is set
internal bool IsSetStartTime()
{
return this._startTime.HasValue;
}
/// <summary>
/// Gets and sets the property Time.
/// <para>
/// <code>Time</code> is deprecated.
/// </para>
///
/// <para>
/// The time that the action is scheduled to begin. <code>Time</code> is an alias for
/// <code>StartTime</code>.
/// </para>
/// </summary>
public DateTime Time
{
get { return this._time.GetValueOrDefault(); }
set { this._time = value; }
}
// Check to see if Time property is set
internal bool IsSetTime()
{
return this._time.HasValue;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Diagnostics;
using System.IO;
using System.Threading;
namespace Apache.Geode.Client.UnitTests
{
using NUnit.Framework;
using Apache.Geode.DUnitFramework;
using Apache.Geode.Client;
[TestFixture]
[Category("generics")]
public class SerializationTests : ThinClientRegionSteps
{
private const int OTHER_TYPE1 = 1;
private const int OTHER_TYPE2 = 2;
private const int OTHER_TYPE22 = 3;
private const int OTHER_TYPE4 = 4;
private const int OTHER_TYPE42 = 5;
private const int OTHER_TYPE43 = 6;
private UnitProcess sender, receiver;
protected override ClientBase[] GetClients()
{
sender = new UnitProcess();
receiver = new UnitProcess();
return new ClientBase[] { sender, receiver };
}
[TestFixtureTearDown]
public override void EndTests()
{
CacheHelper.StopJavaServers();
base.EndTests();
}
[TearDown]
public override void EndTest()
{
try
{
sender.Call(DestroyRegions);
receiver.Call(DestroyRegions);
CacheHelper.ClearEndpoints();
}
finally
{
CacheHelper.StopJavaServers();
}
base.EndTest();
}
private IGeodeSerializable CreateOtherType(int i, int otherType)
{
IGeodeSerializable ot;
switch (otherType)
{
case OTHER_TYPE1: ot = new OtherType(i, i + 20000); break;
case OTHER_TYPE2: ot = new OtherType2(i, i + 20000); break;
case OTHER_TYPE22: ot = new OtherType22(i, i + 20000); break;
case OTHER_TYPE4: ot = new OtherType4(i, i + 20000); break;
case OTHER_TYPE42: ot = new OtherType42(i, i + 20000); break;
case OTHER_TYPE43: ot = new OtherType43(i, i + 20000); break;
default: ot = new OtherType(i, i + 20000); break;
}
return ot;
}
#region Functions that are invoked by the tests
public void CreateRegionForOT(string locators)
{
CacheHelper.CreateTCRegion2<object, object>(RegionNames[0], true, false,
null, locators, false);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType22.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType4.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType2.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType42.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType43.CreateDeserializable);
}
public void DoNPuts(int n)
{
try
{
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType.CreateDeserializable);
Assert.Fail("Expected exception in registering the type again.");
}
catch (IllegalStateException ex)
{
Util.Log("Got expected exception in RegisterType: {0}", ex);
}
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(RegionNames[0]);
for (int i = 0; i < n; i++)
{
//CacheableInt32 key = new CacheableInt32(i);
//region.Put(key, key);
int key = i;
region[key] = key;
}
}
public void DoValidates(int n)
{
try
{
CacheHelper.DCache.TypeRegistry.RegisterTypeGeneric(OtherType.CreateDeserializable);
Assert.Fail("Expected exception in registering the type again.");
}
catch (IllegalStateException ex)
{
Util.Log("Got expected exception in RegisterType: {0}", ex);
}
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(RegionNames[0]);
for (int i = 0; i < n; i++)
{
//CacheableInt32 val = region.Get(i) as CacheableInt32;
object val = region[i];
Assert.AreEqual(i, val, "Found unexpected value");
}
}
public void DoNPutsOtherType(int n, int otherType)
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(RegionNames[0]);
for (int i = 0; i < n; i++)
{
IGeodeSerializable ot = CreateOtherType(i, otherType);
region[i + 10] = ot;
}
}
public void DoValidateNPutsOtherType(int n, int otherType)
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(RegionNames[0]);
for (int i = 0; i < n; i++)
{
object val = region[i + 10];
IGeodeSerializable ot = CreateOtherType(i, otherType);
Assert.IsTrue(ot.Equals(val), "Found unexpected value");
}
}
#endregion
#region Tests
[Test]
public void CustomTypes()
{
CacheHelper.SetupJavaServers(true, "cacheserver.xml");
CacheHelper.StartJavaLocator(1, "GFELOC");
Util.Log("Locator 1 started.");
CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1);
Util.Log("Cacheserver 1 started.");
sender.Call(CreateRegionForOT, CacheHelper.Locators);
Util.Log("StepOne complete.");
receiver.Call(CreateRegionForOT, CacheHelper.Locators);
Util.Log("StepTwo complete.");
sender.Call(DoNPuts, 10);
receiver.Call(DoValidates, 10);
Util.Log("StepThree complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE1);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE1);
Util.Log("StepFour complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE2);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE2);
Util.Log("StepFive complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE22);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE22);
Util.Log("StepSix complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE4);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE4);
Util.Log("StepSeven complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE42);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE42);
Util.Log("StepEight complete.");
sender.Call(DoNPutsOtherType, 10, OTHER_TYPE43);
receiver.Call(DoValidateNPutsOtherType, 10, OTHER_TYPE43);
Util.Log("StepNine complete.");
CacheHelper.StopJavaServer(1);
Util.Log("Cacheserver 1 stopped.");
CacheHelper.StopJavaLocator(1);
}
#endregion
}
[Serializable]
public struct CData
{
#region Private members
private Int32 m_first;
private Int64 m_second;
#endregion
#region Public accessors
public Int32 First
{
get
{
return m_first;
}
set
{
m_first = value;
}
}
public Int64 Second
{
get
{
return m_second;
}
set
{
m_second = value;
}
}
#endregion
public CData(Int32 first, Int64 second)
{
m_first = first;
m_second = second;
}
public static bool operator ==(CData obj1, CData obj2)
{
return ((obj1.m_first == obj2.m_first) && (obj1.m_second == obj2.m_second));
}
public static bool operator !=(CData obj1, CData obj2)
{
return ((obj1.m_first != obj2.m_first) || (obj1.m_second != obj2.m_second));
}
public override bool Equals(object obj)
{
if (obj is CData)
{
CData otherObj = (CData)obj;
return ((m_first == otherObj.m_first) && (m_second == otherObj.m_second));
}
return false;
}
public override int GetHashCode()
{
return m_first.GetHashCode() ^ m_second.GetHashCode();
}
};
public class PdxCData : IPdxSerializable
{
#region Private members
private Int32 m_first;
private Int64 m_second;
#endregion
#region Public accessors
public Int32 First
{
get
{
return m_first;
}
set
{
m_first = value;
}
}
public Int64 Second
{
get
{
return m_second;
}
set
{
m_second = value;
}
}
#endregion
public PdxCData(Int32 first, Int64 second)
{
m_first = first;
m_second = second;
}
public PdxCData() { }
public static PdxCData CreateDeserializable()
{
return new PdxCData();
}
public static bool operator ==(PdxCData obj1, PdxCData obj2)
{
return ((obj1.m_first == obj2.m_first) && (obj1.m_second == obj2.m_second));
}
public static bool operator !=(PdxCData obj1, PdxCData obj2)
{
return ((obj1.m_first != obj2.m_first) || (obj1.m_second != obj2.m_second));
}
public override bool Equals(object obj)
{
if (obj is PdxCData)
{
PdxCData otherObj = (PdxCData)obj;
return ((m_first == otherObj.m_first) && (m_second == otherObj.m_second));
}
return false;
}
public override int GetHashCode()
{
return m_first.GetHashCode();
}
#region IPdxSerializable Members
public void FromData(IPdxReader reader)
{
m_first = reader.ReadInt("m_first");
m_second = reader.ReadLong("m_second");
}
public void ToData(IPdxWriter writer)
{
writer.WriteInt("m_first", m_first);
writer.MarkIdentityField("m_first");
writer.WriteLong("m_second", m_second);
}
#endregion
};
public class OtherType : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType()
{
m_exType = ExceptionType.None;
}
public OtherType(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
//DataInput din = new DataInput(dout.GetBuffer());
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x0;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType ot = obj as OtherType;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
public class OtherType2 : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType2()
{
m_exType = ExceptionType.None;
}
public OtherType2(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType2(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x8C;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType2();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType2 ot = obj as OtherType2;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
public class OtherType22 : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType22()
{
m_exType = ExceptionType.None;
}
public OtherType22(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType22(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x8C0;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType22();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType22 ot = obj as OtherType22;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
public class OtherType4 : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType4()
{
m_exType = ExceptionType.None;
}
public OtherType4(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType4(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x8FC0;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType4();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType4 ot = obj as OtherType4;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
public class OtherType42 : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType42()
{
m_exType = ExceptionType.None;
}
public OtherType42(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType42(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x6F3F97;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType42();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType42 ot = obj as OtherType42;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
public class OtherType43 : IGeodeSerializable
{
private CData m_struct;
private ExceptionType m_exType;
public enum ExceptionType
{
None,
Geode,
System,
// below are with inner exceptions
GeodeGeode,
GeodeSystem,
SystemGeode,
SystemSystem
}
public OtherType43()
{
m_exType = ExceptionType.None;
}
public OtherType43(Int32 first, Int64 second)
: this(first, second, ExceptionType.None)
{
}
public OtherType43(Int32 first, Int64 second, ExceptionType exType)
{
m_struct.First = first;
m_struct.Second = second;
m_exType = exType;
}
public CData Data
{
get
{
return m_struct;
}
}
public static IGeodeSerializable Duplicate(IGeodeSerializable orig)
{
DataOutput dout = CacheHelper.DCache.CreateDataOutput();
orig.ToData(dout);
DataInput din = CacheHelper.DCache.CreateDataInput(dout.GetBuffer());
IGeodeSerializable dup = (IGeodeSerializable)din.ReadObject();
return dup;
}
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_struct.First = input.ReadInt32();
m_struct.Second = input.ReadInt64();
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public void ToData(DataOutput output)
{
output.WriteInt32(m_struct.First);
output.WriteInt64(m_struct.Second);
switch (m_exType)
{
case ExceptionType.None:
break;
case ExceptionType.Geode:
throw new GeodeIOException("Throwing an exception");
case ExceptionType.System:
throw new IOException("Throwing an exception");
case ExceptionType.GeodeGeode:
throw new GeodeIOException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.GeodeSystem:
throw new CacheServerException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
case ExceptionType.SystemGeode:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new CacheServerException("This is an inner exception"));
case ExceptionType.SystemSystem:
throw new ApplicationException("Throwing an exception with inner " +
"exception", new IOException("This is an inner exception"));
}
}
public UInt64 ObjectSize
{
get
{
return (UInt32)(sizeof(Int32) + sizeof(Int64));
}
}
public UInt32 ClassId
{
get
{
return 0x7FFFFFFF;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new OtherType43();
}
public override int GetHashCode()
{
return m_struct.First.GetHashCode() ^ m_struct.Second.GetHashCode();
}
public override bool Equals(object obj)
{
OtherType43 ot = obj as OtherType43;
if (ot != null)
{
return (m_struct.Equals(ot.m_struct));
}
return false;
}
}
}
| |
//==============================================================================
// Lab GuiManager -> Profile File Analyser
// Copyright (c) 2015 All Right Reserved, http://nordiklab.com/
//------------------------------------------------------------------------------
//==============================================================================
$ProfileFieldSet["colorFontIds"] = "fontColors[0] fontColors[1] fontColors[2] fontColors[3] fontColors[4] fontColors[5] fontColors[6] fontColors[7] fontColors[8] fontColors[9]";
$ProfileFieldSet["colorFont"] = "fontColor fontColorHL fontColorNA fontColorSEL fontColorLink fontColorLinkHL";
$ProfileFieldSet["All"] = $ProfileFieldSet["colorFontIds"] SPC $ProfileFieldSet["colorFont"];
$ProfileFieldsStore = "fontSize";
//==============================================================================
// Clear the Analyser saved globals
function clearProfilesGlobals() {
foreach$(%cType in $ProfileUpdateColorList)
$ProfileListColor[%cType] = "";
GLab.resetFontProfilesList();
$ProfileList["fontSource"] = "";
foreach$(%prof in $ProfWithChilds)
$ProfChilds[%prof] = "";
$ProfWithChilds = "";
foreach$(%prof in $ProfWithParent)
$ProfParent[%prof] = "";
$ProfWithParent = "";
}
//------------------------------------------------------------------------------
//==============================================================================
// Create a analyse console report of the scan result
function postProfileScanReport() {
info("Profiles with childs:",$ProfWithChilds);
foreach$(%prof in $ProfWithChilds) {
info(%prof,"Childs:",$ProfChilds[%prof]);
}
info("Profiles with Parent:",$ProfWithParent);
foreach$(%prof in $ProfWithParent) {
info(%prof,"Parent:",$ProfParent[%prof]);
}
info("====================================","--","==================================");
info("Store fields info");
foreach$(%field in $ProfileFieldsStore) {
foreach$(%profileName in $ProfStoreFieldProfiles[%field] ) {
%value = $ProfStoreFieldDefault[%profileName,%field];
info("Field:",%field,"in Profile:",%profileName,"Store as:",%value);
}
}
info("====================================","--","==================================");
info("Profile Color Set Info");
foreach$(%cType in $ProfileUpdateColorList)
info(%cType,"Profiles:",$ProfileListColor[%cType]);
}
//------------------------------------------------------------------------------
//==============================================================================
// Scan all profile files found in profile folder
function scanAllToolProfileFile(%postReport) {
clearProfilesGlobals();
scanProfileFile("tlab/gui/profiles/baseProfiles.cs");
%filePathScript = "tlab/gui/profiles/*.prof.cs";
for(%file = findFirstFile(%filePathScript); %file !$= ""; %file = findNextFile(%filePathScript)) {
scanProfileFile(%file);
}
$ProfileScanDone = true;
if (%postReport)
postProfileScanReport();
}
//------------------------------------------------------------------------------
//==============================================================================
// Scan all profile files found in profile folder
function scanAllProfileFile(%postReport) {
clearProfilesGlobals();
scanProfileFile("art/gui/baseProfiles.cs");
%filePathScript = "art/gui/*.prof.cs";
for(%file = findFirstFile(%filePathScript); %file !$= ""; %file = findNextFile(%filePathScript)) {
scanProfileFile(%file);
}
$ProfileScanDone = true;
if (%postReport)
postProfileScanReport();
if ( IsDirectory( "tlab/" ) )
scanAllToolProfileFile(%postReport);
}
//------------------------------------------------------------------------------
//==============================================================================
// Scan a profile line by line and get needed data
function scanProfileFile( %file ) {
%fileObj = getFileReadObj(%file);
if (!isObject(%fileObj)) return;
while( !%fileObj.isEOF() ) {
%line = %fileObj.readline();
//------------------------------------------------------------------------
//Check for GuiControlProfile definition START
if (strstr(%line,"GuiControlProfile") !$= "-1") {
%lineFix = strchr( %line , "(" );
%lineFix = strReplace(%lineFix,")"," ");
%lineFix = strReplace(%lineFix,"{","");
%lineFix = trim(strReplace(%lineFix,"(",""));
%lineFix = strReplace(%lineFix,":","\t");
%profileName = trim(getField(%lineFix,0));
%profileLink = trim(getField(%lineFix,1));
//Set an empty global to store the fields that this profile own
$ProfOwnedFields[%profileName] = "";
//Check for reference to other profile
if (isObject(%profileLink)) {
$ProfChilds[%profileLink] = strAddWord($ProfChilds[%profileLink],%profileName);
$ProfWithChilds = strAddWord($ProfWithChilds,%profileLink,true);
$ProfParent[%profileName] = %profileLink;
$ProfWithParent = strAddWord($ProfWithParent,%profileName,true);
}
}
//------------------------------------------------------------------------
//Check for GuiControlProfile definition END
else if (strstr(%line,"};") !$= "-1") {
%profileName = "";
%fontType = "";
}
//------------------------------------------------------------------------
//Check for GuiControlProfile field definition
else if (strstr(%line,"=") !$= "-1") {
%trimLine = trim(%line);
%trimLine = strreplace(%trimLine,"="," ");
%trimLine = strreplace(%trimLine,"\"","");
%trimLine = strreplace(%trimLine,";","");
%field = trim(getWord(%trimLine,0));
%value = trim(removeWord(%trimLine,0));
$ProfOwnedFields[%profileName] = strAddWord($ProfOwnedFields[%profileName],%field,true);
if (strstr($ProfileFieldsStore,%field) !$= "-1" ) {
$ProfStoreFieldProfiles[%field] = strAddWord($ProfStoreFieldProfiles[%field],%profileName);
$ProfStoreFieldDefault[%profileName,%field] = %value;
}
if (%field $= "fontSource" ) {
$ProfileList["fontSource"] = strAddWord($ProfileList["fontSource"],%profileName);
}
if (%field $= "fontType") {
$ProfileFontList[%value] = strAddWord($ProfileFontList[%value],%profileName);
$FontTypesList = strAddWord($FontTypesList,%value);
}
if (strstr($ProfileUpdateColorList,%field) !$= "-1" ) {
$ProfileListColor[%field] = strAddWord($ProfileListColor[%field],%profileName);
}
}
}
}
//------------------------------------------------------------------------------
//==============================================================================
//FONTS -> Change the font to all profile or only those specified in the list
//cleanProfileFile("art/gui/TextLab.prof.cs");
function removeProfileFieldSet( %profile,%set ) {
%targetFields = $ProfileFieldSet[%set];
if (%targetFields !$= "")
removeProfileField(%profile,%targetFields);
}
function removeProfileField( %profile,%targetFields ) {
devLog("removeProfileField",%profile.getName(),%targetFields);
%file = %profile.getFilename();
if (!isFile(%file))
return;
%fileObj = getFileReadObj(%file);
if (!isObject(%fileObj)) return;
while( !%fileObj.isEOF() ) {
%line = %fileObj.readline();
%skipLine = false;
if (strstr(%line,"//") !$= "-1") {
%skipLine = false;
} else if (strstr(%line,"GuiControlProfile") !$= "-1") {
// Prints 3456789
%lineFix = strchr( %line , "(" );
%lineFix = strReplace(%lineFix,":"," ");
%lineFix = strReplace(%lineFix,")"," ");
%lineFix = trim(strReplace(%lineFix,"(",""));
%profileName = getWord(%lineFix,0);
%targetProfile = false;
if (%profileName $= %profile.getName()) {
%targetProfile = true;
}
} else if (strstr(%line,"};") !$= "-1") {
//Check if default field is there
if (%fontType !$= "") {
%line[%i++] = "fontType = \"" @%fontType@"\";";
}
%currentProfile = "";
%fontType = "";
} else if (!%targetProfile) {
%targetProfile = %targetProfile;
} else if (strstr(%line,"=") !$= "-1") {
%trimLine = trim(%line);
%trimLine = strreplace(%trimLine,"="," ");
%trimLine = strreplace(%trimLine,"\"","");
%trimLine = strreplace(%trimLine,";","");
%field = trim(getWord(%trimLine,0));
%value = trim(removeWord(%trimLine,0));
foreach$(%target in %targetFields) {
if (%field $= %target) {
%skipLine = true;
//Now we should set the parent value right now
%parent = GLab.findParentFieldSource(%profileName,%field);
%parentValue = %parent.getFieldValue(%field);
if (%parentValue !$= "")
GLab.updateProfileField(%profileName,%field,%parentValue,true);
break;
}
}
}
if (%skipLine) {
continue;
}
%line[%i++] = %line;
}
closeFileObj(%fileObj);
%fileObj = getFileWriteObj(%file);
info("-----------------SavingFile",%file);
//%j = 1;
for(%j=1; %j <= %i; %j++) {
%fileObj.writeLine(%line[%j]);
}
closeFileObj(%fileObj);
if (%profile.getId() $= $GLab_SelectedObject.getId())
GLab.syncProfileParamArray();
}
//------------------------------------------------------------------------------
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Rithmic.Rithmic
File: RithmicMessageAdapter_MarketData.cs
Created: 2015, 12, 2, 8:18 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Rithmic
{
using System;
using System.Collections.Generic;
using System.Linq;
using com.omnesys.rapi;
using Ecng.Collections;
using Ecng.Common;
using MoreLinq;
using StockSharp.Algo;
using StockSharp.Messages;
partial class RithmicMessageAdapter
{
private readonly SynchronizedDictionary<SecurityId, CachedSynchronizedDictionary<DateTimeOffset, RefPair<BidInfo, AskInfo>>> _quotes = new SynchronizedDictionary<SecurityId, CachedSynchronizedDictionary<DateTimeOffset, RefPair<BidInfo, AskInfo>>>();
private readonly CachedSynchronizedSet<string> _boards = new CachedSynchronizedSet<string>();
private void ProcessSecurityLookupMessage(SecurityLookupMessage secMsg)
{
if (secMsg.SecurityId.IsDefault())
{
_client.Session.listExchanges(secMsg.TransactionId);
_client.Session.listTradeRoutes(secMsg.TransactionId);
}
var board = secMsg.SecurityId.BoardCode;
if (secMsg.SecurityType == null || secMsg.SecurityType == SecurityTypes.Option)
{
var expiration = secMsg.ExpiryDate?.ToString("yyyyMM");
_client.Session.getOptionList(board, secMsg.UnderlyingSecurityCode, expiration, secMsg.TransactionId);
_client.Session.getInstrumentByUnderlying(secMsg.UnderlyingSecurityCode, board, expiration, secMsg.TransactionId);
_client.Session.listBinaryContracts(board, secMsg.UnderlyingSecurityCode, secMsg.TransactionId);
}
if (secMsg.SecurityType != SecurityTypes.Option && !secMsg.SecurityId.SecurityCode.IsEmpty())
{
if (board.IsEmpty())
{
foreach (var b in _boards.Cache)
_client.Session.getRefData(b, secMsg.SecurityId.SecurityCode, secMsg.TransactionId);
}
else
_client.Session.getRefData(board, secMsg.SecurityId.SecurityCode, secMsg.TransactionId);
}
}
private void ProcessMarketDataMessage(MarketDataMessage mdMsg)
{
var secCode = mdMsg.SecurityId.SecurityCode;
var boardCode = mdMsg.SecurityId.BoardCode;
switch (mdMsg.DataType)
{
case MarketDataTypes.Level1:
{
if (mdMsg.IsSubscribe)
_client.Session.subscribe(boardCode, secCode, SubscriptionFlags.All & ~(SubscriptionFlags.Prints | SubscriptionFlags.PrintsCond | SubscriptionFlags.Quotes), mdMsg.TransactionId);
else
_client.Session.unsubscribe(boardCode, secCode);
break;
}
case MarketDataTypes.MarketDepth:
{
if (mdMsg.IsSubscribe)
{
_client.Session.rebuildBook(boardCode, secCode, mdMsg.TransactionId);
_client.Session.subscribe(boardCode, secCode, SubscriptionFlags.Quotes, mdMsg.TransactionId);
}
else
_client.Session.unsubscribe(boardCode, secCode);
break;
}
case MarketDataTypes.Trades:
{
if (mdMsg.From == null || mdMsg.To == null)
{
if (mdMsg.IsSubscribe)
_client.Session.subscribe(boardCode, secCode, SubscriptionFlags.Prints | SubscriptionFlags.PrintsCond, mdMsg.TransactionId);
else
_client.Session.unsubscribe(boardCode, secCode);
}
else
_client.Session.replayTrades(boardCode, secCode, mdMsg.From.Value.ToSsboe(), mdMsg.To.Value.ToSsboe(), mdMsg.TransactionId);
break;
}
//case MarketDataTypes.OrderLog:
// break;
//case MarketDataTypes.News:
// break;
case MarketDataTypes.CandleTimeFrame:
{
if (mdMsg.From == null || mdMsg.To == null)
{
if (mdMsg.IsSubscribe)
_client.Session.subscribeTimeBar(boardCode, secCode, mdMsg.TransactionId);
else
_client.Session.unsubscribeTimeBar(boardCode, secCode);
}
else
_client.Session.replayTimeBars(boardCode, secCode, mdMsg.From.Value.ToSsboe(), mdMsg.To.Value.ToSsboe(), mdMsg.TransactionId);
break;
}
default:
{
SendOutMarketDataNotSupported(mdMsg.TransactionId);
return;
}
}
var reply = (MarketDataMessage)mdMsg.Clone();
reply.OriginalTransactionId = mdMsg.TransactionId;
SendOutMessage(reply);
}
private void SessionHolderOnLevel1(string symbol, string exchange, Level1Fields field, decimal value, DateTimeOffset time)
{
try
{
SendOutMessage(
new Level1ChangeMessage
{
SecurityId = new SecurityId
{
SecurityCode = symbol,
BoardCode = exchange,
},
ServerTime = time
}
.TryAdd(field, value));
}
catch (Exception ex)
{
SendOutError(ex);
}
}
private void ProcessRefData(RefDataInfo info, long? originalTransactionId)
{
SendOutMessage(new SecurityMessage
{
SecurityId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange,
},
ExpiryDate = RithmicUtils.ToDateTime(info.Expiration, info.ExpirationTime),
Currency = info.Currency.To<CurrencyTypes?>(),
Strike = info.StrikePrice.ToDecimal(),
OptionType = RithmicUtils.ToOptionType(info.PutCallIndicator),
BinaryOptionType = info.BinaryContractType,
Name = info.Description,
SecurityType = RithmicUtils.ToSecurityType(info.InstrumentType),
UnderlyingSecurityCode = info.Underlying,
LocalTime = RithmicUtils.ToTime(info.Ssboe),
Class = info.ProductCode,
PriceStep = info.SinglePointValue.ToDecimal(),
OriginalTransactionId = originalTransactionId ?? 0,
});
}
private void ProcessRefDataList(IEnumerable<RefDataInfo> list, long? originalTransactionId)
{
list.ForEach(i => ProcessRefData(i, originalTransactionId));
if (originalTransactionId == null)
return;
SendOutMessage(new SecurityLookupResultMessage
{
OriginalTransactionId = originalTransactionId.Value
});
}
private void ProcessExchanges(IEnumerable<string> exchanges)
{
foreach (var exchange in exchanges)
{
_boards.Add(exchange);
SendOutMessage(new BoardMessage
{
Code = exchange,
ExchangeCode = exchange,
});
}
}
private void SessionHolderOnSecurityRefData(RefDataInfo info)
{
//ProcessErrorCode(info.RpCode);
if (!ProcessErrorCode(info.RpCode))
return;
ProcessRefData(info, (long?)info.Context);
}
private void SessionHolderOnSecurityOptions(OptionListInfo info)
{
ProcessExchanges(info.Exchanges);
if (!ProcessErrorCode(info.RpCode))
return;
ProcessRefDataList(info.Instruments, (long?)info.Context);
}
private void SessionHolderOnSecurityInstrumentByUnderlying(InstrumentByUnderlyingInfo info)
{
ProcessExchanges(info.Exchanges);
if (!ProcessErrorCode(info.RpCode))
return;
ProcessRefDataList(info.Instruments, (long?)info.Context);
}
private void SessionHolderOnSecurityBinaryContracts(BinaryContractListInfo info)
{
ProcessExchanges(info.Exchanges);
if (!ProcessErrorCode(info.RpCode))
return;
ProcessRefDataList(info.Instruments, (long?)info.Context);
}
private void SessionHolderOnTimeBarReplay(TimeBarReplayInfo info)
{
if (!ProcessErrorCode(info.RpCode))
return;
SendOutMessage(new TimeFrameCandleMessage
{
OriginalTransactionId = (long?)info.Context ?? 0,
IsFinished = true,
});
}
private void SessionHolderOnTimeBar(TimeBarInfo info)
{
SendOutMessage(new TimeFrameCandleMessage
{
SecurityId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange
},
OriginalTransactionId = (long?)info.Context ?? 0,
OpenPrice = info.OpenPrice.ToDecimal() ?? 0,
OpenVolume = info.OpenSize,
HighPrice = info.HighPrice.ToDecimal() ?? 0,
HighVolume = info.HighVolume,
LowPrice = info.LowPrice.ToDecimal() ?? 0,
LowVolume = info.LowVolume,
ClosePrice = info.ClosePrice.ToDecimal() ?? 0,
CloseVolume = info.CloseSize,
CloseTime = RithmicUtils.ToTime(info.Ssboe),
TotalTicks = info.NumTrades,
UpTicks = info.HighNumTrades,
DownTicks = info.LowNumTrades
});
}
private void SessionHolderOnTradeReplay(TradeReplayInfo info)
{
ProcessErrorCode(info.RpCode);
//if (!ProcessErrorCode(info.RpCode))
// return;
}
private void SessionHolderOnTradeVolume(TradeVolumeInfo info)
{
if (!info.TotalVolumeFlag)
return;
SendOutMessage(new Level1ChangeMessage
{
SecurityId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange
},
ServerTime = RithmicUtils.ToTime(info.Ssboe, info.Usecs),
}.TryAdd(Level1Fields.LastTradeVolume, (decimal)info.TotalVolume));
}
private void ProcessTick(TradeInfo info)
{
var secId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange
};
SendOutMessage(new ExecutionMessage
{
ExecutionType = ExecutionTypes.Tick,
SecurityId = secId,
ServerTime = RithmicUtils.ToTime(info.SourceSsboe, info.SourceUsecs),
LocalTime = RithmicUtils.ToTime(info.Ssboe, info.Usecs),
TradePrice = info.Price.ToDecimal(),
TradeVolume = info.Size,
OriginSide = RithmicUtils.ToOriginSide(info.AggressorSide)
});
SendOutMessage(new Level1ChangeMessage
{
SecurityId = secId,
ServerTime = RithmicUtils.ToTime(info.Ssboe, info.Usecs),
}.TryAdd(Level1Fields.Change, info.NetChange.ToDecimal()));
}
private void SessionHolderOnTradePrint(TradeInfo info)
{
ProcessTick(info);
}
private void SessionHolderOnTradeCondition(TradeInfo info)
{
ProcessTick(info);
}
private void SessionHolderOnSettlementPrice(SettlementPriceInfo info)
{
var price = info.Price.ToDecimal();
if (price == null)
return;
Level1Fields field;
if (info.PriceType == Constants.SETTLEMENT_PRICE_TYPE_FINAL)
field = Level1Fields.SettlementPrice;
else if (info.PriceType == Constants.SETTLEMENT_PRICE_TYPE_THEORETICAL)
field = Level1Fields.TheorPrice;
else
field = Level1Fields.LastTradePrice;
SendOutMessage(new Level1ChangeMessage
{
SecurityId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange
},
ServerTime = RithmicUtils.ToTime(info.Ssboe, info.Usecs),
}.TryAdd(field, price.Value));
}
private void SessionHolderOnOrderBook(OrderBookInfo info)
{
if (!ProcessErrorCode(info.RpCode))
return;
SendOutMessage(new QuoteChangeMessage
{
SecurityId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange ?? AssociatedBoardCode
},
Bids = info.Bids.Select(b => new QuoteChange(Sides.Buy, b.Price.ToDecimal() ?? 0, b.Size) { BoardCode = b.Exchange }).ToArray(),
Asks = info.Asks.Select(b => new QuoteChange(Sides.Sell, b.Price.ToDecimal() ?? 0, b.Size) { BoardCode = b.Exchange }).ToArray(),
ServerTime = RithmicUtils.ToTime(info.Ssboe, info.Usecs),
});
}
private void SessionHolderOnEndQuote(EndQuoteInfo info)
{
var secId = new SecurityId
{
SecurityCode = info.Symbol,
BoardCode = info.Exchange
};
FlushQuotes(secId);
}
private void FlushQuotes(SecurityId secId)
{
var quotes = _quotes.TryGetValue(secId);
if (quotes == null)
return;
_quotes.Remove(secId);
foreach (var pair in quotes.CachedPairs)
{
var message = new Level1ChangeMessage
{
SecurityId = secId,
ServerTime = pair.Key
};
var bid = pair.Value.First;
if (bid != null)
{
message
.TryAdd(Level1Fields.BestBidPrice, bid.Price.ToDecimal())
.TryAdd(Level1Fields.BestBidVolume, (decimal)bid.Size);
}
var ask = pair.Value.Second;
if (ask != null)
{
message
.TryAdd(Level1Fields.BestAskPrice, ask.Price.ToDecimal())
.TryAdd(Level1Fields.BestAskVolume, (decimal)ask.Size);
}
SendOutMessage(message);
}
}
private void ProcessBestQuote(string symbol, string exchange, double price, int size, int numOfOrders, UpdateType updateType, int ssboe, int usecs, Level1Fields priceField, Level1Fields volumeField)
{
var secId = new SecurityId
{
SecurityCode = symbol,
BoardCode = exchange
};
var time = RithmicUtils.ToTime(ssboe, usecs);
SendOutMessage(new Level1ChangeMessage
{
SecurityId = secId,
ServerTime = time,
}
.TryAdd(priceField, price.ToDecimal())
.TryAdd(volumeField, (decimal)size));
switch (updateType)
{
// [gene.sato] For best bid/ask the update type does not apply.
// The update type is for market depth/level 2 updates.
case UpdateType.Undefined:
// break;
case UpdateType.Solo:
{
//SendOutMessage(new Level1ChangeMessage
//{
// SecurityId = secId,
// ServerTime = time,
//}
//.TryAdd(priceField, price.ToDecimal())
//.TryAdd(volumeField, (decimal)size));
break;
}
case UpdateType.Begin:
case UpdateType.Middle:
case UpdateType.Aggregated:
{
var pair = _quotes
.SafeAdd(secId)
.SafeAdd(time, key => new RefPair<BidInfo, AskInfo>());
pair.Second = new AskInfo
{
Price = price,
NumOrders = numOfOrders,
Size = size,
};
break;
}
case UpdateType.End:
FlushQuotes(secId);
break;
case UpdateType.Clear:
break;
default:
throw new ArgumentOutOfRangeException();
}
}
private void SessionHolderOnBestAskQuote(AskInfo info)
{
ProcessBestQuote(info.Symbol, info.Exchange, info.Price, info.Size, info.NumOrders, info.UpdateType, info.Ssboe, info.Usecs, Level1Fields.BestAskPrice, Level1Fields.BestAskVolume);
}
private void SessionHolderOnAskQuote(AskInfo info)
{
}
private void SessionHolderOnBestBidQuote(BidInfo info)
{
ProcessBestQuote(info.Symbol, info.Exchange, info.Price, info.Size, info.NumOrders, info.UpdateType, info.Ssboe, info.Usecs, Level1Fields.BestBidPrice, Level1Fields.BestBidVolume);
}
private void SessionHolderOnBidQuote(BidInfo info)
{
}
private void SessionHolderOnExchanges(ExchangeListInfo info)
{
ProcessErrorCode(info.RpCode);
//if (!ProcessErrorCode(info.RpCode))
// return;
ProcessExchanges(info.Exchanges);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Diagnostics;
using System.IO;
using System.Net.NetworkInformation;
using System.Text;
// Relevant cookie specs:
//
// PERSISTENT CLIENT STATE HTTP COOKIES (1996)
// From <http:// web.archive.org/web/20020803110822/http://wp.netscape.com/newsref/std/cookie_spec.html>
//
// RFC2109 HTTP State Management Mechanism (February 1997)
// From <http:// tools.ietf.org/html/rfc2109>
//
// RFC2965 HTTP State Management Mechanism (October 2000)
// From <http:// tools.ietf.org/html/rfc2965>
//
// RFC6265 HTTP State Management Mechanism (April 2011)
// From <http:// tools.ietf.org/html/rfc6265>
//
// The Version attribute of the cookie header is defined and used only in RFC2109 and RFC2965 cookie
// specs and specifies Version=1. The Version attribute is not used in the Netscape cookie spec
// (considered as Version=0). Nor is it used in the most recent cookie spec, RFC6265, introduced in 2011.
// RFC6265 deprecates all previous cookie specs including the Version attribute.
//
// Cookies without an explicit Domain attribute will only match a potential uri that matches the original
// uri from where the cookie came from.
//
// For explicit Domain attribute in the cookie, the following rules apply:
//
// Version=0 (Netscape, RFC6265) allows the Domain attribute of the cookie to match any tail substring
// of the host uri.
//
// Version=1 related cookie specs only allows the Domain attribute to match the host uri based on a
// more restricted set of rules.
//
// According to RFC2109/RFC2965, the cookie will be rejected for matching if:
// * The value for the Domain attribute contains no embedded dots or does not start with a dot.
// * The value for the request-host does not domain-match the Domain attribute.
// " The request-host is a FQDN (not IP address) and has the form HD, where D is the value of the Domain
// attribute, and H is a string that contains one or more dots.
//
// Examples:
// * A cookie from request-host y.x.foo.com for Domain=.foo.com would be rejected, because H is y.x
// and contains a dot.
//
// * A cookie from request-host x.foo.com for Domain=.foo.com would be accepted.
//
// * A cookie with Domain=.com or Domain=.com., will always be rejected, because there is no embedded dot.
//
// * A cookie with Domain=ajax.com will be rejected because the value for Domain does not begin with a dot.
namespace System.Net
{
internal struct HeaderVariantInfo
{
private readonly string _name;
private readonly CookieVariant _variant;
internal HeaderVariantInfo(string name, CookieVariant variant)
{
_name = name;
_variant = variant;
}
internal string Name
{
get
{
return _name;
}
}
internal CookieVariant Variant
{
get
{
return _variant;
}
}
}
// CookieContainer
//
// Manage cookies for a user (implicit). Based on RFC 2965.
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public class CookieContainer
{
public const int DefaultCookieLimit = 300;
public const int DefaultPerDomainCookieLimit = 20;
public const int DefaultCookieLengthLimit = 4096;
private static readonly HeaderVariantInfo[] s_headerInfo = {
new HeaderVariantInfo(HttpKnownHeaderNames.SetCookie, CookieVariant.Rfc2109),
new HeaderVariantInfo(HttpKnownHeaderNames.SetCookie2, CookieVariant.Rfc2965)
};
private readonly Hashtable m_domainTable = new Hashtable(); // Do not rename (binary serialization)
private int m_maxCookieSize = DefaultCookieLengthLimit; // Do not rename (binary serialization)
private int m_maxCookies = DefaultCookieLimit; // Do not rename (binary serialization)
private int m_maxCookiesPerDomain = DefaultPerDomainCookieLimit; // Do not rename (binary serialization)
private int m_count = 0; // Do not rename (binary serialization)
private string m_fqdnMyDomain = string.Empty; // Do not rename (binary serialization)
public CookieContainer()
{
string domain = HostInformation.DomainName;
if (domain != null && domain.Length > 1)
{
m_fqdnMyDomain = '.' + domain;
}
// Otherwise it will remain string.Empty.
}
public CookieContainer(int capacity) : this()
{
if (capacity <= 0)
{
throw new ArgumentException(SR.net_toosmall, "Capacity");
}
m_maxCookies = capacity;
}
public CookieContainer(int capacity, int perDomainCapacity, int maxCookieSize) : this(capacity)
{
if (perDomainCapacity != Int32.MaxValue && (perDomainCapacity <= 0 || perDomainCapacity > capacity))
{
throw new ArgumentOutOfRangeException(nameof(perDomainCapacity), SR.Format(SR.net_cookie_capacity_range, "PerDomainCapacity", 0, capacity));
}
m_maxCookiesPerDomain = perDomainCapacity;
if (maxCookieSize <= 0)
{
throw new ArgumentException(SR.net_toosmall, "MaxCookieSize");
}
m_maxCookieSize = maxCookieSize;
}
// NOTE: after shrinking the capacity, Count can become greater than Capacity.
public int Capacity
{
get
{
return m_maxCookies;
}
set
{
if (value <= 0 || (value < m_maxCookiesPerDomain && m_maxCookiesPerDomain != Int32.MaxValue))
{
throw new ArgumentOutOfRangeException(nameof(value), SR.Format(SR.net_cookie_capacity_range, "Capacity", 0, m_maxCookiesPerDomain));
}
if (value < m_maxCookies)
{
m_maxCookies = value;
AgeCookies(null);
}
m_maxCookies = value;
}
}
/// <devdoc>
/// <para>Returns the total number of cookies in the container.</para>
/// </devdoc>
public int Count
{
get
{
return m_count;
}
}
public int MaxCookieSize
{
get
{
return m_maxCookieSize;
}
set
{
if (value <= 0)
{
throw new ArgumentOutOfRangeException(nameof(value));
}
m_maxCookieSize = value;
}
}
/// <devdoc>
/// <para>After shrinking domain capacity, each domain will less hold than new domain capacity.</para>
/// </devdoc>
public int PerDomainCapacity
{
get
{
return m_maxCookiesPerDomain;
}
set
{
if (value <= 0 || (value > m_maxCookies && value != Int32.MaxValue))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
if (value < m_maxCookiesPerDomain)
{
m_maxCookiesPerDomain = value;
AgeCookies(null);
}
m_maxCookiesPerDomain = value;
}
}
// This method will construct a faked URI: the Domain property is required for param.
public void Add(Cookie cookie)
{
if (cookie == null)
{
throw new ArgumentNullException(nameof(cookie));
}
if (cookie.Domain.Length == 0)
{
throw new ArgumentException(SR.net_emptystringcall, "cookie.Domain");
}
Uri uri;
var uriSb = new StringBuilder();
// We cannot add an invalid cookie into the container.
// Trying to prepare Uri for the cookie verification.
uriSb.Append(cookie.Secure ? UriScheme.Https : UriScheme.Http).Append(UriScheme.SchemeDelimiter);
// If the original cookie has an explicitly set domain, copy it over to the new cookie.
if (!cookie.DomainImplicit)
{
if (cookie.Domain[0] == '.')
{
uriSb.Append("0"); // URI cctor should consume this faked host.
}
}
uriSb.Append(cookie.Domain);
// Either keep Port as implicit or set it according to original cookie.
if (cookie.PortList != null)
{
uriSb.Append(":").Append(cookie.PortList[0]);
}
// Path must be present, set to root by default.
uriSb.Append(cookie.Path);
if (!Uri.TryCreate(uriSb.ToString(), UriKind.Absolute, out uri))
throw new CookieException(SR.Format(SR.net_cookie_attribute, "Domain", cookie.Domain));
// We don't know cookie verification status, so re-create the cookie and verify it.
Cookie new_cookie = cookie.Clone();
new_cookie.VerifySetDefaults(new_cookie.Variant, uri, IsLocalDomain(uri.Host), m_fqdnMyDomain, true, true);
Add(new_cookie, true);
}
// This method is called *only* when cookie verification is done, so unlike with public
// Add(Cookie cookie) the cookie is in a reasonable condition.
internal void Add(Cookie cookie, bool throwOnError)
{
PathList pathList;
if (cookie.Value.Length > m_maxCookieSize)
{
if (throwOnError)
{
throw new CookieException(SR.Format(SR.net_cookie_size, cookie.ToString(), m_maxCookieSize));
}
return;
}
try
{
lock (m_domainTable.SyncRoot)
{
pathList = (PathList)m_domainTable[cookie.DomainKey];
if (pathList == null)
{
m_domainTable[cookie.DomainKey] = (pathList = new PathList());
}
}
int domain_count = pathList.GetCookiesCount();
CookieCollection cookies;
lock (pathList.SyncRoot)
{
cookies = (CookieCollection)pathList[cookie.Path];
if (cookies == null)
{
cookies = new CookieCollection();
pathList[cookie.Path] = cookies;
}
}
if (cookie.Expired)
{
// Explicit removal command (Max-Age == 0)
lock (cookies)
{
int idx = cookies.IndexOf(cookie);
if (idx != -1)
{
cookies.RemoveAt(idx);
--m_count;
}
}
}
else
{
// This is about real cookie adding, check Capacity first
if (domain_count >= m_maxCookiesPerDomain && !AgeCookies(cookie.DomainKey))
{
return; // Cannot age: reject new cookie
}
else if (m_count >= m_maxCookies && !AgeCookies(null))
{
return; // Cannot age: reject new cookie
}
// About to change the collection
lock (cookies)
{
m_count += cookies.InternalAdd(cookie, true);
}
}
}
catch (OutOfMemoryException)
{
throw;
}
catch (Exception e)
{
if (throwOnError)
{
throw new CookieException(SR.net_container_add_cookie, e);
}
}
}
// This function, when called, must delete at least one cookie.
// If there are expired cookies in given scope they are cleaned up.
// If nothing is found the least used Collection will be found and removed
// from the container.
//
// Also note that expired cookies are also removed during request preparation
// (this.GetCookies method).
//
// Param. 'domain' == null means to age in the whole container.
private bool AgeCookies(string domain)
{
Debug.Assert(m_maxCookies != 0);
Debug.Assert(m_maxCookiesPerDomain != 0);
int removed = 0;
DateTime oldUsed = DateTime.MaxValue;
DateTime tempUsed;
CookieCollection lruCc = null;
string lruDomain = null;
string tempDomain = null;
PathList pathList;
int domain_count = 0;
int itemp = 0;
float remainingFraction = 1.0F;
// The container was shrunk, might need additional cleanup for each domain
if (m_count > m_maxCookies)
{
// Means the fraction of the container to be left.
// Each domain will be cut accordingly.
remainingFraction = (float)m_maxCookies / (float)m_count;
}
lock (m_domainTable.SyncRoot)
{
foreach (DictionaryEntry entry in m_domainTable)
{
if (domain == null)
{
tempDomain = (string)entry.Key;
pathList = (PathList)entry.Value; // Aliasing to trick foreach
}
else
{
tempDomain = domain;
pathList = (PathList)m_domainTable[domain];
}
domain_count = 0; // Cookies in the domain
lock (pathList.SyncRoot)
{
foreach (CookieCollection cc in pathList.Values)
{
itemp = ExpireCollection(cc);
removed += itemp;
m_count -= itemp; // Update this container's count
domain_count += cc.Count;
// We also find the least used cookie collection in ENTIRE container.
// We count the collection as LRU only if it holds 1+ elements.
if (cc.Count > 0 && (tempUsed = cc.TimeStamp(CookieCollection.Stamp.Check)) < oldUsed)
{
lruDomain = tempDomain;
lruCc = cc;
oldUsed = tempUsed;
}
}
}
// Check if we have reduced to the limit of the domain by expiration only.
int min_count = Math.Min((int)(domain_count * remainingFraction), Math.Min(m_maxCookiesPerDomain, m_maxCookies) - 1);
if (domain_count > min_count)
{
// This case requires sorting all domain collections by timestamp.
Array cookies;
Array stamps;
lock (pathList.SyncRoot)
{
cookies = Array.CreateInstance(typeof(CookieCollection), pathList.Count);
stamps = Array.CreateInstance(typeof(DateTime), pathList.Count);
foreach (CookieCollection cc in pathList.Values)
{
stamps.SetValue(cc.TimeStamp(CookieCollection.Stamp.Check), itemp);
cookies.SetValue(cc, itemp);
++itemp;
}
}
Array.Sort(stamps, cookies);
itemp = 0;
for (int i = 0; i < cookies.Length; ++i)
{
CookieCollection cc = (CookieCollection)cookies.GetValue(i);
lock (cc)
{
while (domain_count > min_count && cc.Count > 0)
{
cc.RemoveAt(0);
--domain_count;
--m_count;
++removed;
}
}
if (domain_count <= min_count)
{
break;
}
}
if (domain_count > min_count && domain != null)
{
// Cannot complete aging of explicit domain (no cookie adding allowed).
return false;
}
}
}
}
// We have completed aging of the specified domain.
if (domain != null)
{
return true;
}
// The rest is for entire container aging.
// We must get at least one free slot.
// Don't need to apply LRU if we already cleaned something.
if (removed != 0)
{
return true;
}
if (oldUsed == DateTime.MaxValue)
{
// Something strange. Either capacity is 0 or all collections are locked with cc.Used.
return false;
}
// Remove oldest cookies from the least used collection.
lock (lruCc)
{
while (m_count >= m_maxCookies && lruCc.Count > 0)
{
lruCc.RemoveAt(0);
--m_count;
}
}
return true;
}
// Return number of cookies removed from the collection.
private int ExpireCollection(CookieCollection cc)
{
lock (cc)
{
int oldCount = cc.Count;
int idx = oldCount - 1;
// Cannot use enumerator as we are going to alter collection.
while (idx >= 0)
{
Cookie cookie = cc[idx];
if (cookie.Expired)
{
cc.RemoveAt(idx);
}
--idx;
}
return oldCount - cc.Count;
}
}
public void Add(CookieCollection cookies)
{
if (cookies == null)
{
throw new ArgumentNullException(nameof(cookies));
}
foreach (Cookie c in cookies)
{
Add(c);
}
}
// This will try (if needed) get the full domain name of the host given the Uri.
// NEVER call this function from internal methods with 'fqdnRemote' == null.
// Since this method counts security issue for DNS and hence will slow
// the performance.
internal bool IsLocalDomain(string host)
{
int dot = host.IndexOf('.');
if (dot == -1)
{
// No choice but to treat it as a host on the local domain.
// This also covers 'localhost' and 'loopback'.
return true;
}
// Quick test for typical cases: loopback addresses for IPv4 and IPv6.
if ((host == "127.0.0.1") || (host == "::1") || (host == "0:0:0:0:0:0:0:1"))
{
return true;
}
// Test domain membership.
if (string.Compare(m_fqdnMyDomain, 0, host, dot, m_fqdnMyDomain.Length, StringComparison.OrdinalIgnoreCase) == 0)
{
return true;
}
// Test for "127.###.###.###" without using regex.
string[] ipParts = host.Split('.');
if (ipParts != null && ipParts.Length == 4 && ipParts[0] == "127")
{
int i;
for (i = 1; i < ipParts.Length; i++)
{
string part = ipParts[i];
switch (part.Length)
{
case 3:
if (part[2] < '0' || part[2] > '9')
{
break;
}
goto case 2;
case 2:
if (part[1] < '0' || part[1] > '9')
{
break;
}
goto case 1;
case 1:
if (part[0] < '0' || part[0] > '9')
{
break;
}
continue;
}
break;
}
if (i == 4)
{
return true;
}
}
return false;
}
public void Add(Uri uri, Cookie cookie)
{
if (uri == null)
{
throw new ArgumentNullException(nameof(uri));
}
if (cookie == null)
{
throw new ArgumentNullException(nameof(cookie));
}
Cookie new_cookie = cookie.Clone();
new_cookie.VerifySetDefaults(new_cookie.Variant, uri, IsLocalDomain(uri.Host), m_fqdnMyDomain, true, true);
Add(new_cookie, true);
}
public void Add(Uri uri, CookieCollection cookies)
{
if (uri == null)
{
throw new ArgumentNullException(nameof(uri));
}
if (cookies == null)
{
throw new ArgumentNullException(nameof(cookies));
}
bool isLocalDomain = IsLocalDomain(uri.Host);
foreach (Cookie c in cookies)
{
Cookie new_cookie = c.Clone();
new_cookie.VerifySetDefaults(new_cookie.Variant, uri, isLocalDomain, m_fqdnMyDomain, true, true);
Add(new_cookie, true);
}
}
internal CookieCollection CookieCutter(Uri uri, string headerName, string setCookieHeader, bool isThrow)
{
if (NetEventSource.IsEnabled)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"uri:{uri} headerName:{headerName} setCookieHeader:{setCookieHeader} isThrow:{isThrow}");
}
CookieCollection cookies = new CookieCollection();
CookieVariant variant = CookieVariant.Unknown;
if (headerName == null)
{
variant = CookieVariant.Default;
}
else
{
for (int i = 0; i < s_headerInfo.Length; ++i)
{
if ((String.Compare(headerName, s_headerInfo[i].Name, StringComparison.OrdinalIgnoreCase) == 0))
{
variant = s_headerInfo[i].Variant;
}
}
}
bool isLocalDomain = IsLocalDomain(uri.Host);
try
{
CookieParser parser = new CookieParser(setCookieHeader);
do
{
Cookie cookie = parser.Get();
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"CookieParser returned cookie:{cookie}");
if (cookie == null)
{
break;
}
// Parser marks invalid cookies this way
if (String.IsNullOrEmpty(cookie.Name))
{
if (isThrow)
{
throw new CookieException(SR.net_cookie_format);
}
// Otherwise, ignore (reject) cookie
continue;
}
// This will set the default values from the response URI
// AND will check for cookie validity
if (!cookie.VerifySetDefaults(variant, uri, isLocalDomain, m_fqdnMyDomain, true, isThrow))
{
continue;
}
// If many same cookies arrive we collapse them into just one, hence setting
// parameter isStrict = true below
cookies.InternalAdd(cookie, true);
} while (true);
}
catch (OutOfMemoryException)
{
throw;
}
catch (Exception e)
{
if (isThrow)
{
throw new CookieException(SR.Format(SR.net_cookie_parse_header, uri.AbsoluteUri), e);
}
}
foreach (Cookie c in cookies)
{
Add(c, isThrow);
}
return cookies;
}
public CookieCollection GetCookies(Uri uri)
{
if (uri == null)
{
throw new ArgumentNullException(nameof(uri));
}
return InternalGetCookies(uri) ?? new CookieCollection();
}
internal CookieCollection InternalGetCookies(Uri uri)
{
if (m_count == 0)
{
return null;
}
bool isSecure = (uri.Scheme == UriScheme.Https || uri.Scheme == UriScheme.Wss);
int port = uri.Port;
CookieCollection cookies = null;
var domainAttributeMatchAnyCookieVariant = new System.Collections.Generic.List<string>();
System.Collections.Generic.List<string> domainAttributeMatchOnlyCookieVariantPlain = null;
string fqdnRemote = uri.Host;
// Add initial candidates to match Domain attribute of possible cookies.
// For these Domains, cookie can have any CookieVariant enum value.
domainAttributeMatchAnyCookieVariant.Add(fqdnRemote);
domainAttributeMatchAnyCookieVariant.Add("." + fqdnRemote);
int dot = fqdnRemote.IndexOf('.');
if (dot == -1)
{
// DNS.resolve may return short names even for other inet domains ;-(
// We _don't_ know what the exact domain is, so try also grab short hostname cookies.
// Grab long name from the local domain
if (m_fqdnMyDomain != null && m_fqdnMyDomain.Length != 0)
{
domainAttributeMatchAnyCookieVariant.Add(fqdnRemote + m_fqdnMyDomain);
// Grab the local domain itself
domainAttributeMatchAnyCookieVariant.Add(m_fqdnMyDomain);
}
}
else
{
// Grab the host domain
domainAttributeMatchAnyCookieVariant.Add(fqdnRemote.Substring(dot));
// The following block is only for compatibility with Version0 spec.
// Still, we'll add only Plain-Variant cookies if found under below keys
if (fqdnRemote.Length > 2)
{
// We ignore the '.' at the end on the name
int last = fqdnRemote.LastIndexOf('.', fqdnRemote.Length - 2);
// AND keys with <2 dots inside.
if (last > 0)
{
last = fqdnRemote.LastIndexOf('.', last - 1);
}
if (last != -1)
{
while ((dot < last) && (dot = fqdnRemote.IndexOf('.', dot + 1)) != -1)
{
if (domainAttributeMatchOnlyCookieVariantPlain == null)
{
domainAttributeMatchOnlyCookieVariantPlain = new System.Collections.Generic.List<string>();
}
// These candidates can only match CookieVariant.Plain cookies.
domainAttributeMatchOnlyCookieVariantPlain.Add(fqdnRemote.Substring(dot));
}
}
}
}
BuildCookieCollectionFromDomainMatches(uri, isSecure, port, ref cookies, domainAttributeMatchAnyCookieVariant, false);
if (domainAttributeMatchOnlyCookieVariantPlain != null)
{
BuildCookieCollectionFromDomainMatches(uri, isSecure, port, ref cookies, domainAttributeMatchOnlyCookieVariantPlain, true);
}
return cookies;
}
private void BuildCookieCollectionFromDomainMatches(Uri uri, bool isSecure, int port, ref CookieCollection cookies, System.Collections.Generic.List<string> domainAttribute, bool matchOnlyPlainCookie)
{
for (int i = 0; i < domainAttribute.Count; i++)
{
bool found = false;
bool defaultAdded = false;
PathList pathList;
lock (m_domainTable.SyncRoot)
{
pathList = (PathList)m_domainTable[domainAttribute[i]];
if (pathList == null)
{
continue;
}
}
lock (pathList.SyncRoot)
{
// Manual use of IDictionaryEnumerator instead of foreach to avoid DictionaryEntry box allocations.
IDictionaryEnumerator e = pathList.GetEnumerator();
while (e.MoveNext())
{
string path = (string)e.Key;
if (uri.AbsolutePath.StartsWith(CookieParser.CheckQuoted(path)))
{
found = true;
CookieCollection cc = (CookieCollection)e.Value;
cc.TimeStamp(CookieCollection.Stamp.Set);
MergeUpdateCollections(ref cookies, cc, port, isSecure, matchOnlyPlainCookie);
if (path == "/")
{
defaultAdded = true;
}
}
else if (found)
{
break;
}
}
}
if (!defaultAdded)
{
CookieCollection cc = (CookieCollection)pathList["/"];
if (cc != null)
{
cc.TimeStamp(CookieCollection.Stamp.Set);
MergeUpdateCollections(ref cookies, cc, port, isSecure, matchOnlyPlainCookie);
}
}
// Remove unused domain
// (This is the only place that does domain removal)
if (pathList.Count == 0)
{
lock (m_domainTable.SyncRoot)
{
m_domainTable.Remove(domainAttribute[i]);
}
}
}
}
private void MergeUpdateCollections(ref CookieCollection destination, CookieCollection source, int port, bool isSecure, bool isPlainOnly)
{
lock (source)
{
// Cannot use foreach as we are going to update 'source'
for (int idx = 0; idx < source.Count; ++idx)
{
bool to_add = false;
Cookie cookie = source[idx];
if (cookie.Expired)
{
// If expired, remove from container and don't add to the destination
source.RemoveAt(idx);
--m_count;
--idx;
}
else
{
// Add only if port does match to this request URI
// or was not present in the original response.
if (isPlainOnly && cookie.Variant != CookieVariant.Plain)
{
; // Don't add
}
else if (cookie.PortList != null)
{
foreach (int p in cookie.PortList)
{
if (p == port)
{
to_add = true;
break;
}
}
}
else
{
// It was implicit Port, always OK to add.
to_add = true;
}
// Refuse to add a secure cookie into an 'unsecure' destination
if (cookie.Secure && !isSecure)
{
to_add = false;
}
if (to_add)
{
// In 'source' are already ordered.
// If two same cookies come from different 'source' then they
// will follow (not replace) each other.
if (destination == null)
{
destination = new CookieCollection();
}
destination.InternalAdd(cookie, false);
}
}
}
}
}
public string GetCookieHeader(Uri uri)
{
if (uri == null)
{
throw new ArgumentNullException(nameof(uri));
}
string dummy;
return GetCookieHeader(uri, out dummy);
}
internal string GetCookieHeader(Uri uri, out string optCookie2)
{
CookieCollection cookies = InternalGetCookies(uri);
if (cookies == null)
{
optCookie2 = string.Empty;
return string.Empty;
}
string delimiter = string.Empty;
StringBuilder builder = StringBuilderCache.Acquire();
for (int i = 0; i < cookies.Count; i++)
{
builder.Append(delimiter);
cookies[i].ToString(builder);
delimiter = "; ";
}
optCookie2 = cookies.IsOtherVersionSeen ?
(Cookie.SpecialAttributeLiteral +
CookieFields.VersionAttributeName +
Cookie.EqualsLiteral +
Cookie.MaxSupportedVersionString) : string.Empty;
return StringBuilderCache.GetStringAndRelease(builder);
}
public void SetCookies(Uri uri, string cookieHeader)
{
if (uri == null)
{
throw new ArgumentNullException(nameof(uri));
}
if (cookieHeader == null)
{
throw new ArgumentNullException(nameof(cookieHeader));
}
CookieCutter(uri, null, cookieHeader, true); // Will throw on error
}
}
// PathList needs to be public in order to maintain binary serialization compatibility as the System shim
// needs to have access to type-forward it.
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public sealed class PathList
{
// Usage of PathList depends on it being shallowly immutable;
// adding any mutable fields to it would result in breaks.
private readonly SortedList m_list = SortedList.Synchronized(new SortedList(PathListComparer.StaticInstance)); // Do not rename (binary serialization)
internal int Count => m_list.Count;
internal int GetCookiesCount()
{
int count = 0;
lock (SyncRoot)
{
foreach (CookieCollection cc in m_list.Values)
{
count += cc.Count;
}
}
return count;
}
internal ICollection Values
{
get
{
return m_list.Values;
}
}
internal object this[string s]
{
get
{
lock (SyncRoot)
{
return m_list[s];
}
}
set
{
lock (SyncRoot)
{
Debug.Assert(value != null);
m_list[s] = value;
}
}
}
internal IDictionaryEnumerator GetEnumerator()
{
lock (SyncRoot)
{
return m_list.GetEnumerator();
}
}
internal object SyncRoot => m_list.SyncRoot;
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
private sealed class PathListComparer : IComparer
{
internal static readonly PathListComparer StaticInstance = new PathListComparer();
int IComparer.Compare(object ol, object or)
{
string pathLeft = CookieParser.CheckQuoted((string)ol);
string pathRight = CookieParser.CheckQuoted((string)or);
int ll = pathLeft.Length;
int lr = pathRight.Length;
int length = Math.Min(ll, lr);
for (int i = 0; i < length; ++i)
{
if (pathLeft[i] != pathRight[i])
{
return pathLeft[i] - pathRight[i];
}
}
return lr - ll;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
namespace CliParse
{
/// <summary>
/// Information content builder for help screens.
/// </summary>
public static class InfoBuilder
{
/// <summary>
/// Returns help content derived from the provided assembly and parsable object.
/// </summary>
/// <param name="parsable"></param>
/// <param name="asm"></param>
/// <param name="template"></param>
/// <param name="argumentTemplate"></param>
/// <param name="maxLineLength">The maximum number of characters in a line before it is wrapped</param>
/// <returns></returns>
public static string GetHelpInfoFromAssembly(Parsable parsable, Assembly asm, string template, string argumentTemplate, int maxLineLength = 80)
{
if (parsable == null) throw new ArgumentNullException("parsable");
if(asm == null) throw new ArgumentNullException("asm");
if (string.IsNullOrEmpty(template)) return "";
var parsableClass = Helper.GetObjectAttribute(parsable, typeof(ParsableClassAttribute)) as ParsableClassAttribute;
if (parsableClass == null)
throw new CliParseException("Unable to find 'ParsableClass' attribute on provided object.");
var title = GetAssemblyAttribute(asm, typeof (AssemblyTitleAttribute));
template = template.Replace("{title}", title);
var version = asm.GetName().Version.ToString();
template = template.Replace("{version}", version);
var company = GetAssemblyAttribute(asm, typeof(AssemblyCompanyAttribute));
template = template.Replace("{company}", company);
var description = GetAssemblyAttribute(asm, typeof (AssemblyDescriptionAttribute));
template = template.Replace("{description}", description);
var syntax = GetSyntaxInfo(parsable, argumentTemplate, parsableClass.AllowedPrefixes);
template = template.Replace("{syntax}", syntax);
var copyright = GetAssemblyAttribute(asm, typeof (AssemblyCopyrightAttribute));
template = template.Replace("{copyright}", copyright);
var footer = GetAssemblyMetadataAttribute(asm, "footer");
template = template.Replace("{footer}", footer);
return FormatTextForScreen(template.Trim(), maxLineLength);
}
/// <summary>
/// Returns help content derived from the provided parsable object.
/// </summary>
/// <param name="parsable"></param>
/// <param name="template"></param>
/// <param name="argumentTemplate"></param>
/// <param name="maxLineLength">The maximum number of characters in a line before it is wrapped</param>
/// <returns></returns>
public static string GetHelpInfo(Parsable parsable, string template, string argumentTemplate, int maxLineLength = 80)
{
if (parsable == null) throw new ArgumentNullException("parsable");
if (string.IsNullOrEmpty(template)) return "";
var parsableClass = Helper.GetObjectAttribute(parsable, typeof(ParsableClassAttribute)) as ParsableClassAttribute;
if(parsableClass == null)
throw new CliParseException("Unable to find 'ParsableClass' attribute on provided object.");
template = template.Replace("{title}", parsableClass.Title);
template = template.Replace("{description}", parsableClass.Description);
template = template.Replace("{copyright}", string.IsNullOrEmpty(parsableClass.Copyright) ? "" : parsableClass.Copyright);
template = template.Replace("{version}", parsableClass.Version);
var syntax = GetSyntaxInfo(parsable, argumentTemplate, parsableClass.AllowedPrefixes);
template = template.Replace("{syntax}", syntax);
template = template.Replace("{example}", parsableClass.ExampleText);
template = template.Replace("{footer}", parsableClass.FooterText);
return FormatTextForScreen(template.Trim(), maxLineLength);
}
private static string GetSyntaxInfo(Parsable parsable, string argumentTemplate, ICollection<char> prefixes)
{
var arguments = GetListArgumentAttributes(parsable);
var sb = new StringBuilder();
var prefix = "-"; // default
if (prefixes.Count > 1)
{
prefix = prefixes.FirstOrDefault().ToString();
var allowedPrefixes = "";
prefixes.ToList().ForEach(x => allowedPrefixes += "'" + x + "',");
allowedPrefixes = allowedPrefixes.Substring(0, allowedPrefixes.Length - 1);
sb.AppendLine("The following argument prefix characters can be used: "+allowedPrefixes);
}
foreach (var argument in arguments)
{
sb.AppendLine(argument.GetSyntax(argumentTemplate, prefix));
}
return sb.ToString();
}
private static IEnumerable<ParsableArgumentAttribute> GetListArgumentAttributes(Parsable parsable)
{
var parsableType = parsable.GetType();
var properties = parsableType.GetProperties();
var arguments = new List<ParsableArgumentAttribute>();
foreach (var prop in properties)
{
arguments.AddRange(prop.GetCustomAttributes(true).OfType<ParsableArgumentAttribute>());
}
return arguments;
}
private static string GetAssemblyMetadataAttribute(Assembly asm, string key)
{
var customAttributes = asm.GetCustomAttributes(typeof (AssemblyMetadataAttribute));
var t = (from AssemblyMetadataAttribute attribute in customAttributes
select attribute).FirstOrDefault(x => x.Key.Equals(key));
return t == null ? "" : t.Value;
}
private static string GetAssemblyAttribute(Assembly asm, Type type)
{
var customAttribute = asm.GetCustomAttributes(type).FirstOrDefault(x => x.GetType() == type);
if (customAttribute == null) return "";
return GetAssemblyAttributeValue(type, customAttribute);
}
private static string GetAssemblyAttributeValue(Type type, Attribute customAttribute)
{
if (type == typeof (AssemblyTitleAttribute))
{
var t = customAttribute as AssemblyTitleAttribute;
return t == null ? "" : t.Title;
}
if (type == typeof (AssemblyDescriptionAttribute))
{
var t = customAttribute as AssemblyDescriptionAttribute;
return t == null ? "" : t.Description;
}
if (type == typeof (AssemblyCompanyAttribute))
{
var t = customAttribute as AssemblyCompanyAttribute;
return t == null ? "" : t.Company;
}
if (type == typeof (AssemblyCopyrightAttribute))
{
var t = customAttribute as AssemblyCopyrightAttribute;
return t == null ? "" : t.Copyright;
}
return "";
}
private static string FormatTextForScreen(string text, int maxLineLength)
{
var sb = new StringBuilder();
var lines = text.Split(new[] { "\r\n", "\n" }, StringSplitOptions.None);
foreach (string line in lines)
{
sb.AppendLine(BreakStringToLength(line, maxLineLength));
}
return sb.ToString();
}
public static string BreakStringToLength(string line, int maximumLineLength)
{
if (string.IsNullOrEmpty(line)) return "";
if (maximumLineLength <= 1) throw new ArgumentOutOfRangeException("maximumLineLength");
if (line.Length <= maximumLineLength - 1) return line;
var maxLineLength = maximumLineLength;
var sb = new StringBuilder();
var startingWhiteSpace = GetLeadingWhitespaceAsSpaces(line);
var startingWhiteSpaceLength = startingWhiteSpace.Length;
var currentIndex = 0;
var possibleIndex = 0;
var keepGoing = true;
while (keepGoing)
{
var scanIndex = line.IndexOf(' ', possibleIndex);
if (scanIndex != -1) scanIndex += 1; // move to location after the space so we wrap at start of word.
if (scanIndex - currentIndex + startingWhiteSpaceLength > maxLineLength)
{
sb.Append(line.Substring(currentIndex, possibleIndex - currentIndex));
sb.AppendLine();
sb.Append(startingWhiteSpace);
currentIndex = possibleIndex;
}
// no more spaces
if (scanIndex == -1)
{
var lengthRemaining = line.Length - currentIndex;
if (currentIndex == 0)
{
if (lengthRemaining > maxLineLength)
{
sb.AppendLine(line.Substring(currentIndex, maxLineLength));
sb.Append(startingWhiteSpace);
currentIndex += maxLineLength;
}
else
{
sb.Append(line.Substring(currentIndex, lengthRemaining));
keepGoing = false;
}
}
else
{
if (lengthRemaining + startingWhiteSpaceLength > maxLineLength)
{
sb.AppendLine(line.Substring(currentIndex, maxLineLength - startingWhiteSpaceLength));
sb.Append(startingWhiteSpace);
currentIndex += maxLineLength - startingWhiteSpaceLength;
}
else
{
sb.Append(line.Substring(currentIndex, lengthRemaining));
keepGoing = false;
}
}
}
else
{
possibleIndex = scanIndex;
}
}
return sb.ToString();
}
private static string GetLeadingWhitespaceAsSpaces(string line)
{
int count = 0;
foreach (var c in line)
{
if (!Char.IsWhiteSpace(c)) break;
if (c == ' ') count++;
if (c.Equals('\t')) count += 4;
}
return new string(' ', count);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Xml;
using System.Xml.Linq;
using System.Runtime.Versioning;
namespace System.Xml.Schema
{
internal class XNodeValidator
{
private readonly XmlSchemaSet schemas;
private readonly ValidationEventHandler validationEventHandler;
private XObject source;
private bool addSchemaInfo;
private XmlNamespaceManager namespaceManager;
private XmlSchemaValidator validator;
private Dictionary<XmlSchemaInfo, XmlSchemaInfo> schemaInfos;
private ArrayList defaultAttributes;
private readonly XName xsiTypeName;
private readonly XName xsiNilName;
public XNodeValidator(XmlSchemaSet schemas, ValidationEventHandler validationEventHandler)
{
this.schemas = schemas;
this.validationEventHandler = validationEventHandler;
XNamespace xsi = XNamespace.Get("http://www.w3.org/2001/XMLSchema-instance");
xsiTypeName = xsi.GetName("type");
xsiNilName = xsi.GetName("nil");
}
public void Validate(XObject source, XmlSchemaObject partialValidationType, bool addSchemaInfo)
{
this.source = source;
this.addSchemaInfo = addSchemaInfo;
XmlSchemaValidationFlags validationFlags = XmlSchemaValidationFlags.AllowXmlAttributes;
XmlNodeType nt = source.NodeType;
switch (nt)
{
case XmlNodeType.Document:
source = ((XDocument)source).Root;
if (source == null) throw new InvalidOperationException(SR.InvalidOperation_MissingRoot);
validationFlags |= XmlSchemaValidationFlags.ProcessIdentityConstraints;
break;
case XmlNodeType.Element:
break;
case XmlNodeType.Attribute:
if (((XAttribute)source).IsNamespaceDeclaration) goto default;
if (source.Parent == null) throw new InvalidOperationException(SR.InvalidOperation_MissingParent);
break;
default:
throw new InvalidOperationException(SR.Format(SR.InvalidOperation_BadNodeType, nt));
}
namespaceManager = new XmlNamespaceManager(schemas.NameTable);
PushAncestorsAndSelf(source.Parent);
validator = new XmlSchemaValidator(schemas.NameTable, schemas, namespaceManager, validationFlags);
validator.ValidationEventHandler += new ValidationEventHandler(ValidationCallback);
validator.XmlResolver = null;
if (partialValidationType != null)
{
validator.Initialize(partialValidationType);
}
else
{
validator.Initialize();
}
IXmlLineInfo orginal = SaveLineInfo(source);
if (nt == XmlNodeType.Attribute)
{
ValidateAttribute((XAttribute)source);
}
else
{
ValidateElement((XElement)source);
}
validator.EndValidation();
RestoreLineInfo(orginal);
}
private XmlSchemaInfo GetDefaultAttributeSchemaInfo(XmlSchemaAttribute sa)
{
XmlSchemaInfo si = new XmlSchemaInfo();
si.IsDefault = true;
si.IsNil = false;
si.SchemaAttribute = sa;
XmlSchemaSimpleType st = sa.AttributeSchemaType;
si.SchemaType = st;
if (st.Datatype.Variety == XmlSchemaDatatypeVariety.Union)
{
string value = GetDefaultValue(sa);
foreach (XmlSchemaSimpleType mt in ((XmlSchemaSimpleTypeUnion)st.Content).BaseMemberTypes)
{
object typedValue = null;
try
{
typedValue = mt.Datatype.ParseValue(value, schemas.NameTable, namespaceManager);
}
catch (XmlSchemaException)
{
}
if (typedValue != null)
{
si.MemberType = mt;
break;
}
}
}
si.Validity = XmlSchemaValidity.Valid;
return si;
}
private string GetDefaultValue(XmlSchemaAttribute sa)
{
XmlQualifiedName name = sa.RefName;
if (!name.IsEmpty)
{
sa = schemas.GlobalAttributes[name] as XmlSchemaAttribute;
if (sa == null) return null;
}
string s = sa.FixedValue;
if (s != null) return s;
return sa.DefaultValue;
}
private string GetDefaultValue(XmlSchemaElement se)
{
XmlQualifiedName name = se.RefName;
if (!name.IsEmpty)
{
se = schemas.GlobalElements[name] as XmlSchemaElement;
if (se == null) return null;
}
string s = se.FixedValue;
if (s != null) return s;
return se.DefaultValue;
}
private void ReplaceSchemaInfo(XObject o, XmlSchemaInfo schemaInfo)
{
if (schemaInfos == null)
{
schemaInfos = new Dictionary<XmlSchemaInfo, XmlSchemaInfo>(new XmlSchemaInfoEqualityComparer());
}
XmlSchemaInfo si = o.Annotation<XmlSchemaInfo>();
if (si != null)
{
if (!schemaInfos.ContainsKey(si))
{
schemaInfos.Add(si, si);
}
o.RemoveAnnotations<XmlSchemaInfo>();
}
if (!schemaInfos.TryGetValue(schemaInfo, out si))
{
si = schemaInfo;
schemaInfos.Add(si, si);
}
o.AddAnnotation(si);
}
private void PushAncestorsAndSelf(XElement e)
{
while (e != null)
{
XAttribute a = e.lastAttr;
if (a != null)
{
do
{
a = a.next;
if (a.IsNamespaceDeclaration)
{
string localName = a.Name.LocalName;
if (localName == "xmlns")
{
localName = string.Empty;
}
if (!namespaceManager.HasNamespace(localName))
{
namespaceManager.AddNamespace(localName, a.Value);
}
}
} while (a != e.lastAttr);
}
e = e.parent as XElement;
}
}
private void PushElement(XElement e, ref string xsiType, ref string xsiNil)
{
namespaceManager.PushScope();
XAttribute a = e.lastAttr;
if (a != null)
{
do
{
a = a.next;
if (a.IsNamespaceDeclaration)
{
string localName = a.Name.LocalName;
if (localName == "xmlns")
{
localName = string.Empty;
}
namespaceManager.AddNamespace(localName, a.Value);
}
else
{
XName name = a.Name;
if (name == xsiTypeName)
{
xsiType = a.Value;
}
else if (name == xsiNilName)
{
xsiNil = a.Value;
}
}
} while (a != e.lastAttr);
}
}
private IXmlLineInfo SaveLineInfo(XObject source)
{
IXmlLineInfo previousLineInfo = validator.LineInfoProvider;
validator.LineInfoProvider = source as IXmlLineInfo;
return previousLineInfo;
}
private void RestoreLineInfo(IXmlLineInfo originalLineInfo)
{
validator.LineInfoProvider = originalLineInfo;
}
private void ValidateAttribute(XAttribute a)
{
IXmlLineInfo original = SaveLineInfo(a);
XmlSchemaInfo si = addSchemaInfo ? new XmlSchemaInfo() : null;
source = a;
validator.ValidateAttribute(a.Name.LocalName, a.Name.NamespaceName, a.Value, si);
if (addSchemaInfo)
{
ReplaceSchemaInfo(a, si);
}
RestoreLineInfo(original);
}
private void ValidateAttributes(XElement e)
{
XAttribute a = e.lastAttr;
IXmlLineInfo orginal = SaveLineInfo(a);
if (a != null)
{
do
{
a = a.next;
if (!a.IsNamespaceDeclaration)
{
ValidateAttribute(a);
}
} while (a != e.lastAttr);
source = e;
}
if (addSchemaInfo)
{
if (defaultAttributes == null)
{
defaultAttributes = new ArrayList();
}
else
{
defaultAttributes.Clear();
}
validator.GetUnspecifiedDefaultAttributes(defaultAttributes);
foreach (XmlSchemaAttribute sa in defaultAttributes)
{
a = new XAttribute(XNamespace.Get(sa.QualifiedName.Namespace).GetName(sa.QualifiedName.Name), GetDefaultValue(sa));
ReplaceSchemaInfo(a, GetDefaultAttributeSchemaInfo(sa));
e.Add(a);
}
}
RestoreLineInfo(orginal);
}
private void ValidateElement(XElement e)
{
XmlSchemaInfo si = addSchemaInfo ? new XmlSchemaInfo() : null;
string xsiType = null;
string xsiNil = null;
PushElement(e, ref xsiType, ref xsiNil);
IXmlLineInfo original = SaveLineInfo(e);
source = e;
validator.ValidateElement(e.Name.LocalName, e.Name.NamespaceName, si, xsiType, xsiNil, null, null);
ValidateAttributes(e);
validator.ValidateEndOfAttributes(si);
ValidateNodes(e);
validator.ValidateEndElement(si);
if (addSchemaInfo)
{
if (si.Validity == XmlSchemaValidity.Valid && si.IsDefault)
{
e.Value = GetDefaultValue(si.SchemaElement);
}
ReplaceSchemaInfo(e, si);
}
RestoreLineInfo(original);
namespaceManager.PopScope();
}
private void ValidateNodes(XElement e)
{
XNode n = e.content as XNode;
IXmlLineInfo orginal = SaveLineInfo(n);
if (n != null)
{
do
{
n = n.next;
XElement c = n as XElement;
if (c != null)
{
ValidateElement(c);
}
else
{
XText t = n as XText;
if (t != null)
{
string s = t.Value;
if (s.Length > 0)
{
validator.LineInfoProvider = t as IXmlLineInfo;
validator.ValidateText(s);
}
}
}
} while (n != e.content);
source = e;
}
else
{
string s = e.content as string;
if (s != null && s.Length > 0)
{
validator.ValidateText(s);
}
}
RestoreLineInfo(orginal);
}
private void ValidationCallback(object sender, ValidationEventArgs e)
{
if (validationEventHandler != null)
{
validationEventHandler(source, e);
}
else if (e.Severity == XmlSeverityType.Error)
{
throw e.Exception;
}
}
}
internal class XmlSchemaInfoEqualityComparer : IEqualityComparer<XmlSchemaInfo>
{
public bool Equals(XmlSchemaInfo si1, XmlSchemaInfo si2)
{
if (si1 == si2) return true;
if (si1 == null || si2 == null) return false;
return si1.ContentType == si2.ContentType &&
si1.IsDefault == si2.IsDefault &&
si1.IsNil == si2.IsNil &&
(object)si1.MemberType == (object)si2.MemberType &&
(object)si1.SchemaAttribute == (object)si2.SchemaAttribute &&
(object)si1.SchemaElement == (object)si2.SchemaElement &&
(object)si1.SchemaType == (object)si2.SchemaType &&
si1.Validity == si2.Validity;
}
public int GetHashCode(XmlSchemaInfo si)
{
if (si == null) return 0;
int h = (int)si.ContentType;
if (si.IsDefault)
{
h ^= 1;
}
if (si.IsNil)
{
h ^= 1;
}
XmlSchemaSimpleType memberType = si.MemberType;
if (memberType != null)
{
h ^= memberType.GetHashCode();
}
XmlSchemaAttribute schemaAttribute = si.SchemaAttribute;
if (schemaAttribute != null)
{
h ^= schemaAttribute.GetHashCode();
}
XmlSchemaElement schemaElement = si.SchemaElement;
if (schemaElement != null)
{
h ^= schemaElement.GetHashCode();
}
XmlSchemaType schemaType = si.SchemaType;
if (schemaType != null)
{
h ^= schemaType.GetHashCode();
}
h ^= (int)si.Validity;
return h;
}
}
/// <summary>
/// Extension methods
/// </summary>
public static class Extensions
{
/// <summary>
/// Gets the schema information that has been assigned to the <see cref="XElement"/> as a result of schema validation.
/// </summary>
/// <param name="source">Extension point</param>
[SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")]
public static IXmlSchemaInfo GetSchemaInfo(this XElement source)
{
if (source == null) throw new ArgumentNullException(nameof(source));
return source.Annotation<IXmlSchemaInfo>();
}
/// <summary>
/// Gets the schema information that has been assigned to the <see cref="XAttribute"/> as a result of schema validation.
/// </summary>
/// <param name="source">Extension point</param>
[SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")]
public static IXmlSchemaInfo GetSchemaInfo(this XAttribute source)
{
if (source == null) throw new ArgumentNullException(nameof(source));
return source.Annotation<IXmlSchemaInfo>();
}
/// <summary>
/// Validate a <see cref="XDocument"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/>
/// that receives schema validation warnings and errors encountered during schema
/// validation</param>
public static void Validate(this XDocument source, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler)
{
source.Validate(schemas, validationEventHandler, false);
}
/// <summary>
/// Validate a <see cref="XDocument"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/>
/// that receives schema validation warnings and errors encountered during schema
/// validation</param>
/// <param name="addSchemaInfo">If enabled the <see cref="XDocument"/> and the corresponding
/// subtree is augmented with PSVI in the form of <see cref="IXmlSchemaInfo"/> annotations,
/// default attributes and default element values</param>
[SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")]
public static void Validate(this XDocument source, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (schemas == null) throw new ArgumentNullException(nameof(schemas));
new XNodeValidator(schemas, validationEventHandler).Validate(source, null, addSchemaInfo);
}
/// <summary>
/// Validate a <see cref="XElement"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="partialValidationType">An <see cref="XmlSchemaElement"/> or
/// <see cref="XmlSchemaType"/> object used to initialize the partial validation
/// context</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that
/// receives schema validation warnings and errors encountered during schema
/// validation</param>
public static void Validate(this XElement source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler)
{
source.Validate(partialValidationType, schemas, validationEventHandler, false);
}
/// <summary>
/// Validate a <see cref="XElement"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="partialValidationType">An <see cref="XmlSchemaElement"/> or
/// <see cref="XmlSchemaType"/> object used to initialize the partial validation
/// context</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that
/// receives schema validation warnings and errors encountered during schema
/// validation</param>
/// <param name="addSchemaInfo">If enabled the <see cref="XElement"/> and the corresponding
/// subtree is augmented with PSVI in the form of <see cref="IXmlSchemaInfo"/> annotations,
/// default attributes and default element values</param>
[SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")]
public static void Validate(this XElement source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (partialValidationType == null) throw new ArgumentNullException(nameof(partialValidationType));
if (schemas == null) throw new ArgumentNullException(nameof(schemas));
new XNodeValidator(schemas, validationEventHandler).Validate(source, partialValidationType, addSchemaInfo);
}
/// <summary>
/// Validate a <see cref="XAttribute"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="partialValidationType">An <see cref="XmlSchemaAttribute"/> or
/// <see cref="XmlSchemaType"/> object used to initialize the partial validation
/// context</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that
/// receives schema validation warnings and errors encountered during schema
/// validation</param>
public static void Validate(this XAttribute source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler)
{
source.Validate(partialValidationType, schemas, validationEventHandler, false);
}
/// <summary>
/// Validate a <see cref="XAttribute"/>
/// </summary>
/// <param name="source">Extension point</param>
/// <param name="partialValidationType">An <see cref="XmlSchemaAttribute"/> or
/// <see cref="XmlSchemaType"/> object used to initialize the partial validation
/// context</param>
/// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param>
/// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that
/// receives schema validation warnings and errors encountered during schema
/// validation</param>
/// <param name="addSchemaInfo">If enabled the <see cref="XAttribute"/> is augmented with PSVI
/// in the form of <see cref="IXmlSchemaInfo"/> annotations, default attributes and
/// default element values</param>
[SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")]
public static void Validate(this XAttribute source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (partialValidationType == null) throw new ArgumentNullException(nameof(partialValidationType));
if (schemas == null) throw new ArgumentNullException(nameof(schemas));
new XNodeValidator(schemas, validationEventHandler).Validate(source, partialValidationType, addSchemaInfo);
}
}
}
| |
/*
* $Id: IrcConnection.cs 280 2008-07-17 17:00:56Z meebey $
* $URL: svn+ssh://svn.qnetp.net/svn/smartirc/SmartIrc4net/trunk/src/IrcConnection/IrcConnection.cs $
* $Rev: 280 $
* $Author: meebey $
* $Date: 2008-07-17 19:00:56 +0200 (Thu, 17 Jul 2008) $
*
* SmartIrc4net - the IRC library for .NET/C# <http://smartirc4net.sf.net>
*
* Copyright (c) 2003-2005 Mirco Bauer <meebey@meebey.net> <http://www.meebey.net>
*
* Full LGPL License: <http://www.gnu.org/licenses/lgpl.txt>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
using System;
using System.IO;
using System.Text;
using System.Collections;
using System.Threading;
using System.Reflection;
using System.Net.Sockets;
#if NET_2_0
using System.Net.Security;
#endif
namespace Meebey.SmartIrc4net
{
/// <summary>
///
/// </summary>
/// <threadsafety static="true" instance="true" />
public class IrcConnection
{
private string _VersionNumber;
private string _VersionString;
private string[] _AddressList = {"localhost"};
private int _CurrentAddress;
private int _Port;
#if NET_2_0
private bool _UseSsl;
#endif
private StreamReader _Reader;
private StreamWriter _Writer;
private ReadThread _ReadThread;
private WriteThread _WriteThread;
private IdleWorkerThread _IdleWorkerThread;
private IrcTcpClient _TcpClient;
private Hashtable _SendBuffer = Hashtable.Synchronized(new Hashtable());
private int _SendDelay = 200;
private bool _IsRegistered;
private bool _IsConnected;
private bool _IsConnectionError;
private bool _IsDisconnecting;
private int _ConnectTries;
private bool _AutoRetry;
private int _AutoRetryDelay = 30;
private bool _AutoReconnect;
private Encoding _Encoding = Encoding.Default;
private int _SocketReceiveTimeout = 600;
private int _SocketSendTimeout = 600;
private int _IdleWorkerInterval = 60;
private int _PingInterval = 60;
private int _PingTimeout = 300;
private DateTime _LastPingSent;
private DateTime _LastPongReceived;
private TimeSpan _Lag;
/// <event cref="OnReadLine">
/// Raised when a \r\n terminated line is read from the socket
/// </event>
public event ReadLineEventHandler OnReadLine;
/// <event cref="OnWriteLine">
/// Raised when a \r\n terminated line is written to the socket
/// </event>
public event WriteLineEventHandler OnWriteLine;
/// <event cref="OnConnect">
/// Raised before the connect attempt
/// </event>
public event EventHandler OnConnecting;
/// <event cref="OnConnect">
/// Raised on successful connect
/// </event>
public event EventHandler OnConnected;
/// <event cref="OnConnect">
/// Raised before the connection is closed
/// </event>
public event EventHandler OnDisconnecting;
/// <event cref="OnConnect">
/// Raised when the connection is closed
/// </event>
public event EventHandler OnDisconnected;
/// <event cref="OnConnectionError">
/// Raised when the connection got into an error state
/// </event>
public event EventHandler OnConnectionError;
/// <event cref="AutoConnectErrorEventHandler">
/// Raised when the connection got into an error state during auto connect loop
/// </event>
public event AutoConnectErrorEventHandler OnAutoConnectError;
/// <summary>
/// When a connection error is detected this property will return true
/// </summary>
protected bool IsConnectionError {
get {
lock (this) {
return _IsConnectionError;
}
}
set {
lock (this) {
_IsConnectionError = value;
}
}
}
protected bool IsDisconnecting {
get {
lock (this) {
return _IsDisconnecting;
}
}
set {
lock (this) {
_IsDisconnecting = value;
}
}
}
/// <summary>
/// Gets the current address of the connection
/// </summary>
public string Address {
get {
return _AddressList[_CurrentAddress];
}
}
/// <summary>
/// Gets the address list of the connection
/// </summary>
public string[] AddressList {
get {
return _AddressList;
}
}
/// <summary>
/// Gets the used port of the connection
/// </summary>
public int Port {
get {
return _Port;
}
}
/// <summary>
/// By default nothing is done when the library looses the connection
/// to the server.
/// Default: false
/// </summary>
/// <value>
/// true, if the library should reconnect on lost connections
/// false, if the library should not take care of it
/// </value>
public bool AutoReconnect {
get {
return _AutoReconnect;
}
set {
#if LOG4NET
if (value) {
Logger.Connection.Info("AutoReconnect enabled");
} else {
Logger.Connection.Info("AutoReconnect disabled");
}
#endif
_AutoReconnect = value;
}
}
/// <summary>
/// If the library should retry to connect when the connection fails.
/// Default: false
/// </summary>
/// <value>
/// true, if the library should retry to connect
/// false, if the library should not retry
/// </value>
public bool AutoRetry {
get {
return _AutoRetry;
}
set {
#if LOG4NET
if (value) {
Logger.Connection.Info("AutoRetry enabled");
} else {
Logger.Connection.Info("AutoRetry disabled");
}
#endif
_AutoRetry = value;
}
}
/// <summary>
/// Delay between retry attempts in Connect() in seconds.
/// Default: 30
/// </summary>
public int AutoRetryDelay {
get {
return _AutoRetryDelay;
}
set {
_AutoRetryDelay = value;
}
}
/// <summary>
/// To prevent flooding the IRC server, it's required to delay each
/// message, given in milliseconds.
/// Default: 200
/// </summary>
public int SendDelay {
get {
return _SendDelay;
}
set {
_SendDelay = value;
}
}
/// <summary>
/// On successful registration on the IRC network, this is set to true.
/// </summary>
public bool IsRegistered {
get {
return _IsRegistered;
}
}
/// <summary>
/// On successful connect to the IRC server, this is set to true.
/// </summary>
public bool IsConnected {
get {
return _IsConnected;
}
}
/// <summary>
/// Gets the SmartIrc4net version number
/// </summary>
public string VersionNumber {
get {
return _VersionNumber;
}
}
/// <summary>
/// Gets the full SmartIrc4net version string
/// </summary>
public string VersionString {
get {
return _VersionString;
}
}
/// <summary>
/// Encoding which is used for reading and writing to the socket
/// Default: encoding of the system
/// </summary>
public Encoding Encoding {
get {
return _Encoding;
}
set {
_Encoding = value;
}
}
#if NET_2_0
/// <summary>
/// Enables/disables using SSL for the connection
/// Default: false
/// </summary>
public bool UseSsl {
get {
return _UseSsl;
}
set {
_UseSsl = value;
}
}
#endif
/// <summary>
/// Timeout in seconds for receiving data from the socket
/// Default: 600
/// </summary>
public int SocketReceiveTimeout {
get {
return _SocketReceiveTimeout;
}
set {
_SocketReceiveTimeout = value;
}
}
/// <summary>
/// Timeout in seconds for sending data to the socket
/// Default: 600
/// </summary>
public int SocketSendTimeout {
get {
return _SocketSendTimeout;
}
set {
_SocketSendTimeout = value;
}
}
/// <summary>
/// Interval in seconds to run the idle worker
/// Default: 60
/// </summary>
public int IdleWorkerInterval {
get {
return _IdleWorkerInterval;
}
set {
_IdleWorkerInterval = value;
}
}
/// <summary>
/// Interval in seconds to send a PING
/// Default: 60
/// </summary>
public int PingInterval {
get {
return _PingInterval;
}
set {
_PingInterval = value;
}
}
/// <summary>
/// Timeout in seconds for server response to a PING
/// Default: 600
/// </summary>
public int PingTimeout {
get {
return _PingTimeout;
}
set {
_PingTimeout = value;
}
}
/// <summary>
/// Latency between client and the server
/// </summary>
public TimeSpan Lag {
get {
if (_LastPingSent > _LastPongReceived) {
// there is an outstanding ping, thus we don't have a current lag value
return DateTime.Now - _LastPingSent;
}
return _Lag;
}
}
/// <summary>
/// Initializes the message queues, read and write thread
/// </summary>
public IrcConnection()
{
#if LOG4NET
Logger.Init();
Logger.Main.Debug("IrcConnection created");
#endif
_SendBuffer[Priority.High] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.AboveMedium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.Medium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.BelowMedium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.Low] = Queue.Synchronized(new Queue());
// setup own callbacks
OnReadLine += new ReadLineEventHandler(_SimpleParser);
OnConnectionError += new EventHandler(_OnConnectionError);
_ReadThread = new ReadThread(this);
_WriteThread = new WriteThread(this);
_IdleWorkerThread = new IdleWorkerThread(this);
Assembly assm = Assembly.GetAssembly(this.GetType());
AssemblyName assm_name = assm.GetName(false);
AssemblyProductAttribute pr = (AssemblyProductAttribute)assm.GetCustomAttributes(typeof(AssemblyProductAttribute), false)[0];
_VersionNumber = assm_name.Version.ToString();
_VersionString = pr.Product+" "+_VersionNumber;
}
#if LOG4NET
~IrcConnection()
{
Logger.Main.Debug("IrcConnection destroyed");
}
#endif
/// <overloads>this method has 2 overloads</overloads>
/// <summary>
/// Connects to the specified server and port, when the connection fails
/// the next server in the list will be used.
/// </summary>
/// <param name="addresslist">List of servers to connect to</param>
/// <param name="port">Portnumber to connect to</param>
/// <exception cref="CouldNotConnectException">The connection failed</exception>
/// <exception cref="AlreadyConnectedException">If there is already an active connection</exception>
public void Connect(string[] addresslist, int port)
{
if (_IsConnected) {
throw new AlreadyConnectedException("Already connected to: " + Address + ":" + Port);
}
_ConnectTries++;
#if LOG4NET
Logger.Connection.Info(String.Format("connecting... (attempt: {0})",
_ConnectTries));
#endif
_AddressList = (string[])addresslist.Clone();
_Port = port;
if (OnConnecting != null) {
OnConnecting(this, EventArgs.Empty);
}
try {
System.Net.IPAddress ip = System.Net.Dns.Resolve(Address).AddressList[0];
_TcpClient = new IrcTcpClient();
_TcpClient.NoDelay = true;
_TcpClient.Socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.KeepAlive, 1);
// set timeout, after this the connection will be aborted
_TcpClient.ReceiveTimeout = _SocketReceiveTimeout * 1000;
_TcpClient.SendTimeout = _SocketSendTimeout * 1000;
_TcpClient.Connect(ip, port);
Stream stream = _TcpClient.GetStream();
#if NET_2_0
if (_UseSsl) {
SslStream sslStream = new SslStream(stream, false, delegate {
return true;
});
sslStream.AuthenticateAsClient(Address);
stream = sslStream;
}
#endif
_Reader = new StreamReader(stream, _Encoding);
_Writer = new StreamWriter(stream, _Encoding);
if (_Encoding.GetPreamble().Length > 0) {
// HACK: we have an encoding that has some kind of preamble
// like UTF-8 has a BOM, this will confuse the IRCd!
// Thus we send a \r\n so the IRCd can safely ignore that
// garbage.
_Writer.WriteLine();
}
// Connection was succeful, reseting the connect counter
_ConnectTries = 0;
// updating the connection error state, so connecting is possible again
IsConnectionError = false;
_IsConnected = true;
// lets power up our threads
_ReadThread.Start();
_WriteThread.Start();
_IdleWorkerThread.Start();
#if LOG4NET
Logger.Connection.Info("connected");
#endif
if (OnConnected != null) {
OnConnected(this, EventArgs.Empty);
}
} catch (Exception e) {
if (_Reader != null) {
try {
_Reader.Close();
} catch (ObjectDisposedException) {
}
}
if (_Writer != null) {
try {
_Writer.Close();
} catch (ObjectDisposedException) {
}
}
if (_TcpClient != null) {
_TcpClient.Close();
}
_IsConnected = false;
IsConnectionError = true;
#if LOG4NET
Logger.Connection.Info("connection failed: "+e.Message);
#endif
if (_AutoRetry &&
_ConnectTries <= 3) {
if (OnAutoConnectError != null) {
OnAutoConnectError(this, new AutoConnectErrorEventArgs(Address, Port, e));
}
#if LOG4NET
Logger.Connection.Debug("delaying new connect attempt for "+_AutoRetryDelay+" sec");
#endif
Thread.Sleep(_AutoRetryDelay * 1000);
_NextAddress();
Connect(_AddressList, _Port);
} else {
throw new CouldNotConnectException("Could not connect to: "+Address+":"+Port+" "+e.Message, e);
}
}
}
/// <summary>
/// Connects to the specified server and port.
/// </summary>
/// <param name="address">Server address to connect to</param>
/// <param name="port">Port number to connect to</param>
public void Connect(string address, int port)
{
Connect(new string[] { address }, port);
}
/// <summary>
/// Reconnects to the server
/// </summary>
/// <exception cref="NotConnectedException">
/// If there was no active connection
/// </exception>
/// <exception cref="CouldNotConnectException">
/// The connection failed
/// </exception>
/// <exception cref="AlreadyConnectedException">
/// If there is already an active connection
/// </exception>
public void Reconnect()
{
#if LOG4NET
Logger.Connection.Info("reconnecting...");
#endif
Disconnect();
Connect(_AddressList, _Port);
}
/// <summary>
/// Disconnects from the server
/// </summary>
/// <exception cref="NotConnectedException">
/// If there was no active connection
/// </exception>
public void Disconnect()
{
if (!IsConnected) {
throw new NotConnectedException("The connection could not be disconnected because there is no active connection");
}
#if LOG4NET
Logger.Connection.Info("disconnecting...");
#endif
if (OnDisconnecting != null) {
OnDisconnecting(this, EventArgs.Empty);
}
IsDisconnecting = true;
_ReadThread.Stop();
_WriteThread.Stop();
_TcpClient.Close();
_IsConnected = false;
_IsRegistered = false;
IsDisconnecting = false;
if (OnDisconnected != null) {
OnDisconnected(this, EventArgs.Empty);
}
#if LOG4NET
Logger.Connection.Info("disconnected");
#endif
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
public void Listen(bool blocking)
{
if (blocking) {
while (IsConnected) {
ReadLine(true);
}
} else {
while (ReadLine(false).Length > 0) {
// loop as long as we receive messages
}
}
}
/// <summary>
///
/// </summary>
public void Listen()
{
Listen(true);
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
public void ListenOnce(bool blocking)
{
ReadLine(blocking);
}
/// <summary>
///
/// </summary>
public void ListenOnce()
{
ListenOnce(true);
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
/// <returns></returns>
public string ReadLine(bool blocking)
{
string data = "";
if (blocking) {
// block till the queue has data, but bail out on connection error
while (IsConnected &&
!IsConnectionError &&
_ReadThread.Queue.Count == 0) {
Thread.Sleep(10);
}
}
if (IsConnected &&
_ReadThread.Queue.Count > 0) {
data = (string)(_ReadThread.Queue.Dequeue());
}
if (data != null && data.Length > 0) {
#if LOG4NET
Logger.Queue.Debug("read: \""+data+"\"");
#endif
if (OnReadLine != null) {
OnReadLine(this, new ReadLineEventArgs(data));
}
}
if (IsConnectionError &&
!IsDisconnecting &&
OnConnectionError != null) {
OnConnectionError(this, EventArgs.Empty);
}
return data;
}
/// <summary>
///
/// </summary>
/// <param name="data"></param>
/// <param name="priority"></param>
public void WriteLine(string data, Priority priority)
{
if (priority == Priority.Critical) {
if (!IsConnected) {
throw new NotConnectedException();
}
_WriteLine(data);
} else {
((Queue)_SendBuffer[priority]).Enqueue(data);
}
}
/// <summary>
///
/// </summary>
/// <param name="data"></param>
public void WriteLine(string data)
{
WriteLine(data, Priority.Medium);
}
private bool _WriteLine(string data)
{
if (IsConnected) {
try {
_Writer.Write(data + "\r\n");
_Writer.Flush();
} catch (IOException) {
#if LOG4NET
Logger.Socket.Warn("sending data failed, connection lost");
#endif
IsConnectionError = true;
return false;
} catch (ObjectDisposedException) {
#if LOG4NET
Logger.Socket.Warn("sending data failed (stream error), connection lost");
#endif
IsConnectionError = true;
return false;
}
#if LOG4NET
Logger.Socket.Debug("sent: \""+data+"\"");
#endif
if (OnWriteLine != null) {
OnWriteLine(this, new WriteLineEventArgs(data));
}
return true;
}
return false;
}
private void _NextAddress()
{
_CurrentAddress++;
if (_CurrentAddress >= _AddressList.Length) {
_CurrentAddress = 0;
}
#if LOG4NET
Logger.Connection.Info("set server to: "+Address);
#endif
}
private void _SimpleParser(object sender, ReadLineEventArgs args)
{
string rawline = args.Line;
string[] rawlineex = rawline.Split(new char[] {' '});
string messagecode = "";
if (rawline[0] == ':') {
messagecode = rawlineex[1];
ReplyCode replycode = ReplyCode.Null;
try {
replycode = (ReplyCode)int.Parse(messagecode);
} catch (FormatException) {
}
if (replycode != ReplyCode.Null) {
switch (replycode) {
case ReplyCode.Welcome:
_IsRegistered = true;
#if LOG4NET
Logger.Connection.Info("logged in");
#endif
break;
}
} else {
switch (rawlineex[1]) {
case "PONG":
DateTime now = DateTime.Now;
_LastPongReceived = now;
_Lag = now - _LastPingSent;
#if LOG4NET
Logger.Connection.Debug("PONG received, took: " + _Lag.TotalMilliseconds + " ms");
#endif
break;
}
}
} else {
messagecode = rawlineex[0];
switch (messagecode) {
case "ERROR":
// FIXME: handle server errors differently than connection errors!
//IsConnectionError = true;
break;
}
}
}
private void _OnConnectionError(object sender, EventArgs e)
{
try {
if (AutoReconnect) {
// lets try to recover the connection
Reconnect();
} else {
// make sure we clean up
Disconnect();
}
} catch (ConnectionException) {
}
}
/// <summary>
///
/// </summary>
private class ReadThread
{
#if LOG4NET
private static readonly log4net.ILog _Logger = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
#endif
private IrcConnection _Connection;
private Thread _Thread;
private Queue _Queue = Queue.Synchronized(new Queue());
public Queue Queue {
get {
return _Queue;
}
}
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public ReadThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "ReadThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
#if LOG4NET
_Logger.Debug("Stop()");
#endif
#if LOG4NET
_Logger.Debug("Stop(): aborting thread...");
#endif
_Thread.Abort();
// make sure we close the stream after the thread is gone, else
// the thread will think the connection is broken!
#if LOG4NET
_Logger.Debug("Stop(): joining thread...");
#endif
_Thread.Join();
#if LOG4NET
_Logger.Debug("Stop(): closing reader...");
#endif
try {
_Connection._Reader.Close();
} catch (ObjectDisposedException) {
}
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("ReadThread started");
#endif
try {
string data = "";
try {
while (_Connection.IsConnected &&
((data = _Connection._Reader.ReadLine()) != null)) {
_Queue.Enqueue(data);
#if LOG4NET
Logger.Socket.Debug("received: \""+data+"\"");
#endif
}
} catch (IOException e) {
#if LOG4NET
Logger.Socket.Warn("IOException: "+e.Message);
#endif
} finally {
#if LOG4NET
Logger.Socket.Warn("connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
if (!_Connection.IsDisconnecting) {
_Connection.IsConnectionError = true;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("ReadThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
}
/// <summary>
///
/// </summary>
private class WriteThread
{
private IrcConnection _Connection;
private Thread _Thread;
private int _HighCount;
private int _AboveMediumCount;
private int _MediumCount;
private int _BelowMediumCount;
private int _LowCount;
private int _AboveMediumSentCount;
private int _MediumSentCount;
private int _BelowMediumSentCount;
private int _AboveMediumThresholdCount = 4;
private int _MediumThresholdCount = 2;
private int _BelowMediumThresholdCount = 1;
private int _BurstCount;
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public WriteThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "WriteThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
#if LOG4NET
Logger.Connection.Debug("Stopping WriteThread...");
#endif
_Thread.Abort();
// make sure we close the stream after the thread is gone, else
// the thread will think the connection is broken!
_Thread.Join();
try {
_Connection._Writer.Close();
} catch (ObjectDisposedException) {
}
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("WriteThread started");
#endif
try {
try {
while (_Connection.IsConnected) {
_CheckBuffer();
Thread.Sleep(_Connection._SendDelay);
}
} catch (IOException e) {
#if LOG4NET
Logger.Socket.Warn("IOException: " + e.Message);
#endif
} finally {
#if LOG4NET
Logger.Socket.Warn("connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
if (!_Connection.IsDisconnecting) {
_Connection.IsConnectionError = true;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("WriteThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
#region WARNING: complex scheduler, don't even think about changing it!
// WARNING: complex scheduler, don't even think about changing it!
private void _CheckBuffer()
{
// only send data if we are succefully registered on the IRC network
if (!_Connection._IsRegistered) {
return;
}
_HighCount = ((Queue)_Connection._SendBuffer[Priority.High]).Count;
_AboveMediumCount = ((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Count;
_MediumCount = ((Queue)_Connection._SendBuffer[Priority.Medium]).Count;
_BelowMediumCount = ((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Count;
_LowCount = ((Queue)_Connection._SendBuffer[Priority.Low]).Count;
if (_CheckHighBuffer() &&
_CheckAboveMediumBuffer() &&
_CheckMediumBuffer() &&
_CheckBelowMediumBuffer() &&
_CheckLowBuffer()) {
// everything is sent, resetting all counters
_AboveMediumSentCount = 0;
_MediumSentCount = 0;
_BelowMediumSentCount = 0;
_BurstCount = 0;
}
if (_BurstCount < 3) {
_BurstCount++;
//_CheckBuffer();
}
}
private bool _CheckHighBuffer()
{
if (_HighCount > 0) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.High]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.High]).Enqueue(data);
}
if (_HighCount > 1) {
// there is more data to send
return false;
}
}
return true;
}
private bool _CheckAboveMediumBuffer()
{
if ((_AboveMediumCount > 0) &&
(_AboveMediumSentCount < _AboveMediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Enqueue(data);
}
_AboveMediumSentCount++;
if (_AboveMediumSentCount < _AboveMediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckMediumBuffer()
{
if ((_MediumCount > 0) &&
(_MediumSentCount < _MediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.Medium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.Medium]).Enqueue(data);
}
_MediumSentCount++;
if (_MediumSentCount < _MediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckBelowMediumBuffer()
{
if ((_BelowMediumCount > 0) &&
(_BelowMediumSentCount < _BelowMediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Enqueue(data);
}
_BelowMediumSentCount++;
if (_BelowMediumSentCount < _BelowMediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckLowBuffer()
{
if (_LowCount > 0) {
if ((_HighCount > 0) ||
(_AboveMediumCount > 0) ||
(_MediumCount > 0) ||
(_BelowMediumCount > 0)) {
return true;
}
string data = (string)((Queue)_Connection._SendBuffer[Priority.Low]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.Low]).Enqueue(data);
}
if (_LowCount > 1) {
return false;
}
}
return true;
}
// END OF WARNING, below this you can read/change again ;)
#endregion
}
/// <summary>
///
/// </summary>
private class IdleWorkerThread
{
private IrcConnection _Connection;
private Thread _Thread;
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public IdleWorkerThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
DateTime now = DateTime.Now;
_Connection._LastPingSent = now;
_Connection._LastPongReceived = now;
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "IdleWorkerThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
_Thread.Abort();
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("IdleWorkerThread started");
#endif
try {
while (_Connection.IsConnected ) {
Thread.Sleep(_Connection._IdleWorkerInterval);
// only send active pings if we are registered
if (!_Connection.IsRegistered) {
continue;
}
DateTime now = DateTime.Now;
int last_ping_sent = (int)(now - _Connection._LastPingSent).TotalSeconds;
int last_pong_rcvd = (int)(now - _Connection._LastPongReceived).TotalSeconds;
// determins if the resoponse time is ok
if (last_ping_sent < _Connection._PingTimeout) {
if (_Connection._LastPingSent > _Connection._LastPongReceived) {
// there is a pending ping request, we have to wait
continue;
}
// determines if it need to send another ping yet
if (last_pong_rcvd > _Connection._PingInterval) {
_Connection.WriteLine(Rfc2812.Ping(_Connection.Address), Priority.Critical);
_Connection._LastPingSent = now;
//_Connection._LastPongReceived = now;
} // else connection is fine, just continue
} else {
if (_Connection.IsDisconnecting) {
break;
}
#if LOG4NET
Logger.Socket.Warn("ping timeout, connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
_Connection.IsConnectionError = true;
break;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("IdleWorkerThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Threading.Tasks;
using Orleans.Concurrency;
using Orleans.Runtime;
using Orleans.Serialization;
namespace Orleans.Streams
{
[Serializable]
[Immutable]
internal class StreamImpl<T> : IStreamIdentity, IAsyncStream<T>, IStreamControl, ISerializable, IOnDeserialized
{
private readonly StreamId streamId;
private readonly bool isRewindable;
[NonSerialized]
private IInternalStreamProvider provider;
[NonSerialized]
private volatile IInternalAsyncBatchObserver<T> producerInterface;
[NonSerialized]
private IInternalAsyncObservable<T> consumerInterface;
[NonSerialized]
private readonly object initLock; // need the lock since the same code runs in the provider on the client and in the silo.
[NonSerialized]
private IRuntimeClient runtimeClient;
internal StreamId StreamId { get { return streamId; } }
public bool IsRewindable { get { return isRewindable; } }
public Guid Guid { get { return streamId.Guid; } }
public string Namespace { get { return streamId.Namespace; } }
public string ProviderName { get { return streamId.ProviderName; } }
// IMPORTANT: This constructor needs to be public for Json deserialization to work.
public StreamImpl()
{
initLock = new object();
}
internal StreamImpl(StreamId streamId, IInternalStreamProvider provider, bool isRewindable, IRuntimeClient runtimeClient)
{
if (null == streamId)
throw new ArgumentNullException(nameof(streamId));
if (null == provider)
throw new ArgumentNullException(nameof(provider));
if (null == runtimeClient)
throw new ArgumentNullException(nameof(runtimeClient));
this.streamId = streamId;
this.provider = provider;
producerInterface = null;
consumerInterface = null;
initLock = new object();
this.isRewindable = isRewindable;
this.runtimeClient = runtimeClient;
}
public Task<StreamSubscriptionHandle<T>> SubscribeAsync(IAsyncObserver<T> observer)
{
return GetConsumerInterface().SubscribeAsync(observer, null);
}
public Task<StreamSubscriptionHandle<T>> SubscribeAsync(IAsyncObserver<T> observer, StreamSequenceToken token,
StreamFilterPredicate filterFunc = null,
object filterData = null)
{
return GetConsumerInterface().SubscribeAsync(observer, token, filterFunc, filterData);
}
public async Task Cleanup(bool cleanupProducers, bool cleanupConsumers)
{
// Cleanup producers
if (cleanupProducers && producerInterface != null)
{
await producerInterface.Cleanup();
producerInterface = null;
}
// Cleanup consumers
if (cleanupConsumers && consumerInterface != null)
{
await consumerInterface.Cleanup();
consumerInterface = null;
}
}
public Task OnNextAsync(T item, StreamSequenceToken token = null)
{
return GetProducerInterface().OnNextAsync(item, token);
}
public Task OnNextBatchAsync(IEnumerable<T> batch, StreamSequenceToken token = null)
{
return GetProducerInterface().OnNextBatchAsync(batch, token);
}
public Task OnCompletedAsync()
{
return GetProducerInterface().OnCompletedAsync();
}
public Task OnErrorAsync(Exception ex)
{
return GetProducerInterface().OnErrorAsync(ex);
}
internal Task<StreamSubscriptionHandle<T>> ResumeAsync(
StreamSubscriptionHandle<T> handle,
IAsyncObserver<T> observer,
StreamSequenceToken token)
{
return GetConsumerInterface().ResumeAsync(handle, observer, token);
}
public Task<IList<StreamSubscriptionHandle<T>>> GetAllSubscriptionHandles()
{
return GetConsumerInterface().GetAllSubscriptions();
}
internal Task UnsubscribeAsync(StreamSubscriptionHandle<T> handle)
{
return GetConsumerInterface().UnsubscribeAsync(handle);
}
internal IAsyncBatchObserver<T> GetProducerInterface()
{
if (producerInterface != null) return producerInterface;
lock (initLock)
{
if (producerInterface != null)
return producerInterface;
if (provider == null)
provider = GetStreamProvider();
producerInterface = provider.GetProducerInterface<T>(this);
}
return producerInterface;
}
internal IInternalAsyncObservable<T> GetConsumerInterface()
{
if (consumerInterface == null)
{
lock (initLock)
{
if (consumerInterface == null)
{
if (provider == null)
provider = GetStreamProvider();
consumerInterface = provider.GetConsumerInterface<T>(this);
}
}
}
return consumerInterface;
}
private IInternalStreamProvider GetStreamProvider()
{
return this.runtimeClient.ServiceProvider.GetRequiredServiceByName<IStreamProvider>(streamId.ProviderName) as IInternalStreamProvider;
}
public int CompareTo(IAsyncStream<T> other)
{
var o = other as StreamImpl<T>;
return o == null ? 1 : streamId.CompareTo(o.streamId);
}
public virtual bool Equals(IAsyncStream<T> other)
{
var o = other as StreamImpl<T>;
return o != null && streamId.Equals(o.streamId);
}
public override bool Equals(object obj)
{
var o = obj as StreamImpl<T>;
return o != null && streamId.Equals(o.streamId);
}
public override int GetHashCode()
{
return streamId.GetHashCode();
}
public override string ToString()
{
return streamId.ToString();
}
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
// Use the AddValue method to specify serialized values.
info.AddValue("StreamId", streamId, typeof(StreamId));
info.AddValue("IsRewindable", isRewindable, typeof(bool));
}
// The special constructor is used to deserialize values.
protected StreamImpl(SerializationInfo info, StreamingContext context)
{
// Reset the property value using the GetValue method.
streamId = (StreamId)info.GetValue("StreamId", typeof(StreamId));
isRewindable = info.GetBoolean("IsRewindable");
initLock = new object();
var serializerContext = context.Context as ISerializerContext;
((IOnDeserialized)this).OnDeserialized(serializerContext);
}
void IOnDeserialized.OnDeserialized(ISerializerContext context)
{
this.runtimeClient = context?.AdditionalContext as IRuntimeClient;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using FarseerPhysics;
using FarseerPhysics.Collision;
using FarseerPhysics.Dynamics;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.Graphics;
using FarseerPhysics.Factories;
using PhysicsSimulator = FarseerPhysics.Dynamics.World;
using FarseerPhysics.Common;
using FarseerPhysics.Common.Decomposition;
namespace Pinball
{
public class Foreground : GameObject
{
private static Texture2D foregroundTexture;
public Foreground(PhysicsSimulator physicsSimulator, Vector2 position)
{
base.Body = BodyFactory.CreateBody(physicsSimulator, position);//, 240, 320, 1f);
base.Body.IsStatic = true;
//base.Body.Position = position;
List<Vertices> verticesList = new List<Vertices>();
verticesList.AddRange(CreateRedVertices());
verticesList.AddRange(CreateBlueVertices());
verticesList.AddRange(CreateGreenVertices());
verticesList.AddRange(CreateMagentaVertices());
verticesList.AddRange(CreateOrangeVertices());
verticesList.AddRange(CreateYellowVertices());
foreach (Vertices vertices in verticesList)
{
base.Fixtures.AddLast(FixtureFactory.CreatePolygon(vertices, 1, base.Body, Vector2.Zero));
base.Fixtures.Last.Value.CollisionCategories = CollisionCategory.Cat8;
base.Fixtures.Last.Value.CollidesWith = CollisionCategory.Cat1;
}
}
public static void LoadContent(ContentManager contentManager)
{
foregroundTexture = contentManager.Load<Texture2D>(@"GameTextures\GameForeground");
}
public void Draw(float elapsedTime, SpriteBatch spriteBatch)
{
base.Draw(elapsedTime, spriteBatch, foregroundTexture, Color.White, 0.5f);
}
private List<Vertices> CreateRedVertices()
{
Vertices vertices = new Vertices();
vertices.Add(new Vector2(37, 266));
vertices.Add(new Vector2(34, 265));
vertices.Add(new Vector2(30, 263));
vertices.Add(new Vector2(30, 250));
vertices.Add(new Vector2(30, 235));
vertices.Add(new Vector2(23, 229));
vertices.Add(new Vector2(23, 200));
vertices.Add(new Vector2(23, 175));
vertices.Add(new Vector2(23, 150));
vertices.Add(new Vector2(23, 125));
vertices.Add(new Vector2(23, 100));
vertices.Add(new Vector2(23, 75));
vertices.Add(new Vector2(23, 50));
vertices.Add(new Vector2(23, 32));
vertices.Add(new Vector2(23, 20));
vertices.Add(new Vector2(23, 10));
vertices.Add(new Vector2(10, 10));
vertices.Add(new Vector2(10, 32));
vertices.Add(new Vector2(10, 100));
vertices.Add(new Vector2(10, 200));
vertices.Add(new Vector2(10, 275));
vertices.Add(new Vector2(37, 275));
vertices.Add(new Vector2(37, 266));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
private List<Vertices> CreateBlueVertices()
{
Vertices vertices = new Vertices();
vertices.Add(new Vector2(40, 29));
vertices.Add(new Vector2(40, 50));
vertices.Add(new Vector2(40, 75));
vertices.Add(new Vector2(40, 100));
vertices.Add(new Vector2(40, 125));
vertices.Add(new Vector2(40, 150));
vertices.Add(new Vector2(40, 175));
vertices.Add(new Vector2(40, 200));
vertices.Add(new Vector2(40, 227));
vertices.Add(new Vector2(44, 232));
vertices.Add(new Vector2(44, 247));
vertices.Add(new Vector2(44, 262));
vertices.Add(new Vector2(42, 264));
vertices.Add(new Vector2(38, 266));
vertices.Add(new Vector2(38, 275));
vertices.Add(new Vector2(50, 275));
vertices.Add(new Vector2(50, 200));
vertices.Add(new Vector2(50, 100));
vertices.Add(new Vector2(50, 29));
vertices.Add(new Vector2(40, 29));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
private List<Vertices> CreateGreenVertices()
{
Vertices vertices = new Vertices();
//vertices.Add(new Vector2(106, 113));
//vertices.Add(new Vector2(107, 108));
//vertices.Add(new Vector2(108, 103));
//vertices.Add(new Vector2(110, 94));
//vertices.Add(new Vector2(113, 84));
vertices.Add(new Vector2(115, 82));
vertices.Add(new Vector2(119, 79));
vertices.Add(new Vector2(121, 78));
vertices.Add(new Vector2(121, 53));
vertices.Add(new Vector2(121, 30));
vertices.Add(new Vector2(124, 23));
vertices.Add(new Vector2(130, 15));
vertices.Add(new Vector2(138, 9));
vertices.Add(new Vector2(146, 7));
vertices.Add(new Vector2(161, 7));
vertices.Add(new Vector2(170, 10));
vertices.Add(new Vector2(178, 15));
vertices.Add(new Vector2(183, 22));
vertices.Add(new Vector2(187, 30));
vertices.Add(new Vector2(187, 50));
vertices.Add(new Vector2(187, 75));
vertices.Add(new Vector2(187, 100));
vertices.Add(new Vector2(187, 118));
vertices.Add(new Vector2(195, 126));
vertices.Add(new Vector2(202, 133));
vertices.Add(new Vector2(207, 140));
vertices.Add(new Vector2(210, 145));
vertices.Add(new Vector2(210, 155));
vertices.Add(new Vector2(207, 160));
vertices.Add(new Vector2(204, 168));
vertices.Add(new Vector2(200, 172));
vertices.Add(new Vector2(195, 174));
vertices.Add(new Vector2(195, 180));
vertices.Add(new Vector2(207, 177));
vertices.Add(new Vector2(214, 170));
vertices.Add(new Vector2(214, 150));
vertices.Add(new Vector2(214, 141));
vertices.Add(new Vector2(207, 134));
vertices.Add(new Vector2(206, 130));
vertices.Add(new Vector2(202, 124));
vertices.Add(new Vector2(192, 116));
vertices.Add(new Vector2(192, 50));
vertices.Add(new Vector2(192, 1));
vertices.Add(new Vector2(105, 1));
vertices.Add(new Vector2(105, 82));
//vertices.Add(new Vector2(105, 91));
//vertices.Add(new Vector2(102, 100));
//vertices.Add(new Vector2(100, 110));
//vertices.Add(new Vector2(102, 113));
//vertices.Add(new Vector2(106, 113));
//vertices.Add(new Vector2(115, 82));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
private List<Vertices> CreateMagentaVertices()
{
Vertices vertices = new Vertices();
vertices.Add(new Vector2(194, 174));
vertices.Add(new Vector2(190, 172));
vertices.Add(new Vector2(188, 168));
vertices.Add(new Vector2(189, 163));
vertices.Add(new Vector2(194, 157));
vertices.Add(new Vector2(196, 153));
vertices.Add(new Vector2(198, 150));
vertices.Add(new Vector2(195, 145));
vertices.Add(new Vector2(193, 146));
vertices.Add(new Vector2(184, 136));
vertices.Add(new Vector2(172, 124));
vertices.Add(new Vector2(172, 100));
vertices.Add(new Vector2(173, 75));
vertices.Add(new Vector2(173, 50));
vertices.Add(new Vector2(173, 36));
vertices.Add(new Vector2(168, 27));
vertices.Add(new Vector2(162, 23));
vertices.Add(new Vector2(156, 21));
vertices.Add(new Vector2(149, 21));
vertices.Add(new Vector2(142, 24));
vertices.Add(new Vector2(137, 29));
vertices.Add(new Vector2(134, 35));
vertices.Add(new Vector2(134, 42));
vertices.Add(new Vector2(134, 50));
vertices.Add(new Vector2(134, 60));
vertices.Add(new Vector2(134, 70));
vertices.Add(new Vector2(134, 87));
//vertices.Add(new Vector2(133, 88));
//vertices.Add(new Vector2(132, 93));
//vertices.Add(new Vector2(132, 101));
//vertices.Add(new Vector2(132, 110));
//vertices.Add(new Vector2(132, 117));
//vertices.Add(new Vector2(142, 116));
//vertices.Add(new Vector2(143, 104));
//vertices.Add(new Vector2(144, 94));
vertices.Add(new Vector2(145, 87));
vertices.Add(new Vector2(145, 40));
vertices.Add(new Vector2(165, 40));
vertices.Add(new Vector2(165, 100));
vertices.Add(new Vector2(165, 130));
vertices.Add(new Vector2(183, 146));
vertices.Add(new Vector2(175, 159));
vertices.Add(new Vector2(174, 168));
vertices.Add(new Vector2(183, 176));
vertices.Add(new Vector2(194, 180));
vertices.Add(new Vector2(194, 174));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
private List<Vertices> CreateOrangeVertices()
{
Vertices vertices = new Vertices();
vertices.Add(new Vector2(189, 266));
vertices.Add(new Vector2(185, 264));
vertices.Add(new Vector2(183, 261));
vertices.Add(new Vector2(183, 247));
vertices.Add(new Vector2(183, 232));
vertices.Add(new Vector2(200, 214));
vertices.Add(new Vector2(210, 203));
vertices.Add(new Vector2(222, 190));
vertices.Add(new Vector2(222, 175));
vertices.Add(new Vector2(222, 150));
vertices.Add(new Vector2(222, 125));
vertices.Add(new Vector2(222, 104));
vertices.Add(new Vector2(220, 102));
vertices.Add(new Vector2(218, 103));
vertices.Add(new Vector2(214, 107));
vertices.Add(new Vector2(210, 112));
vertices.Add(new Vector2(215, 117));
vertices.Add(new Vector2(215, 185));
vertices.Add(new Vector2(176, 232));
vertices.Add(new Vector2(176, 273));
vertices.Add(new Vector2(189, 273));
vertices.Add(new Vector2(189, 266));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
private List<Vertices> CreateYellowVertices()
{
Vertices vertices = new Vertices();
vertices.Add(new Vector2(194, 105));
vertices.Add(new Vector2(201, 96));
vertices.Add(new Vector2(206, 91));
vertices.Add(new Vector2(217, 86));
vertices.Add(new Vector2(222, 86));
vertices.Add(new Vector2(229, 89));
vertices.Add(new Vector2(234, 94));
vertices.Add(new Vector2(237, 101));
vertices.Add(new Vector2(237, 125));
vertices.Add(new Vector2(237, 150));
vertices.Add(new Vector2(237, 175));
vertices.Add(new Vector2(237, 197));
vertices.Add(new Vector2(227, 208));
vertices.Add(new Vector2(212, 224));
vertices.Add(new Vector2(205, 232));
vertices.Add(new Vector2(197, 241));
vertices.Add(new Vector2(197, 251));
vertices.Add(new Vector2(197, 262));
vertices.Add(new Vector2(194, 265));
vertices.Add(new Vector2(190, 266));
vertices.Add(new Vector2(190, 273));
vertices.Add(new Vector2(215, 273));
vertices.Add(new Vector2(215, 241));
vertices.Add(new Vector2(250, 203));
vertices.Add(new Vector2(250, 150));
vertices.Add(new Vector2(250, 80));
vertices.Add(new Vector2(225, 70));
vertices.Add(new Vector2(215, 70));
vertices.Add(new Vector2(194, 80));
vertices.Add(new Vector2(194, 105));
for (int counter = 0; counter < vertices.Count; counter++)
{
vertices[counter] = new Vector2(vertices[counter].X - 120, vertices[counter].Y - 160);
}
return EarclipDecomposer.ConvexPartition(vertices);
}
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Android.Sax.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Android.Sax
{
/// <summary>
/// <para>The root XML element. The entry point for this API. Not safe for concurrent use.</para><para>For example, passing this XML:</para><para><pre>
/// <feed xmlns='>
/// <entry>
/// <id>bob</id>
/// </entry>
/// </feed>
/// </pre></para><para>to this code:</para><para><pre>
/// static final String ATOM_NAMESPACE = "http://www.w3.org/2005/Atom";
///
/// ...
///
/// RootElement root = new RootElement(ATOM_NAMESPACE, "feed");
/// Element entry = root.getChild(ATOM_NAMESPACE, "entry");
/// entry.getChild(ATOM_NAMESPACE, "id").setEndTextElementListener(
/// new EndTextElementListener() {
/// public void end(String body) {
/// System.out.println("Entry ID: " + body);
/// }
/// });
///
/// XMLReader reader = ...;
/// reader.setContentHandler(root.getContentHandler());
/// reader.parse(...);
/// </pre></para><para>would output:</para><para><pre>
/// Entry ID: bob
/// </pre> </para>
/// </summary>
/// <java-name>
/// android/sax/RootElement
/// </java-name>
[Dot42.DexImport("android/sax/RootElement", AccessFlags = 33)]
public partial class RootElement : global::Android.Sax.Element
/* scope: __dot42__ */
{
/// <summary>
/// <para>Constructs a new root element with the given name.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;)V", AccessFlags = 1)]
public RootElement(string uri, string localName) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Constructs a new root element with the given name. Uses an empty string as the namespace.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;)V", AccessFlags = 1)]
public RootElement(string localName) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets the SAX <c> ContentHandler </c> . Pass this to your SAX parser. </para>
/// </summary>
/// <java-name>
/// getContentHandler
/// </java-name>
[Dot42.DexImport("getContentHandler", "()Lorg/xml/sax/ContentHandler;", AccessFlags = 1)]
public virtual global::Org.Xml.Sax.IContentHandler GetContentHandler() /* MethodBuilder.Create */
{
return default(global::Org.Xml.Sax.IContentHandler);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal RootElement() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Gets the SAX <c> ContentHandler </c> . Pass this to your SAX parser. </para>
/// </summary>
/// <java-name>
/// getContentHandler
/// </java-name>
public global::Org.Xml.Sax.IContentHandler ContentHandler
{
[Dot42.DexImport("getContentHandler", "()Lorg/xml/sax/ContentHandler;", AccessFlags = 1)]
get{ return GetContentHandler(); }
}
}
/// <summary>
/// <para>Listens for the beginning and ending of text elements. </para>
/// </summary>
/// <java-name>
/// android/sax/TextElementListener
/// </java-name>
[Dot42.DexImport("android/sax/TextElementListener", AccessFlags = 1537)]
public partial interface ITextElementListener : global::Android.Sax.IStartElementListener, global::Android.Sax.IEndTextElementListener
/* scope: __dot42__ */
{
}
/// <summary>
/// <para>Listens for the end of elements. </para>
/// </summary>
/// <java-name>
/// android/sax/EndElementListener
/// </java-name>
[Dot42.DexImport("android/sax/EndElementListener", AccessFlags = 1537)]
public partial interface IEndElementListener
/* scope: __dot42__ */
{
/// <summary>
/// <para>Invoked at the end of an element. </para>
/// </summary>
/// <java-name>
/// end
/// </java-name>
[Dot42.DexImport("end", "()V", AccessFlags = 1025)]
void End() /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Listens for the end of text elements. </para>
/// </summary>
/// <java-name>
/// android/sax/EndTextElementListener
/// </java-name>
[Dot42.DexImport("android/sax/EndTextElementListener", AccessFlags = 1537)]
public partial interface IEndTextElementListener
/* scope: __dot42__ */
{
/// <summary>
/// <para>Invoked at the end of a text element with the body of the element.</para><para></para>
/// </summary>
/// <java-name>
/// end
/// </java-name>
[Dot42.DexImport("end", "(Ljava/lang/String;)V", AccessFlags = 1025)]
void End(string body) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Listens for the beginning of elements. </para>
/// </summary>
/// <java-name>
/// android/sax/StartElementListener
/// </java-name>
[Dot42.DexImport("android/sax/StartElementListener", AccessFlags = 1537)]
public partial interface IStartElementListener
/* scope: __dot42__ */
{
/// <summary>
/// <para>Invoked at the beginning of an element.</para><para></para>
/// </summary>
/// <java-name>
/// start
/// </java-name>
[Dot42.DexImport("start", "(Lorg/xml/sax/Attributes;)V", AccessFlags = 1025)]
void Start(global::Org.Xml.Sax.IAttributes attributes) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Listens for the beginning and ending of elements. </para>
/// </summary>
/// <java-name>
/// android/sax/ElementListener
/// </java-name>
[Dot42.DexImport("android/sax/ElementListener", AccessFlags = 1537)]
public partial interface IElementListener : global::Android.Sax.IStartElementListener, global::Android.Sax.IEndElementListener
/* scope: __dot42__ */
{
}
/// <summary>
/// <para>An XML element. Provides access to child elements and hooks to listen for events related to this element.</para><para><para>RootElement </para></para>
/// </summary>
/// <java-name>
/// android/sax/Element
/// </java-name>
[Dot42.DexImport("android/sax/Element", AccessFlags = 33)]
public partial class Element
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal Element() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets the child element with the given name. Uses an empty string as the namespace. </para>
/// </summary>
/// <java-name>
/// getChild
/// </java-name>
[Dot42.DexImport("getChild", "(Ljava/lang/String;)Landroid/sax/Element;", AccessFlags = 1)]
public virtual global::Android.Sax.Element GetChild(string localName) /* MethodBuilder.Create */
{
return default(global::Android.Sax.Element);
}
/// <summary>
/// <para>Gets the child element with the given name. </para>
/// </summary>
/// <java-name>
/// getChild
/// </java-name>
[Dot42.DexImport("getChild", "(Ljava/lang/String;Ljava/lang/String;)Landroid/sax/Element;", AccessFlags = 1)]
public virtual global::Android.Sax.Element GetChild(string uri, string localName) /* MethodBuilder.Create */
{
return default(global::Android.Sax.Element);
}
/// <summary>
/// <para>Gets the child element with the given name. Uses an empty string as the namespace. We will throw a org.xml.sax.SAXException at parsing time if the specified child is missing. This helps you ensure that your listeners are called. </para>
/// </summary>
/// <java-name>
/// requireChild
/// </java-name>
[Dot42.DexImport("requireChild", "(Ljava/lang/String;)Landroid/sax/Element;", AccessFlags = 1)]
public virtual global::Android.Sax.Element RequireChild(string localName) /* MethodBuilder.Create */
{
return default(global::Android.Sax.Element);
}
/// <summary>
/// <para>Gets the child element with the given name. We will throw a org.xml.sax.SAXException at parsing time if the specified child is missing. This helps you ensure that your listeners are called. </para>
/// </summary>
/// <java-name>
/// requireChild
/// </java-name>
[Dot42.DexImport("requireChild", "(Ljava/lang/String;Ljava/lang/String;)Landroid/sax/Element;", AccessFlags = 1)]
public virtual global::Android.Sax.Element RequireChild(string uri, string localName) /* MethodBuilder.Create */
{
return default(global::Android.Sax.Element);
}
/// <summary>
/// <para>Sets start and end element listeners at the same time. </para>
/// </summary>
/// <java-name>
/// setElementListener
/// </java-name>
[Dot42.DexImport("setElementListener", "(Landroid/sax/ElementListener;)V", AccessFlags = 1)]
public virtual void SetElementListener(global::Android.Sax.IElementListener elementListener) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Sets start and end text element listeners at the same time. </para>
/// </summary>
/// <java-name>
/// setTextElementListener
/// </java-name>
[Dot42.DexImport("setTextElementListener", "(Landroid/sax/TextElementListener;)V", AccessFlags = 1)]
public virtual void SetTextElementListener(global::Android.Sax.ITextElementListener elementListener) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Sets a listener for the start of this element. </para>
/// </summary>
/// <java-name>
/// setStartElementListener
/// </java-name>
[Dot42.DexImport("setStartElementListener", "(Landroid/sax/StartElementListener;)V", AccessFlags = 1)]
public virtual void SetStartElementListener(global::Android.Sax.IStartElementListener startElementListener) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Sets a listener for the end of this element. </para>
/// </summary>
/// <java-name>
/// setEndElementListener
/// </java-name>
[Dot42.DexImport("setEndElementListener", "(Landroid/sax/EndElementListener;)V", AccessFlags = 1)]
public virtual void SetEndElementListener(global::Android.Sax.IEndElementListener endElementListener) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Sets a listener for the end of this text element. </para>
/// </summary>
/// <java-name>
/// setEndTextElementListener
/// </java-name>
[Dot42.DexImport("setEndTextElementListener", "(Landroid/sax/EndTextElementListener;)V", AccessFlags = 1)]
public virtual void SetEndTextElementListener(global::Android.Sax.IEndTextElementListener endTextElementListener) /* MethodBuilder.Create */
{
}
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
public class UI
{
static GUISkin mNullSkin = null;
static GUIContent mContent = null;
static Color mColor;
/// <summary>
/// It's often useful to have a skin that is clean and devoid of borders, padding, margins, etc.
/// </summary>
static public GUISkin nullSkin
{
get
{
if (mNullSkin == null)
{
mNullSkin = GUISkin.CreateInstance("GUISkin") as GUISkin;
mNullSkin.name = "Null Skin (Tools.cs)";
if (Config.Instance != null)
{
mNullSkin.box.normal.background = Config.Instance.windowBorder;
mNullSkin.box.border.left = Config.Instance.windowPadding;
mNullSkin.box.border.right = Config.Instance.windowPadding;
mNullSkin.box.border.top = Config.Instance.windowPadding;
mNullSkin.box.border.bottom = Config.Instance.windowPadding;
}
}
return mNullSkin;
}
}
/// <summary>
/// Wraps the angle, ensuring that it's always in the -360 to 360 range.
/// </summary>
static public float WrapAngle (float a)
{
while (a < -180.0f) a += 360.0f;
while (a > 180.0f) a -= 360.0f;
return a;
}
/// <summary>
/// Replaces the GUI color with the specified alpha value.
/// </summary>
static public void SetAlpha (float a)
{
mColor = GUI.color;
GUI.color = new Color(1.0f, 1.0f, 1.0f, a);
}
/// <summary>
/// Restores the previous GUI color.
/// </summary>
static public void RestoreAlpha () { GUI.color = mColor; }
/// <summary>
/// Draws the tiled texture. Like GUI.DrawTexture() but tiled instead of stretched.
/// </summary>
static public void DrawTiledTexture (Rect rect, Texture tex)
{
GUI.BeginGroup(rect);
{
int width = Mathf.RoundToInt(rect.width);
int height = Mathf.RoundToInt(rect.height);
for (int y = 0; y < height; y += tex.height)
{
for (int x = 0; x < width; x += tex.width)
{
GUI.DrawTexture(new Rect(x, y, tex.width, tex.height), tex);
}
}
}
GUI.EndGroup();
}
/// <summary>
/// Draws the specified tiled texture using the given element's offset.
/// </summary>
static public Rect DrawTiledTexture (Rect rect, float offset, Texture tex)
{
rect = new Rect(rect.x + offset, rect.y + offset,
rect.width - offset - offset,
rect.height - offset - offset);
DrawTiledTexture(rect, tex);
return rect;
}
/// <summary>
/// Helper function that draws a texture, cleanly with no border, margin, or padding.
/// </summary>
static public Rect DrawTexture (float x, float y, Texture tex)
{
Rect rect = new Rect(x, y, tex.width, tex.height);
GUI.DrawTexture(rect, tex);
return rect;
}
/// <summary>
/// Helper function that draws a box similar to GUI.Box(), but with a THS border and a tiled background.
/// </summary>
static public Rect DrawPanel (Rect rect)
{
Rect retVal = DrawTiledTexture(rect,
Config.Instance.windowPadding,
Config.Instance.windowBackground);
GUI.Box(rect, "", nullSkin.box);
// Content rectangle
return retVal;
}
/// <summary>
/// Draws a window over the specified rectangle.
/// </summary>
static public Rect DrawWindow (Rect rect, string text)
{
rect = DrawPanel(rect);
Rect titleRect = new Rect(rect.x + 50.0f, rect.y - 25.0f, rect.width - 100.0f, 40.0f);
DrawPanel(titleRect);
DrawTitle(titleRect, text, Config.Instance.headerStyle);
return Bevel(rect, 8f);
}
/// <summary>
/// Draws a shadowed (or beveled) label with specified colors.
/// </summary>
static public void DrawLabel (Rect rect, string text, GUIStyle style, Color front, Color back)
{
DrawLabel(rect, text, style, front, back, false);
}
/// <summary>
/// Draws a shadowed (or beveled) label with specified colors.
/// </summary>
static public void DrawLabel (Rect rect, string text, GUIStyle style, Color front, Color back, bool outline)
{
Color prev = style.normal.textColor;
// Shadow
style.normal.textColor = back;
GUI.Label(new Rect(rect.x + 1.0f, rect.y + 1.0f, rect.width, rect.height), text, style);
if (outline)
{
GUI.Label(new Rect(rect.x + 1.0f, rect.y - 1.0f, rect.width, rect.height), text, style);
GUI.Label(new Rect(rect.x - 1.0f, rect.y - 1.0f, rect.width, rect.height), text, style);
GUI.Label(new Rect(rect.x - 1.0f, rect.y + 1.0f, rect.width, rect.height), text, style);
}
// Actual label
style.normal.textColor = front;
GUI.Label(rect, text, style);
style.normal.textColor = prev;
}
/// <summary>
/// Draws a shadowed (or beveled) label with specified colors.
/// </summary>
static public void DrawLabel (string text, GUIStyle style, Color front, Color back)
{
Color prev = style.normal.textColor;
// Shadow
style.normal.textColor = back;
GUILayout.Label(text, style);
// Actual label
style.normal.textColor = front;
Rect rect = GUILayoutUtility.GetLastRect();
GUI.Label(new Rect(rect.x - 1.0f, rect.y - 1.0f, rect.width, rect.height), text, style);
style.normal.textColor = prev;
}
/// <summary>
/// Draws a label with a distinct beveled style.
/// </summary>
static public void DrawLabel (string text, GUIStyle style)
{
DrawLabel(text, style, style.normal.textColor, style.hover.textColor);
}
/// <summary>
/// Draws a label with a distinct beveled style.
/// </summary>
static public void DrawLabel (string text)
{
DrawLabel(text, GUI.skin.label);
}
/// <summary>
/// Draws a label with a distinct beveled style.
/// </summary>
static public void DrawLabel (Rect rect, string text, GUIStyle style)
{
DrawLabel(rect, text, style, style.normal.textColor, style.hover.textColor, false);
}
/// <summary>
/// Draws a label with a distinct beveled style.
/// </summary>
static public void DrawLabel (Rect rect, string text)
{
DrawLabel(rect, text, GUI.skin.label);
}
/// <summary>
/// Common functionality for DrawTitle functions.
/// </summary>
static private void DrawBlobShadow (Rect rect, string text, GUIStyle style)
{
if (Config.Instance.townNameBackground != null)
{
Vector3 size = style.CalcSize(mContent);
float padX = size.x * 0.25f;
float padY = size.y * 0.5f;
float x = (rect.width - size.x) * 0.5f - padX;
float y = (rect.height - size.y) * 0.5f - padY;
Color prev = GUI.color;
GUI.color = new Color(1.0f, 1.0f, 1.0f, prev.a * 0.5f);
GUI.DrawTexture(new Rect(rect.x + x, rect.y + y, rect.width - x * 2.0f, rect.height - y * 2.0f),
Config.Instance.townNameBackground);
GUI.color = prev;
}
}
/// <summary>
/// Draws a title label -- beveled header label with a blob shadow.
/// </summary>
static public void DrawTitle (Rect rect, string text, GUIStyle style)
{
TextAnchor prev = style.alignment;
style.alignment = TextAnchor.MiddleCenter;
if (mContent == null) mContent = new GUIContent();
mContent.text = text;
DrawBlobShadow(rect, text, style);
DrawLabel(rect, text, style);
style.alignment = prev;
}
/// <summary>
/// Draws a title label -- beveled header label with a blob shadow.
/// </summary>
static public void DrawTitle (string text, GUIStyle style)
{
TextAnchor prev = style.alignment;
style.alignment = TextAnchor.MiddleCenter;
if (mContent == null) mContent = new GUIContent();
mContent.text = text;
Rect rect = GUILayoutUtility.GetRect(mContent, style);
DrawBlobShadow(rect, text, style);
DrawLabel(rect, text, style);
style.alignment = prev;
}
/// <summary>
/// Bevel the specified rect by the specified number of pixels.
/// </summary>
static public Rect Bevel (Rect rect, float pixels)
{
return new Rect(rect.x + pixels, rect.y + pixels, rect.width - pixels * 2.0f, rect.height - pixels * 2.0f);
}
/// <summary>
/// Draws a grid of boxes and returns their positions.
/// </summary>
static public List<Vector2> DrawGrid (Rect rect, float sizeX, float sizeY, float padX, float padY, float border, int cells)
{
List<Vector2> list = new List<Vector2>();
sizeX += border * 2.0f;
sizeY += border * 2.0f;
float x = rect.x;
float y = rect.y;
float mx = x + rect.width;
float my = y + rect.height;
int count = 0;
GUIStyle style = Config.Instance.skin.box;
while (y < my)
{
if (y + sizeY > my) break;
while (x < mx)
{
if (x + sizeX > mx) break;
Vector2 pos = new Vector2(x, y);
GUI.Box(new Rect(x, y, sizeX, sizeY), "", style);
x += sizeX + padX;
pos.x += border;
pos.y += border;
list.Add(pos);
if (++count >= cells) return list;
}
y += sizeY + padY;
x = rect.x;
}
return list;
}
/// <summary>
/// Whether the specified screen rectangle contains the mouse position.
/// </summary>
static public bool ContainsMouse (Rect rect)
{
Vector2 pos = Input.mousePosition;
pos.y = Screen.height - pos.y;
return rect.Contains(pos);
}
/// <summary>
/// Gets the mouse position in GUI space (top-left based).
/// </summary>
static public Vector2 GetMousePos ()
{
Vector2 pos = Input.mousePosition;
pos.y = Screen.height - pos.y;
return pos;
}
/// <summary>
/// Gets the screen position of the specified world position.
/// </summary>
static public Vector3 GetScreenPos (Vector3 worldPos)
{
Vector3 pos = Camera.main.WorldToScreenPoint(worldPos);
pos.y = Screen.height - pos.y;
return pos;
}
/// <summary>
/// Whether this screen position is currently visible.
/// </summary>
static public bool IsVisible (Vector3 screenPos)
{
return !(screenPos.z < 0f ||
screenPos.x < 0f ||
screenPos.y < 0f ||
screenPos.x > Screen.width ||
screenPos.y > Screen.height);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/datastore/v1/entity.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Cloud.Datastore.V1 {
/// <summary>Holder for reflection information generated from google/datastore/v1/entity.proto</summary>
public static partial class EntityReflection {
#region Descriptor
/// <summary>File descriptor for google/datastore/v1/entity.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static EntityReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CiBnb29nbGUvZGF0YXN0b3JlL3YxL2VudGl0eS5wcm90bxITZ29vZ2xlLmRh",
"dGFzdG9yZS52MRocZ29vZ2xlL2FwaS9hbm5vdGF0aW9ucy5wcm90bxocZ29v",
"Z2xlL3Byb3RvYnVmL3N0cnVjdC5wcm90bxofZ29vZ2xlL3Byb3RvYnVmL3Rp",
"bWVzdGFtcC5wcm90bxoYZ29vZ2xlL3R5cGUvbGF0bG5nLnByb3RvIjcKC1Bh",
"cnRpdGlvbklkEhIKCnByb2plY3RfaWQYAiABKAkSFAoMbmFtZXNwYWNlX2lk",
"GAQgASgJIrcBCgNLZXkSNgoMcGFydGl0aW9uX2lkGAEgASgLMiAuZ29vZ2xl",
"LmRhdGFzdG9yZS52MS5QYXJ0aXRpb25JZBIyCgRwYXRoGAIgAygLMiQuZ29v",
"Z2xlLmRhdGFzdG9yZS52MS5LZXkuUGF0aEVsZW1lbnQaRAoLUGF0aEVsZW1l",
"bnQSDAoEa2luZBgBIAEoCRIMCgJpZBgCIAEoA0gAEg4KBG5hbWUYAyABKAlI",
"AEIJCgdpZF90eXBlIjgKCkFycmF5VmFsdWUSKgoGdmFsdWVzGAEgAygLMhou",
"Z29vZ2xlLmRhdGFzdG9yZS52MS5WYWx1ZSLxAwoFVmFsdWUSMAoKbnVsbF92",
"YWx1ZRgLIAEoDjIaLmdvb2dsZS5wcm90b2J1Zi5OdWxsVmFsdWVIABIXCg1i",
"b29sZWFuX3ZhbHVlGAEgASgISAASFwoNaW50ZWdlcl92YWx1ZRgCIAEoA0gA",
"EhYKDGRvdWJsZV92YWx1ZRgDIAEoAUgAEjUKD3RpbWVzdGFtcF92YWx1ZRgK",
"IAEoCzIaLmdvb2dsZS5wcm90b2J1Zi5UaW1lc3RhbXBIABItCglrZXlfdmFs",
"dWUYBSABKAsyGC5nb29nbGUuZGF0YXN0b3JlLnYxLktleUgAEhYKDHN0cmlu",
"Z192YWx1ZRgRIAEoCUgAEhQKCmJsb2JfdmFsdWUYEiABKAxIABIuCg9nZW9f",
"cG9pbnRfdmFsdWUYCCABKAsyEy5nb29nbGUudHlwZS5MYXRMbmdIABIzCgxl",
"bnRpdHlfdmFsdWUYBiABKAsyGy5nb29nbGUuZGF0YXN0b3JlLnYxLkVudGl0",
"eUgAEjYKC2FycmF5X3ZhbHVlGAkgASgLMh8uZ29vZ2xlLmRhdGFzdG9yZS52",
"MS5BcnJheVZhbHVlSAASDwoHbWVhbmluZxgOIAEoBRIcChRleGNsdWRlX2Zy",
"b21faW5kZXhlcxgTIAEoCEIMCgp2YWx1ZV90eXBlIr8BCgZFbnRpdHkSJQoD",
"a2V5GAEgASgLMhguZ29vZ2xlLmRhdGFzdG9yZS52MS5LZXkSPwoKcHJvcGVy",
"dGllcxgDIAMoCzIrLmdvb2dsZS5kYXRhc3RvcmUudjEuRW50aXR5LlByb3Bl",
"cnRpZXNFbnRyeRpNCg9Qcm9wZXJ0aWVzRW50cnkSCwoDa2V5GAEgASgJEikK",
"BXZhbHVlGAIgASgLMhouZ29vZ2xlLmRhdGFzdG9yZS52MS5WYWx1ZToCOAFC",
"ngEKF2NvbS5nb29nbGUuZGF0YXN0b3JlLnYxQgtFbnRpdHlQcm90b1ABWjxn",
"b29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9nb29nbGVhcGlzL2RhdGFzdG9y",
"ZS92MTtkYXRhc3RvcmWqAhlHb29nbGUuQ2xvdWQuRGF0YXN0b3JlLlYxygIZ",
"R29vZ2xlXENsb3VkXERhdGFzdG9yZVxWMWIGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, global::Google.Type.LatlngReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.PartitionId), global::Google.Cloud.Datastore.V1.PartitionId.Parser, new[]{ "ProjectId", "NamespaceId" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.Key), global::Google.Cloud.Datastore.V1.Key.Parser, new[]{ "PartitionId", "Path" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.Key.Types.PathElement), global::Google.Cloud.Datastore.V1.Key.Types.PathElement.Parser, new[]{ "Kind", "Id", "Name" }, new[]{ "IdType" }, null, null)}),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.ArrayValue), global::Google.Cloud.Datastore.V1.ArrayValue.Parser, new[]{ "Values" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.Value), global::Google.Cloud.Datastore.V1.Value.Parser, new[]{ "NullValue", "BooleanValue", "IntegerValue", "DoubleValue", "TimestampValue", "KeyValue", "StringValue", "BlobValue", "GeoPointValue", "EntityValue", "ArrayValue", "Meaning", "ExcludeFromIndexes" }, new[]{ "ValueType" }, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Datastore.V1.Entity), global::Google.Cloud.Datastore.V1.Entity.Parser, new[]{ "Key", "Properties" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, })
}));
}
#endregion
}
#region Messages
/// <summary>
/// A partition ID identifies a grouping of entities. The grouping is always
/// by project and namespace, however the namespace ID may be empty.
///
/// A partition ID contains several dimensions:
/// project ID and namespace ID.
///
/// Partition dimensions:
///
/// - May be `""`.
/// - Must be valid UTF-8 bytes.
/// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}`
/// If the value of any dimension matches regex `__.*__`, the partition is
/// reserved/read-only.
/// A reserved/read-only partition ID is forbidden in certain documented
/// contexts.
///
/// Foreign partition IDs (in which the project ID does
/// not match the context project ID ) are discouraged.
/// Reads and writes of foreign partition IDs may fail if the project is not in an active state.
/// </summary>
public sealed partial class PartitionId : pb::IMessage<PartitionId> {
private static readonly pb::MessageParser<PartitionId> _parser = new pb::MessageParser<PartitionId>(() => new PartitionId());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<PartitionId> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.EntityReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PartitionId() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PartitionId(PartitionId other) : this() {
projectId_ = other.projectId_;
namespaceId_ = other.namespaceId_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PartitionId Clone() {
return new PartitionId(this);
}
/// <summary>Field number for the "project_id" field.</summary>
public const int ProjectIdFieldNumber = 2;
private string projectId_ = "";
/// <summary>
/// The ID of the project to which the entities belong.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string ProjectId {
get { return projectId_; }
set {
projectId_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "namespace_id" field.</summary>
public const int NamespaceIdFieldNumber = 4;
private string namespaceId_ = "";
/// <summary>
/// If not empty, the ID of the namespace to which the entities belong.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string NamespaceId {
get { return namespaceId_; }
set {
namespaceId_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as PartitionId);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(PartitionId other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (ProjectId != other.ProjectId) return false;
if (NamespaceId != other.NamespaceId) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (ProjectId.Length != 0) hash ^= ProjectId.GetHashCode();
if (NamespaceId.Length != 0) hash ^= NamespaceId.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (ProjectId.Length != 0) {
output.WriteRawTag(18);
output.WriteString(ProjectId);
}
if (NamespaceId.Length != 0) {
output.WriteRawTag(34);
output.WriteString(NamespaceId);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (ProjectId.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(ProjectId);
}
if (NamespaceId.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(NamespaceId);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(PartitionId other) {
if (other == null) {
return;
}
if (other.ProjectId.Length != 0) {
ProjectId = other.ProjectId;
}
if (other.NamespaceId.Length != 0) {
NamespaceId = other.NamespaceId;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 18: {
ProjectId = input.ReadString();
break;
}
case 34: {
NamespaceId = input.ReadString();
break;
}
}
}
}
}
/// <summary>
/// A unique identifier for an entity.
/// If a key's partition ID or any of its path kinds or names are
/// reserved/read-only, the key is reserved/read-only.
/// A reserved/read-only key is forbidden in certain documented contexts.
/// </summary>
public sealed partial class Key : pb::IMessage<Key> {
private static readonly pb::MessageParser<Key> _parser = new pb::MessageParser<Key>(() => new Key());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Key> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.EntityReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Key() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Key(Key other) : this() {
PartitionId = other.partitionId_ != null ? other.PartitionId.Clone() : null;
path_ = other.path_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Key Clone() {
return new Key(this);
}
/// <summary>Field number for the "partition_id" field.</summary>
public const int PartitionIdFieldNumber = 1;
private global::Google.Cloud.Datastore.V1.PartitionId partitionId_;
/// <summary>
/// Entities are partitioned into subsets, currently identified by a project
/// ID and namespace ID.
/// Queries are scoped to a single partition.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Datastore.V1.PartitionId PartitionId {
get { return partitionId_; }
set {
partitionId_ = value;
}
}
/// <summary>Field number for the "path" field.</summary>
public const int PathFieldNumber = 2;
private static readonly pb::FieldCodec<global::Google.Cloud.Datastore.V1.Key.Types.PathElement> _repeated_path_codec
= pb::FieldCodec.ForMessage(18, global::Google.Cloud.Datastore.V1.Key.Types.PathElement.Parser);
private readonly pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Key.Types.PathElement> path_ = new pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Key.Types.PathElement>();
/// <summary>
/// The entity path.
/// An entity path consists of one or more elements composed of a kind and a
/// string or numerical identifier, which identify entities. The first
/// element identifies a _root entity_, the second element identifies
/// a _child_ of the root entity, the third element identifies a child of the
/// second entity, and so forth. The entities identified by all prefixes of
/// the path are called the element's _ancestors_.
///
/// An entity path is always fully complete: *all* of the entity's ancestors
/// are required to be in the path along with the entity identifier itself.
/// The only exception is that in some documented cases, the identifier in the
/// last path element (for the entity) itself may be omitted. For example,
/// the last path element of the key of `Mutation.insert` may have no
/// identifier.
///
/// A path can never be empty, and a path can have at most 100 elements.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Key.Types.PathElement> Path {
get { return path_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Key);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Key other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(PartitionId, other.PartitionId)) return false;
if(!path_.Equals(other.path_)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (partitionId_ != null) hash ^= PartitionId.GetHashCode();
hash ^= path_.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (partitionId_ != null) {
output.WriteRawTag(10);
output.WriteMessage(PartitionId);
}
path_.WriteTo(output, _repeated_path_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (partitionId_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(PartitionId);
}
size += path_.CalculateSize(_repeated_path_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Key other) {
if (other == null) {
return;
}
if (other.partitionId_ != null) {
if (partitionId_ == null) {
partitionId_ = new global::Google.Cloud.Datastore.V1.PartitionId();
}
PartitionId.MergeFrom(other.PartitionId);
}
path_.Add(other.path_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (partitionId_ == null) {
partitionId_ = new global::Google.Cloud.Datastore.V1.PartitionId();
}
input.ReadMessage(partitionId_);
break;
}
case 18: {
path_.AddEntriesFrom(input, _repeated_path_codec);
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the Key message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// A (kind, ID/name) pair used to construct a key path.
///
/// If either name or ID is set, the element is complete.
/// If neither is set, the element is incomplete.
/// </summary>
public sealed partial class PathElement : pb::IMessage<PathElement> {
private static readonly pb::MessageParser<PathElement> _parser = new pb::MessageParser<PathElement>(() => new PathElement());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<PathElement> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.Key.Descriptor.NestedTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PathElement() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PathElement(PathElement other) : this() {
kind_ = other.kind_;
switch (other.IdTypeCase) {
case IdTypeOneofCase.Id:
Id = other.Id;
break;
case IdTypeOneofCase.Name:
Name = other.Name;
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PathElement Clone() {
return new PathElement(this);
}
/// <summary>Field number for the "kind" field.</summary>
public const int KindFieldNumber = 1;
private string kind_ = "";
/// <summary>
/// The kind of the entity.
/// A kind matching regex `__.*__` is reserved/read-only.
/// A kind must not contain more than 1500 bytes when UTF-8 encoded.
/// Cannot be `""`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Kind {
get { return kind_; }
set {
kind_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "id" field.</summary>
public const int IdFieldNumber = 2;
/// <summary>
/// The auto-allocated ID of the entity.
/// Never equal to zero. Values less than zero are discouraged and may not
/// be supported in the future.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public long Id {
get { return idTypeCase_ == IdTypeOneofCase.Id ? (long) idType_ : 0L; }
set {
idType_ = value;
idTypeCase_ = IdTypeOneofCase.Id;
}
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 3;
/// <summary>
/// The name of the entity.
/// A name matching regex `__.*__` is reserved/read-only.
/// A name must not be more than 1500 bytes when UTF-8 encoded.
/// Cannot be `""`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return idTypeCase_ == IdTypeOneofCase.Name ? (string) idType_ : ""; }
set {
idType_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
idTypeCase_ = IdTypeOneofCase.Name;
}
}
private object idType_;
/// <summary>Enum of possible cases for the "id_type" oneof.</summary>
public enum IdTypeOneofCase {
None = 0,
Id = 2,
Name = 3,
}
private IdTypeOneofCase idTypeCase_ = IdTypeOneofCase.None;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public IdTypeOneofCase IdTypeCase {
get { return idTypeCase_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void ClearIdType() {
idTypeCase_ = IdTypeOneofCase.None;
idType_ = null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as PathElement);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(PathElement other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Kind != other.Kind) return false;
if (Id != other.Id) return false;
if (Name != other.Name) return false;
if (IdTypeCase != other.IdTypeCase) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Kind.Length != 0) hash ^= Kind.GetHashCode();
if (idTypeCase_ == IdTypeOneofCase.Id) hash ^= Id.GetHashCode();
if (idTypeCase_ == IdTypeOneofCase.Name) hash ^= Name.GetHashCode();
hash ^= (int) idTypeCase_;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Kind.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Kind);
}
if (idTypeCase_ == IdTypeOneofCase.Id) {
output.WriteRawTag(16);
output.WriteInt64(Id);
}
if (idTypeCase_ == IdTypeOneofCase.Name) {
output.WriteRawTag(26);
output.WriteString(Name);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Kind.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Kind);
}
if (idTypeCase_ == IdTypeOneofCase.Id) {
size += 1 + pb::CodedOutputStream.ComputeInt64Size(Id);
}
if (idTypeCase_ == IdTypeOneofCase.Name) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(PathElement other) {
if (other == null) {
return;
}
if (other.Kind.Length != 0) {
Kind = other.Kind;
}
switch (other.IdTypeCase) {
case IdTypeOneofCase.Id:
Id = other.Id;
break;
case IdTypeOneofCase.Name:
Name = other.Name;
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Kind = input.ReadString();
break;
}
case 16: {
Id = input.ReadInt64();
break;
}
case 26: {
Name = input.ReadString();
break;
}
}
}
}
}
}
#endregion
}
/// <summary>
/// An array value.
/// </summary>
public sealed partial class ArrayValue : pb::IMessage<ArrayValue> {
private static readonly pb::MessageParser<ArrayValue> _parser = new pb::MessageParser<ArrayValue>(() => new ArrayValue());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ArrayValue> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.EntityReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ArrayValue() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ArrayValue(ArrayValue other) : this() {
values_ = other.values_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ArrayValue Clone() {
return new ArrayValue(this);
}
/// <summary>Field number for the "values" field.</summary>
public const int ValuesFieldNumber = 1;
private static readonly pb::FieldCodec<global::Google.Cloud.Datastore.V1.Value> _repeated_values_codec
= pb::FieldCodec.ForMessage(10, global::Google.Cloud.Datastore.V1.Value.Parser);
private readonly pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Value> values_ = new pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Value>();
/// <summary>
/// Values in the array.
/// The order of this array may not be preserved if it contains a mix of
/// indexed and unindexed values.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Cloud.Datastore.V1.Value> Values {
get { return values_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ArrayValue);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ArrayValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!values_.Equals(other.values_)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= values_.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
values_.WriteTo(output, _repeated_values_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += values_.CalculateSize(_repeated_values_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ArrayValue other) {
if (other == null) {
return;
}
values_.Add(other.values_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
values_.AddEntriesFrom(input, _repeated_values_codec);
break;
}
}
}
}
}
/// <summary>
/// A message that can hold any of the supported value types and associated
/// metadata.
/// </summary>
public sealed partial class Value : pb::IMessage<Value> {
private static readonly pb::MessageParser<Value> _parser = new pb::MessageParser<Value>(() => new Value());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Value> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.EntityReflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Value() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Value(Value other) : this() {
meaning_ = other.meaning_;
excludeFromIndexes_ = other.excludeFromIndexes_;
switch (other.ValueTypeCase) {
case ValueTypeOneofCase.NullValue:
NullValue = other.NullValue;
break;
case ValueTypeOneofCase.BooleanValue:
BooleanValue = other.BooleanValue;
break;
case ValueTypeOneofCase.IntegerValue:
IntegerValue = other.IntegerValue;
break;
case ValueTypeOneofCase.DoubleValue:
DoubleValue = other.DoubleValue;
break;
case ValueTypeOneofCase.TimestampValue:
TimestampValue = other.TimestampValue.Clone();
break;
case ValueTypeOneofCase.KeyValue:
KeyValue = other.KeyValue.Clone();
break;
case ValueTypeOneofCase.StringValue:
StringValue = other.StringValue;
break;
case ValueTypeOneofCase.BlobValue:
BlobValue = other.BlobValue;
break;
case ValueTypeOneofCase.GeoPointValue:
GeoPointValue = other.GeoPointValue.Clone();
break;
case ValueTypeOneofCase.EntityValue:
EntityValue = other.EntityValue.Clone();
break;
case ValueTypeOneofCase.ArrayValue:
ArrayValue = other.ArrayValue.Clone();
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Value Clone() {
return new Value(this);
}
/// <summary>Field number for the "null_value" field.</summary>
public const int NullValueFieldNumber = 11;
/// <summary>
/// A null value.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Protobuf.WellKnownTypes.NullValue NullValue {
get { return valueTypeCase_ == ValueTypeOneofCase.NullValue ? (global::Google.Protobuf.WellKnownTypes.NullValue) valueType_ : 0; }
set {
valueType_ = value;
valueTypeCase_ = ValueTypeOneofCase.NullValue;
}
}
/// <summary>Field number for the "boolean_value" field.</summary>
public const int BooleanValueFieldNumber = 1;
/// <summary>
/// A boolean value.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool BooleanValue {
get { return valueTypeCase_ == ValueTypeOneofCase.BooleanValue ? (bool) valueType_ : false; }
set {
valueType_ = value;
valueTypeCase_ = ValueTypeOneofCase.BooleanValue;
}
}
/// <summary>Field number for the "integer_value" field.</summary>
public const int IntegerValueFieldNumber = 2;
/// <summary>
/// An integer value.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public long IntegerValue {
get { return valueTypeCase_ == ValueTypeOneofCase.IntegerValue ? (long) valueType_ : 0L; }
set {
valueType_ = value;
valueTypeCase_ = ValueTypeOneofCase.IntegerValue;
}
}
/// <summary>Field number for the "double_value" field.</summary>
public const int DoubleValueFieldNumber = 3;
/// <summary>
/// A double value.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public double DoubleValue {
get { return valueTypeCase_ == ValueTypeOneofCase.DoubleValue ? (double) valueType_ : 0D; }
set {
valueType_ = value;
valueTypeCase_ = ValueTypeOneofCase.DoubleValue;
}
}
/// <summary>Field number for the "timestamp_value" field.</summary>
public const int TimestampValueFieldNumber = 10;
/// <summary>
/// A timestamp value.
/// When stored in the Datastore, precise only to microseconds;
/// any additional precision is rounded down.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Protobuf.WellKnownTypes.Timestamp TimestampValue {
get { return valueTypeCase_ == ValueTypeOneofCase.TimestampValue ? (global::Google.Protobuf.WellKnownTypes.Timestamp) valueType_ : null; }
set {
valueType_ = value;
valueTypeCase_ = value == null ? ValueTypeOneofCase.None : ValueTypeOneofCase.TimestampValue;
}
}
/// <summary>Field number for the "key_value" field.</summary>
public const int KeyValueFieldNumber = 5;
/// <summary>
/// A key value.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Datastore.V1.Key KeyValue {
get { return valueTypeCase_ == ValueTypeOneofCase.KeyValue ? (global::Google.Cloud.Datastore.V1.Key) valueType_ : null; }
set {
valueType_ = value;
valueTypeCase_ = value == null ? ValueTypeOneofCase.None : ValueTypeOneofCase.KeyValue;
}
}
/// <summary>Field number for the "string_value" field.</summary>
public const int StringValueFieldNumber = 17;
/// <summary>
/// A UTF-8 encoded string value.
/// When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes.
/// Otherwise, may be set to at least 1,000,000 bytes.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string StringValue {
get { return valueTypeCase_ == ValueTypeOneofCase.StringValue ? (string) valueType_ : ""; }
set {
valueType_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
valueTypeCase_ = ValueTypeOneofCase.StringValue;
}
}
/// <summary>Field number for the "blob_value" field.</summary>
public const int BlobValueFieldNumber = 18;
/// <summary>
/// A blob value.
/// May have at most 1,000,000 bytes.
/// When `exclude_from_indexes` is false, may have at most 1500 bytes.
/// In JSON requests, must be base64-encoded.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString BlobValue {
get { return valueTypeCase_ == ValueTypeOneofCase.BlobValue ? (pb::ByteString) valueType_ : pb::ByteString.Empty; }
set {
valueType_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
valueTypeCase_ = ValueTypeOneofCase.BlobValue;
}
}
/// <summary>Field number for the "geo_point_value" field.</summary>
public const int GeoPointValueFieldNumber = 8;
/// <summary>
/// A geo point value representing a point on the surface of Earth.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Type.LatLng GeoPointValue {
get { return valueTypeCase_ == ValueTypeOneofCase.GeoPointValue ? (global::Google.Type.LatLng) valueType_ : null; }
set {
valueType_ = value;
valueTypeCase_ = value == null ? ValueTypeOneofCase.None : ValueTypeOneofCase.GeoPointValue;
}
}
/// <summary>Field number for the "entity_value" field.</summary>
public const int EntityValueFieldNumber = 6;
/// <summary>
/// An entity value.
///
/// - May have no key.
/// - May have a key with an incomplete key path.
/// - May have a reserved/read-only key.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Datastore.V1.Entity EntityValue {
get { return valueTypeCase_ == ValueTypeOneofCase.EntityValue ? (global::Google.Cloud.Datastore.V1.Entity) valueType_ : null; }
set {
valueType_ = value;
valueTypeCase_ = value == null ? ValueTypeOneofCase.None : ValueTypeOneofCase.EntityValue;
}
}
/// <summary>Field number for the "array_value" field.</summary>
public const int ArrayValueFieldNumber = 9;
/// <summary>
/// An array value.
/// Cannot contain another array value.
/// A `Value` instance that sets field `array_value` must not set fields
/// `meaning` or `exclude_from_indexes`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Datastore.V1.ArrayValue ArrayValue {
get { return valueTypeCase_ == ValueTypeOneofCase.ArrayValue ? (global::Google.Cloud.Datastore.V1.ArrayValue) valueType_ : null; }
set {
valueType_ = value;
valueTypeCase_ = value == null ? ValueTypeOneofCase.None : ValueTypeOneofCase.ArrayValue;
}
}
/// <summary>Field number for the "meaning" field.</summary>
public const int MeaningFieldNumber = 14;
private int meaning_;
/// <summary>
/// The `meaning` field should only be populated for backwards compatibility.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Meaning {
get { return meaning_; }
set {
meaning_ = value;
}
}
/// <summary>Field number for the "exclude_from_indexes" field.</summary>
public const int ExcludeFromIndexesFieldNumber = 19;
private bool excludeFromIndexes_;
/// <summary>
/// If the value should be excluded from all indexes including those defined
/// explicitly.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool ExcludeFromIndexes {
get { return excludeFromIndexes_; }
set {
excludeFromIndexes_ = value;
}
}
private object valueType_;
/// <summary>Enum of possible cases for the "value_type" oneof.</summary>
public enum ValueTypeOneofCase {
None = 0,
NullValue = 11,
BooleanValue = 1,
IntegerValue = 2,
DoubleValue = 3,
TimestampValue = 10,
KeyValue = 5,
StringValue = 17,
BlobValue = 18,
GeoPointValue = 8,
EntityValue = 6,
ArrayValue = 9,
}
private ValueTypeOneofCase valueTypeCase_ = ValueTypeOneofCase.None;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ValueTypeOneofCase ValueTypeCase {
get { return valueTypeCase_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void ClearValueType() {
valueTypeCase_ = ValueTypeOneofCase.None;
valueType_ = null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Value);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Value other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (NullValue != other.NullValue) return false;
if (BooleanValue != other.BooleanValue) return false;
if (IntegerValue != other.IntegerValue) return false;
if (DoubleValue != other.DoubleValue) return false;
if (!object.Equals(TimestampValue, other.TimestampValue)) return false;
if (!object.Equals(KeyValue, other.KeyValue)) return false;
if (StringValue != other.StringValue) return false;
if (BlobValue != other.BlobValue) return false;
if (!object.Equals(GeoPointValue, other.GeoPointValue)) return false;
if (!object.Equals(EntityValue, other.EntityValue)) return false;
if (!object.Equals(ArrayValue, other.ArrayValue)) return false;
if (Meaning != other.Meaning) return false;
if (ExcludeFromIndexes != other.ExcludeFromIndexes) return false;
if (ValueTypeCase != other.ValueTypeCase) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (valueTypeCase_ == ValueTypeOneofCase.NullValue) hash ^= NullValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.BooleanValue) hash ^= BooleanValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.IntegerValue) hash ^= IntegerValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.DoubleValue) hash ^= DoubleValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.TimestampValue) hash ^= TimestampValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.KeyValue) hash ^= KeyValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.StringValue) hash ^= StringValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.BlobValue) hash ^= BlobValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.GeoPointValue) hash ^= GeoPointValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.EntityValue) hash ^= EntityValue.GetHashCode();
if (valueTypeCase_ == ValueTypeOneofCase.ArrayValue) hash ^= ArrayValue.GetHashCode();
if (Meaning != 0) hash ^= Meaning.GetHashCode();
if (ExcludeFromIndexes != false) hash ^= ExcludeFromIndexes.GetHashCode();
hash ^= (int) valueTypeCase_;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (valueTypeCase_ == ValueTypeOneofCase.BooleanValue) {
output.WriteRawTag(8);
output.WriteBool(BooleanValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.IntegerValue) {
output.WriteRawTag(16);
output.WriteInt64(IntegerValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.DoubleValue) {
output.WriteRawTag(25);
output.WriteDouble(DoubleValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.KeyValue) {
output.WriteRawTag(42);
output.WriteMessage(KeyValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.EntityValue) {
output.WriteRawTag(50);
output.WriteMessage(EntityValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.GeoPointValue) {
output.WriteRawTag(66);
output.WriteMessage(GeoPointValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.ArrayValue) {
output.WriteRawTag(74);
output.WriteMessage(ArrayValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.TimestampValue) {
output.WriteRawTag(82);
output.WriteMessage(TimestampValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.NullValue) {
output.WriteRawTag(88);
output.WriteEnum((int) NullValue);
}
if (Meaning != 0) {
output.WriteRawTag(112);
output.WriteInt32(Meaning);
}
if (valueTypeCase_ == ValueTypeOneofCase.StringValue) {
output.WriteRawTag(138, 1);
output.WriteString(StringValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.BlobValue) {
output.WriteRawTag(146, 1);
output.WriteBytes(BlobValue);
}
if (ExcludeFromIndexes != false) {
output.WriteRawTag(152, 1);
output.WriteBool(ExcludeFromIndexes);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (valueTypeCase_ == ValueTypeOneofCase.NullValue) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) NullValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.BooleanValue) {
size += 1 + 1;
}
if (valueTypeCase_ == ValueTypeOneofCase.IntegerValue) {
size += 1 + pb::CodedOutputStream.ComputeInt64Size(IntegerValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.DoubleValue) {
size += 1 + 8;
}
if (valueTypeCase_ == ValueTypeOneofCase.TimestampValue) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(TimestampValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.KeyValue) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(KeyValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.StringValue) {
size += 2 + pb::CodedOutputStream.ComputeStringSize(StringValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.BlobValue) {
size += 2 + pb::CodedOutputStream.ComputeBytesSize(BlobValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.GeoPointValue) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(GeoPointValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.EntityValue) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(EntityValue);
}
if (valueTypeCase_ == ValueTypeOneofCase.ArrayValue) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(ArrayValue);
}
if (Meaning != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Meaning);
}
if (ExcludeFromIndexes != false) {
size += 2 + 1;
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Value other) {
if (other == null) {
return;
}
if (other.Meaning != 0) {
Meaning = other.Meaning;
}
if (other.ExcludeFromIndexes != false) {
ExcludeFromIndexes = other.ExcludeFromIndexes;
}
switch (other.ValueTypeCase) {
case ValueTypeOneofCase.NullValue:
NullValue = other.NullValue;
break;
case ValueTypeOneofCase.BooleanValue:
BooleanValue = other.BooleanValue;
break;
case ValueTypeOneofCase.IntegerValue:
IntegerValue = other.IntegerValue;
break;
case ValueTypeOneofCase.DoubleValue:
DoubleValue = other.DoubleValue;
break;
case ValueTypeOneofCase.TimestampValue:
TimestampValue = other.TimestampValue;
break;
case ValueTypeOneofCase.KeyValue:
KeyValue = other.KeyValue;
break;
case ValueTypeOneofCase.StringValue:
StringValue = other.StringValue;
break;
case ValueTypeOneofCase.BlobValue:
BlobValue = other.BlobValue;
break;
case ValueTypeOneofCase.GeoPointValue:
GeoPointValue = other.GeoPointValue;
break;
case ValueTypeOneofCase.EntityValue:
EntityValue = other.EntityValue;
break;
case ValueTypeOneofCase.ArrayValue:
ArrayValue = other.ArrayValue;
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
BooleanValue = input.ReadBool();
break;
}
case 16: {
IntegerValue = input.ReadInt64();
break;
}
case 25: {
DoubleValue = input.ReadDouble();
break;
}
case 42: {
global::Google.Cloud.Datastore.V1.Key subBuilder = new global::Google.Cloud.Datastore.V1.Key();
if (valueTypeCase_ == ValueTypeOneofCase.KeyValue) {
subBuilder.MergeFrom(KeyValue);
}
input.ReadMessage(subBuilder);
KeyValue = subBuilder;
break;
}
case 50: {
global::Google.Cloud.Datastore.V1.Entity subBuilder = new global::Google.Cloud.Datastore.V1.Entity();
if (valueTypeCase_ == ValueTypeOneofCase.EntityValue) {
subBuilder.MergeFrom(EntityValue);
}
input.ReadMessage(subBuilder);
EntityValue = subBuilder;
break;
}
case 66: {
global::Google.Type.LatLng subBuilder = new global::Google.Type.LatLng();
if (valueTypeCase_ == ValueTypeOneofCase.GeoPointValue) {
subBuilder.MergeFrom(GeoPointValue);
}
input.ReadMessage(subBuilder);
GeoPointValue = subBuilder;
break;
}
case 74: {
global::Google.Cloud.Datastore.V1.ArrayValue subBuilder = new global::Google.Cloud.Datastore.V1.ArrayValue();
if (valueTypeCase_ == ValueTypeOneofCase.ArrayValue) {
subBuilder.MergeFrom(ArrayValue);
}
input.ReadMessage(subBuilder);
ArrayValue = subBuilder;
break;
}
case 82: {
global::Google.Protobuf.WellKnownTypes.Timestamp subBuilder = new global::Google.Protobuf.WellKnownTypes.Timestamp();
if (valueTypeCase_ == ValueTypeOneofCase.TimestampValue) {
subBuilder.MergeFrom(TimestampValue);
}
input.ReadMessage(subBuilder);
TimestampValue = subBuilder;
break;
}
case 88: {
valueType_ = input.ReadEnum();
valueTypeCase_ = ValueTypeOneofCase.NullValue;
break;
}
case 112: {
Meaning = input.ReadInt32();
break;
}
case 138: {
StringValue = input.ReadString();
break;
}
case 146: {
BlobValue = input.ReadBytes();
break;
}
case 152: {
ExcludeFromIndexes = input.ReadBool();
break;
}
}
}
}
}
/// <summary>
/// A Datastore data object.
///
/// An entity is limited to 1 megabyte when stored. That _roughly_
/// corresponds to a limit of 1 megabyte for the serialized form of this
/// message.
/// </summary>
public sealed partial class Entity : pb::IMessage<Entity> {
private static readonly pb::MessageParser<Entity> _parser = new pb::MessageParser<Entity>(() => new Entity());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Entity> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Datastore.V1.EntityReflection.Descriptor.MessageTypes[4]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Entity() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Entity(Entity other) : this() {
Key = other.key_ != null ? other.Key.Clone() : null;
properties_ = other.properties_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Entity Clone() {
return new Entity(this);
}
/// <summary>Field number for the "key" field.</summary>
public const int KeyFieldNumber = 1;
private global::Google.Cloud.Datastore.V1.Key key_;
/// <summary>
/// The entity's key.
///
/// An entity must have a key, unless otherwise documented (for example,
/// an entity in `Value.entity_value` may have no key).
/// An entity's kind is its key path's last element's kind,
/// or null if it has no key.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Datastore.V1.Key Key {
get { return key_; }
set {
key_ = value;
}
}
/// <summary>Field number for the "properties" field.</summary>
public const int PropertiesFieldNumber = 3;
private static readonly pbc::MapField<string, global::Google.Cloud.Datastore.V1.Value>.Codec _map_properties_codec
= new pbc::MapField<string, global::Google.Cloud.Datastore.V1.Value>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Google.Cloud.Datastore.V1.Value.Parser), 26);
private readonly pbc::MapField<string, global::Google.Cloud.Datastore.V1.Value> properties_ = new pbc::MapField<string, global::Google.Cloud.Datastore.V1.Value>();
/// <summary>
/// The entity's properties.
/// The map's keys are property names.
/// A property name matching regex `__.*__` is reserved.
/// A reserved property name is forbidden in certain documented contexts.
/// The name must not contain more than 500 characters.
/// The name cannot be `""`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, global::Google.Cloud.Datastore.V1.Value> Properties {
get { return properties_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Entity);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Entity other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Key, other.Key)) return false;
if (!Properties.Equals(other.Properties)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (key_ != null) hash ^= Key.GetHashCode();
hash ^= Properties.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (key_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Key);
}
properties_.WriteTo(output, _map_properties_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (key_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Key);
}
size += properties_.CalculateSize(_map_properties_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Entity other) {
if (other == null) {
return;
}
if (other.key_ != null) {
if (key_ == null) {
key_ = new global::Google.Cloud.Datastore.V1.Key();
}
Key.MergeFrom(other.Key);
}
properties_.Add(other.properties_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (key_ == null) {
key_ = new global::Google.Cloud.Datastore.V1.Key();
}
input.ReadMessage(key_);
break;
}
case 26: {
properties_.AddEntriesFrom(input, _map_properties_codec);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security;
using Google.Api;
using Google.Api.Gax;
using Google.Api.Gax.Grpc;
using Google.Apis.Auth.OAuth2;
using Google.Cloud.Logging.Type;
using Google.Cloud.Logging.V2;
using Google.Protobuf;
using Google.Protobuf.WellKnownTypes;
using NLog;
using NLog.Targets;
using NLog.Common;
using System.Threading.Tasks;
using System.Threading;
using System.Reflection;
using Newtonsoft.Json.Linq;
namespace Google.Cloud.Logging.NLog
{
/// <summary>
/// Appends logging events to Google Stackdriver Logging.
/// </summary>
[Target("GoogleStackdriver")]
public partial class GoogleStackdriverTarget : TargetWithContext
{
private static readonly string[] s_oAuthScopes = new string[] { "https://www.googleapis.com/auth/logging.write" };
private static readonly Dictionary<string, string> s_emptyLabels = new Dictionary<string, string>();
private static readonly Dictionary<LogLevel, LogSeverity> s_levelMap = new Dictionary<LogLevel, LogSeverity>
{
// Map NLog levels to Stackdriver LogSeveritys.
{ LogLevel.Fatal, LogSeverity.Emergency },
{ LogLevel.Error, LogSeverity.Error },
{ LogLevel.Warn, LogSeverity.Warning },
{ LogLevel.Info, LogSeverity.Info },
{ LogLevel.Debug, LogSeverity.Debug },
{ LogLevel.Trace, LogSeverity.Debug },
};
private LoggingServiceV2Client _client;
private Platform _platform;
private MonitoredResource _resource;
private string _logName;
private LogNameOneof _logNameToWrite;
private Task _prevTask;
private long _pendingTaskCount;
private CancellationTokenSource _cancelTokenSource;
private Func<object, Value> _jsonConvertFunction;
private readonly Func<Task, object, Task> _writeLogEntriesBegin;
private readonly Action<Task, object> _writeLogEntriesCompleted;
/// <summary>
/// Construct a Google Cloud loggin target.
/// </summary>
public GoogleStackdriverTarget() : this(null, null)
{
}
// For testing only.
internal GoogleStackdriverTarget(LoggingServiceV2Client client, Platform platform)
{
OptimizeBufferReuse = true;
ResourceLabels = new List<TargetPropertyWithContext>();
_contextProperties = new List<TargetPropertyWithContext>();
_client = client;
_platform = platform;
_writeLogEntriesBegin = WriteLogEntriesBegin;
_writeLogEntriesCompleted = WriteLogEntriesCompleted;
}
/// <summary>
/// Initializes the target.
/// </summary>
protected override void InitializeTarget()
{
_cancelTokenSource = new CancellationTokenSource();
_platform = _platform ?? Platform.Instance();
string logId = LogId?.Render(LogEventInfo.CreateNullEvent());
GaxPreconditions.CheckNotNullOrEmpty(logId, nameof(LogId));
if (SendJsonPayload)
{
if (JsonConverter != null)
{
// Use the function provided directly.
GaxPreconditions.CheckState(
string.IsNullOrWhiteSpace(JsonConverterTypeName) && string.IsNullOrWhiteSpace(JsonConverterMethodName),
$"{nameof(JsonConverterTypeName)} and {nameof(JsonConverterMethodName)} must not be set along with {nameof(JsonConverter)}.");
_jsonConvertFunction = JsonConverter;
}
else if (!string.IsNullOrWhiteSpace(JsonConverterTypeName) || !string.IsNullOrWhiteSpace(JsonConverterMethodName))
{
// Use the method refered to by type-name and method-name.
GaxPreconditions.CheckState(
!string.IsNullOrWhiteSpace(JsonConverterTypeName) && !string.IsNullOrWhiteSpace(JsonConverterMethodName),
$"Either both or neither of {nameof(JsonConverterTypeName)} and {nameof(JsonConverterMethodName)} must be specified.");
_jsonConvertFunction = BuildAndVerifyJsonConverter();
}
else
{
// Use default json.net based converter.
_jsonConvertFunction = BuildProtoConverter();
}
}
ActivateLogIdAndResource(logId);
_client = _client ?? BuildLoggingServiceClient();
base.InitializeTarget();
}
private Func<object, Value> BuildProtoConverter()
{
// Create reusable JsonSerializer to reduce allocations
var jsonSettings = new Newtonsoft.Json.JsonSerializerSettings()
{
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Ignore
};
jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter());
var jsonSerializer = Newtonsoft.Json.JsonSerializer.CreateDefault(jsonSettings);
return o => {
try
{
switch (Convert.GetTypeCode(o))
{
case TypeCode.Empty:
return Value.ForNull();
case TypeCode.Boolean:
return Value.ForBool((bool)o);
case TypeCode.Decimal:
case TypeCode.Double:
case TypeCode.Single:
return Value.ForNumber(Convert.ToDouble(o));
case TypeCode.Byte:
case TypeCode.SByte:
case TypeCode.Int16:
case TypeCode.UInt16:
case TypeCode.Int32:
case TypeCode.UInt32:
case TypeCode.Int64:
case TypeCode.UInt64:
if (o is System.Enum)
break; // Let StringEnumConverter handle formatting
return Value.ForNumber(Convert.ToDouble(o));
case TypeCode.String:
return Value.ForString((string)o);
case TypeCode.Char:
return Value.ForString(o.ToString());
}
return ProtoConverter.Convert(JToken.FromObject(o, jsonSerializer));
}
catch
{
// Reset the JsonSerializer as it now can be in a bad state
jsonSerializer = Newtonsoft.Json.JsonSerializer.CreateDefault(jsonSettings);
throw;
}
};
}
private Func<object, Value> BuildAndVerifyJsonConverter()
{
var type = System.Type.GetType(JsonConverterTypeName, throwOnError: false);
GaxPreconditions.CheckState(type != null, "A type with the specified name cannot be found: '{0}'", JsonConverterTypeName);
var methodInfo = type.GetTypeInfo()
.GetMethods(BindingFlags.Public | BindingFlags.Static | BindingFlags.Instance)
.Where(m => m.Name == JsonConverterMethodName && !m.IsAbstract && !m.IsGenericMethod &&
m.GetParameters().Length == 1 && m.GetParameters()[0].ParameterType == typeof(object) &&
(m.ReturnType == typeof(Value) || typeof(JToken).IsAssignableFrom(m.ReturnType)))
.FirstOrDefault();
GaxPreconditions.CheckState(methodInfo != null,
$"A suitable public method named '{JsonConverterMethodName}' cannot be found in type '{JsonConverterTypeName}'. " +
"The public method must have a single parameter of type 'object', and a return type of 'Value' or 'JToken'");
object instance = null;
if (!methodInfo.IsStatic)
{
try
{
instance = Activator.CreateInstance(type);
}
catch (Exception e)
{
// Acticator.CreateInstance can throw many different exceptions, so catch them all.
throw new InvalidOperationException(
$"Type '{JsonConverterTypeName}' must have a parameterless constructor so it can be instantiated.", e);
}
}
if (methodInfo.ReturnType == typeof(Value))
{
return (Func<object, Value>)methodInfo.CreateDelegate(typeof(Func<object, Value>), instance);
}
else
{
var fn = (Func<object, JToken>)methodInfo.CreateDelegate(typeof(Func<object, JToken>), instance);
return o => ProtoConverter.Convert(fn(o));
}
}
/// <summary>
/// Closes / Disposes the Target
/// </summary>
protected override void CloseTarget()
{
_cancelTokenSource.Cancel();
_prevTask = null;
_pendingTaskCount = 0;
base.CloseTarget();
}
/// <inheritdoc/>
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
if (_prevTask != null)
{
// Continue after last write is complete.
_prevTask.ContinueWith(_ => asyncContinuation(null), TaskScheduler.Default);
}
else
{
// Nothing to flush.
asyncContinuation(null);
}
}
private LoggingServiceV2Client BuildLoggingServiceClient()
{
GoogleCredential credential = GetCredentialFromConfiguration();
if (credential == null)
{
return LoggingServiceV2Client.Create();
}
if (credential.IsCreateScopedRequired)
{
credential = credential.CreateScoped(s_oAuthScopes);
}
Grpc.Core.Channel channel = new Grpc.Core.Channel(
LoggingServiceV2Client.DefaultEndpoint.Host,
LoggingServiceV2Client.DefaultEndpoint.Port,
Grpc.Auth.GoogleGrpcCredentials.ToChannelCredentials(credential)
);
return LoggingServiceV2Client.Create(channel);
}
private GoogleCredential GetCredentialFromConfiguration()
{
var nullLogEvent = LogEventInfo.CreateNullEvent();
var credentialFile = CredentialFile?.Render(nullLogEvent);
var credentialJson = CredentialJson?.Render(nullLogEvent);
GaxPreconditions.CheckState(string.IsNullOrWhiteSpace(credentialFile) || string.IsNullOrWhiteSpace(credentialJson),
$"{nameof(CredentialFile)} and {nameof(CredentialJson)} must not both be set.");
var credential =
!string.IsNullOrWhiteSpace(credentialFile) ? GoogleCredential.FromFile(credentialFile) :
!string.IsNullOrWhiteSpace(credentialJson) ? GoogleCredential.FromJson(credentialJson) :
null;
if (credential == null)
{
return null;
}
if (credential.IsCreateScopedRequired)
{
credential = credential.CreateScoped(s_oAuthScopes);
}
return credential;
}
private void ActivateLogIdAndResource(string logId)
{
string projectId = null;
MonitoredResource resource = null;
if (!DisableResourceTypeDetection)
{
resource = MonitoredResourceBuilder.FromPlatform(_platform);
resource.Labels.TryGetValue("project_id", out projectId);
}
if (projectId == null)
{
// Either platform detection is disabled, or it detected an unknown platform.
// So use the manually configured projectId and override the resource.
projectId = GaxPreconditions.CheckNotNull(ProjectId?.Render(LogEventInfo.CreateNullEvent()), nameof(ProjectId));
if (ResourceType == null)
{
resource = new MonitoredResource { Type = "global", Labels = { { "project_id", projectId } } };
}
else
{
resource = new MonitoredResource { Type = ResourceType,
Labels = { ResourceLabels.ToDictionary(x => x.Name, x => x.Layout.Render(LogEventInfo.CreateNullEvent())) } };
}
}
_resource = resource;
var logName = new LogName(projectId, logId);
_logName = logName.ToString();
_logNameToWrite = LogNameOneof.From(logName);
}
/// <summary>
/// Writes async log event to the log target.
/// </summary>
/// /// <param name="logEvent">Logging event to be written out.</param>
protected override void Write(AsyncLogEventInfo logEvent)
{
LogEntry logEntry = null;
try
{
logEntry = BuildLogEntry(logEvent.LogEvent);
}
catch (Exception ex)
{
InternalLogger.Error(ex, "GoogleStackdriver(Name={0}): Failed to create LogEntry, marked as failed", Name);
logEvent.Continuation(ex);
}
if (logEntry != null)
{
WriteLogEntries(new[] { logEntry }, logEvent.Continuation);
}
}
/// <summary>
/// Writes a list of logging events to the log target.
/// </summary>
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
List<LogEntry> logEntries = new List<LogEntry>(logEvents.Count);
List<AsyncContinuation> continuationList = new List<AsyncContinuation>(logEvents.Count);
for (int i = 0; i < logEvents.Count; ++i)
{
var logEvent = logEvents[i];
try
{
var logEntry = BuildLogEntry(logEvent.LogEvent);
logEntries.Add(logEntry);
continuationList.Add(logEvent.Continuation);
}
catch (Exception ex)
{
InternalLogger.Error(ex, "GoogleStackdriver(Name={0}): Failed to create LogEntry, marked as failed", Name);
logEvent.Continuation(ex);
}
}
if (logEntries.Count > 0)
{
WriteLogEntries(logEntries, continuationList);
}
}
private void WriteLogEntries(IList<LogEntry> logEntries, object continuationList)
{
bool withinTaskLimit = Interlocked.Increment(ref _pendingTaskCount) <= TaskPendingLimit;
try
{
if (!withinTaskLimit)
{
// The task queue has become too long. We will throttle, and wait for the task-queue to become shorter
InternalLogger.Debug("GoogleStackdriver(Name={0}): Throttle started because {1} tasks are pending", Name, _pendingTaskCount);
for (int i = -100; i < TimeoutSeconds * 1000; i += 100)
{
withinTaskLimit = _prevTask?.Wait(100, _cancelTokenSource.Token) ?? true; // Throttle
if (withinTaskLimit)
{
_pendingTaskCount = 1; // All pending tasks has completed
break;
}
if (Interlocked.Read(ref _pendingTaskCount) < TaskPendingLimit)
{
withinTaskLimit = true; // Some pending tasks has completed
break;
}
}
if (!withinTaskLimit)
{
// The tasks queue is not moving. We start a new task queue and ignores the old one
InternalLogger.Info("GoogleStackdriver(Name={0}): Throttle timeout but {1} tasks are still pending", Name, _pendingTaskCount);
}
}
if (withinTaskLimit && _prevTask != null)
{
_prevTask = _prevTask.ContinueWith(_writeLogEntriesBegin, logEntries, _cancelTokenSource.Token);
}
else
{
_prevTask = WriteLogEntriesBegin(null, logEntries);
}
_prevTask = _prevTask.ContinueWith(_writeLogEntriesCompleted, continuationList);
}
catch (Exception ex)
{
Interlocked.Decrement(ref _pendingTaskCount);
InternalLogger.Error(ex, "GoogleStackdriver(Name={0}): Failed to begin writing {1} LogEntries", Name, logEntries.Count);
throw;
}
}
private async Task WriteLogEntriesBegin(Task _, object state)
{
var logEntries = state as IList<LogEntry>;
await _client.WriteLogEntriesAsync(_logNameToWrite, _resource, s_emptyLabels, logEntries, _cancelTokenSource.Token).ConfigureAwait(false);
}
private void WriteLogEntriesCompleted(Task prevTask, object state)
{
Interlocked.Decrement(ref _pendingTaskCount);
var singleContinuation = state as AsyncContinuation;
if (singleContinuation != null)
{
if (prevTask.Exception != null)
{
InternalLogger.Error(prevTask.Exception, "GoogleStackdriver(Name={0}): Failed to write LogEntry", Name);
}
singleContinuation(prevTask.Exception);
}
else
{
var continuationList = state as List<AsyncContinuation>;
if (continuationList != null)
{
if (prevTask.Exception != null)
{
InternalLogger.Error(prevTask.Exception, "GoogleStackdriver(Name={0}): Failed to write {1} LogEntries", Name, continuationList.Count);
}
foreach (var continuation in continuationList)
{
continuation(prevTask.Exception);
}
}
}
}
private LogEntry BuildLogEntry(LogEventInfo loggingEvent)
{
var logEntry = new LogEntry
{
Severity = s_levelMap[loggingEvent.Level],
Timestamp = ConvertToTimestamp(loggingEvent.TimeStamp),
LogName = _logName,
Resource = _resource,
};
if (SendJsonPayload)
{
var jsonStruct = new Struct();
jsonStruct.Fields.Add("message", Value.ForString(RenderLogEvent(Layout, loggingEvent)));
var propertiesStruct = new Struct();
jsonStruct.Fields.Add("properties", Value.ForStruct(propertiesStruct));
foreach (var combinedProperty in GetAllProperties(loggingEvent).Where(x => !string.IsNullOrEmpty(x.Key)))
{
Value jsonValue;
try
{
jsonValue = _jsonConvertFunction(combinedProperty.Value);
}
catch (Exception ex)
{
InternalLogger.Warn(ex,
"GoogleStackdriver(Name={0}): Exception at BuildLogEntry with Key={1}", Name, combinedProperty.Key);
jsonValue = Value.ForString($"<Exception: '{ex.Message}'>");
}
propertiesStruct.Fields.Add(combinedProperty.Key, jsonValue);
}
logEntry.JsonPayload = jsonStruct;
}
else
{
logEntry.TextPayload = RenderLogEvent(Layout, loggingEvent);
foreach (var combinedProperty in GetAllProperties(loggingEvent).Where(x => !string.IsNullOrEmpty(x.Key)))
{
try
{
logEntry.Labels[combinedProperty.Key] = combinedProperty.Value?.ToString() ?? "null";
}
catch (Exception ex)
{
InternalLogger.Warn(ex, "GoogleStackdriver(Name={0}): Exception at BuildLogEntry with Key={1}", Name, combinedProperty.Key);
logEntry.Labels[combinedProperty.Key] = "null";
}
}
}
TryAddGitRevisionId(logEntry.Labels);
var callsiteMethod = loggingEvent.CallerMemberName;
if (!string.IsNullOrEmpty(callsiteMethod))
{
logEntry.SourceLocation = new LogEntrySourceLocation()
{
Function = string.Concat(loggingEvent.CallerClassName, ".", callsiteMethod),
File = loggingEvent.CallerFilePath,
Line = loggingEvent.CallerLineNumber,
};
}
return logEntry;
}
private static Timestamp ConvertToTimestamp(DateTime dt)
{
switch (dt.Kind)
{
case DateTimeKind.Local:
dt = dt.ToUniversalTime();
break;
case DateTimeKind.Unspecified:
dt = DateTime.SpecifyKind(dt, DateTimeKind.Utc);
break;
}
return Timestamp.FromDateTime(dt);
}
private void TryAddGitRevisionId(Protobuf.Collections.MapField<string, string> labels)
{
try
{
var gitId = DevTools.Source.V1.SourceContext.AppSourceContext?.Git?.RevisionId;
if (!String.IsNullOrWhiteSpace(gitId))
{
labels.Add(DevTools.Source.V1.SourceContext.GitRevisionIdLogLabel, gitId);
}
}
catch (Exception ex) when (
ex is SecurityException
|| ex is InvalidProtocolBufferException
|| ex is InvalidJsonException
|| ex is UnauthorizedAccessException)
{
// This is best-effort only, exceptions from reading/parsing the source_context.json are ignored.
InternalLogger.Warn(ex, "GoogleStackdriver(Name={0}): Exception at TryAddGitRevisionId", Name);
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Xml;
using Microsoft.Test.ModuleCore;
namespace CoreXml.Test.XLinq
{
public partial class FunctionalTests : TestModule
{
public partial class XNodeReaderTests : XLinqTestCase
{
public partial class TCAttributeAccess : BridgeHelpers
{
//[Variation("Attribute Access test using ordinal (Ascending Order)", Priority = 0)]
public void TestAttributeAccess1()
{
XmlReader DataReader = GetReader();
string[] astr = new string[10];
string n;
string qname;
PositionOnElement(DataReader, "ACT0");
int start = 1;
int end = DataReader.AttributeCount;
for (int i = start; i < end; i++)
{
astr[i - 1] = DataReader[i];
n = strAttr + (i - 1);
qname = "foo:" + n;
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
TestLog.Compare(DataReader[n, strNamespace], DataReader.GetAttribute(n, strNamespace), "Compare this(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[i], DataReader[n, strNamespace], "Compare this(i) with this(name,strNamespace)");
TestLog.Compare(DataReader.MoveToAttribute(n, strNamespace), true, "MoveToAttribute(name,strNamespace)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n, strNamespace), "Compare MoveToAttribute(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[i], DataReader[qname], "Compare this(i) with this(qname)");
TestLog.Compare(DataReader.MoveToAttribute(qname), true, "MoveToAttribute(qname)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(qname), "Compare MoveToAttribute(qname) with GetAttribute(qname)");
}
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
TestLog.Compare(astr[i], DataReader.GetAttribute(i), "Compare value with GetAttribute");
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
n = strAttr + i;
TestLog.Compare(DataReader[n], DataReader.GetAttribute(n), "Compare this(name) with GetAttribute(name)");
TestLog.Compare(DataReader[i], DataReader[n], "Compare this(i) with this(name)");
TestLog.Compare(DataReader.MoveToAttribute(n), true, "MoveToAttribute(name)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n), "Compare MoveToAttribute(name) with GetAttribute(name)");
}
}
//[Variation("Attribute Access test using ordinal (Descending Order)")]
public void TestAttributeAccess2()
{
XmlReader DataReader = GetReader();
string[] astr = new string[10];
string n;
string qname;
PositionOnElement(DataReader, "ACT0");
int start = 1;
int end = DataReader.AttributeCount;
for (int i = end - 1; i >= start; i--)
{
astr[i - 1] = DataReader[i];
n = strAttr + (i - 1);
qname = "foo:" + n;
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
TestLog.Compare(DataReader[n, strNamespace], DataReader.GetAttribute(n, strNamespace), "Compare this(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[i], DataReader[n, strNamespace], "Compare this(i) with this(name,strNamespace)");
TestLog.Compare(DataReader.MoveToAttribute(n, strNamespace), true, "MoveToAttribute(name,strNamespace)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n, strNamespace), "Compare MoveToAttribute(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(qname), "Compare this(qname) with GetAttribute(qname)");
TestLog.Compare(DataReader[i], DataReader[qname], "Compare this(i) with this(qname)");
TestLog.Compare(DataReader.MoveToAttribute(qname), true, "MoveToAttribute(qname)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(qname), "Compare MoveToAttribute(qname) with GetAttribute(qname)");
}
PositionOnElement(DataReader, "ACT1");
for (int i = (DataReader.AttributeCount - 1); i > 0; i--)
{
n = strAttr + i;
TestLog.Compare(astr[i], DataReader.GetAttribute(i), "Compare value with GetAttribute");
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
TestLog.Compare(DataReader[n], DataReader.GetAttribute(n), "Compare this(name) with GetAttribute(name)");
TestLog.Compare(DataReader[i], DataReader[n], "Compare this(i) with this(name)");
TestLog.Compare(DataReader.MoveToAttribute(n), true, "MoveToAttribute(name)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n), "Compare MoveToAttribute(name) with GetAttribute(name)");
}
}
//[Variation("Attribute Access test using ordinal (Odd number)", Priority = 0)]
public void TestAttributeAccess3()
{
XmlReader DataReader = GetReader();
string[] astr = new string[10];
string n;
string qname;
PositionOnElement(DataReader, "ACT0");
int start = 1;
int end = DataReader.AttributeCount;
for (int i = start; i < end; i += 2)
{
astr[i - 1] = DataReader[i];
n = strAttr + (i - 1);
qname = "foo:" + n;
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
TestLog.Compare(DataReader[n, strNamespace], DataReader.GetAttribute(n, strNamespace), "Compare this(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[i], DataReader[n, strNamespace], "Compare this(i) with this(name,strNamespace)");
TestLog.Compare(DataReader.MoveToAttribute(n, strNamespace), true, "MoveToAttribute(name,strNamespace)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n, strNamespace), "Compare MoveToAttribute(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(qname), "Compare this(qname) with GetAttribute(qname)");
TestLog.Compare(DataReader[i], DataReader[qname], "Compare this(i) with this(qname)");
TestLog.Compare(DataReader.MoveToAttribute(qname), true, "MoveToAttribute(qname)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(qname), "Compare MoveToAttribute(qname) with GetAttribute(qname)");
}
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i += 2)
{
TestLog.Compare(astr[i], DataReader.GetAttribute(i), "Compare value with GetAttribute");
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
n = strAttr + i;
TestLog.Compare(DataReader[n], DataReader.GetAttribute(n), "Compare this(name) with GetAttribute(name)");
TestLog.Compare(DataReader[i], DataReader[n], "Compare this(i) with this(name)");
TestLog.Compare(DataReader.MoveToAttribute(n), true, "MoveToAttribute(name)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n), "Compare MoveToAttribute(name) with GetAttribute(name)");
}
}
//[Variation("Attribute Access test using ordinal (Even number)")]
public void TestAttributeAccess4()
{
XmlReader DataReader = GetReader();
string[] astr = new string[10];
string n;
string qname;
PositionOnElement(DataReader, "ACT0");
int start = 1;
int end = DataReader.AttributeCount;
for (int i = start; i < end; i += 3)
{
astr[i - 1] = DataReader[i];
n = strAttr + (i - 1);
qname = "foo:" + n;
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
TestLog.Compare(DataReader[n, strNamespace], DataReader.GetAttribute(n, strNamespace), "Compare this(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[i], DataReader[n, strNamespace], "Compare this(i) with this(name,strNamespace)");
TestLog.Compare(DataReader.MoveToAttribute(n, strNamespace), true, "MoveToAttribute(name,strNamespace)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n, strNamespace), "Compare MoveToAttribute(name,strNamespace) with GetAttribute(name,strNamespace)");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(qname), "Compare this(qname) with GetAttribute(qname)");
TestLog.Compare(DataReader[i], DataReader[qname], "Compare this(i) with this(qname)");
TestLog.Compare(DataReader.MoveToAttribute(qname), true, "MoveToAttribute(qname)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(qname), "Compare MoveToAttribute(qname) with GetAttribute(qname)");
}
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i += 3)
{
TestLog.Compare(astr[i], DataReader.GetAttribute(i), "Compare value with GetAttribute");
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Compare this with GetAttribute");
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Compare MoveToAttribute(i) with GetAttribute");
n = strAttr + i;
TestLog.Compare(DataReader[n], DataReader.GetAttribute(n), "Compare this(name) with GetAttribute(name)");
TestLog.Compare(DataReader[i], DataReader[n], "Compare this(i) with this(name)");
TestLog.Compare(DataReader.MoveToAttribute(n), true, "MoveToAttribute(name)");
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(n), "Compare MoveToAttribute(name) with GetAttribute(name)");
}
}
//[Variation("Attribute Access with namespace=null")]
public void TestAttributeAccess5()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
TestLog.Compare(DataReader[strAttr + 1, null], null, "Item");
TestLog.Compare(DataReader.Name, "ACT1", "Reader changed position");
}
}
public partial class TCThisName : BridgeHelpers
{
//[Variation("This[Name] Verify with GetAttribute(Name)", Priority = 0)]
public void ThisWithName1()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName], DataReader.GetAttribute(strName), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("This[Name, null] Verify with GetAttribute(Name)")]
public void ThisWithName2()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName, null], null, "Ordinal (" + i + "): Should have returned null");
}
}
//[Variation("This[Name] Verify with GetAttribute(Name,null)")]
public void ThisWithName3()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader.GetAttribute(strName, null), null, "Ordinal (" + i + "): Should have returned null");
}
}
//[Variation("This[Name, NamespaceURI] Verify with GetAttribute(Name, NamespaceURI)", Priority = 0)]
public void ThisWithName4()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName, strNamespace], DataReader.GetAttribute(strName, strNamespace), "Ordinal (" + i + "): Compare GetAttribute(strName,strNamespace) and this[strName,strNamespace]");
}
}
//[Variation("This[Name, null] Verify not the same as GetAttribute(Name, NamespaceURI)")]
public void ThisWithName5()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
TestLog.Compare(DataReader[strName, null], null, "Ordinal (" + i + "): Should have returned null");
}
}
//[Variation("This[Name, NamespaceURI] Verify not the same as GetAttribute(Name, null)")]
public void ThisWithName6()
{
string strName;
XmlReader DataReader = GetReader();
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
if (DataReader.GetAttribute(strName, null) == DataReader[strName, strNamespace])
throw new TestException(TestResult.Failed, Variation.Desc);
}
}
//[Variation("This[Name] Verify with MoveToAttribute(Name)", Priority = 0)]
public void ThisWithName7()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName);
TestLog.Compare(DataReader.Value, DataReader[strName], "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("This[Name, null] Verify with MoveToAttribute(Name)")]
public void ThisWithName8()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName);
TestLog.Compare(DataReader[strName, null], null, "Ordinal (" + i + "): Should have returned null");
}
}
//[Variation("This[Name] Verify with MoveToAttribute(Name,null)")]
public void ThisWithName9()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader.MoveToAttribute(strName, null), false, "Ordinal (" + i + "): Reader should not have moved");
}
}
//[Variation("This[Name, NamespaceURI] Verify not the same as MoveToAttribute(Name, null)", Priority = 0)]
public void ThisWithName10()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
TestLog.Compare(DataReader.MoveToAttribute(strName, null), false, "Ordinal (" + i + "): Reader should not have moved");
}
}
//[Variation("This[Name, null] Verify not the same as MoveToAttribute(Name, NamespaceURI)")]
public void ThisWithName11()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName, strNamespace);
TestLog.Compare(DataReader[strName, null], null, "Ordinal (" + i + "): Should have retuned null");
}
}
//[Variation("This[Name, namespace] Verify not the same as MoveToAttribute(Name, namespace)")]
public void ThisWithName12()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
DataReader.MoveToAttribute(strName, strNamespace);
TestLog.Compare(DataReader.Value, DataReader[strName, strNamespace], "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("This(String.Empty)")]
public void ThisWithName13()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader[String.Empty], null, "Should have returned null");
}
//[Variation("This[String.Empty,String.Empty]")]
public void ThisWithName14()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader[String.Empty, String.Empty], null, "Should have returned null");
}
//[Variation("This[QName] Verify with GetAttribute(Name, NamespaceURI)", Priority = 0)]
public void ThisWithName15()
{
string strName;
string qname;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
qname = "foo:" + strName;
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(strName, strNamespace), "Ordinal (" + i + "): Compare GetAttribute(strName,strNamespace) and this[qname]");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(qname), "Ordinal (" + i + "): Compare GetAttribute(qname) and this[qname]");
}
}
//[Variation("This[QName] invalid Qname")]
public void ThisWithName16()
{
string strName;
string qname;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
int i = 1;
strName = strAttr + i;
qname = "foo1:" + strName;
TestLog.Compare(DataReader.MoveToAttribute(qname), false, "MoveToAttribute(invalid qname)");
TestLog.Compare(DataReader[qname], null, "Compare this[invalid qname] with null");
TestLog.Compare(DataReader.GetAttribute(qname), null, "Compare GetAttribute(invalid qname) with null");
}
}
public partial class TCMoveToAttributeReader : BridgeHelpers
{
//[Variation("MoveToAttribute(String.Empty)")]
public void MoveToAttributeWithName1()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader.MoveToAttribute(String.Empty), false, "Should have returned false");
TestLog.Compare(DataReader.Value, String.Empty, "Compare MoveToAttribute with String.Empty");
}
//[Variation("MoveToAttribute(String.Empty,String.Empty)")]
public void MoveToAttributeWithName2()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader.MoveToAttribute(String.Empty, String.Empty), false, "Compare the call to MoveToAttribute");
TestLog.Compare(DataReader.Value, String.Empty, "Compare MoveToAttribute(strName)");
}
}
//[TestCase(Name = "GetAttributeOrdinal", Desc = "GetAttributeOrdinal")]
public partial class TCGetAttributeOrdinal : BridgeHelpers
{
//[Variation("GetAttribute(i) Verify with This[i] - Double Quote", Priority = 0)]
public void GetAttributeWithGetAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare GetAttribute(i) and this[i]");
}
}
//[Variation("GetAttribute[i] Verify with This[i] - Single Quote")]
public void OrdinalWithGetAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare GetAttribute(i) and this[i]");
}
}
//[Variation("GetAttribute(i) Verify with MoveToAttribute[i] - Double Quote", Priority = 0)]
public void GetAttributeWithMoveAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader.GetAttribute(i);
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("GetAttribute(i) Verify with MoveToAttribute[i] - Single Quote")]
public void GetAttributeWithMoveAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader.GetAttribute(i);
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader[i], "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("GetAttribute(i) NegativeOneOrdinal", Priority = 0)]
public void NegativeOneOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader.GetAttribute(-1);
}
//[Variation("GetAttribute(i) FieldCountOrdinal")]
public void FieldCountOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string str = DataReader.GetAttribute(DataReader.AttributeCount);
}
//[Variation("GetAttribute(i) OrdinalPlusOne", Priority = 0)]
public void OrdinalPlusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader.GetAttribute(DataReader.AttributeCount + 1);
}
//[Variation("GetAttribute(i) OrdinalMinusOne")]
public void OrdinalMinusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader.GetAttribute(-2);
}
}
//[TestCase(Name = "GetAttributeName", Desc = "GetAttributeName")]
public partial class TCGetAttributeName : BridgeHelpers
{
//[Variation("GetAttribute(Name) Verify with This[Name]", Priority = 0)]
public void GetAttributeWithName1()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName], DataReader.GetAttribute(strName), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("GetAttribute(Name, null) Verify with This[Name]")]
public void GetAttributeWithName2()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName], DataReader.GetAttribute(strName), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("GetAttribute(Name) Verify with This[Name,null]")]
public void GetAttributeWithName3()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader[strName], DataReader.GetAttribute(strName), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("GetAttribute(Name, NamespaceURI) Verify with This[Name, NamespaceURI]", Priority = 0)]
public void GetAttributeWithName4()
{
string strName;
string qname;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
qname = "foo:" + strName;
TestLog.Compare(DataReader[strName, strNamespace], DataReader.GetAttribute(strName, strNamespace), "Ordinal (" + i + "): Compare GetAttribute(strName,strNamespace) and this[strName,strNamespace]");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(strName, strNamespace), "Ordinal (" + i + "): Compare GetAttribute(strName,strNamespace) and this[strName,strNamespace]");
TestLog.Compare(DataReader[qname], DataReader.GetAttribute(qname), "Ordinal (" + i + "): Compare GetAttribute(qname) and this[qname]");
}
}
//[Variation("GetAttribute(Name, null) Verify not the same as This[Name, NamespaceURI]")]
public void GetAttributeWithName5()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
if (DataReader.GetAttribute(strName) == DataReader[strName, strNamespace])
{
if (DataReader[strName, strNamespace] == String.Empty)
throw new TestException(TestResult.Failed, Variation.Desc);
}
}
}
//[Variation("GetAttribute(Name, NamespaceURI) Verify not the same as This[Name, null]")]
public void GetAttributeWithName6()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
if (DataReader.GetAttribute(strName, strNamespace) == DataReader[strName])
throw new TestException(TestResult.Failed, Variation.Desc);
}
}
//[Variation("GetAttribute(Name) Verify with MoveToAttribute(Name)")]
public void GetAttributeWithName7()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(strName), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("GetAttribute(Name,null) Verify with MoveToAttribute(Name)", Priority = 1)]
public void GetAttributeWithName8()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName);
TestLog.Compare(DataReader.GetAttribute(strName, null), null, "Ordinal (" + i + "): Did not return null");
}
}
//[Variation("GetAttribute(Name) Verify with MoveToAttribute(Name,null)", Priority = 1)]
public void GetAttributeWithName9()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
TestLog.Compare(DataReader.MoveToAttribute(strName, null), false, "Ordinal (" + i + "): Incorrect move");
TestLog.Compare(DataReader.Value, String.Empty, "Ordinal (" + i + "): DataReader.Value should be empty string");
}
}
//[Variation("GetAttribute(Name, NamespaceURI) Verify not the same as MoveToAttribute(Name, null)")]
public void GetAttributeWithName10()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
TestLog.Compare(DataReader.MoveToAttribute(strName, null), false, "Incorrect move");
TestLog.Compare(DataReader.Value, String.Empty, "Ordinal (" + i + "): DataReader.Value should be empty string");
}
}
//[Variation("GetAttribute(Name, null) Verify not the same as MoveToAttribute(Name, NamespaceURI)")]
public void GetAttributeWithName11()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
strName = strAttr + i;
DataReader.MoveToAttribute(strName, strNamespace);
TestLog.Compare(DataReader.GetAttribute(strName, null), null, "Should have returned null");
}
}
//[Variation("GetAttribute(Name, namespace) Verify not the same as MoveToAttribute(Name, namespace)")]
public void GetAttributeWithName12()
{
string strName;
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 1; i < DataReader.AttributeCount; i++)
{
strName = strAttr + (i - 1);
DataReader.MoveToAttribute(strName, strNamespace);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(strName, strNamespace), "Ordinal (" + i + "): Compare GetAttribute(strName) and this[strName]");
}
}
//[Variation("GetAttribute(String.Empty)")]
public void GetAttributeWithName13()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
TestLog.Compare(DataReader.GetAttribute(String.Empty), null, "Should have returned null");
}
//[Variation("GetAttribute(String.Empty,String.Empty)")]
public void GetAttributeWithName14()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
TestLog.Compare(DataReader.GetAttribute(String.Empty, String.Empty), null, "Compare GetAttribute(strName) and this[strName]");
}
}
//[TestCase(Name = "ThisOrdinal", Desc = "ThisOrdinal")]
public partial class TCThisOrdinal : BridgeHelpers
{
//[Variation("This[i] Verify with GetAttribute[i] - Double Quote", Priority = 0)]
public void OrdinalWithGetAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare GetAttribute[i] and this[i]");
}
}
//[Variation("This[i] Verify with GetAttribute[i] - Single Quote")]
public void OrdinalWithGetAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
TestLog.Compare(DataReader[i], DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare GetAttribute[i] and this[i]");
}
}
//[Variation("This[i] Verify with MoveToAttribute[i] - Double Quote", Priority = 0)]
public void OrdinalWithMoveAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader[i];
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader[i], "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("This[i] Verify with MoveToAttribute[i] - Single Quote")]
public void OrdinalWithMoveAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader[i];
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader[i], "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("ThisOrdinal NegativeOneOrdinal", Priority = 0)]
public void NegativeOneOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader[-1];
}
//[Variation("ThisOrdinal FieldCountOrdinal")]
public void FieldCountOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string str = DataReader[DataReader.AttributeCount];
}
//[Variation("ThisOrdinal OrdinalPlusOne", Priority = 0)]
public void OrdinalPlusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader[DataReader.AttributeCount + 1];
}
//[Variation("ThisOrdinal OrdinalMinusOne")]
public void OrdinalMinusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string str = DataReader[-2];
}
}
//[TestCase(Name = "MoveToAttributeOrdinal", Desc = "MoveToAttributeOrdinal")]
public partial class TCMoveToAttributeOrdinal : BridgeHelpers
{
//[Variation("MoveToAttribute(i) Verify with This[i] - Double Quote", Priority = 0)]
public void MoveToAttributeWithGetAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader[i], DataReader.Value, "Ordinal (" + i + "): Compare GetAttribute(i) and this[i]");
}
}
//[Variation("MoveToAttribute(i) Verify with This[i] - Single Quote")]
public void MoveToAttributeWithGetAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader[i], DataReader.Value, "Ordinal (" + i + "): Compare GetAttribute(i) and this[i]");
}
}
//[Variation("MoveToAttribute(i) Verify with GetAttribute(i) - Double Quote", Priority = 0)]
public void MoveToAttributeWithMoveAttrDoubleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader.GetAttribute(i);
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader.GetAttribute(i), "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("MoveToAttribute(i) Verify with GetAttribute[i] - Single Quote")]
public void MoveToAttributeWithMoveAttrSingleQ()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
for (int i = 0; i < DataReader.AttributeCount; i++)
{
string str = DataReader.GetAttribute(i);
DataReader.MoveToAttribute(i);
TestLog.Compare(DataReader.Value, DataReader[i], "Ordinal (" + i + "): Compare MoveToAttribute[i] and this[i]");
TestLog.Compare(str, DataReader.Value, "Ordinal (" + i + "): Compare MoveToAttribute[i] and string");
}
}
//[Variation("MoveToAttribute(i) NegativeOneOrdinal", Priority = 0)]
public void NegativeOneOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
try
{
DataReader.MoveToAttribute(-1);
}
catch (ArgumentOutOfRangeException)
{
return;
}
throw new TestException(TestResult.Failed, "");
}
//[Variation("MoveToAttribute(i) FieldCountOrdinal")]
public void FieldCountOrdinal()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
try
{
DataReader.MoveToAttribute(DataReader.AttributeCount);
}
catch (ArgumentOutOfRangeException)
{
return;
}
throw new TestException(TestResult.Failed, "");
}
//[Variation("MoveToAttribute(i) OrdinalPlusOne", Priority = 0)]
public void OrdinalPlusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
try
{
DataReader.MoveToAttribute(DataReader.AttributeCount + 1);
}
catch (ArgumentOutOfRangeException)
{
return;
}
throw new TestException(TestResult.Failed, "");
}
//[Variation("MoveToAttribute(i) OrdinalMinusOne")]
public void OrdinalMinusOne()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
try
{
DataReader.MoveToAttribute(-2);
}
catch (ArgumentOutOfRangeException)
{
return;
}
throw new TestException(TestResult.Failed, "");
}
}
//[TestCase(Name = "MoveToFirstAtribute", Desc = "MoveToFirstAttribute")]
public partial class TCMoveToFirstAttribute : BridgeHelpers
{
//[Variation("MoveToFirstAttribute() When AttributeCount=0, <EMPTY1/> ", Priority = 0)]
public void MoveToFirstAttribute1()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When AttributeCount=0, <NONEMPTY1>ABCDE</NONEMPTY1> ")]
public void MoveToFirstAttribute2()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "NONEMPTY1");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=0, with namespace")]
public void MoveToFirstAttribute3()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strFirst;
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=0, without namespace")]
public void MoveToFirstAttribute4()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strFirst;
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=mIddle, with namespace")]
public void MoveToFirstAttribute5()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strFirst;
DataReader.MoveToAttribute((int)((DataReader.AttributeCount) / 2));
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=mIddle, without namespace")]
public void MoveToFirstAttribute6()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strFirst;
DataReader.MoveToAttribute((int)((DataReader.AttributeCount) / 2));
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=end, with namespace")]
public void MoveToFirstAttribute7()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strFirst;
DataReader.MoveToAttribute((DataReader.AttributeCount) - 1);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=end, without namespace")]
public void MoveToFirstAttribute8()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strFirst;
DataReader.MoveToAttribute((DataReader.AttributeCount) - 1);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
}
public partial class TCMoveToNextAttribute : BridgeHelpers
{
//[Variation("MoveToNextAttribute() When AttributeCount=0, <EMPTY1/> ", Priority = 0)]
public void MoveToNextAttribute1()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "EMPTY1");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, Variation.Desc);
}
//[Variation("MoveToNextAttribute() When AttributeCount=0, <NONEMPTY1>ABCDE</NONEMPTY1> ")]
public void MoveToNextAttribute2()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "NONEMPTY1");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, Variation.Desc);
}
//[Variation("MoveToNextAttribute() When iOrdinal=0, with namespace")]
public void MoveToNextAttribute3()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strValue;
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue, DataReader.GetAttribute(0), Variation.Desc);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue, DataReader.GetAttribute(1), Variation.Desc);
}
//[Variation("MoveToNextAttribute() When iOrdinal=0, without namespace")]
public void MoveToNextAttribute4()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strValue;
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue, DataReader.GetAttribute(0), Variation.Desc);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue, DataReader.GetAttribute(1), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=mIddle, with namespace")]
public void MoveToFirstAttribute5()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strValue0;
string strValue;
int iMid = (DataReader.AttributeCount) / 2;
DataReader.MoveToAttribute(iMid + 1);
strValue0 = DataReader.Value;
DataReader.MoveToAttribute(iMid);
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue0, strValue, Variation.Desc);
TestLog.Compare(strValue, DataReader.GetAttribute(iMid + 1), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=mIddle, without namespace")]
public void MoveToFirstAttribute6()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strValue0;
string strValue;
int iMid = (DataReader.AttributeCount) / 2;
DataReader.MoveToAttribute(iMid + 1);
strValue0 = DataReader.Value;
DataReader.MoveToAttribute(iMid);
TestLog.Compare(DataReader.MoveToNextAttribute(), true, Variation.Desc);
strValue = DataReader.Value;
TestLog.Compare(strValue0, strValue, Variation.Desc);
TestLog.Compare(strValue, DataReader.GetAttribute(iMid + 1), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=end, with namespace")]
public void MoveToFirstAttribute7()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT0");
string strFirst;
DataReader.MoveToAttribute((DataReader.AttributeCount) - 1);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
//[Variation("MoveToFirstAttribute() When iOrdinal=end, without namespace")]
public void MoveToFirstAttribute8()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, "ACT1");
string strFirst;
DataReader.MoveToAttribute((DataReader.AttributeCount) - 1);
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, Variation.Desc);
strFirst = DataReader.Value;
TestLog.Compare(strFirst, DataReader.GetAttribute(0), Variation.Desc);
}
}
public partial class TCAttributeTest : BridgeHelpers
{
//[Variation("Attribute Test On None")]
public void TestAttributeTestNodeType_None()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.None))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("Attribute Test On Element", Priority = 0)]
public void TestAttributeTestNodeType_Element()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.Element))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("Attribute Test On Text", Priority = 0)]
public void TestAttributeTestNodeType_Text()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.Text))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("Attribute Test On CDATA")]
public void TestAttributeTestNodeType_CDATA()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.CDATA))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("Attribute Test On ProcessingInstruction")]
public void TestAttributeTestNodeType_ProcessingInstruction()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.ProcessingInstruction))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On Comment")]
public void TestAttributeTestNodeType_Comment()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.Comment))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On DocumentType", Priority = 0)]
public void TestAttributeTestNodeType_DocumentType()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.DocumentType))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On Whitespace")]
public void TestAttributeTestNodeType_Whitespace()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.Whitespace))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCoung");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On EndElement")]
public void TestAttributeTestNodeType_EndElement()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.EndElement))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On XmlDeclaration", Priority = 0)]
public void TestAttributeTestNodeType_XmlDeclaration()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.XmlDeclaration))
{
int nCount = 3;
TestLog.Compare(DataReader.AttributeCount, nCount, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, true, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), true, "Checking MoveToFirstAttribute");
bool bNext = true;
TestLog.Compare(DataReader.MoveToNextAttribute(), bNext, "Checking MoveToNextAttribute");
}
}
//[Variation("AttributeTest On EndEntity")]
public void TestAttributeTestNodeType_EndEntity()
{
XmlReader DataReader = GetReader();
if (FindNodeType(DataReader, XmlNodeType.EndEntity))
{
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
TestLog.Compare(DataReader.MoveToFirstAttribute(), false, "Checking MoveToFirstAttribute");
TestLog.Compare(DataReader.MoveToNextAttribute(), false, "Checking MoveToNextAttribute");
}
}
}
public partial class TCAttributeDocType : BridgeHelpers
{
//[Variation("AttributeCount and HasAttributes", Priority = 0)]
public void TADocType_1()
{
XmlReader DataReader = GetReader();
PositionOnNodeType(DataReader, XmlNodeType.DocumentType);
TestLog.Compare(DataReader.AttributeCount, 0, "Checking AttributeCount");
TestLog.Compare(DataReader.HasAttributes, false, "Checking HasAttributes");
}
//[Variation("HasValue and Value on DocumentType")]
public void TADocType_2()
{
XmlReader DataReader = GetReader(new StringReader("<!DOCTYPE dt [<!ENTITY e 'eee'>]><ROOT/>"));
PositionOnNodeType(DataReader, XmlNodeType.DocumentType);
TestLog.Compare(DataReader.HasValue, true, "HasValue");
TestLog.Compare(DataReader.Value, "<!ENTITY e 'eee'>", "Value");
}
}
//[TestCase(Name = "ReadURI", Desc = "Read URI")]
public partial class TATextReaderDocType : BridgeHelpers
{
//[Variation("Valid URI reference as SystemLiteral")]
public void TATextReaderDocType_1()
{
string strxml = "<?xml version='1.0' standalone='no'?><!DOCTYPE ROOT SYSTEM 'se2.dtd'[]><ROOT/>";
XmlReader r = GetReaderStr(strxml);
while (r.Read()) ;
}
void TestUriChar(char ch)
{
string filename = String.Format("f{0}.dtd", ch);
string strxml = String.Format("<!DOCTYPE ROOT SYSTEM '{0}' []><ROOT></ROOT>", filename);
XmlReader r = GetReaderStr(strxml);
while (r.Read()) ;
}
// XML 1.0 SE
//[Variation("URI reference with disallowed characters in SystemLiteral")]
public void TATextReaderDocType_4()
{
string strDisallowed = " {}^`";
for (int i = 0; i < strDisallowed.Length; i++)
TestUriChar(strDisallowed[i]);
}
}
public partial class TCXmlns : BridgeHelpers
{
private string _ST_ENS1 = "EMPTY_NAMESPACE1";
private string _ST_NS2 = "NAMESPACE2";
//[Variation("Name, LocalName, Prefix and Value with xmlns=ns attribute", Priority = 0)]
public void TXmlns1()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
DataReader.MoveToAttribute("xmlns");
TestLog.Compare(DataReader.LocalName, "xmlns", "ln");
TestLog.Compare(DataReader.Name, "xmlns", "n");
TestLog.Compare(DataReader.Prefix, String.Empty, "p");
TestLog.Compare(DataReader.Value, "14", "v");
}
//[Variation("Name, LocalName, Prefix and Value with xmlns:p=ns attribute")]
public void TXmlns2()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_NS2);
DataReader.MoveToAttribute(0);
TestLog.Compare(DataReader.LocalName, "bar", "ln");
TestLog.Compare(DataReader.Name, "xmlns:bar", "n");
TestLog.Compare(DataReader.Prefix, "xmlns", "p");
TestLog.Compare(DataReader.Value, "1", "v");
}
//[Variation("LookupNamespace with xmlns=ns attribute")]
public void TXmlns3()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
DataReader.MoveToAttribute(1);
TestLog.Compare(DataReader.LookupNamespace("xmlns"), "http://www.w3.org/2000/xmlns/", "ln");
}
//[Variation("MoveToAttribute access on xmlns attribute")]
public void TXmlns4()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
DataReader.MoveToAttribute(1);
TestLog.Compare(DataReader.LocalName, "xmlns", "ln");
TestLog.Compare(DataReader.Name, "xmlns", "n");
TestLog.Compare(DataReader.Prefix, String.Empty, "p");
TestLog.Compare(DataReader.Value, "14", "v");
DataReader.MoveToElement();
TestLog.Compare(DataReader.MoveToAttribute("xmlns"), true, "mta(str)");
TestLog.Compare(DataReader.LocalName, "xmlns", "ln");
TestLog.Compare(DataReader.Name, "xmlns", "n");
TestLog.Compare(DataReader.Prefix, String.Empty, "p");
TestLog.Compare(DataReader.Value, "14", "v");
DataReader.MoveToElement();
TestLog.Compare(DataReader.MoveToAttribute("xmlns"), true, "mta(str, str)");
TestLog.Compare(DataReader.LocalName, "xmlns", "ln");
TestLog.Compare(DataReader.Name, "xmlns", "n");
TestLog.Compare(DataReader.Prefix, String.Empty, "p");
TestLog.Compare(DataReader.Value, "14", "v");
DataReader.MoveToElement();
TestLog.Compare(DataReader.MoveToAttribute("xmlns", "14"), false, "mta inv");
}
//[Variation("GetAttribute access on xmlns attribute")]
public void TXmlns5()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
TestLog.Compare(DataReader.GetAttribute(1), "14", "ga(i)");
TestLog.Compare(DataReader.GetAttribute("xmlns"), "14", "ga(str)");
TestLog.Compare(DataReader.GetAttribute("xmlns"), "14", "ga(str, str)");
TestLog.Compare(DataReader.GetAttribute("xmlns", "14"), null, "ga inv");
}
//[Variation("this[xmlns] attribute access")]
public void TXmlns6()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
TestLog.Compare(DataReader[1], "14", "this[i]");
TestLog.Compare(DataReader["xmlns"], "14", "this[str]");
TestLog.Compare(DataReader["xmlns", "14"], null, "this inv");
}
}
public partial class TCXmlnsPrefix : BridgeHelpers
{
private string _ST_ENS1 = "EMPTY_NAMESPACE1";
private string _ST_NS2 = "NAMESPACE2";
private string _strXmlns = "http://www.w3.org/2000/xmlns/";
//[Variation("NamespaceURI of xmlns:a attribute", Priority = 0)]
public void TXmlnsPrefix1()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_NS2);
DataReader.MoveToAttribute(0);
TestLog.Compare(DataReader.NamespaceURI, _strXmlns, "nu");
}
//[Variation("NamespaceURI of element/attribute with xmlns attribute", Priority = 0)]
public void TXmlnsPrefix2()
{
XmlReader DataReader = GetReader();
PositionOnElement(DataReader, _ST_ENS1);
TestLog.Compare(DataReader.NamespaceURI, "14", "nue");
DataReader.MoveToAttribute("Attr0");
TestLog.Compare(DataReader.NamespaceURI, String.Empty, "nu");
DataReader.MoveToAttribute("xmlns");
TestLog.Compare(DataReader.NamespaceURI, _strXmlns, "nu");
}
//[Variation("LookupNamespace with xmlns prefix")]
public void TXmlnsPrefix3()
{
XmlReader DataReader = GetReader();
DataReader.Read();
TestLog.Compare(DataReader.LookupNamespace("xmlns"), null, "ln");
}
//[Variation("Define prefix for 'www.w3.org/2000/xmlns'", Priority = 0)]
public void TXmlnsPrefix4()
{
string strxml = "<ROOT xmlns:pxmlns='http://www.w3.org/2000/xmlns/'/>";
try
{
XmlReader DataReader = GetReaderStr(strxml);
DataReader.Read();
throw new TestException(TestResult.Failed, "");
}
catch (XmlException) { }
}
//[Variation("Redefine namespace attached to xmlns prefix")]
public void TXmlnsPrefix5()
{
string strxml = "<ROOT xmlns:xmlns='http://www.w3.org/2002/xmlns/'/>";
try
{
XmlReader DataReader = GetReaderStr(strxml);
DataReader.Read();
throw new TestException(TestResult.Failed, "");
}
catch (XmlException) { }
}
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/longrunning/operations.proto
// Original file comments:
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#region Designer generated code
using System;
using System.Threading;
using System.Threading.Tasks;
using Grpc.Core;
namespace Google.LongRunning {
/// <summary>
/// Manages long-running operations with an API service.
///
/// When an API method normally takes long time to complete, it can be designed
/// to return [Operation][google.longrunning.Operation] to the client, and the client can use this
/// interface to receive the real response asynchronously by polling the
/// operation resource, or pass the operation resource to another API (such as
/// Google Cloud Pub/Sub API) to receive the response. Any API service that
/// returns long-running operations should implement the `Operations` interface
/// so developers can have a consistent client experience.
/// </summary>
public static class Operations
{
static readonly string __ServiceName = "google.longrunning.Operations";
static readonly Marshaller<global::Google.LongRunning.ListOperationsRequest> __Marshaller_ListOperationsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.ListOperationsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.LongRunning.ListOperationsResponse> __Marshaller_ListOperationsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.ListOperationsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.LongRunning.GetOperationRequest> __Marshaller_GetOperationRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.GetOperationRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.LongRunning.Operation> __Marshaller_Operation = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.Operation.Parser.ParseFrom);
static readonly Marshaller<global::Google.LongRunning.DeleteOperationRequest> __Marshaller_DeleteOperationRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.DeleteOperationRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Protobuf.WellKnownTypes.Empty> __Marshaller_Empty = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Protobuf.WellKnownTypes.Empty.Parser.ParseFrom);
static readonly Marshaller<global::Google.LongRunning.CancelOperationRequest> __Marshaller_CancelOperationRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.CancelOperationRequest.Parser.ParseFrom);
static readonly Method<global::Google.LongRunning.ListOperationsRequest, global::Google.LongRunning.ListOperationsResponse> __Method_ListOperations = new Method<global::Google.LongRunning.ListOperationsRequest, global::Google.LongRunning.ListOperationsResponse>(
MethodType.Unary,
__ServiceName,
"ListOperations",
__Marshaller_ListOperationsRequest,
__Marshaller_ListOperationsResponse);
static readonly Method<global::Google.LongRunning.GetOperationRequest, global::Google.LongRunning.Operation> __Method_GetOperation = new Method<global::Google.LongRunning.GetOperationRequest, global::Google.LongRunning.Operation>(
MethodType.Unary,
__ServiceName,
"GetOperation",
__Marshaller_GetOperationRequest,
__Marshaller_Operation);
static readonly Method<global::Google.LongRunning.DeleteOperationRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_DeleteOperation = new Method<global::Google.LongRunning.DeleteOperationRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"DeleteOperation",
__Marshaller_DeleteOperationRequest,
__Marshaller_Empty);
static readonly Method<global::Google.LongRunning.CancelOperationRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_CancelOperation = new Method<global::Google.LongRunning.CancelOperationRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"CancelOperation",
__Marshaller_CancelOperationRequest,
__Marshaller_Empty);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Google.LongRunning.OperationsReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of Operations</summary>
public abstract class OperationsBase
{
/// <summary>
/// Lists operations that match the specified filter in the request. If the
/// server doesn't support this method, it returns `UNIMPLEMENTED`.
///
/// NOTE: the `name` binding below allows API services to override the binding
/// to use different resource name schemes, such as `users/*/operations`.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.LongRunning.ListOperationsResponse> ListOperations(global::Google.LongRunning.ListOperationsRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Gets the latest state of a long-running operation. Clients can use this
/// method to poll the operation result at intervals as recommended by the API
/// service.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.LongRunning.Operation> GetOperation(global::Google.LongRunning.GetOperationRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Deletes a long-running operation. This method indicates that the client is
/// no longer interested in the operation result. It does not cancel the
/// operation. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> DeleteOperation(global::Google.LongRunning.DeleteOperationRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Starts asynchronous cancellation on a long-running operation. The server
/// makes a best effort to cancel the operation, but success is not
/// guaranteed. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`. Clients can use
/// [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
/// other methods to check whether the cancellation succeeded or whether the
/// operation completed despite cancellation. On successful cancellation,
/// the operation is not deleted; instead, it becomes an operation with
/// an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
/// corresponding to `Code.CANCELLED`.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> CancelOperation(global::Google.LongRunning.CancelOperationRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for Operations</summary>
public class OperationsClient : ClientBase<OperationsClient>
{
/// <summary>Creates a new client for Operations</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public OperationsClient(Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for Operations that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public OperationsClient(CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected OperationsClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected OperationsClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Lists operations that match the specified filter in the request. If the
/// server doesn't support this method, it returns `UNIMPLEMENTED`.
///
/// NOTE: the `name` binding below allows API services to override the binding
/// to use different resource name schemes, such as `users/*/operations`.
/// </summary>
public virtual global::Google.LongRunning.ListOperationsResponse ListOperations(global::Google.LongRunning.ListOperationsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListOperations(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists operations that match the specified filter in the request. If the
/// server doesn't support this method, it returns `UNIMPLEMENTED`.
///
/// NOTE: the `name` binding below allows API services to override the binding
/// to use different resource name schemes, such as `users/*/operations`.
/// </summary>
public virtual global::Google.LongRunning.ListOperationsResponse ListOperations(global::Google.LongRunning.ListOperationsRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ListOperations, null, options, request);
}
/// <summary>
/// Lists operations that match the specified filter in the request. If the
/// server doesn't support this method, it returns `UNIMPLEMENTED`.
///
/// NOTE: the `name` binding below allows API services to override the binding
/// to use different resource name schemes, such as `users/*/operations`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.LongRunning.ListOperationsResponse> ListOperationsAsync(global::Google.LongRunning.ListOperationsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ListOperationsAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Lists operations that match the specified filter in the request. If the
/// server doesn't support this method, it returns `UNIMPLEMENTED`.
///
/// NOTE: the `name` binding below allows API services to override the binding
/// to use different resource name schemes, such as `users/*/operations`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.LongRunning.ListOperationsResponse> ListOperationsAsync(global::Google.LongRunning.ListOperationsRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ListOperations, null, options, request);
}
/// <summary>
/// Gets the latest state of a long-running operation. Clients can use this
/// method to poll the operation result at intervals as recommended by the API
/// service.
/// </summary>
public virtual global::Google.LongRunning.Operation GetOperation(global::Google.LongRunning.GetOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetOperation(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the latest state of a long-running operation. Clients can use this
/// method to poll the operation result at intervals as recommended by the API
/// service.
/// </summary>
public virtual global::Google.LongRunning.Operation GetOperation(global::Google.LongRunning.GetOperationRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetOperation, null, options, request);
}
/// <summary>
/// Gets the latest state of a long-running operation. Clients can use this
/// method to poll the operation result at intervals as recommended by the API
/// service.
/// </summary>
public virtual AsyncUnaryCall<global::Google.LongRunning.Operation> GetOperationAsync(global::Google.LongRunning.GetOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetOperationAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the latest state of a long-running operation. Clients can use this
/// method to poll the operation result at intervals as recommended by the API
/// service.
/// </summary>
public virtual AsyncUnaryCall<global::Google.LongRunning.Operation> GetOperationAsync(global::Google.LongRunning.GetOperationRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetOperation, null, options, request);
}
/// <summary>
/// Deletes a long-running operation. This method indicates that the client is
/// no longer interested in the operation result. It does not cancel the
/// operation. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteOperation(global::Google.LongRunning.DeleteOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteOperation(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a long-running operation. This method indicates that the client is
/// no longer interested in the operation result. It does not cancel the
/// operation. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty DeleteOperation(global::Google.LongRunning.DeleteOperationRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_DeleteOperation, null, options, request);
}
/// <summary>
/// Deletes a long-running operation. This method indicates that the client is
/// no longer interested in the operation result. It does not cancel the
/// operation. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteOperationAsync(global::Google.LongRunning.DeleteOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return DeleteOperationAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Deletes a long-running operation. This method indicates that the client is
/// no longer interested in the operation result. It does not cancel the
/// operation. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> DeleteOperationAsync(global::Google.LongRunning.DeleteOperationRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_DeleteOperation, null, options, request);
}
/// <summary>
/// Starts asynchronous cancellation on a long-running operation. The server
/// makes a best effort to cancel the operation, but success is not
/// guaranteed. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`. Clients can use
/// [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
/// other methods to check whether the cancellation succeeded or whether the
/// operation completed despite cancellation. On successful cancellation,
/// the operation is not deleted; instead, it becomes an operation with
/// an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
/// corresponding to `Code.CANCELLED`.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty CancelOperation(global::Google.LongRunning.CancelOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CancelOperation(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Starts asynchronous cancellation on a long-running operation. The server
/// makes a best effort to cancel the operation, but success is not
/// guaranteed. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`. Clients can use
/// [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
/// other methods to check whether the cancellation succeeded or whether the
/// operation completed despite cancellation. On successful cancellation,
/// the operation is not deleted; instead, it becomes an operation with
/// an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
/// corresponding to `Code.CANCELLED`.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty CancelOperation(global::Google.LongRunning.CancelOperationRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_CancelOperation, null, options, request);
}
/// <summary>
/// Starts asynchronous cancellation on a long-running operation. The server
/// makes a best effort to cancel the operation, but success is not
/// guaranteed. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`. Clients can use
/// [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
/// other methods to check whether the cancellation succeeded or whether the
/// operation completed despite cancellation. On successful cancellation,
/// the operation is not deleted; instead, it becomes an operation with
/// an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
/// corresponding to `Code.CANCELLED`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> CancelOperationAsync(global::Google.LongRunning.CancelOperationRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CancelOperationAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Starts asynchronous cancellation on a long-running operation. The server
/// makes a best effort to cancel the operation, but success is not
/// guaranteed. If the server doesn't support this method, it returns
/// `google.rpc.Code.UNIMPLEMENTED`. Clients can use
/// [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
/// other methods to check whether the cancellation succeeded or whether the
/// operation completed despite cancellation. On successful cancellation,
/// the operation is not deleted; instead, it becomes an operation with
/// an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
/// corresponding to `Code.CANCELLED`.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> CancelOperationAsync(global::Google.LongRunning.CancelOperationRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_CancelOperation, null, options, request);
}
protected override OperationsClient NewInstance(ClientBaseConfiguration configuration)
{
return new OperationsClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
public static ServerServiceDefinition BindService(OperationsBase serviceImpl)
{
return ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_ListOperations, serviceImpl.ListOperations)
.AddMethod(__Method_GetOperation, serviceImpl.GetOperation)
.AddMethod(__Method_DeleteOperation, serviceImpl.DeleteOperation)
.AddMethod(__Method_CancelOperation, serviceImpl.CancelOperation).Build();
}
}
}
#endregion
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections;
using System.Diagnostics;
using System.ComponentModel;
using System.Windows.Forms;
using OpenLiveWriter.BlogClient;
using OpenLiveWriter.BlogClient.Providers;
using OpenLiveWriter.Extensibility.BlogClient;
using OpenLiveWriter.Controls.Wizard;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.Localization;
using System.Runtime.InteropServices;
namespace OpenLiveWriter.PostEditor.Configuration.Wizard
{
public class WeblogConfigurationWizardController : WizardController, IBlogClientUIContext, IDisposable
{
#region Creation and Initialization and Disposal
public static string Welcome(IWin32Window owner)
{
TemporaryBlogSettings temporarySettings = TemporaryBlogSettings.CreateNew();
using (WeblogConfigurationWizardController controller = new WeblogConfigurationWizardController(temporarySettings))
{
return controller.WelcomeWeblog(owner);
}
}
public static string Add(IWin32Window owner, bool permitSwitchingWeblogs)
{
bool switchToWeblog;
return Add(owner, permitSwitchingWeblogs, out switchToWeblog);
}
public static string Add(IWin32Window owner, bool permitSwitchingWeblogs, out bool switchToWeblog)
{
TemporaryBlogSettings temporarySettings = TemporaryBlogSettings.CreateNew();
temporarySettings.IsNewWeblog = true;
temporarySettings.SwitchToWeblog = true;
using (WeblogConfigurationWizardController controller = new WeblogConfigurationWizardController(temporarySettings))
{
return controller.AddWeblog(owner, ApplicationEnvironment.ProductNameQualified, permitSwitchingWeblogs, out switchToWeblog);
}
}
public static string AddBlog(IWin32Window owner, Uri blogToAdd)
{
TemporaryBlogSettings temporarySettings = TemporaryBlogSettings.CreateNew();
temporarySettings.IsNewWeblog = true;
temporarySettings.SwitchToWeblog = true;
string username;
string password;
Uri homepageUrl;
ParseAddBlogUri(blogToAdd, out username, out password, out homepageUrl);
temporarySettings.HomepageUrl = homepageUrl.ToString();
temporarySettings.Credentials.Username = username;
temporarySettings.Credentials.Password = password;
temporarySettings.SavePassword = false;
using (WeblogConfigurationWizardController controller = new WeblogConfigurationWizardController(temporarySettings))
{
bool dummy;
return controller.AddWeblogSkipType(owner, ApplicationEnvironment.ProductNameQualified, false, out dummy);
}
}
public static bool EditTemporarySettings(IWin32Window owner, TemporaryBlogSettings settings)
{
using (WeblogConfigurationWizardController controller = new WeblogConfigurationWizardController(settings))
return controller.EditWeblogTemporarySettings(owner);
}
private WeblogConfigurationWizardController(TemporaryBlogSettings settings)
: base()
{
_temporarySettings = settings;
}
public void Dispose()
{
//clear any cached credential information that may have been set by the wizard
ClearTransientCredentials();
System.GC.SuppressFinalize(this);
}
~WeblogConfigurationWizardController()
{
Debug.Fail("Wizard controller was not disposed");
}
private string WelcomeWeblog(IWin32Window owner)
{
_preventSwitchingToWeblog = true;
// welcome is the same as add with one additional step on the front end
WizardStep wizardStep;
addWizardStep(
wizardStep = new WizardStep(new WeblogConfigurationWizardPanelWelcome(),
StringId.ConfigWizardWelcome,
null, null, new NextCallback(OnWelcomeCompleted), null, null));
wizardStep.WantsFocus = false;
addWizardStep(
new WizardStep(new WeblogConfigurationWizardPanelConfirmation(),
StringId.ConfigWizardComplete,
new DisplayCallback(OnConfirmationDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnConfirmationCompleted),
null,
null));
bool switchToWeblog;
return ShowBlogWizard(ApplicationEnvironment.ProductNameQualified, owner, out switchToWeblog);
}
private string AddWeblogSkipType(IWin32Window owner, string caption, bool permitSwitchingWeblogs, out bool switchToWeblog)
{
_preventSwitchingToWeblog = !permitSwitchingWeblogs;
_temporarySettings.IsSpacesBlog = false;
_temporarySettings.IsSharePointBlog = false;
AddBasicInfoSubStep();
AddConfirmationStep();
return ShowBlogWizard(caption, owner, out switchToWeblog);
}
private string AddWeblog(IWin32Window owner, string caption, bool permitSwitchingWeblogs, out bool switchToWeblog)
{
_preventSwitchingToWeblog = !permitSwitchingWeblogs;
AddChooseBlogTypeStep();
AddConfirmationStep();
return ShowBlogWizard(caption, owner, out switchToWeblog);
}
private string ShowBlogWizard(string caption, IWin32Window owner, out bool switchToWeblog)
{
// blog id to return
string blogId = null;
if (ShowDialog(owner, caption) == DialogResult.OK)
{
// save the blog settings
using (BlogSettings blogSettings = BlogSettings.ForBlogId(_temporarySettings.Id))
{
_temporarySettings.Save(blogSettings);
blogId = blogSettings.Id;
}
// note the last added weblog (for re-selection in subsequent invocations of the dialog)
WeblogConfigurationWizardSettings.LastServiceName = _temporarySettings.ServiceName;
}
switchToWeblog = _temporarySettings.SwitchToWeblog;
return blogId;
}
private bool EditWeblogTemporarySettings(IWin32Window owner)
{
// first step conditional on blog type
if (_temporarySettings.IsSharePointBlog)
AddSharePointBasicInfoSubStep(true);
else
AddBasicInfoSubStep();
AddConfirmationStep();
if (ShowDialog(owner, Res.Get(StringId.UpdateAccountConfigurationTitle)) == DialogResult.OK)
{
return true;
}
else
{
return false;
}
}
private void AddChooseBlogTypeStep()
{
addWizardStep(
new WizardStep(new WeblogConfigurationWizardPanelBlogType(),
StringId.ConfigWizardChooseWeblogType,
new DisplayCallback(OnChooseBlogTypeDisplayed),
null,
new NextCallback(OnChooseBlogTypeCompleted),
null,
null));
}
private void AddBasicInfoSubStep()
{
WeblogConfigurationWizardPanelBasicInfo panel = new WeblogConfigurationWizardPanelBasicInfo();
addWizardSubStep(
new WizardSubStep(panel,
StringId.ConfigWizardBasicInfo,
new DisplayCallback(OnBasicInfoDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnBasicInfoCompleted),
null,
null));
}
private void AddSharePointBasicInfoSubStep(bool showAuthenticationStep)
{
addWizardSubStep(
new WizardSubStep(new WeblogConfigurationWizardPanelSharePointBasicInfo(),
StringId.ConfigWizardSharePointHomepage,
new DisplayCallback(OnBasicInfoDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnSharePointBasicInfoCompleted),
new NextCallback(OnSharePointBasicInfoUndone),
null));
_authenticationRequired = showAuthenticationStep;
}
private void AddGoogleBloggerOAuthSubStep()
{
addWizardSubStep(
new WizardSubStep(new WeblogConfigurationWizardPanelGoogleBloggerAuthentication(_temporarySettings.Id, this),
null,
new DisplayCallback(OnBasicInfoDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnGoogleBloggerOAuthCompleted),
null,
new BackCallback(OnGoogleBloggerOAuthBack)));
}
private void AddConfirmationStep()
{
addWizardStep(
new WizardStep(new WeblogConfigurationWizardPanelConfirmation(),
StringId.ConfigWizardComplete,
new DisplayCallback(OnConfirmationDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnConfirmationCompleted),
null,
null));
}
private DialogResult ShowDialog(IWin32Window owner, string title)
{
using (new WaitCursor())
{
DialogResult result;
using (_wizardForm = new WeblogConfigurationWizard(this))
{
using (new BlogClientUIContextScope(_wizardForm))
{
_owner = _wizardForm;
_wizardForm.Text = title;
// Show in taskbar if it's a top-level window. This is true during welcome
if (owner == null)
{
_wizardForm.ShowInTaskbar = true;
_wizardForm.StartPosition = FormStartPosition.CenterScreen;
}
result = _wizardForm.ShowDialog(owner);
_owner = null;
}
}
_wizardForm = null;
if (_detectionOperation != null && !_detectionOperation.IsDone)
_detectionOperation.Cancel();
return result;
}
}
#endregion
#region Welcome Panel
private void OnWelcomeCompleted(Object stepControl)
{
//setup the next steps based on which choice the user selected.
addWizardSubStep(
new WizardSubStep(new WeblogConfigurationWizardPanelBlogType(),
StringId.ConfigWizardChooseWeblogType,
new DisplayCallback(OnChooseBlogTypeDisplayed),
null,
new NextCallback(OnChooseBlogTypeCompleted),
null,
null));
}
#endregion
#region Choose Blog Type Panel
private void OnChooseBlogTypeDisplayed(Object stepControl)
{
// Fixes for 483356: In account configuration wizard, hitting back in select provider or success screens causes anomalous behavior
// Need to clear cached credentials and cached blogname otherwise they'll be used downstream in the wizard...
ClearTransientCredentials();
_temporarySettings.BlogName = string.Empty;
// Bug 681904: Insure that the next and cancel are always available when this panel is displayed.
NextEnabled = true;
CancelEnabled = true;
// get reference to panel
WeblogConfigurationWizardPanelBlogType panelBlogType = stepControl as WeblogConfigurationWizardPanelBlogType;
// notify it that it is being displayed (reset dirty state)
panelBlogType.OnDisplayPanel();
}
private void OnChooseBlogTypeCompleted(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelBlogType panelBlogType = stepControl as WeblogConfigurationWizardPanelBlogType;
// if the user is changing types then blank out the blog info
if (panelBlogType.UserChangedSelection)
{
_temporarySettings.HomepageUrl = String.Empty;
_temporarySettings.Credentials.Clear();
}
// set the user's choice
_temporarySettings.IsSharePointBlog = panelBlogType.IsSharePointBlog;
_temporarySettings.IsGoogleBloggerBlog = panelBlogType.IsGoogleBloggerBlog;
// did this bootstrap a custom account wizard?
_providerAccountWizard = panelBlogType.ProviderAccountWizard;
// add the next wizard sub step as appropriate
if (_temporarySettings.IsSharePointBlog)
{
AddSharePointBasicInfoSubStep(false);
}
else if (_temporarySettings.IsGoogleBloggerBlog)
{
AddGoogleBloggerOAuthSubStep();
}
else
{
AddBasicInfoSubStep();
}
}
#endregion
private static void ParseAddBlogUri(Uri blogToAdd, out string username, out string password, out Uri homepageUrl)
{
// The URL is in the format http://username:password@blogUrl/;
// We use the Uri class to extract the username:password (comes as a single string) and then parse it.
// We strip the username:password from the remaining url and return it.
username = null;
password = null;
string[] userInfoSplit = System.Web.HttpUtility.UrlDecode(blogToAdd.UserInfo).Split(':');
if (userInfoSplit.Length > 0)
{
username = userInfoSplit[0];
if (userInfoSplit.Length > 1)
{
password = userInfoSplit[1];
}
}
homepageUrl = new Uri(blogToAdd.GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped));
}
#region Basic Info Panel
private void OnBasicInfoDisplayed(Object stepControl)
{
// Fixes for 483356: In account configuration wizard, hitting back in select provider or success screens causes anomalous behavior
// Need to clear cached credentials and cached blogname otherwise they'll be used downstream in the wizard...
_temporarySettings.BlogName = string.Empty;
// get reference to data interface for panel
IAccountBasicInfoProvider basicInfo = stepControl as IAccountBasicInfoProvider;
// populate basic data
basicInfo.ProviderAccountWizard = _providerAccountWizard;
basicInfo.AccountId = _temporarySettings.Id;
basicInfo.HomepageUrl = _temporarySettings.HomepageUrl;
basicInfo.ForceManualConfiguration = _temporarySettings.ForceManualConfig;
basicInfo.Credentials = _temporarySettings.Credentials;
basicInfo.SavePassword = basicInfo.Credentials.Password != String.Empty && (_temporarySettings.SavePassword ?? true);
}
private delegate void PerformBlogAutoDetection();
private void OnBasicInfoCompleted(Object stepControl)
{
OnBasicInfoAndAuthenticationCompleted((IAccountBasicInfoProvider)stepControl, new PerformBlogAutoDetection(PerformWeblogAndSettingsAutoDetectionSubStep));
}
private void OnBasicInfoAndAuthenticationCompleted(IAccountBasicInfoProvider basicInfo, PerformBlogAutoDetection performBlogAutoDetection)
{
// copy the settings
_temporarySettings.HomepageUrl = basicInfo.HomepageUrl;
_temporarySettings.ForceManualConfig = basicInfo.ForceManualConfiguration;
_temporarySettings.Credentials = basicInfo.Credentials;
_temporarySettings.SavePassword = basicInfo.SavePassword;
// clear the transient credentials so we don't accidentally use cached credentials
ClearTransientCredentials();
if (!_temporarySettings.ForceManualConfig)
{
// perform auto-detection
performBlogAutoDetection();
}
else
{
PerformSelectProviderSubStep();
}
}
private void OnSharePointBasicInfoCompleted(Object stepControl)
{
if (_authenticationRequired)
AddSharePointAuthenticationStep((IAccountBasicInfoProvider)stepControl);
else
OnBasicInfoAndAuthenticationCompleted((IAccountBasicInfoProvider)stepControl, new PerformBlogAutoDetection(PerformSharePointAutoDetectionSubStep));
}
private void OnSharePointBasicInfoUndone(Object stepControl)
{
if (_authenticationRequired && !_authenticationStepAdded)
{
AddSharePointAuthenticationStep((IAccountBasicInfoProvider)stepControl);
next();
}
}
private void AddSharePointAuthenticationStep(IAccountBasicInfoProvider basicInfoProvider)
{
if (!_authenticationStepAdded)
{
addWizardSubStep(new WizardSubStep(new WeblogConfigurationWizardPanelSharePointAuthentication(basicInfoProvider),
StringId.ConfigWizardSharePointLogin,
new WizardController.DisplayCallback(OnSharePointAuthenticationDisplayed),
new VerifyStepCallback(OnValidatePanel),
new WizardController.NextCallback(OnSharePointAuthenticationComplete),
null,
new WizardController.BackCallback(OnSharePointAuthenticationBack)));
_authenticationStepAdded = true;
}
}
#endregion
#region Weblog and Settings Auto Detection
private void PerformWeblogAndSettingsAutoDetectionSubStep()
{
// Clear the provider so the user will be forced to do autodetection
// until we have successfully configured a publishing interface
_temporarySettings.ClearProvider();
_detectionOperation = new WizardWeblogAndSettingsAutoDetectionOperation(_editWithStyleStep);
// performe the step
addWizardSubStep(new WizardAutoDetectionStep(
(IBlogClientUIContext)this,
_temporarySettings,
new NextCallback(OnWeblogAndSettingsAutoDetectionCompleted),
_detectionOperation));
}
private WizardWeblogAndSettingsAutoDetectionOperation _detectionOperation;
private void PerformSharePointAutoDetectionSubStep()
{
// Clear the provider so the user will be forced to do autodetection
// until we have successfully configured a publishing interface
_temporarySettings.ClearProvider();
AddAutoDetectionStep();
}
private void AddAutoDetectionStep()
{
_detectionOperation = new WizardSharePointAutoDetectionOperation(_editWithStyleStep);
WizardSharePointAutoDetectionStep sharePointDetectionStep =
new WizardSharePointAutoDetectionStep(
(IBlogClientUIContext)this,
_temporarySettings,
new NextCallback(OnWeblogAndSettingsAutoDetectionCompleted),
_detectionOperation);
if (!_authenticationStepAdded)
sharePointDetectionStep.AuthenticationErrorOccurred += new EventHandler(sharePointDetectionStep_AuthenticationErrorOccurred);
addWizardSubStep(sharePointDetectionStep);
}
private void sharePointDetectionStep_AuthenticationErrorOccurred(object sender, EventArgs e)
{
this._authenticationRequired = true;
}
private void OnSharePointAuthenticationDisplayed(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSharePointAuthentication panelBlogType = stepControl as WeblogConfigurationWizardPanelSharePointAuthentication;
// set value
panelBlogType.Credentials = _temporarySettings.Credentials;
panelBlogType.SavePassword = _temporarySettings.Credentials.Password != String.Empty;
}
private void OnSharePointAuthenticationComplete(Object stepControl)
{
OnBasicInfoAndAuthenticationCompleted((IAccountBasicInfoProvider)stepControl, new PerformBlogAutoDetection(PerformSharePointAutoDetectionSubStep));
}
private void OnSharePointAuthenticationBack(Object stepControl)
{
_authenticationStepAdded = false;
}
private void OnGoogleBloggerOAuthCompleted(Object stepControl)
{
OnBasicInfoAndAuthenticationCompleted((IAccountBasicInfoProvider)stepControl, new PerformBlogAutoDetection(PerformWeblogAndSettingsAutoDetectionSubStep));
}
private void OnGoogleBloggerOAuthBack(Object stepControl)
{
var panel = (WeblogConfigurationWizardPanelGoogleBloggerAuthentication)stepControl;
panel.CancelAuthorization();
}
private void OnWeblogAndSettingsAutoDetectionCompleted(Object stepControl)
{
// if we weren't able to identify a specific weblog
if (_temporarySettings.HostBlogId == String.Empty)
{
// if we have a list of weblogs then show the blog list
if (_temporarySettings.HostBlogs.Length > 0)
{
PerformSelectBlogSubStep();
}
else // kick down to select a provider
{
PerformSelectProviderSubStep();
}
}
else
{
PerformSelectImageEndpointIfNecessary();
}
}
private void PerformSelectImageEndpointIfNecessary()
{
if (_temporarySettings.HostBlogId != string.Empty
&& _temporarySettings.AvailableImageEndpoints != null
&& _temporarySettings.AvailableImageEndpoints.Length > 0)
{
/*
if (_temporarySettings.AvailableImageEndpoints.Length == 1)
{
IDictionary optionOverrides = _temporarySettings.OptionOverrides;
optionOverrides[BlogClientOptions.IMAGE_ENDPOINT] = _temporarySettings.AvailableImageEndpoints[0].Id;
_temporarySettings.OptionOverrides = optionOverrides;
}
else
PerformSelectImageEndpointSubStep();
*/
// currently we always show the image endpoint selection UI if we find at least one.
PerformSelectImageEndpointSubStep();
}
}
#endregion
#region Select Provider Panel
void PerformSelectProviderSubStep()
{
addWizardSubStep(new WizardSubStep(
new WeblogConfigurationWizardPanelSelectProvider(),
StringId.ConfigWizardSelectProvider,
new DisplayCallback(OnSelectProviderDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnSelectProviderCompleted),
null,
null));
}
void OnSelectProviderDisplayed(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectProvider panelSelectProvider = stepControl as WeblogConfigurationWizardPanelSelectProvider;
// show the panel
panelSelectProvider.ShowPanel(
_temporarySettings.ServiceName,
_temporarySettings.HomepageUrl,
_temporarySettings.Id,
_temporarySettings.Credentials);
}
void OnSelectProviderCompleted(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectProvider panelSelectProvider = stepControl as WeblogConfigurationWizardPanelSelectProvider;
// record the provider and blog info
IBlogProviderDescription provider = panelSelectProvider.SelectedBlogProvider;
_temporarySettings.SetProvider(provider.Id, provider.Name, provider.PostApiUrl, provider.ClientType);
_temporarySettings.HostBlogId = String.Empty;
if (panelSelectProvider.TargetBlog != null)
_temporarySettings.SetBlogInfo(panelSelectProvider.TargetBlog);
_temporarySettings.HostBlogs = panelSelectProvider.UsersBlogs;
// If we don't yet have a HostBlogId then the user needs to choose from
// among available weblogs
if (_temporarySettings.HostBlogId == String.Empty)
{
PerformSelectBlogSubStep();
}
else
{
// if we have not downloaded an editing template yet for this
// weblog then execute this now
PerformSettingsAutoDetectionSubStepIfNecessary();
}
}
#endregion
#region Select Blog Panel
void PerformSelectBlogSubStep()
{
addWizardSubStep(new WizardSubStep(
new WeblogConfigurationWizardPanelSelectBlog(),
StringId.ConfigWizardSelectWeblog,
new DisplayCallback(OnSelectBlogDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnSelectBlogCompleted),
null,
null));
}
void OnSelectBlogDisplayed(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectBlog panelSelectBlog = stepControl as WeblogConfigurationWizardPanelSelectBlog;
// show the panel
panelSelectBlog.ShowPanel(_temporarySettings.HostBlogs, _temporarySettings.HostBlogId);
}
private void OnSelectBlogCompleted(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectBlog panelSelectBlog = stepControl as WeblogConfigurationWizardPanelSelectBlog;
// get the selected blog
_temporarySettings.SetBlogInfo(panelSelectBlog.SelectedBlog);
// if we have not downloaded an editing template yet for this
// weblog then execute this now
PerformSettingsAutoDetectionSubStepIfNecessary();
}
#endregion
#region Select Image Endpoint Panel
void PerformSelectImageEndpointSubStep()
{
WeblogConfigurationWizardPanelSelectBlog panel = new WeblogConfigurationWizardPanelSelectBlog();
panel.LabelText = Res.Get(StringId.CWSelectImageEndpointText);
addWizardSubStep(new WizardSubStep(
panel,
StringId.ConfigWizardSelectImageEndpoint,
new DisplayCallback(OnSelectImageEndpointDisplayed),
new VerifyStepCallback(OnValidatePanel),
new NextCallback(OnSelectImageEndpointCompleted),
null,
null));
}
void OnSelectImageEndpointDisplayed(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectBlog panelSelectImageEndpoint = stepControl as WeblogConfigurationWizardPanelSelectBlog;
// show the panel
panelSelectImageEndpoint.ShowPanel(_temporarySettings.AvailableImageEndpoints, _temporarySettings.OptionOverrides[BlogClientOptions.IMAGE_ENDPOINT] as string);
}
private void OnSelectImageEndpointCompleted(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelSelectBlog panelSelectBlog = stepControl as WeblogConfigurationWizardPanelSelectBlog;
// get the selected blog
IDictionary optionOverrides = _temporarySettings.HomePageOverrides;
optionOverrides[BlogClientOptions.IMAGE_ENDPOINT] = panelSelectBlog.SelectedBlog.Id;
_temporarySettings.HomePageOverrides = optionOverrides;
}
#endregion
#region Weblog Settings Auto Detection
private void PerformSettingsAutoDetectionSubStepIfNecessary()
{
if (_temporarySettings.TemplateFiles.Length == 0)
{
PerformSettingsAutoDetectionSubStep();
}
}
private void PerformSettingsAutoDetectionSubStep()
{
// performe the step
addWizardSubStep(new WizardAutoDetectionStep(
(IBlogClientUIContext)this,
_temporarySettings, new NextCallback(OnPerformSettingsAutoDetectionCompleted),
new WizardSettingsAutoDetectionOperation(_editWithStyleStep)));
}
private void OnPerformSettingsAutoDetectionCompleted(object stepControl)
{
PerformSelectImageEndpointIfNecessary();
}
#endregion
#region Confirmation Panel
void OnConfirmationDisplayed(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelConfirmation panelConfirmation = stepControl as WeblogConfigurationWizardPanelConfirmation;
// show the panel
panelConfirmation.ShowPanel(_temporarySettings, _preventSwitchingToWeblog);
}
void OnConfirmationCompleted(Object stepControl)
{
// get reference to panel
WeblogConfigurationWizardPanelConfirmation panelConfirmation = stepControl as WeblogConfigurationWizardPanelConfirmation;
// save settings
_temporarySettings.BlogName = panelConfirmation.WeblogName;
_temporarySettings.SwitchToWeblog = panelConfirmation.SwitchToWeblog;
}
#endregion
#region Generic Helpers
private bool OnValidatePanel(Object panelControl)
{
WeblogConfigurationWizardPanel wizardPanel = panelControl as WeblogConfigurationWizardPanel;
return wizardPanel.ValidatePanel();
}
/// <summary>
/// Clear any cached credential information for the blog
/// </summary>
private void ClearTransientCredentials()
{
//clear any cached credential information associated with this blog (fixes bug 373063)
new BlogCredentialsAccessor(_temporarySettings.Id, _temporarySettings.Credentials).TransientCredentials = null;
}
#endregion
#region Private Members
private IWin32Window _owner = null;
private WeblogConfigurationWizard _wizardForm;
private TemporaryBlogSettings _temporarySettings;
private bool _preventSwitchingToWeblog = false;
private WizardStep _editWithStyleStep = null;
private IBlogProviderAccountWizardDescription _providerAccountWizard;
private bool _authenticationRequired = false;
private bool _authenticationStepAdded;
#endregion
#region IBlogClientUIContext Members
IntPtr IWin32Window.Handle { get { return _wizardForm.Handle; } }
bool ISynchronizeInvoke.InvokeRequired { get { return _wizardForm.InvokeRequired; } }
IAsyncResult ISynchronizeInvoke.BeginInvoke(Delegate method, object[] args) { return _wizardForm.BeginInvoke(method, args); }
object ISynchronizeInvoke.EndInvoke(IAsyncResult result) { return _wizardForm.EndInvoke(result); }
object ISynchronizeInvoke.Invoke(Delegate method, object[] args) { return _wizardForm.Invoke(method, args); }
#endregion
}
internal interface IAccountBasicInfoProvider
{
IBlogProviderAccountWizardDescription ProviderAccountWizard { set; }
string AccountId { set; }
string HomepageUrl { get; set; }
bool SavePassword { get; set; }
IBlogCredentials Credentials { get; set; }
bool ForceManualConfiguration { get; set; }
bool IsDirty(TemporaryBlogSettings settings);
BlogInfo BlogAccount { get; }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.ComponentModel.Composition.Hosting;
using System.Linq;
using System.Reflection;
using Xunit;
namespace System.ComponentModel.Composition
{
public class DynamicMetadata : IDisposable
{
[Fact]
public void SimpleAttachment()
{
MetadataStore.Container = new CompositionContainer();
DynamicMetadataTestClass val = DynamicMetadataTestClass.Get("42");
var notYetAttached = TypeDescriptor.GetConverter(val);
Assert.False(notYetAttached.CanConvertFrom(typeof(string)), "The default type converter for DynamicMetadataTestClass shouldn't support round tripping");
MetadataStore.AddAttribute(
typeof(DynamicMetadataTestClass),
(type, attributes) =>
Enumerable.Concat(
attributes,
new Attribute[] { new TypeConverterAttribute(typeof(DynamicMetadataTestClassConverter)) }
)
);
var attached = TypeDescriptor.GetConverter(val);
Assert.True(attached.CanConvertFrom(typeof(string)), "The new type converter for DynamicMetadataTestClass should support round tripping");
}
[Fact]
public void LocalContainer()
{
var container1 = new CompositionContainer();
TypeDescriptorServices dat = new TypeDescriptorServices();
CompositionBatch batch = new CompositionBatch();
batch.AddPart(dat);
container1.Compose(batch);
MetadataStore.AddAttribute(
typeof(DynamicMetadataTestClass),
(type, attributes) =>
Enumerable.Concat(
attributes,
new Attribute[] { new TypeConverterAttribute(typeof(DynamicMetadataTestClassConverter)) }
),
container1
);
DynamicMetadataTestClass val = DynamicMetadataTestClass.Get("42");
var notYetAttached = TypeDescriptor.GetConverter(val.GetType());
Assert.False(notYetAttached.CanConvertFrom(typeof(string)), "The default type converter for DynamicMetadataTestClass shouldn't support round tripping");
var attached = dat.GetConverter(val.GetType());
Assert.True(attached.CanConvertFrom(typeof(string)), "The new type converter for DynamicMetadataTestClass should support round tripping");
}
[Fact]
public void DualContainers()
{
var container1 = new CompositionContainer();
TypeDescriptorServices dat1 = new TypeDescriptorServices();
CompositionBatch batch = new CompositionBatch();
batch.AddPart(dat1);
container1.Compose(batch);
MetadataStore.AddAttribute(
typeof(DynamicMetadataTestClass),
(type, attributes) =>
Enumerable.Concat(
attributes,
new Attribute[] { new TypeConverterAttribute(typeof(DynamicMetadataTestClassConverter)) }
),
container1
);
var container2 = new CompositionContainer();
CompositionBatch batch2 = new CompositionBatch();
TypeDescriptorServices dat2 = new TypeDescriptorServices();
batch2.AddPart(dat2);
container2.Compose(batch2);
DynamicMetadataTestClass val = DynamicMetadataTestClass.Get("42");
var attached1 = dat1.GetConverter(val.GetType());
Assert.True(attached1.CanConvertFrom(typeof(string)), "The new type converter for DynamicMetadataTestClass should support round tripping");
var attached2 = dat2.GetConverter(val.GetType());
Assert.False(attached2.CanConvertFrom(typeof(string)), "The default type converter for DynamicMetadataTestClass shouldn't support round tripping");
}
public void Dispose()
{
MetadataStore.Container = null;
}
}
[Export]
public class TypeDescriptorServices
{
Dictionary<Type, TypeDescriptionProvider> providers = new Dictionary<Type, TypeDescriptionProvider>();
internal Dictionary<Type, TypeDescriptionProvider> Providers
{
get { return providers; }
set { providers = value; }
}
public ICustomTypeDescriptor GetTypeDescriptor(Type objectType, object instance)
{
if (Providers.ContainsKey(objectType))
{
return Providers[objectType].GetTypeDescriptor(objectType);
}
else
{
return null;
}
}
public void AddProvider(TypeDescriptionProvider provider, Type type)
{
Providers[type] = provider;
}
public TypeConverter GetConverter(Type type)
{
var ictd = GetTypeDescriptor(type, null);
if (ictd != null)
{
return ictd.GetConverter();
}
else
{
return TypeDescriptor.GetConverter(type);
}
}
}
public static class MetadataStore
{
public static CompositionContainer Container { get; set; }
static Dictionary<Type, TypeDescriptionProvider> registeredRedirect = new Dictionary<Type, TypeDescriptionProvider>();
public static void AddAttribute(Type target, Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider)
{
AddAttribute(target, provider, MetadataStore.Container);
}
public static void AddAttribute(Type target, Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider, CompositionContainer container)
{
ContainerUnawareProviderRedirect.GetRedirect(container)[target] = new MetadataStoreProvider(target, provider);
RegisterTypeDescriptorInterop(target);
}
private static void RegisterTypeDescriptorInterop(Type target)
{
if (!registeredRedirect.ContainsKey(target))
{
var r = new ContainerUnawareProviderRedirect(target);
TypeDescriptor.AddProvider(r, target);
registeredRedirect[target] = r;
}
else
{
// force a uncache of the information from TypeDescriptor
//
TypeDescriptor.RemoveProvider(registeredRedirect[target], target);
TypeDescriptor.AddProvider(registeredRedirect[target], target);
}
}
public static TypeDescriptorServices GetTypeDescriptorServicesForContainer(CompositionContainer container)
{
if (container != null)
{
var result = container.GetExportedValueOrDefault<TypeDescriptorServices>();
if (result == null)
{
var v = new TypeDescriptorServices();
CompositionBatch batch = new CompositionBatch();
batch.AddPart(v);
container.Compose(batch);
return v;
}
return result;
}
return null;
}
private class ContainerUnawareProviderRedirect : TypeDescriptionProvider
{
public ContainerUnawareProviderRedirect(Type forType)
: base(TypeDescriptor.GetProvider(forType))
{
}
public override ICustomTypeDescriptor GetTypeDescriptor(Type objectType, object instance)
{
var datd = GetTypeDescriptorServicesForContainer(MetadataStore.Container);
if (datd == null || !datd.Providers.ContainsKey(objectType))
{
return base.GetTypeDescriptor(objectType, instance);
}
else
{
return datd.GetTypeDescriptor(objectType, instance);
}
}
internal static Dictionary<Type, TypeDescriptionProvider> GetRedirect(CompositionContainer container)
{
TypeDescriptorServices v = GetTypeDescriptorServicesForContainer(container);
return v != null ? v.Providers : null;
}
}
private class MetadataStoreProvider : TypeDescriptionProvider
{
Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider;
public MetadataStoreProvider(Type forType, Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider)
: base(TypeDescriptor.GetProvider(forType))
{
this.provider = provider;
}
public override ICustomTypeDescriptor GetTypeDescriptor(Type objectType, object instance)
{
ICustomTypeDescriptor descriptor = base.GetTypeDescriptor(objectType, instance);
descriptor = new MetadataStoreTypeDescriptor(objectType, descriptor, provider);
return descriptor;
}
}
private class MetadataStoreTypeDescriptor : CustomTypeDescriptor
{
Type targetType;
Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider;
public MetadataStoreTypeDescriptor(Type targetType, ICustomTypeDescriptor parent, Func<MemberInfo, IEnumerable<Attribute>, IEnumerable<Attribute>> provider)
: base(parent)
{
this.targetType = targetType;
this.provider = provider;
}
public override TypeConverter GetConverter()
{
TypeConverterAttribute attribute = (TypeConverterAttribute)GetAttributes()[typeof(TypeConverterAttribute)];
if (attribute != null)
{
Type c = this.GetTypeFromName(attribute.ConverterTypeName);
if ((c != null) && typeof(TypeConverter).IsAssignableFrom(c))
{
return (TypeConverter)Activator.CreateInstance(c);
}
}
return base.GetConverter();
}
private Type GetTypeFromName(string typeName)
{
if ((typeName == null) || (typeName.Length == 0))
{
return null;
}
int length = typeName.IndexOf(',');
Type type = null;
if (length == -1)
{
type = targetType.Assembly.GetType(typeName);
}
if (type == null)
{
type = Type.GetType(typeName);
}
if ((type == null) && (length != -1))
{
type = Type.GetType(typeName.Substring(0, length));
}
return type;
}
public override AttributeCollection GetAttributes()
{
var n = new List<Attribute>();
foreach (var attr in provider(targetType, base.GetAttributes().OfType<Attribute>()))
{
n.Add(attr);
}
return new AttributeCollection(n.ToArray());
}
}
}
public class DynamicMetadataTestClass
{
int i;
private DynamicMetadataTestClass(int i)
{
this.i = i;
}
public override string ToString()
{
return i.ToString();
}
public static DynamicMetadataTestClass Get(string s)
{
return new DynamicMetadataTestClass(int.Parse(s));
}
}
public class DynamicMetadataTestClassConverter : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
return sourceType == typeof(string);
}
public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType)
{
return ((DynamicMetadataTestClass)value).ToString();
}
public override object ConvertFrom(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value)
{
return DynamicMetadataTestClass.Get((string)value);
}
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
return destinationType == typeof(string);
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.IO;
using log4net.Util;
namespace log4net.ObjectRenderer
{
/// <summary>
/// Map class objects to an <see cref="IObjectRenderer"/>.
/// </summary>
/// <remarks>
/// <para>
/// Maintains a mapping between types that require special
/// rendering and the <see cref="IObjectRenderer"/> that
/// is used to render them.
/// </para>
/// <para>
/// The <see cref="FindAndRender(object)"/> method is used to render an
/// <c>object</c> using the appropriate renderers defined in this map.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
public class RendererMap
{
private readonly static Type declaringType = typeof(RendererMap);
#region Member Variables
private System.Collections.Hashtable m_map;
private System.Collections.Hashtable m_cache = new System.Collections.Hashtable();
private static IObjectRenderer s_defaultRenderer = new DefaultRenderer();
#endregion
#region Constructors
/// <summary>
/// Default Constructor
/// </summary>
/// <remarks>
/// <para>
/// Default constructor.
/// </para>
/// </remarks>
public RendererMap()
{
m_map = System.Collections.Hashtable.Synchronized(new System.Collections.Hashtable());
}
#endregion
/// <summary>
/// Render <paramref name="obj"/> using the appropriate renderer.
/// </summary>
/// <param name="obj">the object to render to a string</param>
/// <returns>the object rendered as a string</returns>
/// <remarks>
/// <para>
/// This is a convenience method used to render an object to a string.
/// The alternative method <see cref="FindAndRender(object,TextWriter)"/>
/// should be used when streaming output to a <see cref="TextWriter"/>.
/// </para>
/// </remarks>
public string FindAndRender(object obj)
{
// Optimisation for strings
string strData = obj as String;
if (strData != null)
{
return strData;
}
StringWriter stringWriter = new StringWriter(System.Globalization.CultureInfo.InvariantCulture);
FindAndRender(obj, stringWriter);
return stringWriter.ToString();
}
/// <summary>
/// Render <paramref name="obj"/> using the appropriate renderer.
/// </summary>
/// <param name="obj">the object to render to a string</param>
/// <param name="writer">The writer to render to</param>
/// <remarks>
/// <para>
/// Find the appropriate renderer for the type of the
/// <paramref name="obj"/> parameter. This is accomplished by calling the
/// <see cref="Get(Type)"/> method. Once a renderer is found, it is
/// applied on the object <paramref name="obj"/> and the result is returned
/// as a <see cref="string"/>.
/// </para>
/// </remarks>
public void FindAndRender(object obj, TextWriter writer)
{
if (obj == null)
{
writer.Write(SystemInfo.NullText);
}
else
{
// Optimisation for strings
string str = obj as string;
if (str != null)
{
writer.Write(str);
}
else
{
// Lookup the renderer for the specific type
try
{
Get(obj.GetType()).RenderObject(this, obj, writer);
}
catch(Exception ex)
{
// Exception rendering the object
log4net.Util.LogLog.Error(declaringType, "Exception while rendering object of type ["+obj.GetType().FullName+"]", ex);
// return default message
string objectTypeName = "";
if (obj != null && obj.GetType() != null)
{
objectTypeName = obj.GetType().FullName;
}
writer.Write("<log4net.Error>Exception rendering object type ["+objectTypeName+"]");
if (ex != null)
{
string exceptionText = null;
try
{
exceptionText = ex.ToString();
}
catch
{
// Ignore exception
}
writer.Write("<stackTrace>" + exceptionText + "</stackTrace>");
}
writer.Write("</log4net.Error>");
}
}
}
}
/// <summary>
/// Gets the renderer for the specified object type
/// </summary>
/// <param name="obj">the object to lookup the renderer for</param>
/// <returns>the renderer for <paramref name="obj"/></returns>
/// <remarks>
/// <param>
/// Gets the renderer for the specified object type.
/// </param>
/// <param>
/// Syntactic sugar method that calls <see cref="Get(Type)"/>
/// with the type of the object parameter.
/// </param>
/// </remarks>
public IObjectRenderer Get(Object obj)
{
if (obj == null)
{
return null;
}
else
{
return Get(obj.GetType());
}
}
/// <summary>
/// Gets the renderer for the specified type
/// </summary>
/// <param name="type">the type to lookup the renderer for</param>
/// <returns>the renderer for the specified type</returns>
/// <remarks>
/// <para>
/// Returns the renderer for the specified type.
/// If no specific renderer has been defined the
/// <see cref="DefaultRenderer"/> will be returned.
/// </para>
/// </remarks>
public IObjectRenderer Get(Type type)
{
if (type == null)
{
throw new ArgumentNullException("type");
}
IObjectRenderer result = null;
// Check cache
result = (IObjectRenderer)m_cache[type];
if (result == null)
{
for(Type cur = type; cur != null; cur = cur.BaseType)
{
// Search the type's interfaces
result = SearchTypeAndInterfaces(cur);
if (result != null)
{
break;
}
}
// if not set then use the default renderer
if (result == null)
{
result = s_defaultRenderer;
}
// Add to cache
m_cache[type] = result;
}
return result;
}
/// <summary>
/// Internal function to recursively search interfaces
/// </summary>
/// <param name="type">the type to lookup the renderer for</param>
/// <returns>the renderer for the specified type</returns>
private IObjectRenderer SearchTypeAndInterfaces(Type type)
{
IObjectRenderer r = (IObjectRenderer)m_map[type];
if (r != null)
{
return r;
}
else
{
foreach(Type t in type.GetInterfaces())
{
r = SearchTypeAndInterfaces(t);
if (r != null)
{
return r;
}
}
}
return null;
}
/// <summary>
/// Get the default renderer instance
/// </summary>
/// <value>the default renderer</value>
/// <remarks>
/// <para>
/// Get the default renderer
/// </para>
/// </remarks>
public IObjectRenderer DefaultRenderer
{
get { return s_defaultRenderer; }
}
/// <summary>
/// Clear the map of renderers
/// </summary>
/// <remarks>
/// <para>
/// Clear the custom renderers defined by using
/// <see cref="Put"/>. The <see cref="DefaultRenderer"/>
/// cannot be removed.
/// </para>
/// </remarks>
public void Clear()
{
m_map.Clear();
m_cache.Clear();
}
/// <summary>
/// Register an <see cref="IObjectRenderer"/> for <paramref name="typeToRender"/>.
/// </summary>
/// <param name="typeToRender">the type that will be rendered by <paramref name="renderer"/></param>
/// <param name="renderer">the renderer for <paramref name="typeToRender"/></param>
/// <remarks>
/// <para>
/// Register an object renderer for a specific source type.
/// This renderer will be returned from a call to <see cref="Get(Type)"/>
/// specifying the same <paramref name="typeToRender"/> as an argument.
/// </para>
/// </remarks>
public void Put(Type typeToRender, IObjectRenderer renderer)
{
m_cache.Clear();
if (typeToRender == null)
{
throw new ArgumentNullException("typeToRender");
}
if (renderer == null)
{
throw new ArgumentNullException("renderer");
}
m_map[typeToRender] = renderer;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography.X509Certificates;
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.Pkcs.Tests
{
public static partial class SignedCmsTests
{
[Fact]
public static void CmsSignerKeyIsNullByDefault()
{
CmsSigner cmsSigner = new CmsSigner();
Assert.Null(cmsSigner.PrivateKey);
}
[Fact]
public static void CmsSignerKeyIsNullByDefaultWhenCertificateIsPassed()
{
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
{
CmsSigner cmsSigner = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert);
Assert.Null(cmsSigner.PrivateKey);
}
}
[Fact]
public static void CmsSignerConstructorWithKeySetsProperty()
{
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (RSA key = cert.GetRSAPrivateKey())
{
CmsSigner cmsSigner = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
Assert.Same(key, cmsSigner.PrivateKey);
}
}
[Fact]
public static void SingUsingExplicitKeySetWithProperty()
{
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (X509Certificate2 pubCert = new X509Certificate2(cert.RawData))
using (RSA key = cert.GetRSAPrivateKey())
{
byte[] content = { 1, 2, 3, 4, 19 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo);
CmsSigner cmsSigner = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, pubCert);
cmsSigner.PrivateKey = key;
cms.ComputeSignature(cmsSigner);
cms.CheckSignature(true);
Assert.Equal(1, cms.SignerInfos.Count);
Assert.Equal(pubCert, cms.SignerInfos[0].Certificate);
}
}
[Fact]
public static void SignCmsUsingExplicitRSAKey()
{
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (RSA key = cert.GetRSAPrivateKey())
{
VerifyWithExplicitPrivateKey(cert, key);
}
}
[Fact]
public static void SignCmsUsingExplicitDSAKey()
{
using (X509Certificate2 cert = Certificates.Dsa1024.TryGetCertificateWithPrivateKey())
using (DSA key = cert.GetDSAPrivateKey())
{
VerifyWithExplicitPrivateKey(cert, key);
}
}
[Fact]
public static void SignCmsUsingExplicitECDsaKey()
{
using (X509Certificate2 cert = Certificates.ECDsaP256Win.TryGetCertificateWithPrivateKey())
using (ECDsa key = cert.GetECDsaPrivateKey())
{
VerifyWithExplicitPrivateKey(cert, key);
}
}
[Fact]
public static void SignCmsUsingExplicitECDsaP521Key()
{
using (X509Certificate2 cert = Certificates.ECDsaP521Win.TryGetCertificateWithPrivateKey())
using (ECDsa key = cert.GetECDsaPrivateKey())
{
VerifyWithExplicitPrivateKey(cert, key);
}
}
[Fact]
public static void CounterSignCmsUsingExplicitRSAKeyForFirstSignerAndDSAForCounterSignature()
{
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (RSA key = cert.GetRSAPrivateKey())
using (X509Certificate2 counterSignerCert = Certificates.Dsa1024.TryGetCertificateWithPrivateKey())
using (DSA counterSignerKey = counterSignerCert.GetDSAPrivateKey())
{
VerifyCounterSignatureWithExplicitPrivateKey(cert, key, counterSignerCert, counterSignerKey);
}
}
[Fact]
public static void CounterSignCmsUsingExplicitDSAKeyForFirstSignerAndECDsaForCounterSignature()
{
using (X509Certificate2 cert = Certificates.Dsa1024.TryGetCertificateWithPrivateKey())
using (DSA key = cert.GetDSAPrivateKey())
using (X509Certificate2 counterSignerCert = Certificates.ECDsaP256Win.TryGetCertificateWithPrivateKey())
using (ECDsa counterSignerKey = counterSignerCert.GetECDsaPrivateKey())
{
VerifyCounterSignatureWithExplicitPrivateKey(cert, key, counterSignerCert, counterSignerKey);
}
}
[Fact]
public static void CounterSignCmsUsingExplicitECDsaKeyForFirstSignerAndRSAForCounterSignature()
{
using (X509Certificate2 cert = Certificates.ECDsaP256Win.TryGetCertificateWithPrivateKey())
using (ECDsa key = cert.GetECDsaPrivateKey())
using (X509Certificate2 counterSignerCert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (RSA counterSignerKey = counterSignerCert.GetRSAPrivateKey())
{
VerifyCounterSignatureWithExplicitPrivateKey(cert, key, counterSignerCert, counterSignerKey);
}
}
[Fact]
public static void SignCmsUsingRSACertAndECDsaKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.GetCertificate())
using (ECDsa key = ECDsa.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
public static void SignCmsUsingDSACertAndECDsaKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.Dsa1024.GetCertificate())
using (ECDsa key = ECDsa.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
signer.IncludeOption = X509IncludeOption.EndCertOnly;
signer.DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
public static void SignCmsUsingEDCSaCertAndRSAaKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.ECDsaP256Win.GetCertificate())
using (RSA key = RSA.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
public static void SignCmsUsingRSACertWithNotMatchingKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.RSA2048SignatureOnly.GetCertificate())
using (RSA key = RSA.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
[PlatformSpecific(~TestPlatforms.OSX)] // Creating DSA keys is not supported on OSX
public static void SignCmsUsingDSACertWithNotMatchingKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.Dsa1024.GetCertificate())
using (DSA key = DSA.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
signer.IncludeOption = X509IncludeOption.EndCertOnly;
signer.DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
public static void SignCmsUsingECDsaCertWithNotMatchingKeyThrows()
{
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo, detached: false);
using (X509Certificate2 cert = Certificates.ECDsaP256Win.GetCertificate())
using (ECDsa key = ECDsa.Create())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert, key);
Assert.Throws<CryptographicException>(() => cms.ComputeSignature(signer));
}
}
[Fact]
public static void AddCertificate()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
int numOfCerts = cms.Certificates.Count;
using (X509Certificate2 newCert = Certificates.RSAKeyTransfer1.GetCertificate())
{
cms.AddCertificate(newCert);
Assert.Equal(numOfCerts + 1, cms.Certificates.Count);
Assert.True(cms.Certificates.Contains(newCert));
cms.CheckSignature(true);
}
}
[Fact]
public static void AddCertificateWithPrivateKey()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
int numOfCerts = cms.Certificates.Count;
using (X509Certificate2 newCert = Certificates.RSAKeyTransfer1.TryGetCertificateWithPrivateKey())
{
Assert.True(newCert.HasPrivateKey);
cms.AddCertificate(newCert);
Assert.Equal(numOfCerts + 1, cms.Certificates.Count);
X509Certificate2 addedCert = cms.Certificates.OfType<X509Certificate2>().Where((cert) => cert.Equals(newCert)).Single();
Assert.False(addedCert.HasPrivateKey);
Assert.Equal(newCert, addedCert);
cms.CheckSignature(true);
}
}
[Fact]
public static void RemoveCertificate()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
var expectedCerts = new HashSet<X509Certificate2>(cms.Certificates.OfType<X509Certificate2>());
using (X509Certificate2 cert1 = Certificates.RSAKeyTransfer1.GetCertificate())
using (X509Certificate2 cert2 = Certificates.RSAKeyTransfer2.GetCertificate())
{
Assert.NotEqual(cert1, cert2);
cms.AddCertificate(cert1);
cms.AddCertificate(cert2);
expectedCerts.Add(cert2);
cms.RemoveCertificate(cert1);
Assert.Equal(expectedCerts.Count, cms.Certificates.Count);
foreach (X509Certificate2 documentCert in cms.Certificates)
{
Assert.True(expectedCerts.Contains(documentCert));
}
}
}
[Fact]
public static void RemoveNonExistingCertificate()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
using (X509Certificate2 certToRemove = Certificates.RSAKeyTransfer1.GetCertificate())
{
Assert.Throws<CryptographicException>(() => cms.RemoveCertificate(certToRemove));
}
}
[Fact]
public static void RemoveAllCertsAddBackSignerCert()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
SignerInfo signerInfoBeforeRemoval = cms.SignerInfos[0];
X509Certificate2 signerCert = signerInfoBeforeRemoval.Certificate;
while (cms.Certificates.Count > 0)
{
cms.RemoveCertificate(cms.Certificates[0]);
}
// Signer info should be gone
Assert.Throws<CryptographicException>(() => cms.CheckSignature(true));
Assert.Null(cms.SignerInfos[0].Certificate);
Assert.NotNull(signerInfoBeforeRemoval.Certificate);
cms.AddCertificate(signerCert);
cms.CheckSignature(true);
Assert.Equal(1, cms.Certificates.Count);
}
[Fact]
public static void AddExistingCertificate()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.CounterSignedRsaPkcs1OneSigner);
using (X509Certificate2 newCert = Certificates.RSAKeyTransfer1.GetCertificate())
{
cms.AddCertificate(newCert);
Assert.Throws<CryptographicException>(() => cms.AddCertificate(newCert));
}
}
[Fact]
public static void AddAttributeToIndefiniteLengthContent()
{
SignedCms cms = new SignedCms();
cms.Decode(SignedDocuments.IndefiniteLengthContentDocument);
cms.SignerInfos[0].AddUnsignedAttribute(new Pkcs9DocumentDescription("Indefinite length test"));
byte[] encoded = cms.Encode();
cms = new SignedCms();
cms.Decode(encoded);
// It should sort first, because it's smaller.
Assert.Equal(Oids.DocumentDescription, cms.SignerInfos[0].UnsignedAttributes[0].Oid.Value);
}
[Fact]
public static void AddSigner_RSA_EphemeralKey()
{
using (RSA rsa = RSA.Create())
using (X509Certificate2 publicCertificate = Certificates.RSA2048SignatureOnly.GetCertificate())
using (X509Certificate2 certificateWithKey = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey(exportable: true))
{
if (certificateWithKey == null)
{
return;
}
using (RSA privateKey = certificateWithKey.GetRSAPrivateKey())
using (RSA exportableKey = privateKey.MakeExportable())
{
rsa.ImportParameters(exportableKey.ExportParameters(true));
}
using (X509Certificate2 certWithEphemeralKey = publicCertificate.CopyWithPrivateKey(rsa))
{
ContentInfo content = new ContentInfo(new byte[] { 1, 2, 3 });
SignedCms cms = new SignedCms(content, false);
CmsSigner signer = new CmsSigner(certWithEphemeralKey);
cms.ComputeSignature(signer);
}
}
}
[Fact]
public static void AddSigner_DSA_EphemeralKey()
{
using (DSA dsa = DSA.Create())
using (X509Certificate2 publicCertificate = Certificates.Dsa1024.GetCertificate())
using (X509Certificate2 certificateWithKey = Certificates.Dsa1024.TryGetCertificateWithPrivateKey(exportable: true))
{
if (certificateWithKey == null)
{
return;
}
using (DSA privateKey = certificateWithKey.GetDSAPrivateKey())
using (DSA exportableKey = privateKey.MakeExportable())
{
dsa.ImportParameters(exportableKey.ExportParameters(true));
}
using (X509Certificate2 certWithEphemeralKey = publicCertificate.CopyWithPrivateKey(dsa))
{
ContentInfo content = new ContentInfo(new byte[] { 1, 2, 3 });
SignedCms cms = new SignedCms(content, false);
CmsSigner signer = new CmsSigner(certWithEphemeralKey)
{
DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1)
};
cms.ComputeSignature(signer);
}
}
}
[Fact]
public static void AddSigner_ECDSA_EphemeralKey()
{
using (ECDsa ecdsa = ECDsa.Create())
using (X509Certificate2 publicCertificate = Certificates.ECDsaP256Win.GetCertificate())
using (X509Certificate2 certificateWithKey = Certificates.ECDsaP256Win.TryGetCertificateWithPrivateKey(exportable: true))
{
if (certificateWithKey == null)
{
return;
}
using (ECDsa privateKey = certificateWithKey.GetECDsaPrivateKey())
using (ECDsa exportableKey = privateKey.MakeExportable())
{
ecdsa.ImportParameters(exportableKey.ExportParameters(true));
}
using (X509Certificate2 certWithEphemeralKey = publicCertificate.CopyWithPrivateKey(ecdsa))
{
ContentInfo content = new ContentInfo(new byte[] { 1, 2, 3 });
SignedCms cms = new SignedCms(content, false);
CmsSigner signer = new CmsSigner(certWithEphemeralKey);
cms.ComputeSignature(signer);
}
}
}
[Fact]
public static void CreateSignature_DigestAlgorithmWithSignatureOid_Prohibited()
{
ContentInfo content = new ContentInfo(new byte[] { 1, 2, 3 });
SignedCms cms = new SignedCms(content);
using (X509Certificate2 cert = Certificates.RSAKeyTransferCapi1.TryGetCertificateWithPrivateKey())
{
CmsSigner signer = new CmsSigner(cert);
signer.DigestAlgorithm = new Oid(Oids.RsaPkcs1Sha256);
Assert.Throws<CryptographicException>(() => {
cms.ComputeSignature(signer);
});
}
}
private static void VerifyWithExplicitPrivateKey(X509Certificate2 cert, AsymmetricAlgorithm key)
{
using (var pubCert = new X509Certificate2(cert.RawData))
{
Assert.False(pubCert.HasPrivateKey);
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo);
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, pubCert, key)
{
IncludeOption = X509IncludeOption.EndCertOnly,
DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1)
};
cms.ComputeSignature(signer);
cms.CheckSignature(true);
Assert.Equal(1, cms.SignerInfos.Count);
Assert.Equal(pubCert, cms.SignerInfos[0].Certificate);
}
}
private static void VerifyCounterSignatureWithExplicitPrivateKey(X509Certificate2 cert, AsymmetricAlgorithm key, X509Certificate2 counterSignerCert, AsymmetricAlgorithm counterSignerKey)
{
Assert.NotNull(key);
Assert.NotNull(counterSignerKey);
using (var pubCert = new X509Certificate2(cert.RawData))
using (var counterSignerPubCert = new X509Certificate2(counterSignerCert.RawData))
{
Assert.False(pubCert.HasPrivateKey);
byte[] content = { 9, 8, 7, 6, 5 };
ContentInfo contentInfo = new ContentInfo(content);
SignedCms cms = new SignedCms(contentInfo);
CmsSigner cmsSigner = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, pubCert, key)
{
IncludeOption = X509IncludeOption.EndCertOnly,
DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1)
};
CmsSigner cmsCounterSigner = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, counterSignerPubCert, counterSignerKey)
{
IncludeOption = X509IncludeOption.EndCertOnly,
DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1)
};
cms.ComputeSignature(cmsSigner);
Assert.Equal(1, cms.SignerInfos.Count);
Assert.Equal(pubCert, cms.SignerInfos[0].Certificate);
cms.SignerInfos[0].ComputeCounterSignature(cmsCounterSigner);
cms.CheckSignature(true);
Assert.Equal(1, cms.SignerInfos[0].CounterSignerInfos.Count);
Assert.Equal(counterSignerPubCert, cms.SignerInfos[0].CounterSignerInfos[0].Certificate);
}
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Firebase;
using Firebase.Firestore;
using NUnit.Framework;
using UnityEngine.TestTools;
using static Tests.TestAsserts;
namespace Tests {
public class TransactionAndBatchTests : FirestoreIntegrationTests {
[UnityTest]
public IEnumerator WriteBatch_ShouldWork() {
DocumentReference doc1 = TestDocument();
DocumentReference doc2 = TestDocument();
DocumentReference doc3 = TestDocument();
// Initialize doc1 and doc2 with some data.
var initialData = new Dictionary<string, object> {
{ "field", "value" },
};
yield return AwaitSuccess(doc1.SetAsync(initialData));
yield return AwaitSuccess(doc2.SetAsync(initialData));
// Perform batch that deletes doc1, updates doc2, and overwrites doc3.
yield return AwaitSuccess(
doc1.Firestore.StartBatch()
.Delete(doc1)
.Update(doc2, new Dictionary<string, object> { { "field2", "value2" } })
.Update(doc2,
new Dictionary<FieldPath, object> { { new FieldPath("field3"), "value3" } })
.Update(doc2, "field4", "value4")
.Set(doc3, initialData)
.CommitAsync());
{
var getDoc1Task = doc1.GetSnapshotAsync();
yield return AwaitSuccess(getDoc1Task);
DocumentSnapshot snap = getDoc1Task.Result;
Assert.That(snap.Exists, Is.False);
}
{
var getDoc2Task = doc2.GetSnapshotAsync();
yield return AwaitSuccess(getDoc2Task);
DocumentSnapshot snap = getDoc2Task.Result;
Assert.That(snap.ToDictionary(), Is.EquivalentTo(new Dictionary<string, object> {
{ "field", "value" },
{ "field2", "value2" },
{ "field3", "value3" },
{ "field4", "value4" },
}));
}
{
var getDoc3Task = doc3.GetSnapshotAsync();
yield return AwaitSuccess(getDoc3Task);
DocumentSnapshot snap = getDoc3Task.Result;
Assert.That(snap.ToDictionary(), Is.EquivalentTo(initialData));
}
}
[UnityTest]
public IEnumerator WriteBatch_ShouldReportErrorOnInvalidDocument() {
var docWithInvalidName = TestCollection().Document("__badpath__");
Task commitWithInvalidDocTask = docWithInvalidName.Firestore.StartBatch()
.Set(docWithInvalidName, TestData(0))
.CommitAsync();
yield return AwaitCompletion(commitWithInvalidDocTask);
AssertTaskFaulted(commitWithInvalidDocTask, FirestoreError.InvalidArgument, "__badpath__");
}
[UnityTest]
public IEnumerator Transaction_ShouldWork() {
DocumentReference doc1 = TestDocument();
DocumentReference doc2 = TestDocument();
DocumentReference doc3 = TestDocument();
// Initialize doc1 and doc2 with some data.
var initialData = new Dictionary<string, object> {
{ "field", "value" },
};
yield return AwaitSuccess(doc1.SetAsync(initialData));
yield return AwaitSuccess(doc2.SetAsync(initialData));
// Perform transaction that reads doc1, deletes doc1, updates doc2, and overwrites doc3.
var transactionTask = doc1.Firestore.RunTransactionAsync<string>((transaction) => {
Assert.That(mainThreadId, Is.EqualTo(Thread.CurrentThread.ManagedThreadId));
return transaction.GetSnapshotAsync(doc1).ContinueWith((getTask) => {
Assert.That(getTask.Result.ToDictionary(), Is.EquivalentTo(initialData));
transaction.Delete(doc1);
transaction.Update(doc2, new Dictionary<string, object> { { "field2", "value2" } });
transaction.Update(
doc2, new Dictionary<FieldPath, object> { { new FieldPath("field3"), "value3" } });
transaction.Update(doc2, "field4", "value4");
transaction.Set(doc3, initialData);
return "txn result";
});
});
yield return AwaitSuccess(transactionTask);
string result = transactionTask.Result;
Assert.That(result, Is.EqualTo("txn result"));
{
var getTask = doc1.GetSnapshotAsync();
yield return AwaitSuccess(getTask);
DocumentSnapshot snap = getTask.Result;
Assert.That(snap.Exists, Is.False);
}
{
var getTask = doc2.GetSnapshotAsync();
yield return AwaitSuccess(getTask);
DocumentSnapshot snap = getTask.Result;
Assert.That(snap.ToDictionary(), Is.EquivalentTo(new Dictionary<string, object> {
{ "field", "value" },
{ "field2", "value2" },
{ "field3", "value3" },
{ "field4", "value4" },
}));
}
{
var getTask = doc3.GetSnapshotAsync();
yield return AwaitSuccess(getTask);
DocumentSnapshot snap = getTask.Result;
Assert.That(snap.ToDictionary(), Is.EquivalentTo(initialData));
}
}
[UnityTest]
public IEnumerator TransactionWithNonGenericTask_ShouldWork() {
DocumentReference doc = TestDocument();
yield return AwaitSuccess(db.RunTransactionAsync((transaction) => {
transaction.Set(doc, TestData(1));
// Create a plain (non-generic) `Task` result.
return Task.CompletedTask;
}));
var getTask = doc.GetSnapshotAsync();
yield return AwaitSuccess(getTask);
DocumentSnapshot snap = getTask.Result;
Assert.That(snap.ToDictionary(), Is.EquivalentTo(TestData(1)));
}
[UnityTest]
public IEnumerator Transaction_CanAbortOnFailedTask() {
int retries = 0;
Task txnTask = db.RunTransactionAsync((transaction) => {
retries++;
TaskCompletionSource<object> tcs = new TaskCompletionSource<object>();
tcs.SetException(new InvalidOperationException("Failed Task"));
return tcs.Task;
// TODO(183714287): Why below makes txnTask succeed?
// return Task.FromException(new InvalidOperationException("Failed Task"));
});
yield return AwaitCompletion(txnTask);
Exception e = AssertTaskFaulted(txnTask);
Assert.That(retries, Is.EqualTo(1));
Assert.That(e, Is.TypeOf<InvalidOperationException>());
Assert.That(e.Message, Is.EqualTo("Failed Task"));
}
[UnityTest]
public IEnumerator Transaction_CanAbortOnException() {
int retries = 0;
Task txnTask = db.RunTransactionAsync((transaction) => {
retries++;
throw new InvalidOperationException("Failed Exception");
});
yield return AwaitCompletion(txnTask);
Exception e = AssertTaskFaulted(txnTask);
Assert.That(retries, Is.EqualTo(1));
Assert.That(e, Is.TypeOf<InvalidOperationException>());
Assert.That(e.Message, Is.EqualTo("Failed Exception"));
}
[UnityTest]
public IEnumerator Transaction_AbortOnInvalidDocuments() {
Task txnTask = db.RunTransactionAsync((transaction) => {
var docWithInvalidName = TestCollection().Document("__badpath__");
return transaction.GetSnapshotAsync(docWithInvalidName);
});
yield return AwaitCompletion(txnTask);
AssertTaskFaulted(txnTask, FirestoreError.InvalidArgument, "__badpath__");
}
[UnityTest]
public IEnumerator Transaction_AbortWithoutRetryOnPermanentError() {
int retries = 0;
DocumentReference doc = TestDocument();
// Try to update a document that doesn't exist. Should fail permanently (no retries)
// with a "Not Found" error.
Task txnTask = db.RunTransactionAsync((transaction) => {
retries++;
transaction.Update(doc, TestData(0));
return Task.CompletedTask;
});
yield return AwaitCompletion(txnTask);
AssertTaskFaulted(txnTask, FirestoreError.NotFound, doc.Id);
Assert.That(retries, Is.EqualTo(1));
}
[UnityTest]
public IEnumerator Transaction_RetriesWithOutOfBandWrites() {
int retries = 0;
DocumentReference doc = TestDocument();
Task txnTask = db.RunTransactionAsync((transaction) => {
retries++;
return transaction.GetSnapshotAsync(doc)
.ContinueWith((snapshot) => {
// Queue a write via the transaction.
transaction.Set(doc, TestData(0));
// But also write the document (out-of-band) so the transaction is retried.
return doc.SetAsync(TestData(retries));
})
.Unwrap();
});
yield return AwaitCompletion(txnTask);
AssertTaskFaulted(txnTask, FirestoreError.FailedPrecondition);
// The transaction API will retry 6 times before giving up.
Assert.That(retries, Is.EqualTo(6));
}
[UnityTest]
public IEnumerator Transaction_RollsBackIfExceptionIsThrown() {
// This test covers this bug: https://github.com/firebase/quickstart-unity/issues/1042
DocumentReference doc = TestDocument();
Task txnTask = db.RunTransactionAsync(transaction => {
return transaction.GetSnapshotAsync(doc).ContinueWith(snapshotTask => {
transaction.Set(doc, new Dictionary<string, object> { { "key", 42 } }, null);
throw new TestException();
});
});
yield return AwaitCompletion(txnTask);
Exception exception = AssertTaskFaulted(txnTask);
Assert.That(exception, Is.TypeOf<TestException>());
// Verify that the transaction was rolled back.
var getTask = doc.GetSnapshotAsync();
yield return AwaitSuccess(getTask);
DocumentSnapshot snap = getTask.Result;
Assert.That(snap.Exists, Is.False);
}
private class TestException : Exception {
public TestException() {}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using osu.Framework.Allocation;
using osu.Framework.Audio;
using osu.Framework.Audio.Sample;
using osu.Framework.Bindables;
using osu.Framework.Extensions.Color4Extensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Screens;
using osu.Game.Audio;
using osu.Game.Beatmaps;
using osu.Game.Online.Rooms;
using osu.Game.Overlays;
using osu.Game.Overlays.Mods;
using osu.Game.Rulesets.Mods;
using osu.Game.Screens.OnlinePlay.Match.Components;
namespace osu.Game.Screens.OnlinePlay.Match
{
[Cached(typeof(IPreviewTrackOwner))]
public abstract class RoomSubScreen : OnlinePlaySubScreen, IPreviewTrackOwner
{
[Cached(typeof(IBindable<PlaylistItem>))]
protected readonly Bindable<PlaylistItem> SelectedItem = new Bindable<PlaylistItem>();
public override bool? AllowTrackAdjustments => true;
protected override BackgroundScreen CreateBackground() => new RoomBackgroundScreen(Room.Playlist.FirstOrDefault())
{
SelectedItem = { BindTarget = SelectedItem }
};
public override bool DisallowExternalBeatmapRulesetChanges => true;
/// <summary>
/// A container that provides controls for selection of user mods.
/// This will be shown/hidden automatically when applicable.
/// </summary>
protected Drawable UserModsSection;
private Sample sampleStart;
/// <summary>
/// Any mods applied by/to the local user.
/// </summary>
protected readonly Bindable<IReadOnlyList<Mod>> UserMods = new Bindable<IReadOnlyList<Mod>>(Array.Empty<Mod>());
protected readonly IBindable<long?> RoomId = new Bindable<long?>();
[Resolved]
private MusicController music { get; set; }
[Resolved]
private BeatmapManager beatmapManager { get; set; }
[Resolved(canBeNull: true)]
protected OnlinePlayScreen ParentScreen { get; private set; }
private IBindable<WeakReference<BeatmapSetInfo>> managerUpdated;
[Cached]
protected OnlinePlayBeatmapAvailabilityTracker BeatmapAvailabilityTracker { get; private set; }
protected IBindable<BeatmapAvailability> BeatmapAvailability => BeatmapAvailabilityTracker.Availability;
public readonly Room Room;
private readonly bool allowEdit;
private ModSelectOverlay userModsSelectOverlay;
private RoomSettingsOverlay settingsOverlay;
private Drawable mainContent;
/// <summary>
/// Creates a new <see cref="RoomSubScreen"/>.
/// </summary>
/// <param name="room">The <see cref="Room"/>.</param>
/// <param name="allowEdit">Whether to allow editing room settings post-creation.</param>
protected RoomSubScreen(Room room, bool allowEdit = true)
{
Room = room;
this.allowEdit = allowEdit;
Padding = new MarginPadding { Top = Header.HEIGHT };
BeatmapAvailabilityTracker = new OnlinePlayBeatmapAvailabilityTracker
{
SelectedItem = { BindTarget = SelectedItem }
};
RoomId.BindTo(room.RoomID);
}
[BackgroundDependencyLoader]
private void load(AudioManager audio)
{
sampleStart = audio.Samples.Get(@"SongSelect/confirm-selection");
InternalChildren = new Drawable[]
{
BeatmapAvailabilityTracker,
new GridContainer
{
RelativeSizeAxes = Axes.Both,
RowDimensions = new[]
{
new Dimension(),
new Dimension(GridSizeMode.Absolute, 50)
},
Content = new[]
{
// Padded main content (drawable room + main content)
new Drawable[]
{
new Container
{
RelativeSizeAxes = Axes.Both,
Padding = new MarginPadding
{
Horizontal = WaveOverlayContainer.WIDTH_PADDING,
Bottom = 30
},
Children = new[]
{
mainContent = new GridContainer
{
RelativeSizeAxes = Axes.Both,
RowDimensions = new[]
{
new Dimension(GridSizeMode.AutoSize),
new Dimension(GridSizeMode.Absolute, 10)
},
Content = new[]
{
new Drawable[]
{
new DrawableMatchRoom(Room, allowEdit)
{
OnEdit = () => settingsOverlay.Show(),
SelectedItem = { BindTarget = SelectedItem }
}
},
null,
new Drawable[]
{
new Container
{
RelativeSizeAxes = Axes.Both,
Children = new[]
{
new Container
{
RelativeSizeAxes = Axes.Both,
Masking = true,
CornerRadius = 10,
Child = new Box
{
RelativeSizeAxes = Axes.Both,
Colour = Color4Extensions.FromHex(@"3e3a44") // Temporary.
},
},
new Container
{
RelativeSizeAxes = Axes.Both,
Padding = new MarginPadding(20),
Child = CreateMainContent(),
},
new Container
{
Anchor = Anchor.BottomLeft,
Origin = Anchor.BottomLeft,
RelativeSizeAxes = Axes.X,
AutoSizeAxes = Axes.Y,
Child = userModsSelectOverlay = new UserModSelectOverlay
{
SelectedMods = { BindTarget = UserMods },
IsValidMod = _ => false
}
},
}
}
}
}
},
new Container
{
RelativeSizeAxes = Axes.Both,
// Resolves 1px masking errors between the settings overlay and the room panel.
Padding = new MarginPadding(-1),
Child = settingsOverlay = CreateRoomSettingsOverlay(Room)
}
},
},
},
// Footer
new Drawable[]
{
new Container
{
RelativeSizeAxes = Axes.Both,
Children = new Drawable[]
{
new Box
{
RelativeSizeAxes = Axes.Both,
Colour = Color4Extensions.FromHex(@"28242d") // Temporary.
},
new Container
{
RelativeSizeAxes = Axes.Both,
Padding = new MarginPadding(5),
Child = CreateFooter()
},
}
}
}
}
}
};
}
protected override void LoadComplete()
{
base.LoadComplete();
RoomId.BindValueChanged(id =>
{
if (id.NewValue == null)
{
// A new room is being created.
// The main content should be hidden until the settings overlay is hidden, signaling the room is ready to be displayed.
mainContent.Hide();
settingsOverlay.Show();
}
else
{
mainContent.Show();
settingsOverlay.Hide();
}
}, true);
SelectedItem.BindValueChanged(_ => Scheduler.AddOnce(selectedItemChanged));
managerUpdated = beatmapManager.ItemUpdated.GetBoundCopy();
managerUpdated.BindValueChanged(beatmapUpdated);
UserMods.BindValueChanged(_ => Scheduler.AddOnce(UpdateMods));
}
protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent)
{
return new CachedModelDependencyContainer<Room>(base.CreateChildDependencies(parent))
{
Model = { Value = Room }
};
}
public override bool OnBackButton()
{
if (Room.RoomID.Value == null)
{
// room has not been created yet; exit immediately.
settingsOverlay.Hide();
return base.OnBackButton();
}
if (userModsSelectOverlay.State.Value == Visibility.Visible)
{
userModsSelectOverlay.Hide();
return true;
}
if (settingsOverlay.State.Value == Visibility.Visible)
{
settingsOverlay.Hide();
return true;
}
return base.OnBackButton();
}
protected void ShowUserModSelect() => userModsSelectOverlay.Show();
public override void OnEntering(IScreen last)
{
base.OnEntering(last);
beginHandlingTrack();
}
public override void OnSuspending(IScreen next)
{
endHandlingTrack();
base.OnSuspending(next);
}
public override void OnResuming(IScreen last)
{
base.OnResuming(last);
beginHandlingTrack();
Scheduler.AddOnce(UpdateMods);
}
public override bool OnExiting(IScreen next)
{
RoomManager?.PartRoom();
Mods.Value = Array.Empty<Mod>();
endHandlingTrack();
return base.OnExiting(next);
}
protected void StartPlay()
{
sampleStart?.Play();
// fallback is to allow this class to operate when there is no parent OnlineScreen (testing purposes).
var targetScreen = (Screen)ParentScreen ?? this;
targetScreen.Push(CreateGameplayScreen());
}
/// <summary>
/// Creates the gameplay screen to be entered.
/// </summary>
/// <returns>The screen to enter.</returns>
protected abstract Screen CreateGameplayScreen();
private void selectedItemChanged()
{
updateWorkingBeatmap();
var selected = SelectedItem.Value;
if (selected == null)
return;
// Remove any user mods that are no longer allowed.
UserMods.Value = UserMods.Value
.Where(m => selected.AllowedMods.Any(a => m.GetType() == a.GetType()))
.ToList();
UpdateMods();
Ruleset.Value = selected.Ruleset.Value;
if (!selected.AllowedMods.Any())
{
UserModsSection?.Hide();
userModsSelectOverlay.Hide();
userModsSelectOverlay.IsValidMod = _ => false;
}
else
{
UserModsSection?.Show();
userModsSelectOverlay.IsValidMod = m => selected.AllowedMods.Any(a => a.GetType() == m.GetType());
}
}
private void beatmapUpdated(ValueChangedEvent<WeakReference<BeatmapSetInfo>> weakSet) => Schedule(updateWorkingBeatmap);
private void updateWorkingBeatmap()
{
var beatmap = SelectedItem.Value?.Beatmap.Value;
// Retrieve the corresponding local beatmap, since we can't directly use the playlist's beatmap info
var localBeatmap = beatmap == null ? null : beatmapManager.QueryBeatmap(b => b.OnlineBeatmapID == beatmap.OnlineBeatmapID);
Beatmap.Value = beatmapManager.GetWorkingBeatmap(localBeatmap);
}
protected virtual void UpdateMods()
{
if (SelectedItem.Value == null)
return;
Mods.Value = UserMods.Value.Concat(SelectedItem.Value.RequiredMods).ToList();
}
private void beginHandlingTrack()
{
Beatmap.BindValueChanged(applyLoopingToTrack, true);
}
private void endHandlingTrack()
{
Beatmap.ValueChanged -= applyLoopingToTrack;
cancelTrackLooping();
}
private void applyLoopingToTrack(ValueChangedEvent<WorkingBeatmap> _ = null)
{
if (!this.IsCurrentScreen())
return;
var track = Beatmap.Value?.Track;
if (track != null)
{
Beatmap.Value.PrepareTrackForPreviewLooping();
music?.EnsurePlayingSomething();
}
}
private void cancelTrackLooping()
{
var track = Beatmap?.Value?.Track;
if (track != null)
track.Looping = false;
}
/// <summary>
/// Creates the main centred content.
/// </summary>
protected abstract Drawable CreateMainContent();
/// <summary>
/// Creates the footer content.
/// </summary>
protected abstract Drawable CreateFooter();
/// <summary>
/// Creates the room settings overlay.
/// </summary>
/// <param name="room">The room to change the settings of.</param>
protected abstract RoomSettingsOverlay CreateRoomSettingsOverlay(Room room);
public class UserModSelectButton : PurpleTriangleButton
{
}
}
}
| |
namespace OctoTorrent.Client
{
using System;
using System.Collections.Generic;
using System.Threading;
using System.Security.Cryptography;
using System.IO;
using System.Linq;
using Common;
using PieceWriters;
public delegate void DiskIOCallback(bool successful);
public partial class DiskManager : IDisposable
{
private static readonly MainLoop IOLoop = new MainLoop("Disk IO");
#region Member Variables
private readonly object _bufferLock = new object();
private readonly Queue<BufferedIO> _bufferedReads;
private readonly Queue<BufferedIO> _bufferedWrites;
private readonly ICache<BufferedIO> _cache;
private bool _disposed;
private readonly ClientEngine _engine;
private readonly MainLoopTask _loopTask;
private readonly SpeedMonitor _readMonitor;
private readonly SpeedMonitor _writeMonitor;
internal readonly RateLimiter ReadLimiter;
internal readonly RateLimiter WriteLimiter;
private PieceWriter _writer;
#endregion Member Variables
#region Properties
public bool Disposed
{
get { return _disposed; }
}
public int QueuedWrites
{
get { return _bufferedWrites.Count; }
}
public int ReadRate
{
get { return _readMonitor.Rate; }
}
public int WriteRate
{
get { return _writeMonitor.Rate; }
}
public long TotalRead
{
get { return _readMonitor.Total; }
}
public long TotalWritten
{
get { return _writeMonitor.Total; }
}
internal PieceWriter Writer
{
get { return _writer; }
set { _writer = value; }
}
#endregion Properties
#region Constructors
internal DiskManager(ClientEngine engine, PieceWriter writer)
{
_bufferedReads = new Queue<BufferedIO>();
_bufferedWrites = new Queue<BufferedIO>();
_cache = new Cache<BufferedIO>(true).Synchronize();
_engine = engine;
ReadLimiter = new RateLimiter();
_readMonitor = new SpeedMonitor();
_writeMonitor = new SpeedMonitor();
WriteLimiter = new RateLimiter();
_writer = writer;
_loopTask = delegate {
if (_disposed)
return;
while (_bufferedWrites.Count > 0 && WriteLimiter.TryProcess(_bufferedWrites.Peek ().buffer.Length / 2048))
{
BufferedIO write;
lock (_bufferLock)
write = _bufferedWrites.Dequeue();
try
{
PerformWrite(write);
_cache.Enqueue (write);
}
catch (Exception ex)
{
if (write.Manager != null)
SetError(write.Manager, Reason.WriteFailure, ex);
}
}
while (_bufferedReads.Count > 0 && ReadLimiter.TryProcess(_bufferedReads.Peek().Count / 2048))
{
BufferedIO read;
lock (_bufferLock)
read = _bufferedReads.Dequeue();
try
{
PerformRead(read);
_cache.Enqueue(read);
}
catch (Exception ex)
{
if (read.Manager != null)
SetError(read.Manager, Reason.ReadFailure, ex);
}
}
};
IOLoop.QueueTimeout(TimeSpan.FromSeconds(1), () =>
{
if (_disposed)
return false;
_readMonitor.Tick();
_writeMonitor.Tick();
_loopTask();
return true;
});
}
#endregion Constructors
#region Methods
internal WaitHandle CloseFileStreams(TorrentManager manager)
{
var handle = new ManualResetEvent(false);
IOLoop.Queue(delegate {
// Process all pending reads/writes then close any open streams
try
{
_loopTask();
_writer.Close(manager.Torrent.Files);
}
catch (Exception ex)
{
SetError (manager, Reason.WriteFailure, ex);
}
finally
{
handle.Set();
}
});
return handle;
}
public void Dispose()
{
if (_disposed)
return;
_disposed = true;
// FIXME: Ensure everything is written to disk before killing the mainloop.
IOLoop.QueueWait((MainLoopTask)_writer.Dispose);
}
public void Flush()
{
IOLoop.QueueWait(() =>
{
foreach (var manager in _engine.Torrents)
_writer.Flush(manager.Torrent.Files);
});
}
public void Flush(TorrentManager manager)
{
Check.Manager(manager);
IOLoop.QueueWait(() => _writer.Flush(manager.Torrent.Files));
}
private void PerformWrite(BufferedIO io)
{
try {
// Perform the actual write
_writer.Write(io.Files, io.Offset, io.buffer, 0, io.Count, io.PieceLength, io.Manager.Torrent.Size);
_writeMonitor.AddDelta(io.Count);
} finally {
io.Complete = true;
if (io.Callback != null)
io.Callback(true);
}
}
private void PerformRead(BufferedIO io)
{
try
{
io.ActualCount = _writer.Read(io.Files, io.Offset, io.buffer, 0, io.Count,
io.PieceLength,
io.Manager.Torrent.Size)
? io.Count
: 0;
_readMonitor.AddDelta(io.ActualCount);
}
finally {
io.Complete = true;
if (io.Callback != null)
io.Callback(io.ActualCount == io.Count);
}
}
internal void QueueFlush(TorrentManager manager, int index)
{
IOLoop.Queue(() =>
{
try
{
foreach (var file in manager.Torrent.Files.Where(file => file.StartPieceIndex >= index && file.EndPieceIndex <= index))
_writer.Flush(file);
}
catch (Exception ex)
{
SetError(manager, Reason.WriteFailure, ex);
}
});
}
internal void QueueRead(TorrentManager manager, long offset, byte[] buffer, int count, DiskIOCallback callback)
{
var io = _cache.Dequeue();
io.Initialise(manager, buffer, offset, count, manager.Torrent.PieceLength, manager.Torrent.Files);
QueueRead(io, callback);
}
void QueueRead(BufferedIO io, DiskIOCallback callback)
{
io.Callback = callback;
if (Thread.CurrentThread == IOLoop.Thread) {
PerformRead(io);
_cache.Enqueue (io);
}
else
lock (_bufferLock)
{
_bufferedReads.Enqueue(io);
if (_bufferedReads.Count == 1)
IOLoop.Queue(_loopTask);
}
}
internal void QueueWrite(TorrentManager manager, long offset, byte[] buffer, int count, DiskIOCallback callback)
{
var io = _cache.Dequeue();
io.Initialise(manager, buffer, offset, count, manager.Torrent.PieceLength, manager.Torrent.Files);
QueueWrite(io, callback);
}
void QueueWrite(BufferedIO io, DiskIOCallback callback)
{
io.Callback = callback;
if (Thread.CurrentThread == IOLoop.Thread) {
PerformWrite(io);
_cache.Enqueue (io);
}
else
lock (_bufferLock)
{
_bufferedWrites.Enqueue(io);
if (_bufferedWrites.Count == 1)
IOLoop.Queue(_loopTask);
}
}
internal bool CheckAnyFilesExist(TorrentManager manager)
{
var result = false;
IOLoop.QueueWait(() =>
{
try
{
for (var i = 0; i < manager.Torrent.Files.Length && !result; i++)
result = _writer.Exists(manager.Torrent.Files[i]);
}
catch (Exception ex)
{
SetError(manager, Reason.ReadFailure, ex);
}
});
return result;
}
internal bool CheckFileExists(TorrentManager manager, TorrentFile file)
{
var result = false;
IOLoop.QueueWait(() =>
{
try
{
result = _writer.Exists(file);
}
catch (Exception ex)
{
SetError(manager, Reason.ReadFailure, ex);
}
});
return result;
}
static void SetError(TorrentManager manager, Reason reason, Exception ex)
{
ClientEngine.MainLoop.Queue(() =>
{
if (manager.Mode is ErrorMode)
return;
manager.Error = new Error(reason, ex);
manager.Mode = new ErrorMode(manager);
});
}
internal void BeginGetHash(TorrentManager manager, int pieceIndex, MainLoopResult callback)
{
var count = 0;
var offset = (long) manager.Torrent.PieceLength * pieceIndex;
var endOffset = Math.Min(offset + manager.Torrent.PieceLength, manager.Torrent.Size);
var hashBuffer = BufferManager.EmptyBuffer;
ClientEngine.BufferManager.GetBuffer(ref hashBuffer, Piece.BlockSize);
var hasher = HashAlgoFactory.Create<SHA1>();
hasher.Initialize();
DiskIOCallback readCallback = null;
readCallback = successful =>
{
if (successful)
hasher.TransformBlock(hashBuffer, 0, count, hashBuffer, 0);
offset += count;
if (!successful || offset == endOffset)
{
object hash = null;
if (successful)
{
hasher.TransformFinalBlock(hashBuffer, 0, 0);
hash = hasher.Hash;
}
((IDisposable) hasher).Dispose();
ClientEngine.BufferManager.FreeBuffer(ref hashBuffer);
ClientEngine.MainLoop.Queue(() => callback(hash));
}
else
{
count = (int) Math.Min(Piece.BlockSize, endOffset - offset);
QueueRead(manager, offset, hashBuffer, count, readCallback);
}
};
count = (int)Math.Min(Piece.BlockSize, endOffset - offset);
QueueRead(manager, offset, hashBuffer, count, readCallback);
}
#endregion
internal void MoveFile(TorrentManager manager, TorrentFile file, string path)
{
IOLoop.QueueWait(() =>
{
try
{
path = Path.GetFullPath(path);
_writer.Move(file.FullPath, path, false);
file.FullPath = path;
}
catch (Exception ex)
{
SetError(manager, Reason.WriteFailure, ex);
}
});
}
internal void MoveFiles(TorrentManager manager, string newRoot, bool overWriteExisting)
{
IOLoop.QueueWait(() =>
{
try
{
_writer.Move(newRoot, manager.Torrent.Files, overWriteExisting);
}
catch (Exception ex)
{
SetError(manager, Reason.WriteFailure, ex);
}
});
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Tharga.Toolkit.Console.Entities;
using Tharga.Toolkit.Console.Helpers;
using Tharga.Toolkit.Console.Interfaces;
namespace Tharga.Toolkit.Console.Commands.Base
{
public abstract class ContainerCommandBase : CommandBase, IContainerCommand
{
private readonly List<ICommand> _subCommands = new List<ICommand>();
protected readonly List<Tuple<Type, Type>> SubCommandTypes = new List<Tuple<Type, Type>>();
public IEnumerable<ICommand> SubCommands => _subCommands.OrderBy(x => x.Name);
public event EventHandler<CommandRegisteredEventArgs> CommandRegisteredEvent;
protected ContainerCommandBase(string name, string description = null, bool hidden = false)
: base(name, description, hidden)
{
}
public override IEnumerable<HelpLine> HelpText { get { yield break; } }
protected virtual IEnumerable<string> CommandKeys
{
get
{
foreach (var sub in _subCommands)
{
foreach (var name in sub.Names)
{
if (this is RootCommandBase)
{
yield return "help";
}
yield return name;
var subContainer = sub as ContainerCommandBase;
if (subContainer == null) continue;
yield return name + " help";
var commandKeys = subContainer.CommandKeys;
foreach (var key in commandKeys)
{
yield return $"{name} {key}";
}
}
}
}
}
protected void RegisterCommand<T>()
{
SubCommandTypes.Add(new Tuple<Type, Type>(typeof(T), null));
}
protected void RegisterCommand<T, TContainer>()
where TContainer : IContainerCommand
{
SubCommandTypes.Add(new Tuple<Type, Type>(typeof(T), typeof(TContainer)));
}
protected void RegisterCommand(Type type)
{
SubCommandTypes.Add(new Tuple<Type, Type>(type, null));
}
protected void RegisterCommand(ICommand command)
{
if (command.Names.Any(x => GetCommand(x) != null)) throw new CommandAlreadyRegisteredException(command.Name, Name);
_subCommands.Add(command);
command.WriteEvent += OnOutputEvent;
CommandRegisteredEvent?.Invoke(this, new CommandRegisteredEventArgs(command));
}
public void UnregisterCommand(string commandName)
{
_subCommands.RemoveAll(x => string.Compare(x.Name, commandName, StringComparison.InvariantCultureIgnoreCase) == 0);
_subCommands.RemoveAll(x => x.Names.Any(y => string.Compare(y, commandName, StringComparison.InvariantCultureIgnoreCase) == 0));
}
protected ICommand GetCommand(string commandName)
{
return _subCommands.FirstOrDefault(x => string.Compare(x.Name, commandName, StringComparison.InvariantCultureIgnoreCase) == 0 || x.Names.Any(y => string.Compare(y, commandName, StringComparison.InvariantCultureIgnoreCase) == 0));
}
protected override ICommand GetHelpCommand(string paramList)
{
var helpCommand = new HelpCommand(RootCommand.CommandEngine);
var showHidden = true;
var command = this as ICommand;
var subCommand = paramList?.Trim();
if (paramList != " details")
{
showHidden = false;
command = GetSubCommand(paramList, out subCommand, out var typeRegistration);
}
if (command == null)
{
helpCommand.AddLine($"There is no command named '{paramList?.Replace(" details", "")}', cannot help with that.", foreColor: ConsoleColor.Yellow);
return helpCommand;
}
if (command.Name == "root")
{
var assembly = Assembly.GetEntryAssembly();
helpCommand.AddLine($"Application {assembly?.GetName().Name ?? "main"} help.", foreColor: ConsoleColor.DarkCyan);
helpCommand.AddLine($"Version {assembly?.GetName().Version}");
}
else
{
helpCommand.AddLine($"Help for command {command.Name}.", foreColor: ConsoleColor.DarkCyan);
helpCommand.AddLine(command.Description);
}
command.CanExecute(out var reasonMessage);
if (subCommand != null && subCommand.EndsWith("details"))
{
if (!string.IsNullOrEmpty(reasonMessage))
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine("This command can currently not be executed.", foreColor: ConsoleColor.Yellow);
helpCommand.AddLine(reasonMessage, foreColor: ConsoleColor.Yellow);
}
if (command.HelpText.Any())
{
foreach (var helpText in command.HelpText)
{
helpCommand.AddLine(helpText.Text, foreColor: helpText.ForeColor);
}
}
if (command.Name == "root")
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine("How to use help.", foreColor: ConsoleColor.DarkCyan);
helpCommand.AddLine("Use the parameter -? at the end of any command to get more details.");
helpCommand.AddLine("It is also possible to type 'help [command]' to get details.");
//helpCommand.AddLine(string.Empty);
//helpCommand.AddLine("Application parameters.", foreColor: ConsoleColor.DarkCyan);
//helpCommand.AddLine("");
//helpCommand.AddLine(string.Empty);
//helpCommand.AddLine("More details.", foreColor: ConsoleColor.DarkCyan);
//helpCommand.AddLine("Visit https://github.com/poxet/tharga-console.");
helpCommand.AddLine(string.Empty);
helpCommand.AddLine("Switches:", foreColor: ConsoleColor.DarkCyan);
helpCommand.AddLine("/c Keeps the console open when parameters are sent to the console.");
helpCommand.AddLine("/e Keeps the console open when parameters are sent to the console and something goes wrong.");
helpCommand.AddLine("/r Resets settings.");
}
}
else if (command.HelpText.Any())
{
if (command.Name == "root")
{
helpCommand.AddLine("Type \"help\" for more information.", foreColor: ConsoleColor.DarkYellow);
}
else
{
helpCommand.AddLine($"Type \"{command.Name} -?\" for more information.", foreColor: ConsoleColor.DarkYellow);
}
}
if (command is ContainerCommandBase containerCommand)
{
ShowSubCommandHelp(containerCommand._subCommands, helpCommand, reasonMessage, showHidden);
}
if (command.Names.Count() > 1)
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine("Alternative names:", foreColor: ConsoleColor.DarkCyan);
foreach (var name in command.Names)
{
helpCommand.AddLine($"{name}");
}
}
return helpCommand;
}
private void ShowSubCommandHelp(IEnumerable<ICommand> subCommands, HelpCommand helpCommand, string parentReasonMesage, bool showHidden)
{
var anyHidden = false;
var arr = subCommands as ICommand[] ?? subCommands.ToArray();
var actionCommands = arr.Where(x => x is ActionCommandBase).ToArray();
var containerCommands = arr.Where(x => x is ContainerCommandBase).ToArray();
var padLength = containerCommands.Max(x => x.Name.Length, 0).Max(actionCommands.Max(x => x.Name.Length, 0));
if (containerCommands.Any(x => !x.IsHidden || showHidden))
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine($"Sections for {Name}:", foreColor: ConsoleColor.DarkCyan);
foreach (var command in containerCommands)
{
if (!command.IsHidden || showHidden)
{
var hidden = command.IsHidden ? "*" : "";
if (command.IsHidden) anyHidden = true;
helpCommand.AddLine($"{(hidden + command.Name).PadStringAfter(padLength)} {command.Description}", () =>
{
var canExecute = command.CanExecute(out _);
if (canExecute && !string.IsNullOrEmpty(parentReasonMesage))
{
return false;
}
return canExecute;
});
}
}
}
if (actionCommands.Any(x => !x.IsHidden || showHidden))
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine($"Commands for {Name}:", foreColor: ConsoleColor.DarkCyan);
foreach (var command in actionCommands)
{
if (!command.IsHidden || showHidden)
{
var hidden = command.IsHidden ? "*" : "";
if (command.IsHidden) anyHidden = true;
helpCommand.AddLine($"{(hidden + command.Name).PadStringAfter(padLength)} {command.Description}", () =>
{
var canExecute = command.CanExecute(out _);
if (canExecute && !string.IsNullOrEmpty(parentReasonMesage))
{
return false;
}
return canExecute;
});
}
}
}
if (anyHidden)
{
helpCommand.AddLine(string.Empty);
helpCommand.AddLine("* = Hidden command");
}
}
public override bool CanExecute(out string reasonMessage)
{
string dummy;
if (!_subCommands.Any(x => x.CanExecute(out dummy)))
{
reasonMessage = "There are no executable subcommands.";
return false;
}
return base.CanExecute(out reasonMessage);
}
protected internal ICommand GetSubCommand(string entry, out string subCommand, out bool typeRegistration)
{
typeRegistration = false;
subCommand = null;
if (string.IsNullOrEmpty(entry))
{
return this;
}
var arr = entry.Split(' ');
if (arr.Length > 1) subCommand = entry.Substring(entry.IndexOf(' ') + 1);
var name = arr[0].ToLower();
if (string.Compare("help", name, StringComparison.CurrentCultureIgnoreCase) == 0)
{
return GetHelpCommand(subCommand + " details");
}
else if (entry.EndsWith("-?") || entry.EndsWith("/?") || entry.EndsWith("--help"))
{
entry = entry.Replace("-?", string.Empty);
entry = entry.Replace("/?", string.Empty);
entry = entry.Replace("--help", string.Empty);
return GetHelpCommand(entry.Trim() + " details");
}
//Look for a command registered in current list
var command = _subCommands.FirstOrDefault(y => y.Names.Any(x => string.Compare(x, name, StringComparison.InvariantCultureIgnoreCase) == 0));
if (command == null) return null;
if (!(command is ContainerCommandBase containerCommandBase))
{
typeRegistration = SubCommandTypes.Any(x => x.Item1 == command.GetType());
return command;
}
//If there is a command, take the next parameter and look for a sub-command
var x1 = containerCommandBase.GetSubCommand(subCommand, out var nextSub, out typeRegistration);
if (x1 == null) return containerCommandBase; //If there is no sub-command, return the command found
subCommand = nextSub;
if (x1 is ActionCommandBase actionCommandBase)
{
var a = x1.CanExecute(out var reasonMessage);
var b = command.CanExecute(out reasonMessage);
if (a && !b)
{
actionCommandBase.SetCanExecute(() => $"{reasonMessage} Inherited by parent.");
}
}
return x1;
}
public override void Invoke(string[] param)
{
var enumerable = param as string[] ?? param.ToArray();
var paramList = enumerable.ToParamString(); //TODO: Do not convert, use input all the way
if (!CanExecute(out var reasonMessage))
{
OutputWarning(GetCanExecuteFailMessage(reasonMessage));
GetHelpCommand(paramList).Invoke(enumerable);
}
else if (string.IsNullOrEmpty(paramList))
{
GetHelpCommand(paramList).Invoke(enumerable);
}
else
{
OutputWarning($"Unknown sub command '{paramList}', for {Name}.");
}
}
protected void OnOutputEvent(object sender, WriteEventArgs e)
{
RootCommand.Console.Output(e);
}
protected internal override void Attach(RootCommandBase rootCommand, List<Tuple<Type, Type>> subCommandTypes)
{
base.Attach(rootCommand, null);
subCommandTypes = SubCommandTypes.Union(subCommandTypes ?? new List<Tuple<Type, Type>>()).ToList();
var subCommandsToPassOn = new List<Tuple<Type, Type>>();
if (subCommandTypes.Any())
{
if (RootCommand.CommandResolver == null) throw new InvalidOperationException("No CommandResolver has been defined in the root command.");
foreach (var subCommandType in subCommandTypes)
{
if (subCommandType.Item2 == null || subCommandType.Item2 == GetType())
{
//TODO: Have a feature, so that the command does not have to be initiated before execution.
var command = RootCommand.CommandResolver.Resolve(subCommandType.Item1);
RegisterCommand(command);
}
else if (subCommandType.Item2 != null)
{
subCommandsToPassOn.Add(subCommandType);
}
}
}
foreach (var cmd in SubCommands)
{
var c = cmd as CommandBase;
c?.Attach(rootCommand, subCommandsToPassOn);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information.
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Testing;
using Test.Utilities;
using Xunit;
using VerifyCS = Test.Utilities.CSharpCodeFixVerifier<
Microsoft.CodeQuality.Analyzers.QualityGuidelines.AssigningSymbolAndItsMemberInSameStatement,
Microsoft.CodeAnalysis.Testing.EmptyCodeFixProvider>;
namespace Microsoft.CodeQuality.Analyzers.UnitTests.QualityGuidelines
{
public class AssigningSymbolAndItsMemberInSameStatementTests
{
[Fact]
public async Task CSharpReassignLocalVariableAndReferToItsFieldAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Field;
}
public class Test
{
public void Method()
{
C a = new C(), b = new C();
a.Field = a = b;
}
}
",
GetCSharpResultAt(12, 9, "a", "Field"));
}
[Fact]
public async Task CSharpReassignLocalVariableAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b = new C(), c;
a.Property = c = a = b;
}
}
",
GetCSharpResultAt(12, 9, "a", "Property"));
}
[Fact]
public async Task CSharpReassignLocalVariablesPropertyAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b = new C();
a.Property.Property = a.Property = b;
}
}
",
GetCSharpResultAt(12, 9, "a.Property", "Property"));
}
[Fact]
public async Task CSharpReassignLocalVariableAndItsPropertyAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b = new C();
a.Property.Property = a.Property = a = b;
}
}
",
GetCSharpResultAt(12, 9, "a.Property", "Property"),
GetCSharpResultAt(12, 31, "a", "Property"));
}
[Fact]
public async Task CSharpReferToFieldOfReferenceTypeLocalVariableAfterItsReassignmentAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Field;
}
public class Test
{
static C x, y;
public void Method()
{
x.Field = x = y;
}
}
",
GetCSharpResultAt(13, 9, "x", "Field"));
}
[Fact]
public async Task CSharpReassignGlobalVariableAndReferToItsFieldAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
static C x, y;
public void Method()
{
x.Property.Property = x.Property = y;
}
}
",
GetCSharpResultAt(13, 9, "x.Property", "Property"));
}
[Fact]
public async Task CSharpReassignGlobalVariableAndItsPropertyAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
static C x, y;
public void Method()
{
x.Property.Property = x.Property = x = y;
}
}
",
GetCSharpResultAt(13, 9, "x.Property", "Property"),
GetCSharpResultAt(13, 31, "x", "Property"));
}
[Fact]
public async Task CSharpReassignGlobalPropertyAndItsPropertyAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
static C x { get; set; }
static C y { get; set; }
public void Method()
{
x.Property.Property = x.Property = x = y;
}
}
",
GetCSharpResultAt(14, 9, "x.Property", "Property"),
GetCSharpResultAt(14, 31, "x", "Property"));
}
[Fact]
public async Task CSharpReassignSecondLocalVariableAndReferToItsPropertyOfFirstVariableAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b;
a.Property = b = a;
}
}
");
}
[Fact]
public async Task CSharpReassignPropertyOfFirstLocalVariableWithSecondAndReferToPropertyOfSecondVariableAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b = new C(), c;
b.Property.Property = a.Property = b;
}
}
");
}
[Fact]
public async Task CSharpReassignPropertyOfFirstLocalVariableWithThirdAndReferToPropertyOfSecondVariableAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method()
{
C a = new C(), b = new C(), c = new C();
b.Property.Property = a.Property = c;
}
}
");
}
[Fact]
public async Task CSharpReassignMethodParameterAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class C
{
public C Property { get; set; }
}
public class Test
{
public void Method(C b)
{
C a = new C();
b.Property = b = a;
}
}
",
GetCSharpResultAt(12, 9, "b", "Property"));
}
[Fact]
public async Task CSharpReassignLocalValueTypeVariableAndReferToItsFieldAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public struct S
{
public S {|CS0523:Field|};
}
public class Test
{
public void Method()
{
S a, b = new S();
a.Field = a = b;
}
}
");
}
[Fact]
public async Task CSharpReassignLocalValueTypeVariableAndReferToItsPropertyAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public struct S
{
public S Property { get => default; set { } }
}
public class Test
{
public void Method()
{
S a, b = new S();
a.Property = a = b;
}
}
");
}
[Fact]
public async Task CSharpAssignmentInCodeWithOperationNoneAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public struct Test
{
public System.IntPtr PtrField;
public unsafe void Method(Test a, Test *b)
{
b->PtrField = a.PtrField;
}
}
");
}
[Fact]
[WorkItem(2889, "https://github.com/dotnet/roslyn-analyzers/issues/2889")]
public async Task CSharpAssignmentLocalReferenceOperationAsync()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public static class Class1
{
public static void SomeMethod()
{
var u = new System.UriBuilder();
u.Host = u.Path = string.Empty;
}
}
");
}
private static DiagnosticResult GetCSharpResultAt(int line, int column, params string[] arguments)
#pragma warning disable RS0030 // Do not used banned APIs
=> VerifyCS.Diagnostic()
.WithLocation(line, column)
#pragma warning restore RS0030 // Do not used banned APIs
.WithArguments(arguments);
}
}
| |
// File generated from our OpenAPI spec
namespace Stripe.Issuing
{
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Stripe.Infrastructure;
/// <summary>
/// Any use of an <a href="https://stripe.com/docs/issuing">issued card</a> that results in
/// funds entering or leaving your Stripe account, such as a completed purchase or refund,
/// is represented by an Issuing <c>Transaction</c> object.
///
/// Related guide: <a href="https://stripe.com/docs/issuing/purchases/transactions">Issued
/// Card Transactions</a>.
/// </summary>
public class Transaction : StripeEntity<Transaction>, IHasId, IHasMetadata, IHasObject, IBalanceTransactionSource
{
/// <summary>
/// Unique identifier for the object.
/// </summary>
[JsonProperty("id")]
public string Id { get; set; }
/// <summary>
/// String representing the object's type. Objects of the same type share the same value.
/// </summary>
[JsonProperty("object")]
public string Object { get; set; }
/// <summary>
/// The transaction amount, which will be reflected in your balance. This amount is in your
/// currency and in the <a href="https://stripe.com/docs/currencies#zero-decimal">smallest
/// currency unit</a>.
/// </summary>
[JsonProperty("amount")]
public long Amount { get; set; }
/// <summary>
/// Detailed breakdown of amount components. These amounts are denominated in
/// <c>currency</c> and in the <a
/// href="https://stripe.com/docs/currencies#zero-decimal">smallest currency unit</a>.
/// </summary>
[JsonProperty("amount_details")]
public TransactionAmountDetails AmountDetails { get; set; }
#region Expandable Authorization
/// <summary>
/// (ID of the Authorization)
/// The <c>Authorization</c> object that led to this transaction.
/// </summary>
[JsonIgnore]
public string AuthorizationId
{
get => this.InternalAuthorization?.Id;
set => this.InternalAuthorization = SetExpandableFieldId(value, this.InternalAuthorization);
}
/// <summary>
/// (Expanded)
/// The <c>Authorization</c> object that led to this transaction.
///
/// For more information, see the <a href="https://stripe.com/docs/expand">expand documentation</a>.
/// </summary>
[JsonIgnore]
public Authorization Authorization
{
get => this.InternalAuthorization?.ExpandedObject;
set => this.InternalAuthorization = SetExpandableFieldObject(value, this.InternalAuthorization);
}
[JsonProperty("authorization")]
[JsonConverter(typeof(ExpandableFieldConverter<Authorization>))]
internal ExpandableField<Authorization> InternalAuthorization { get; set; }
#endregion
#region Expandable BalanceTransaction
/// <summary>
/// (ID of the BalanceTransaction)
/// ID of the <a href="https://stripe.com/docs/api/balance_transactions">balance
/// transaction</a> associated with this transaction.
/// </summary>
[JsonIgnore]
public string BalanceTransactionId
{
get => this.InternalBalanceTransaction?.Id;
set => this.InternalBalanceTransaction = SetExpandableFieldId(value, this.InternalBalanceTransaction);
}
/// <summary>
/// (Expanded)
/// ID of the <a href="https://stripe.com/docs/api/balance_transactions">balance
/// transaction</a> associated with this transaction.
///
/// For more information, see the <a href="https://stripe.com/docs/expand">expand documentation</a>.
/// </summary>
[JsonIgnore]
public BalanceTransaction BalanceTransaction
{
get => this.InternalBalanceTransaction?.ExpandedObject;
set => this.InternalBalanceTransaction = SetExpandableFieldObject(value, this.InternalBalanceTransaction);
}
[JsonProperty("balance_transaction")]
[JsonConverter(typeof(ExpandableFieldConverter<BalanceTransaction>))]
internal ExpandableField<BalanceTransaction> InternalBalanceTransaction { get; set; }
#endregion
#region Expandable Card
/// <summary>
/// (ID of the Card)
/// The card used to make this transaction.
/// </summary>
[JsonIgnore]
public string CardId
{
get => this.InternalCard?.Id;
set => this.InternalCard = SetExpandableFieldId(value, this.InternalCard);
}
/// <summary>
/// (Expanded)
/// The card used to make this transaction.
///
/// For more information, see the <a href="https://stripe.com/docs/expand">expand documentation</a>.
/// </summary>
[JsonIgnore]
public Card Card
{
get => this.InternalCard?.ExpandedObject;
set => this.InternalCard = SetExpandableFieldObject(value, this.InternalCard);
}
[JsonProperty("card")]
[JsonConverter(typeof(ExpandableFieldConverter<Card>))]
internal ExpandableField<Card> InternalCard { get; set; }
#endregion
#region Expandable Cardholder
/// <summary>
/// (ID of the Cardholder)
/// The cardholder to whom this transaction belongs.
/// </summary>
[JsonIgnore]
public string CardholderId
{
get => this.InternalCardholder?.Id;
set => this.InternalCardholder = SetExpandableFieldId(value, this.InternalCardholder);
}
/// <summary>
/// (Expanded)
/// The cardholder to whom this transaction belongs.
///
/// For more information, see the <a href="https://stripe.com/docs/expand">expand documentation</a>.
/// </summary>
[JsonIgnore]
public Cardholder Cardholder
{
get => this.InternalCardholder?.ExpandedObject;
set => this.InternalCardholder = SetExpandableFieldObject(value, this.InternalCardholder);
}
[JsonProperty("cardholder")]
[JsonConverter(typeof(ExpandableFieldConverter<Cardholder>))]
internal ExpandableField<Cardholder> InternalCardholder { get; set; }
#endregion
/// <summary>
/// Time at which the object was created. Measured in seconds since the Unix epoch.
/// </summary>
[JsonProperty("created")]
[JsonConverter(typeof(UnixDateTimeConverter))]
public DateTime Created { get; set; } = Stripe.Infrastructure.DateTimeUtils.UnixEpoch;
/// <summary>
/// Three-letter <a href="https://www.iso.org/iso-4217-currency-codes.html">ISO currency
/// code</a>, in lowercase. Must be a <a href="https://stripe.com/docs/currencies">supported
/// currency</a>.
/// </summary>
[JsonProperty("currency")]
public string Currency { get; set; }
#region Expandable Dispute
/// <summary>
/// (ID of the Dispute)
/// If you've disputed the transaction, the ID of the dispute.
/// </summary>
[JsonIgnore]
public string DisputeId
{
get => this.InternalDispute?.Id;
set => this.InternalDispute = SetExpandableFieldId(value, this.InternalDispute);
}
/// <summary>
/// (Expanded)
/// If you've disputed the transaction, the ID of the dispute.
///
/// For more information, see the <a href="https://stripe.com/docs/expand">expand documentation</a>.
/// </summary>
[JsonIgnore]
public Dispute Dispute
{
get => this.InternalDispute?.ExpandedObject;
set => this.InternalDispute = SetExpandableFieldObject(value, this.InternalDispute);
}
[JsonProperty("dispute")]
[JsonConverter(typeof(ExpandableFieldConverter<Dispute>))]
internal ExpandableField<Dispute> InternalDispute { get; set; }
#endregion
/// <summary>
/// Has the value <c>true</c> if the object exists in live mode or the value <c>false</c> if
/// the object exists in test mode.
/// </summary>
[JsonProperty("livemode")]
public bool Livemode { get; set; }
/// <summary>
/// The amount that the merchant will receive, denominated in <c>merchant_currency</c> and
/// in the <a href="https://stripe.com/docs/currencies#zero-decimal">smallest currency
/// unit</a>. It will be different from <c>amount</c> if the merchant is taking payment in a
/// different currency.
/// </summary>
[JsonProperty("merchant_amount")]
public long MerchantAmount { get; set; }
/// <summary>
/// The currency with which the merchant is taking payment.
/// </summary>
[JsonProperty("merchant_currency")]
public string MerchantCurrency { get; set; }
[JsonProperty("merchant_data")]
public AuthorizationMerchantData MerchantData { get; set; }
/// <summary>
/// Set of <a href="https://stripe.com/docs/api/metadata">key-value pairs</a> that you can
/// attach to an object. This can be useful for storing additional information about the
/// object in a structured format.
/// </summary>
[JsonProperty("metadata")]
public Dictionary<string, string> Metadata { get; set; }
/// <summary>
/// Additional purchase information that is optionally provided by the merchant.
/// </summary>
[JsonProperty("purchase_details")]
public TransactionPurchaseDetails PurchaseDetails { get; set; }
/// <summary>
/// The nature of the transaction.
/// One of: <c>capture</c>, or <c>refund</c>.
/// </summary>
[JsonProperty("type")]
public string Type { get; set; }
/// <summary>
/// The digital wallet used for this transaction. One of <c>apple_pay</c>,
/// <c>google_pay</c>, or <c>samsung_pay</c>.
/// One of: <c>apple_pay</c>, <c>google_pay</c>, or <c>samsung_pay</c>.
/// </summary>
[JsonProperty("wallet")]
public string Wallet { get; set; }
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.TeamFoundation.DistributedTask.WebApi;
using Microsoft.TeamFoundation.TestManagement.WebApi;
using Microsoft.VisualStudio.Services.Agent.Worker;
using Microsoft.VisualStudio.Services.Agent.Worker.CodeCoverage;
using Moq;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using Xunit;
namespace Microsoft.VisualStudio.Services.Agent.Tests.Worker.CodeCoverage
{
public class JacocoSummaryReaderTests
{
private Mock<IExecutionContext> _ec;
private List<string> _warnings = new List<string>();
private List<string> _errors = new List<string>();
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "PublishCodeCoverage")]
[Trait("DeploymentItem", "Jacoco.xml")]
public void VerifyJacocoCoverageStatisticsForValidSummaryFile()
{
SetupMocks();
var jacocoXml = GetPathToValidJaCoCoFile();
try
{
JaCoCoSummaryReader summaryReader = new JaCoCoSummaryReader();
var coverageStats = summaryReader.GetCodeCoverageSummary(_ec.Object, jacocoXml);
var coverageStatsNew = coverageStats.ToList();
coverageStatsNew.Sort(new Statscomparer());
Assert.Equal(0, _errors.Count);
Assert.Equal(0, _warnings.Count);
VerifyCoverageStats(coverageStatsNew.ToList());
}
finally
{
File.Delete(jacocoXml);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "PublishCodeCoverage")]
public void VerifyFileDidnotExist()
{
SetupMocks();
var jacocoXml = JacocoFileDidnotExist();
JaCoCoSummaryReader summaryReader = new JaCoCoSummaryReader();
Assert.Throws<ArgumentException>(() => summaryReader.GetCodeCoverageSummary(_ec.Object, jacocoXml));
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "PublishCodeCoverage")]
public void VerifyInvalidXmlFile()
{
var invalidXml = JacocoInvalidXmlFile();
var summaryReader = new JaCoCoSummaryReader();
try
{
SetupMocks();
summaryReader.GetCodeCoverageSummary(_ec.Object, invalidXml);
}
finally
{
File.Delete(invalidXml);
}
Assert.Equal(0, _errors.Count);
Assert.Equal(1, _warnings.Count);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "PublishCodeCoverage")]
public void VerifyWrongXmlFile()
{
var wrongXml = JacocoWrongXmlFile();
var summaryReader = new JaCoCoSummaryReader();
try
{
SetupMocks();
var coverageStats = summaryReader.GetCodeCoverageSummary(_ec.Object, wrongXml);
Assert.Equal(coverageStats.ToList().Count, 0);
Assert.Equal(0, _errors.Count);
Assert.Equal(0, _warnings.Count);
}
finally
{
File.Delete(wrongXml);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "PublishCodeCoverage")]
public void VerifyEmptyXmlFile()
{
var emptyXml = GetEmptyCCFile();
try
{
SetupMocks();
var summaryReader = new JaCoCoSummaryReader();
Assert.Null(summaryReader.GetCodeCoverageSummary(_ec.Object, emptyXml));
Assert.Equal(0, _errors.Count);
Assert.Equal(0, _warnings.Count);
}
finally
{
File.Delete(emptyXml);
}
}
private string GetPathToValidJaCoCoFile()
{
var file = Path.Combine(Path.GetTempPath(), "jacocoValid.xml");
File.WriteAllText(file, CodeCoverageTestConstants.ValidJacocoXml);
return file;
}
private string JacocoFileDidnotExist()
{
return Path.Combine(Path.GetTempPath(), "CoberturaDidNotExist.xml");
}
private string JacocoInvalidXmlFile()
{
var file = Path.GetTempFileName();
File.WriteAllText(file, "This is not XML File");
return file;
}
private string JacocoWrongXmlFile()
{
var file = Path.GetTempFileName();
File.WriteAllText(file, "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n<event>This is a Test</event>");
return file;
}
private string GetEmptyCCFile()
{
return Path.GetTempFileName();
}
private static void VerifyCoverageStats(List<CodeCoverageStatistics> coverageStats)
{
Assert.Equal(5, coverageStats.Count);
Assert.Equal(1, (int)coverageStats[0].Position);
Assert.Equal("class", coverageStats[0].Label.ToLower());
Assert.Equal(2, (int)coverageStats[0].Covered);
Assert.Equal(2, (int)coverageStats[0].Total);
Assert.Equal(2, (int)coverageStats[1].Position);
Assert.Equal("complexity", coverageStats[1].Label.ToLower());
Assert.Equal(2, (int)coverageStats[1].Covered);
Assert.Equal(6, (int)coverageStats[1].Total);
Assert.Equal(3, (int)coverageStats[2].Position);
Assert.Equal("method", coverageStats[2].Label.ToLower());
Assert.Equal(2, (int)coverageStats[2].Covered);
Assert.Equal(6, (int)coverageStats[2].Total);
Assert.Equal(4, (int)coverageStats[3].Position);
Assert.Equal("line", coverageStats[3].Label.ToLower());
Assert.Equal(2, (int)coverageStats[3].Covered);
Assert.Equal(7, (int)coverageStats[3].Total);
Assert.Equal(5, (int)coverageStats[4].Position);
Assert.Equal("instruction", coverageStats[4].Label.ToLower());
Assert.Equal(8, (int)coverageStats[4].Covered);
Assert.Equal(22, (int)coverageStats[4].Total);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA2000:Dispose objects before losing scope", MessageId = "TestHostContext")]
private void SetupMocks([CallerMemberName] string name = "")
{
TestHostContext hc = new TestHostContext(this, name);
_ec = new Mock<IExecutionContext>();
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>()))
.Callback<Issue>
((issue) =>
{
if (issue.Type == IssueType.Warning)
{
_warnings.Add(issue.Message);
}
else if (issue.Type == IssueType.Error)
{
_errors.Add(issue.Message);
}
});
}
}
public class Statscomparer : IComparer<CodeCoverageStatistics>
{
public int Compare(CodeCoverageStatistics x, CodeCoverageStatistics y)
{
return ((int)x.Position > (int)y.Position ? 1 : -1);
}
}
}
| |
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 436 // Temporary bridge until the Batch core NuGet without file staging is published
using System;
using System.Collections.Generic;
using System.Collections.Concurrent;
using System.Text;
using System.IO;
using System.Threading;
using System.Diagnostics;
using System.Security;
using System.Runtime.InteropServices;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using BatchFS=Microsoft.Azure.Batch.FileStaging;
namespace Microsoft.Azure.Batch.FileStaging
{
/// <summary>
/// Provides for file staging of a local file to blob storage.
/// </summary>
public sealed class FileToStage : IFileStagingProvider
{
/// <summary>
/// The name of the local file to stage to blob storage
/// </summary>
public string LocalFileToStage
{
get;
internal set;
}
/// <summary>
/// The target filename, on the compute node, to which the blob contents will be downloaded.
/// </summary>
public string NodeFileName
{
get;
internal set;
}
/// <summary>
/// The instances of ResourcesFile for the staged local file.
/// For this implementation, successful file staging of this object will
/// result in a collection with only one entry.
/// </summary>
public IEnumerable<ResourceFile> StagedFiles
{
get;
internal set;
}
/// <summary>
/// The exception, if any, caught while attempting to stage this file.
/// </summary>
public Exception Exception
{
get;
internal set;
}
#region constructors
private FileToStage()
{
}
/// <summary>
/// Specifies that a local file should be staged to blob storage.
/// The specified account will be charged for storage costs.
/// </summary>
/// <param name="localFileToStage">The name of the local file.</param>
/// <param name="storageCredentials">The storage credentials to be used when creating the default container.</param>
/// <param name="nodeFileName">Optional name to be given to the file on the compute node. If this parameter is null or missing
/// the name on the compute node will be set to the value of localFileToStage stripped of all path information.</param>
public FileToStage(string localFileToStage, StagingStorageAccount storageCredentials, string nodeFileName = null)
{
this.LocalFileToStage = localFileToStage;
this.StagingStorageAccount = storageCredentials;
if (string.IsNullOrWhiteSpace(this.LocalFileToStage))
{
throw new ArgumentOutOfRangeException("localFileToStage");
}
// map null to base name of local file
if (string.IsNullOrWhiteSpace(nodeFileName))
{
this.NodeFileName = Path.GetFileName(this.LocalFileToStage);
}
else
{
this.NodeFileName = nodeFileName;
}
}
#endregion // constructors
#region // IFileStagingProvider
/// <summary>
/// See <see cref="IFileStagingProvider.StageFilesAsync"/>.
/// </summary>
/// <param name="filesToStage">The instances of IFileStagingProvider to stage.</param>
/// <param name="fileStagingArtifact">IFileStagingProvider specific staging artifacts including error/progress.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public async System.Threading.Tasks.Task StageFilesAsync(List<IFileStagingProvider> filesToStage, IFileStagingArtifact fileStagingArtifact)
{
System.Threading.Tasks.Task taskForStaticStaging = FileToStage.StageFilesInternalAsync(filesToStage, fileStagingArtifact);
await taskForStaticStaging.ConfigureAwait(continueOnCapturedContext: false);
return;
}
/// <summary>
/// See <see cref="IFileStagingProvider.CreateStagingArtifact"/>.
/// </summary>
/// <returns>An instance of IFileStagingArtifact with default values.</returns>
public IFileStagingArtifact CreateStagingArtifact()
{
return new SequentialFileStagingArtifact() as IFileStagingArtifact;
}
/// <summary>
/// See <see cref="IFileStagingProvider.Validate"/>.
/// </summary>
public void Validate()
{
if (!File.Exists(this.LocalFileToStage))
{
throw new FileNotFoundException(string.Format(BatchFS.ErrorMessages.FileStagingLocalFileNotFound, this.LocalFileToStage));
}
}
#endregion // IFileStagingProvier
#region internal/private
// the staging code needs to get the secrets
internal StagingStorageAccount StagingStorageAccount { get; set; }
/// <summary>
/// combine container and blob into an URL.
/// </summary>
/// <param name="container">container url</param>
/// <param name="blob">blob url</param>
/// <returns>full url</returns>
private static string ConstructBlobSource(string container, string blob)
{
int index = container.IndexOf("?");
if (index != -1)
{
//SAS
string containerAbsoluteUrl = container.Substring(0, index);
return containerAbsoluteUrl + "/" + blob + container.Substring(index);
}
else
{
return container + "/" + blob;
}
}
/// <summary>
/// create a container if doesn't exist, setting permission with policy, and return assosciated SAS signature
/// </summary>
/// <param name="account">storage account</param>
/// <param name="key">storage key</param>
/// <param name="container">container to be created</param>
/// <param name="policy">name for the policy</param>
/// <param name="start">start time of the policy</param>
/// <param name="end">expire time of the policy</param>
/// <param name="permissions">permission on the name</param>
/// <param name="blobUri">blob URI</param>
/// <returns>the SAS for the container, in full URI format.</returns>
private static string CreateContainerWithPolicySASIfNotExist(string account, string key, Uri blobUri, string container, string policy, DateTime start, DateTime end, SharedAccessBlobPermissions permissions)
{
// 1. form the credentail and initial client
CloudStorageAccount storageaccount = new CloudStorageAccount(new WindowsAzure.Storage.Auth.StorageCredentials(account, key),
blobEndpoint: blobUri,
queueEndpoint: null,
tableEndpoint: null,
fileEndpoint: null);
CloudBlobClient client = storageaccount.CreateCloudBlobClient();
// 2. create container if it doesn't exist
CloudBlobContainer storagecontainer = client.GetContainerReference(container);
storagecontainer.CreateIfNotExists();
// 3. validate policy, create/overwrite if doesn't match
bool policyFound = false;
SharedAccessBlobPolicy accesspolicy = new SharedAccessBlobPolicy()
{
SharedAccessExpiryTime = end,
SharedAccessStartTime = start,
Permissions = permissions
};
BlobContainerPermissions blobPermissions = storagecontainer.GetPermissions();
if (blobPermissions.SharedAccessPolicies.ContainsKey(policy))
{
SharedAccessBlobPolicy containerpolicy = blobPermissions.SharedAccessPolicies[policy];
if (!(permissions == (containerpolicy.Permissions & permissions) && start <= containerpolicy.SharedAccessStartTime && end >= containerpolicy.SharedAccessExpiryTime))
{
blobPermissions.SharedAccessPolicies[policy] = accesspolicy;
}
else
{
policyFound = true;
}
}
else
{
blobPermissions.SharedAccessPolicies.Add(policy, accesspolicy);
}
if (!policyFound)
{
storagecontainer.SetPermissions(blobPermissions);
}
// 4. genereate SAS and return
string container_sas = storagecontainer.GetSharedAccessSignature(new SharedAccessBlobPolicy(), policy);
string container_url = storagecontainer.Uri.AbsoluteUri + container_sas;
return container_url;
}
private static void CreateDefaultBlobContainerAndSASIfNeededReturn(List<IFileStagingProvider> filesToStage, SequentialFileStagingArtifact seqArtifact)
{
if ((null != filesToStage) && (filesToStage.Count > 0))
{
// construct the name of the new blob container.
seqArtifact.BlobContainerCreated = FileStagingLinkedSources.ConstructDefaultName(seqArtifact.NamingFragment).ToLowerInvariant();
// get any instance for the storage credentials
FileToStage anyRealInstance = FindAtLeastOne(filesToStage);
if (null != anyRealInstance)
{
StagingStorageAccount creds = anyRealInstance.StagingStorageAccount;
string policyName = Batch.Constants.DefaultConveniencePrefix + Constants.DefaultContainerPolicyFragment;
DateTime startTime = DateTime.UtcNow;
DateTime expiredAtTime = startTime + new TimeSpan(24 /* hrs*/, 0, 0);
seqArtifact.DefaultContainerSAS = CreateContainerWithPolicySASIfNotExist(
creds.StorageAccount,
creds.StorageAccountKey,
creds.BlobUri,
seqArtifact.BlobContainerCreated,
policyName,
startTime,
expiredAtTime,
SharedAccessBlobPermissions.Read);
return; // done
}
}
}
/// <summary>
/// Since this is the SequentialFileStagingProvider, all files are supposed to be of this type.
/// Find any one and return the implementation instance.
/// </summary>
/// <param name="filesToStage"></param>
/// <returns>Null means there was not even one.</returns>
private static FileToStage FindAtLeastOne(List<IFileStagingProvider> filesToStage)
{
if ((null != filesToStage) && (filesToStage.Count > 0))
{
foreach(IFileStagingProvider curProvider in filesToStage)
{
FileToStage thisIsReal = curProvider as FileToStage;
if (null != thisIsReal)
{
return thisIsReal;
}
}
}
return null;
}
/// <summary>
/// Starts an asynchronous call to stage the given files.
/// </summary>
private static async System.Threading.Tasks.Task StageFilesInternalAsync(List<IFileStagingProvider> filesToStage, IFileStagingArtifact fileStagingArtifact)
{
if (null == filesToStage)
{
throw new ArgumentNullException("filesToStage");
}
if (null == fileStagingArtifact)
{
throw new ArgumentNullException("filesStagingArtifact");
}
SequentialFileStagingArtifact seqArtifact = fileStagingArtifact as SequentialFileStagingArtifact;
if (null == seqArtifact)
{
throw new ArgumentOutOfRangeException(BatchFS.ErrorMessages.FileStagingIncorrectArtifact);
}
// is there any work to do?
if (null == FindAtLeastOne(filesToStage))
{
return; // now work to do. none of the files belong to this provider
}
// is there any work to do
if ((null == filesToStage) || (filesToStage.Count <= 0))
{
return; // we are done
}
// create a Run task to create the blob containers if needed
System.Threading.Tasks.Task createContainerTask = System.Threading.Tasks.Task.Run(() => { CreateDefaultBlobContainerAndSASIfNeededReturn(filesToStage, seqArtifact); });
// wait for container to be created
await createContainerTask.ConfigureAwait(continueOnCapturedContext: false);
// begin staging the files
System.Threading.Tasks.Task stageTask = StageFilesAsync(filesToStage, seqArtifact);
// wait for files to be staged
await stageTask.ConfigureAwait(continueOnCapturedContext: false);
}
/// <summary>
/// Stages all files in the queue
/// </summary>
private async static System.Threading.Tasks.Task StageFilesAsync(List<IFileStagingProvider> filesToStage, SequentialFileStagingArtifact seqArtifacts)
{
foreach (IFileStagingProvider currentFile in filesToStage)
{
// for "retry" and/or "double calls" we ignore files that have already been staged
if (null == currentFile.StagedFiles)
{
FileToStage fts = currentFile as FileToStage;
if (null != fts)
{
System.Threading.Tasks.Task stageTask = StageOneFileAsync(fts, seqArtifacts);
await stageTask.ConfigureAwait(continueOnCapturedContext: false);
}
}
}
}
/// <summary>
/// Stage a single file.
/// </summary>
private async static System.Threading.Tasks.Task StageOneFileAsync(FileToStage stageThisFile, SequentialFileStagingArtifact seqArtifacts)
{
StagingStorageAccount storecreds = stageThisFile.StagingStorageAccount;
string containerName = seqArtifacts.BlobContainerCreated;
// TODO: this flattens all files to the top of the compute node/task relative file directory. solve the hiearchy problem (virt dirs?)
string blobName = Path.GetFileName(stageThisFile.LocalFileToStage);
// Create the storage account with the connection string.
CloudStorageAccount storageAccount = new CloudStorageAccount(
new WindowsAzure.Storage.Auth.StorageCredentials(storecreds.StorageAccount, storecreds.StorageAccountKey),
blobEndpoint: storecreds.BlobUri,
queueEndpoint: null,
tableEndpoint: null,
fileEndpoint: null);
CloudBlobClient client = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = client.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlockBlobReference(blobName);
bool doesBlobExist;
try
{
// fetch attributes so we can compare file lengths
System.Threading.Tasks.Task fetchTask = blob.FetchAttributesAsync();
await fetchTask.ConfigureAwait(continueOnCapturedContext: false);
doesBlobExist = true;
}
catch (StorageException scex)
{
// check to see if blob does not exist
if ((int)System.Net.HttpStatusCode.NotFound == scex.RequestInformation.HttpStatusCode)
{
doesBlobExist = false;
}
else
{
throw; // unknown exception, throw to caller
}
}
bool mustUploadBlob = true; // we do not re-upload blobs if they have already been uploaded
if (doesBlobExist) // if the blob exists, compare
{
FileInfo fi = new FileInfo(stageThisFile.LocalFileToStage);
// since we don't have a hash of the contents... we check length
if (blob.Properties.Length == fi.Length)
{
mustUploadBlob = false;
}
}
if (mustUploadBlob)
{
// upload the file
System.Threading.Tasks.Task uploadTask = blob.UploadFromFileAsync(stageThisFile.LocalFileToStage);
await uploadTask.ConfigureAwait(continueOnCapturedContext: false);
}
// get the SAS for the blob
string blobSAS = ConstructBlobSource(seqArtifacts.DefaultContainerSAS, blobName);
string nodeFileName = stageThisFile.NodeFileName;
// create a new ResourceFile and populate it. This file is now staged!
stageThisFile.StagedFiles = new ResourceFile[] { new ResourceFile(blobSAS, nodeFileName) };
}
#endregion internal/private
}
}
| |
/*
* Copyright (c) 2015, InWorldz Halcyon Developers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of halcyon nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Reflection;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Communications.Capabilities;
using OpenSim.Region.Framework.Interfaces;
namespace OpenSim.Region.Framework.Scenes
{
/// <summary>
/// Tracks the presences an avatar has on our known neighbor regions
/// </summary>
public class AvatarRemotePresences
{
private static readonly ILog _log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Collection of the currently established remote presences for this user
/// </summary>
private Dictionary<ulong, AvatarRemotePresence> _remotePresences = new Dictionary<ulong, AvatarRemotePresence>();
/// <summary>
/// The current scene we're managing presences for this user on
/// </summary>
private Scene _scene;
/// <summary>
/// The local presence we're managing
/// </summary>
private ScenePresence _sp;
/// <summary>
/// The last number of regions we were able to see into given the user's draw distance
/// </summary>
private uint _lastDrawDistanceFactor;
/// <summary>
/// Semaphore held during large ops to ensure only one major change happens at a time
/// </summary>
private SemaphoreSlim _operationSemaphore = new SemaphoreSlim(1, 1);
public AvatarRemotePresences(Scene currentRegion, ScenePresence sp)
{
_scene = currentRegion;
_sp = sp;
_lastDrawDistanceFactor = Util.GetRegionUnitsFromDD((uint)sp.DrawDistance);
_scene.EventManager.OnMakeRootAgent += EventManager_OnMakeRootAgent;
_scene.EventManager.OnMakeChildAgent += EventManager_OnMakeChildAgent;
}
/// <summary>
/// Called when a root SP is being constructed and we need to copy presences
/// from a previous region
/// </summary>
/// <param name="currentPresences"></param>
public void SetInitialPresences(IEnumerable<RemotePresenceInfo> currentPresences)
{
if (_sp.IsBot) return;
foreach (var pres in currentPresences)
{
//don't include our handle in the remote presences. this may be coming
//from a neighbor region which had us as a remote pres
if (pres.RegionInfo.RegionHandle != _scene.RegionInfo.RegionHandle)
{
_remotePresences[pres.RegionInfo.RegionHandle] =
new AvatarRemotePresence
{
PresenceInfo = pres,
IsFarPresence = false,
State = RemotePresenceState.Established
};
}
}
}
/// <summary>
/// Called when this presence is being changed to a root agent
/// </summary>
/// <param name="presence"></param>
void EventManager_OnMakeRootAgent(ScenePresence presence)
{
if (presence.IsBot) return;
if (presence == _sp)
{
//subscribe to changes in the surrounding region config
_scene.SurroundingRegions.OnNeighborStateChange += SurroundingRegions_OnNeighborStateChange;
//set up our initial connections to neighbors
//let the task run async in the background
const int CROSSING_RESYNC_DELAY = 500;
var task = this.CalculateAndResyncNeighbors((uint)presence.DrawDistance, CROSSING_RESYNC_DELAY);
}
}
/// <summary>
/// Called when this presence is leaving this region
/// </summary>
/// <param name="presence"></param>
void EventManager_OnMakeChildAgent(ScenePresence presence)
{
if (presence.IsBot) return;
//is it our presence that has been made into a child?
if (presence == _sp)
{
StopManagingPresences();
}
}
/// <summary>
/// Unsubscribe to region changes and clear out presences, it is no longer up to us to manage this data
/// </summary>
private void StopManagingPresences()
{
_scene.SurroundingRegions.OnNeighborStateChange -= SurroundingRegions_OnNeighborStateChange;
lock (_remotePresences)
{
_remotePresences.Clear();
}
}
/// <summary>
/// Called by our SP when it is closing
/// </summary>
internal void OnScenePresenceClosed()
{
if (_sp.IsBot) return;
StopManagingPresences();
_scene.EventManager.OnMakeRootAgent -= EventManager_OnMakeRootAgent;
_scene.EventManager.OnMakeChildAgent -= EventManager_OnMakeChildAgent;
}
/// <summary>
/// Called by the surrounding region manager when there is a region state change
/// </summary>
/// <param name="neighbor"></param>
/// <param name="changeType"></param>
void SurroundingRegions_OnNeighborStateChange(SimpleRegionInfo neighbor, NeighborStateChangeType changeType)
{
switch (changeType)
{
case NeighborStateChangeType.NeighborUp:
var task1 = this.HandleNeighborUp(neighbor);
break;
case NeighborStateChangeType.NeighborDown:
var task2 = this.HandleNeighborDown(neighbor);
break;
}
}
/// <summary>
/// Called when a neighbor has declared that it is down or a ping timed out. We should tell the client
/// to remove these regions from its view and we remove the region presence from our collection
/// </summary>
/// <param name="neighbor"></param>
private async Task HandleNeighborDown(SimpleRegionInfo neighbor)
{
// on any neighbor change, we need to recalculate all neighbors because
// visibility rules may have resulted in more than one new neighbor.
await CalculateAndResyncNeighbors((uint)_sp.DrawDistance, 0);
}
/// <summary>
/// Drops the given presence taking the operation semaphore
/// </summary>
/// <param name="neighbor">The presence to drop</param>
/// <param name="onlyIfFar">Only drop if the presence is a far presence</param>
public async Task DropRemotePresenceLocked(SimpleRegionInfo neighbor, bool onlyIfFar)
{
try
{
await _operationSemaphore.WaitAsync();
await DropRemotePresence(neighbor, onlyIfFar);
}
finally
{
_operationSemaphore.Release();
}
}
/// <summary>
/// Drops the given presence
/// </summary>
/// <param name="neighbor">The presence to drop</param>
/// <param name="onlyIfFar">Only drop if the presence is a far presence</param>
private async Task DropRemotePresence(SimpleRegionInfo neighbor, bool onlyIfFar)
{
AvatarRemotePresence foundPresence = null;
TryGetRemotePresenceLocked(neighbor.RegionHandle, (AvatarRemotePresence presence) =>
{
foundPresence = presence;
if (presence != null)
{
if (onlyIfFar && !presence.IsFarPresence)
{
return;
}
//You can not send a disablesimulator for the remote region
//the viewer processes all disablesimulator messages coming from this
//region as a disablesimulator message FOR this region
//therefore, we remove the neighbor from our known regions list
//and ask the sim on the other end to tear down the connection
_remotePresences.Remove(neighbor.RegionHandle);
}
});
if (foundPresence != null)
{
//send a close to the neighbor
await _scene.InterregionComms.SendCloseAgentAsync(neighbor, _sp.UUID);
}
}
private async Task HandleNeighborUp(SimpleRegionInfo neighbor)
{
// on any neighbor change, we need to recalculate all neighbors because
// visibility rules may have resulted in more than one new neighbor.
await CalculateAndResyncNeighbors((uint)_sp.DrawDistance, 0);
}
/// <summary>
/// Does this user have an established presence on the given region?
/// </summary>
/// <param name="regionHandle"></param>
/// <returns></returns>
public bool HasPresenceOnRegion(ulong regionHandle)
{
bool hasPresence = false;
TryGetRemotePresenceLocked(regionHandle, (AvatarRemotePresence presence) =>
{
if (presence != null)
{
hasPresence = (presence.State == RemotePresenceState.Established);
}
});
return hasPresence;
}
public void TryGetRemotePresenceLocked(ulong regionHandle, Action<AvatarRemotePresence> callback)
{
lock (_remotePresences)
{
AvatarRemotePresence presence;
_remotePresences.TryGetValue(regionHandle, out presence);
callback(presence);
}
}
/// <summary>
/// Attempts to establish a presence on the given region. Does this while waiting for other major tasks to complete
/// </summary>
/// <param name="region">The region we want to establish a child presence on</param>
/// <param name="forceReestablish">Whether to force a reestablishment even if we already think we have a remote presence</param>
/// <param name="isFarPresence">Is this presence intentionally far away? Eg. The beginning of a remote teleport</param>
/// <returns></returns>
public async Task<Tuple<EstablishPresenceResult, string>> EstablishPresenceOnRegionLocked(SimpleRegionInfo region, bool forceReestablish, bool isFarPresence)
{
try
{
await _operationSemaphore.WaitAsync();
return await this.EstablishPresenceOnRegion(region, forceReestablish, isFarPresence);
}
finally
{
_operationSemaphore.Release();
}
}
/// <summary>
/// Attempts to establish a presence on the given region
/// </summary>
/// <param name="region">The region we want to establish a child presence on</param>
/// <param name="forceReestablish">Whether to force a reestablishment even if we already think we have a remote presence</param>
/// <param name="isFarPresence">Is this presence intentionally far away? Eg. The beginning of a remote teleport</param>
/// <returns></returns>
private async Task<Tuple<EstablishPresenceResult, string>> EstablishPresenceOnRegion(SimpleRegionInfo region, bool forceReestablish, bool isFarPresence)
{
Task<Tuple<EstablishPresenceResult, string>> establishTask = null;
bool presenceExisted = false;
//check if we already have or are waiting on an establish
TryGetRemotePresenceLocked(region.RegionHandle, (AvatarRemotePresence presence) =>
{
if (presence != null && !forceReestablish)
{
//we have a presence
//if it is established just return
if (presence.State == RemotePresenceState.Established)
{
presenceExisted = true;
}
else
{
//if not, we can await the existing callback
establishTask = presence.EstablishTask;
}
}
else
{
//we have no presence and we're not waiting for a callback
//begin an async establish and await a callback
presence = new AvatarRemotePresence
{
PresenceInfo = new RemotePresenceInfo { RegionInfo = region, CapsPath = CapsUtil.GetRandomCapsObjectPath() },
IsFarPresence = isFarPresence,
State = RemotePresenceState.Establishing
};
if (_remotePresences.ContainsKey(region.RegionHandle))
_remotePresences.Remove(region.RegionHandle);
_remotePresences.Add(region.RegionHandle, presence);
establishTask = DoEstablishPresenceOnRegion(region, presence);
presence.EstablishTask = establishTask;
}
});
//nothing to do, we're already established
if (presenceExisted) return Tuple.Create(EstablishPresenceResult.Success, String.Empty);
return await establishTask;
}
private async Task<Tuple<EstablishPresenceResult, string>> DoEstablishPresenceOnRegion(SimpleRegionInfo region, AvatarRemotePresence initPresence)
{
Tuple<EstablishPresenceResult, string> establishResult;
try
{
establishResult = await this.LaunchNewEstablishChildTask(initPresence, region);
}
catch (Exception e)
{
establishResult = new Tuple<EstablishPresenceResult, string>(EstablishPresenceResult.ErrorInformingRegion, e.Message);
}
bool failure = false;
TryGetRemotePresenceLocked(region.RegionHandle, (AvatarRemotePresence presence) =>
{
//success, change the status of the task
if (presence != null)
{
if (establishResult.Item1 == EstablishPresenceResult.Success)
{
presence.State = RemotePresenceState.ViewerWait;
}
else
{
//failure contacting other region
_remotePresences.Remove(region.RegionHandle);
failure = true;
}
}
else
{
failure = true;
//hmm, someone stole this presence from us
_log.ErrorFormat("[REMOTEPRESENCE]: Unable to update child presence established to {0} for {1}. Child presence missing.", establishResult, _sp.Name);
establishResult = Tuple.Create(EstablishPresenceResult.ConnectionAborted, "Connection was aborted");
}
});
if (failure)
{
return establishResult;
}
//now we need to call out to the remote region to wait for the SP to be set up
bool waitSuccess = await WaitForScenePresenceEstablished(region);
Tuple<EstablishPresenceResult, string> result = null;
TryGetRemotePresenceLocked(region.RegionHandle, (AvatarRemotePresence presence) =>
{
//success, change the status of the task
if (presence != null)
{
if (waitSuccess)
{
presence.State = RemotePresenceState.Established;
result = Tuple.Create(EstablishPresenceResult.Success, String.Empty);
}
else
{
//failure waiting for SP
_remotePresences.Remove(region.RegionHandle);
result = Tuple.Create(EstablishPresenceResult.ClientWaitTimeout, "Destination region never received a connection from the viewer");
}
}
else
{
//hmm, someone stole this presence from us
_log.ErrorFormat("[REMOTEPRESENCE]: Unable to update child presence established to {0} for {1}. Child presence missing.", establishResult, _sp.Name);
result = Tuple.Create(EstablishPresenceResult.ConnectionAborted, "Connection was aborted");
}
});
return result;
}
private async Task<bool> WaitForScenePresenceEstablished(SimpleRegionInfo region)
{
//this is the HTTP timeout, however the actual wait timeout on the receiving side is 10 seconds
//so that timeout should be triggered first
const int HTTP_SP_WAIT_TIMEOUT = 15000;
var req = (HttpWebRequest)HttpWebRequest.Create(region.InsecurePublicHTTPServerURI + String.Format("/agent2/{0}/{1}", _sp.UUID, region.RegionHandle));
req.Headers["authorization"] = Util.GenerateHttpAuthorization(_scene.GridSendKey);
req.Timeout = HTTP_SP_WAIT_TIMEOUT;
req.ReadWriteTimeout = HTTP_SP_WAIT_TIMEOUT;
req.Method = "GET";
try
{
using (WebResponse response = await req.GetResponseAsync(HTTP_SP_WAIT_TIMEOUT))
{
//we do nothing besides dispose on success. a 200 response means all is well
}
return true;
}
catch (Exception e)
{
_log.ErrorFormat("[REMOTEPRESENCE]: Waiting for viewer connection from {0} to {1} failed: {2}", _sp.Name, region.RegionHandle, e);
return false;
}
}
private Task<Tuple<EstablishPresenceResult, string>> LaunchNewEstablishChildTask(AvatarRemotePresence presence, SimpleRegionInfo region)
{
AgentCircuitData agent = _sp.ControllingClient.RequestClientInfo();
agent.BaseFolder = UUID.Zero;
agent.InventoryFolder = UUID.Zero;
agent.startpos = Scene.DEFAULT_CHILD_AGENT_POS;
agent.child = true;
agent.CapsPath = presence.PresenceInfo.CapsPath;
return _scene.SceneGridService.EstablishChildConnectionToRegionAsync(_sp, agent, region);
}
/// <summary>
/// The user's draw distance has changed, we may need to drop or add some regions
/// </summary>
/// <param name="newDrawDistance"></param>
public async Task HandleDrawDistanceChanged(uint newDrawDistance)
{
if (_sp.IsBot) return;
uint factor = Util.GetRegionUnitsFromDD(newDrawDistance);
if (_lastDrawDistanceFactor == factor)
{
//nothing to do
return;
}
_lastDrawDistanceFactor = factor;
await CalculateAndResyncNeighbors(newDrawDistance, 0);
}
/// <summary>
/// Resyncs the user with our view of the neighbors
/// </summary>
/// <param name="newDrawDistance">The new DD for the user</param>
/// <param name="resyncDelay">Delay before executing the resync. We delay on a region crossing because the viewer locks up sometimes when freeing memory</param>
/// <returns></returns>
private async Task CalculateAndResyncNeighbors(uint newDrawDistance, int resyncDelay)
{
uint xmin, xmax, ymin, ymax;
Util.GetDrawDistanceBasedRegionRectangle((uint)newDrawDistance, _scene.RegionInfo.RegionLocX,
_scene.RegionInfo.RegionLocY, out xmin, out xmax, out ymin, out ymax);
//get our current neighbor list
List<SimpleRegionInfo> knownNeighborsList = _scene.SurroundingRegions.GetKnownNeighborsWithinClientDD(newDrawDistance);
Dictionary<ulong, SimpleRegionInfo> knownNeighborsDict = new Dictionary<ulong, SimpleRegionInfo>();
foreach (var neighbor in knownNeighborsList)
{
knownNeighborsDict.Add(neighbor.RegionHandle, neighbor);
}
HashSet<ulong> knownNeighbors = new HashSet<ulong>(knownNeighborsList.Select(x => x.RegionHandle));
List<ulong> deadRegions;
List<ulong> newRegions;
lock (_remotePresences)
{
//check the list of what we have vs what we should have
HashSet<ulong> usersRegions = new HashSet<ulong>();
//add all regions from the presence
foreach (var presence in _remotePresences.Values)
{
knownNeighborsDict[presence.PresenceInfo.RegionInfo.RegionHandle] = presence.PresenceInfo.RegionInfo;
//dont put far regions into this update, they shouldnt be dropped by DD changes
if (!presence.IsFarPresence)
{
usersRegions.Add(presence.PresenceInfo.RegionInfo.RegionHandle);
}
}
// regions that we have but that we shouldnt have anymore
deadRegions = new List<ulong>(usersRegions.Except(knownNeighbors));
// regions that we don't have that we need to add
newRegions = new List<ulong>(knownNeighbors.Except(usersRegions));
}
try
{
await _operationSemaphore.WaitAsync();
if (resyncDelay > 0) await Task.Delay(resyncDelay);
await this.ResyncRegions(knownNeighborsDict, deadRegions, newRegions);
}
finally
{
_operationSemaphore.Release();
}
}
/// <summary>
/// Resynchronizes our remote presences with the given lists
/// </summary>
/// <param name="knownNeighborsDict">All known regions from both lists</param>
/// <param name="deadRegions">Regions that we should not longer have presences on</param>
/// <param name="newRegions">Regions that we should have presences on but do not</param>
/// <returns></returns>
private async Task ResyncRegions(Dictionary<ulong, SimpleRegionInfo> knownNeighborsDict, List<ulong> deadRegions, List<ulong> newRegions)
{
List<Task<Tuple<EstablishPresenceResult, string>>> connectionTasks = new List<Task<Tuple<EstablishPresenceResult, string>>>(newRegions.Count);
List<Task> disconnectTasks = new List<Task>();
foreach (var region in deadRegions)
{
disconnectTasks.Add(this.DropRemotePresence(knownNeighborsDict[region], false));
}
foreach (var region in newRegions)
{
connectionTasks.Add(this.EstablishPresenceOnRegion(knownNeighborsDict[region], false, false));
}
await Task.WhenAll(connectionTasks);
await Task.WhenAll(disconnectTasks);
if (connectionTasks.Count > 0)
if (_sp != null)
_sp.SendChildAgentUpdate(); // send the other regions the intial avatar info (pos, draw distance) for culling
}
/// <summary>
/// Returns all remote presences we know about, including presences still establishing
/// </summary>
/// <returns></returns>
public List<AvatarRemotePresence> GetRemotePresenceList()
{
lock (_remotePresences)
{
return new List<AvatarRemotePresence>(_remotePresences.Values);
}
}
/// <summary>
/// Returns only established presences
/// </summary>
/// <returns></returns>
public List<AvatarRemotePresence> GetEstablishedRemotePresenceList()
{
List<AvatarRemotePresence> ret = new List<AvatarRemotePresence>();
lock (_remotePresences)
{
foreach (var pres in _remotePresences.Values)
{
if (pres.State == RemotePresenceState.Established)
{
ret.Add(pres);
}
}
}
return ret;
}
/// <summary>
/// Returns whether or not there are any connections currently being established
/// </summary>
/// <returns></returns>
public bool HasConnectionsEstablishing()
{
if (_operationSemaphore.CurrentCount == 0) return true;
lock (_remotePresences)
{
foreach (AvatarRemotePresence pres in _remotePresences.Values)
{
if (pres.State != RemotePresenceState.Established)
{
return true;
}
}
}
return false;
}
/// <summary>
/// Requests that all remote regions we have children on terminate the child agents
/// </summary>
internal void TerminateAllNeighbors()
{
if (_sp.IsBot) return;
List<AvatarRemotePresence> remotes = this.GetRemotePresenceList();
foreach (var remote in remotes)
{
if (!remote.IsFarPresence)
{
_scene.InterregionComms.SendCloseAgent(remote.PresenceInfo.RegionInfo.RegionHandle, _sp.UUID);
}
}
}
/// <summary>
/// Returns the number of remote presences we have on other regions
/// </summary>
/// <returns></returns>
public int GetRemotePresenceCount()
{
lock (_remotePresences)
{
return _remotePresences.Count;
}
}
}
}
| |
namespace Nancy.Tests.Functional.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Nancy.Cookies;
using Nancy.ErrorHandling;
using Nancy.IO;
using Nancy.Responses.Negotiation;
using Nancy.Testing;
using Nancy.Tests.Functional.Modules;
using Nancy.Tests.xUnitExtensions;
using Xunit;
public class ContentNegotiationFixture
{
[Fact]
public async Task Should_return_int_value_from_get_route_as_response_with_status_code_set_to_value()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/int", (x, m) => 200);
});
var browser = new Browser(with =>
{
with.Module(module);
});
// When
var response = await browser.Get("/int");
// Then
Assert.Equal((HttpStatusCode)200, response.StatusCode);
}
[Fact]
public async Task Should_return_string_value_from_get_route_as_response_with_content_set_as_value()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/string", (x, m) => "hello");
});
var browser = new Browser(with =>
{
with.Module(module);
});
// When
var response = await browser.Get("/string");
// Then
Assert.Equal("hello", response.Body.AsString());
}
[Fact]
public async Task Should_return_httpstatuscode_value_from_get_route_as_response_with_content_set_as_value()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/httpstatuscode", (x, m) => HttpStatusCode.Accepted);
});
var browser = new Browser(with =>
{
with.Module(module);
});
// When
var response = await browser.Get("/httpstatuscode");
// Then
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
}
[Fact]
public async Task Should_return_action_value_as_response_with_content_set_as_value()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/action", (x, m) =>
{
Action<Stream> result = stream =>
{
var wrapper = new UnclosableStreamWrapper(stream);
using (var writer = new StreamWriter(wrapper))
{
writer.Write("Hiya Nancy!");
}
};
return result;
});
});
var browser = new Browser(with =>
{
with.Module(module);
});
// When
var response = await browser.Get("/action");
// Then
Assert.Equal("Hiya Nancy!", response.Body.AsString());
}
[Fact]
public async Task Should_add_negotiated_headers_to_response()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/headers", (x, m) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
negotiator.WithHeader("foo", "bar");
return negotiator;
});
});
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(module);
});
// When
var response = await browser.Get("/headers");
// Then
Assert.True(response.Headers.ContainsKey("foo"));
Assert.Equal("bar", response.Headers["foo"]);
}
[Fact]
public async Task Should_set_reason_phrase_on_response()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/customPhrase", (x, m) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
negotiator.WithReasonPhrase("The test is passing!").WithStatusCode(404);
return negotiator;
});
});
var browser = new Browser(with =>
{
with.StatusCodeHandler<DefaultStatusCodeHandler>();
with.ResponseProcessor<TestProcessor>();
with.Module(module);
});
// When
var response = await browser.Get("/customPhrase");
// Then
Assert.Equal("The test is passing!", response.ReasonPhrase);
}
[Fact]
public async Task Should_add_negotiated_content_headers_to_response()
{
// Given
var module = new ConfigurableNancyModule(with =>
{
with.Get("/headers", (x, m) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
negotiator.WithContentType("text/xml");
return negotiator;
});
});
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(module);
});
// When
var response = await browser.Get("/headers");
// Then
Assert.Equal("text/xml", response.Context.Response.ContentType);
}
[Fact]
public async Task Should_apply_default_accept_when_no_accept_header_sent()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", (parameters, module) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
return negotiator;
});
}));
});
// When
var response = await browser.Get("/");
// Then
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
}
[Fact]
public async Task Should_boost_html_priority_if_set_to_the_same_priority_as_others()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", (parameters, module) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
negotiator.WithAllowedMediaRange("application/xml");
negotiator.WithAllowedMediaRange("text/html");
return negotiator;
});
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Header("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4");
with.Accept("application/xml", 0.9m);
with.Accept("text/html", 0.9m);
});
// Then
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
Assert.True(response.Body.AsString().Contains("text/html"), "Media type mismatch");
}
[Fact]
public async Task Should_override_with_extension()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/test", (parameters, module) =>
{
var context =
new NancyContext();
var negotiator =
new Negotiator(context);
return negotiator;
});
}));
});
// When
var response = await browser.Get("/test.foo", with =>
{
with.Header("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4");
with.Accept("application/xml", 0.9m);
with.Accept("text/html", 0.9m);
});
// Then
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
Assert.True(response.Body.AsString().Contains("foo/bar"), "Media type mismatch");
}
[Fact]
public async Task Should_response_with_notacceptable_when_route_does_not_allow_any_of_the_accepted_formats()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/test", CreateNegotiatedResponse(config =>
{
config.WithAllowedMediaRange("application/xml");
}));
}));
});
// When
var response = await browser.Get("/test", with =>
{
with.Accept("foo/bar", 0.9m);
});
// Then
Assert.Equal(HttpStatusCode.NotAcceptable, response.StatusCode);
}
[Fact]
public async Task Should_respond_with_notacceptable_when_no_processor_can_process_media_range()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<NullProcessor>();
with.Module<NegotiationModule>();
});
// When
var response = await browser.Get("/invalid-view-name", with => with.Accept("foo/bar"));
// Then
Assert.Equal(HttpStatusCode.NotAcceptable, response.StatusCode);
}
[Fact]
public async Task Should_return_that_contains_default_model_when_no_media_range_specific_model_was_declared()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<ModelProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse(config =>
{
config.WithModel("the model");
config.WithAllowedMediaRange("test/test");
}));
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Accept("test/test", 0.9m);
});
// Then
Assert.Equal("the model", response.Body.AsString());
}
[Fact]
public async Task Should_return_media_range_specific_model_when_declared()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<ModelProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse(config =>
{
config.WithModel("the model");
config.WithAllowedMediaRange("test/test");
config.WithMediaRangeModel("test/test", "media model");
}));
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Accept("test/test", 0.9m);
});
// Then
Assert.Equal("media model", response.Body.AsString());
}
[Fact]
public async Task Should_add_vary_accept_header()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessors(typeof(XmlProcessor), typeof(JsonProcessor), typeof(TestProcessor));
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse());
}));
});
// When
var response = await browser.Get("/", with => with.Header("Accept", "application/json"));
// Then
Assert.True(response.Headers.ContainsKey("Vary"));
Assert.True(response.Headers["Vary"].Contains("Accept"));
}
[Fact]
public async Task Should_add_link_header_for_matching_response_processors()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessors(typeof(XmlProcessor), typeof(JsonProcessor), typeof(TestProcessor));
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse());
}));
});
// When
var response = await browser.Get("/");
// Then
Assert.True(response.Headers["Link"].Contains(@"</.foo>; rel=""alternate""; type=""foo/bar"""));
Assert.True(response.Headers["Link"].Contains(@"</.json>; rel=""alternate""; type=""application/json"""));
Assert.True(response.Headers["Link"].Contains(@"</.xml>; rel=""alternate""; type=""application/xml"""));
}
[Fact]
public async Task Should_preserve_existing_link_header()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessors(typeof(XmlProcessor), typeof(JsonLdProcessor));
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse());
}));
});
// When
var response = await browser.Get("/");
// Then
Assert.True(response.Headers["Link"].Contains(@"</context.jsonld>; rel=""http://www.w3.org/ns/json-ld#context""; type=""application/ld+json"""));
Assert.True(response.Headers["Link"].Contains(@"</.xml>; rel=""alternate""; type=""application/xml"""));
}
[Fact]
public async Task Should_set_negotiated_status_code_to_response_when_set_as_integer()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse(config =>
{
config.WithStatusCode(507);
}));
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Accept("test/test", 0.9m);
});
// Then
Assert.Equal(HttpStatusCode.InsufficientStorage, response.StatusCode);
}
[Fact]
public async Task Should_set_negotiated_status_code_to_response_when_set_as_httpstatuscode()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse(config =>
{
config.WithStatusCode(HttpStatusCode.InsufficientStorage);
}));
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Accept("test/test", 0.9m);
});
// Then
Assert.Equal(HttpStatusCode.InsufficientStorage, response.StatusCode);
}
[Fact]
public async Task Should_set_negotiated_cookies_to_response()
{
// Given
var negotiatedCookie =
new NancyCookie("test", "test");
var browser = new Browser(with =>
{
with.ResponseProcessor<TestProcessor>();
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse(config =>
{
config.WithCookie(negotiatedCookie);
}));
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Accept("test/test", 0.9m);
});
// Then
Assert.Same(negotiatedCookie, response.Cookies.First());
}
[Fact]
public async Task Should_throw_exception_if_view_location_fails()
{
var browser = new Browser(with =>
{
with.ResponseProcessor<ViewProcessor>();
with.Module(new ConfigurableNancyModule(x => x.Get("/FakeModuleInvalidViewName", CreateNegotiatedResponse(neg => neg.WithView("blahblahblah")))));
});
// When
var result = await RecordAsync.Exception(() => browser.Get(
"/FakeModuleInvalidViewName", with =>
{ with.Accept("text/html", 1.0m); })
);
// Then
Assert.NotNull(result);
Assert.Contains("Unable to locate view", result.ToString());
}
[Fact]
public async Task Should_use_next_processor_if_processor_returns_null()
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessors(typeof(NullProcessor), typeof(TestProcessor));
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/test", CreateNegotiatedResponse(config =>
{
config.WithAllowedMediaRange("application/xml");
}));
}));
});
// When
var response = await browser.Get("/test", with =>
{
with.Accept("application/xml", 0.9m);
});
// Then
var bodyResult = response.Body.AsString();
Assert.True(bodyResult.StartsWith("application/xml"), string.Format("Body should have started with 'application/xml' but was actually '{0}'", bodyResult));
}
[Theory]
[InlineData("application/xhtml+xml; profile=\"http://www.wapforum. org/xhtml\"")]
[InlineData("application/xhtml+xml; q=1; profile=\"http://www.wapforum. org/xhtml\"")]
public async Task Should_not_throw_exception_because_of_uncommon_accept_header(string header)
{
// Given
var browser = new Browser(with =>
{
with.ResponseProcessors(typeof(XmlProcessor), typeof(JsonProcessor), typeof(TestProcessor));
with.Module(new ConfigurableNancyModule(x =>
{
x.Get("/", CreateNegotiatedResponse());
}));
});
// When
var response = await browser.Get("/", with =>
{
with.Header("Accept", header);
});
// Then
Assert.Equal((HttpStatusCode)200, response.StatusCode);
}
[Fact]
public async Task Should_not_try_and_serve_view_with_invalid_name()
{
// Given
var browser = new Browser(with => with.Module<NegotiationModule>());
// When
var result = await RecordAsync.Exception(() => browser.Get("/invalid-view-name"));
// Then
Assert.True(result.ToString().Contains("Unable to locate view"));
}
[Fact]
public async Task Should_return_response_negotiated_based_on_media_range()
{
// Given
var browser = new Browser(with => with.Module<NegotiationModule>());
// When
var result = await browser.Get("/negotiate", with =>
{
with.Accept("text/html");
});
// Then
Assert.Equal(HttpStatusCode.SeeOther, result.StatusCode);
}
[Fact]
public async Task Can_negotiate_in_status_code_handler()
{
// Given
var browser = new Browser(with => with.StatusCodeHandler<NotFoundStatusCodeHandler>());
// When
var result = await browser.Get("/not-found", with => with.Accept("application/json"));
var response = result.Body.DeserializeJson<NotFoundStatusCodeHandlerResult>();
// Then
Assert.Equal(HttpStatusCode.OK, result.StatusCode);
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
Assert.Equal("Not Found.", response.Message);
}
[Fact]
public async Task Can_negotiate_in_error_pipeline()
{
// Given
var browser = new Browser(with => with.Module<ThrowingModule>());
// When
var jsonResult = await browser.Get("/", with => with.Accept("application/json"));
var xmlResult = await browser.Get("/", with => with.Accept("application/xml"));
var jsonResponse = jsonResult.Body.DeserializeJson<ThrowingModule.Error>();
var xmlResponse = xmlResult.Body.DeserializeXml<ThrowingModule.Error>();
// Then
Assert.Equal("Oh noes!", jsonResponse.Message);
Assert.Equal("Oh noes!", xmlResponse.Message);
}
[Fact]
public async Task Should_return_negotiated_not_found_response_when_accept_header_is_html()
{
// Given
var browser = new Browser(with => with.StatusCodeHandler<DefaultStatusCodeHandler>());
var contentType = "text/html";
// When
var result = await browser.Get("/not-found", with => with.Accept(contentType));
// Then
Assert.Equal(HttpStatusCode.NotFound, result.StatusCode);
Assert.Equal(contentType, result.ContentType);
}
[Fact]
public async Task Should_return_negotiated_not_found_response_when_accept_header_is_json()
{
// Given
var browser = new Browser(with => with.StatusCodeHandler<DefaultStatusCodeHandler>());
var contentType = "application/json";
// When
var result = await browser.Get("/not-found", with => with.Accept(contentType));
// Then
Assert.Equal(HttpStatusCode.NotFound, result.StatusCode);
Assert.Equal(string.Format("{0}; charset=utf-8", contentType), result.ContentType);
}
[Fact]
public async Task Should_return_negotiated_not_found_response_when_accept_header_is_xml()
{
// Given
var browser = new Browser(with => with.StatusCodeHandler<DefaultStatusCodeHandler>());
var contentType = "application/xml";
// When
var result = await browser.Get("/not-found", with => with.Accept(contentType));
// Then
Assert.Equal(HttpStatusCode.NotFound, result.StatusCode);
Assert.Equal(contentType, result.ContentType);
}
private static Func<dynamic, NancyModule, Negotiator> CreateNegotiatedResponse(Action<Negotiator> action = null)
{
return (parameters, module) =>
{
var negotiator = new Negotiator(module.Context);
if (action != null)
{
action.Invoke(negotiator);
}
return negotiator;
};
}
/// <summary>
/// Test response processor that will accept any type
/// and put the content type and model type into the
/// response body for asserting against.
/// Hacky McHackmeister but it works :-)
/// </summary>
public class TestProcessor : IResponseProcessor
{
private const string ResponseTemplate = "{0}\n{1}";
public IEnumerable<Tuple<string, MediaRange>> ExtensionMappings
{
get
{
yield return new Tuple<string, MediaRange>("foo", "foo/bar");
}
}
public ProcessorMatch CanProcess(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return new ProcessorMatch
{
RequestedContentTypeResult = MatchResult.DontCare,
ModelResult = MatchResult.DontCare
};
}
public Response Process(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return string.Format(ResponseTemplate, requestedMediaRange, model == null ? "None" : model.GetType());
}
}
public class NullProcessor : IResponseProcessor
{
public IEnumerable<Tuple<string, MediaRange>> ExtensionMappings
{
get
{
yield break;
}
}
public ProcessorMatch CanProcess(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return new ProcessorMatch
{
RequestedContentTypeResult = MatchResult.ExactMatch,
ModelResult = MatchResult.ExactMatch
};
}
public Response Process(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return null;
}
}
public class ModelProcessor : IResponseProcessor
{
public IEnumerable<Tuple<string, MediaRange>> ExtensionMappings
{
get
{
yield return new Tuple<string, MediaRange>("foo", "foo/bar");
}
}
public ProcessorMatch CanProcess(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return new ProcessorMatch
{
RequestedContentTypeResult = MatchResult.DontCare,
ModelResult = MatchResult.DontCare
};
}
public Response Process(MediaRange requestedMediaRange, dynamic model, NancyContext context)
{
return (string)model;
}
}
public class NegotiationModule : NancyModule
{
public NegotiationModule()
{
Get("/invalid-view-name", args =>
{
return this.GetModel();
});
Get("/negotiate", args =>
{
return Negotiate
.WithMediaRangeResponse("text/html", Response.AsRedirect("/"))
.WithMediaRangeModel("application/json", new { Name = "Nancy" });
});
}
private IEnumerable<Foo> GetModel()
{
yield return new Foo();
}
public class Foo
{
}
}
private class NotFoundStatusCodeHandler : IStatusCodeHandler
{
private readonly IResponseNegotiator responseNegotiator;
public NotFoundStatusCodeHandler(IResponseNegotiator responseNegotiator)
{
this.responseNegotiator = responseNegotiator;
}
public bool HandlesStatusCode(HttpStatusCode statusCode, NancyContext context)
{
return statusCode == HttpStatusCode.NotFound;
}
public void Handle(HttpStatusCode statusCode, NancyContext context)
{
var error = new NotFoundStatusCodeHandlerResult
{
StatusCode = statusCode,
Message = "Not Found."
};
context.Response = this.responseNegotiator.NegotiateResponse(error, context);
}
}
private class NotFoundStatusCodeHandlerResult
{
public HttpStatusCode StatusCode { get; set; }
public string Message { get; set; }
}
}
}
| |
// ReSharper disable All
using System.Collections.Generic;
using System.Dynamic;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using Frapid.ApplicationState.Cache;
using Frapid.ApplicationState.Models;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Frapid.Account.DataAccess;
using Frapid.DataAccess;
using Frapid.DataAccess.Models;
using Frapid.Framework;
using Frapid.Framework.Extensions;
namespace Frapid.Account.Api
{
/// <summary>
/// Provides a direct HTTP access to perform various tasks such as adding, editing, and removing Registrations.
/// </summary>
[RoutePrefix("api/v1.0/account/registration")]
public class RegistrationController : FrapidApiController
{
/// <summary>
/// The Registration repository.
/// </summary>
private readonly IRegistrationRepository RegistrationRepository;
public RegistrationController()
{
this._LoginId = AppUsers.GetCurrent().View.LoginId.To<long>();
this._UserId = AppUsers.GetCurrent().View.UserId.To<int>();
this._OfficeId = AppUsers.GetCurrent().View.OfficeId.To<int>();
this._Catalog = AppUsers.GetCatalog();
this.RegistrationRepository = new Frapid.Account.DataAccess.Registration
{
_Catalog = this._Catalog,
_LoginId = this._LoginId,
_UserId = this._UserId
};
}
public RegistrationController(IRegistrationRepository repository, string catalog, LoginView view)
{
this._LoginId = view.LoginId.To<long>();
this._UserId = view.UserId.To<int>();
this._OfficeId = view.OfficeId.To<int>();
this._Catalog = catalog;
this.RegistrationRepository = repository;
}
public long _LoginId { get; }
public int _UserId { get; private set; }
public int _OfficeId { get; private set; }
public string _Catalog { get; }
/// <summary>
/// Creates meta information of "registration" entity.
/// </summary>
/// <returns>Returns the "registration" meta information to perform CRUD operation.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("meta")]
[Route("~/api/account/registration/meta")]
[Authorize]
public EntityView GetEntityView()
{
if (this._LoginId == 0)
{
return new EntityView();
}
return new EntityView
{
PrimaryKey = "registration_id",
Columns = new List<EntityColumn>()
{
new EntityColumn { ColumnName = "registration_id", PropertyName = "RegistrationId", DataType = "System.Guid", DbDataType = "uuid", IsNullable = false, IsPrimaryKey = true, IsSerial = false, Value = "", MaxLength = 0 },
new EntityColumn { ColumnName = "name", PropertyName = "Name", DataType = "string", DbDataType = "varchar", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 100 },
new EntityColumn { ColumnName = "email", PropertyName = "Email", DataType = "string", DbDataType = "varchar", IsNullable = false, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 100 },
new EntityColumn { ColumnName = "phone", PropertyName = "Phone", DataType = "string", DbDataType = "varchar", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 100 },
new EntityColumn { ColumnName = "password", PropertyName = "Password", DataType = "string", DbDataType = "text", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 0 },
new EntityColumn { ColumnName = "browser", PropertyName = "Browser", DataType = "string", DbDataType = "text", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 0 },
new EntityColumn { ColumnName = "ip_address", PropertyName = "IpAddress", DataType = "string", DbDataType = "varchar", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 50 },
new EntityColumn { ColumnName = "registered_on", PropertyName = "RegisteredOn", DataType = "DateTime", DbDataType = "timestamptz", IsNullable = false, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 0 },
new EntityColumn { ColumnName = "confirmed", PropertyName = "Confirmed", DataType = "bool", DbDataType = "bool", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 0 },
new EntityColumn { ColumnName = "confirmed_on", PropertyName = "ConfirmedOn", DataType = "DateTime", DbDataType = "timestamptz", IsNullable = true, IsPrimaryKey = false, IsSerial = false, Value = "", MaxLength = 0 }
}
};
}
/// <summary>
/// Counts the number of registrations.
/// </summary>
/// <returns>Returns the count of the registrations.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("count")]
[Route("~/api/account/registration/count")]
[Authorize]
public long Count()
{
try
{
return this.RegistrationRepository.Count();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns all collection of registration.
/// </summary>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("all")]
[Route("~/api/account/registration/all")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> GetAll()
{
try
{
return this.RegistrationRepository.GetAll();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns collection of registration for export.
/// </summary>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("export")]
[Route("~/api/account/registration/export")]
[Authorize]
public IEnumerable<dynamic> Export()
{
try
{
return this.RegistrationRepository.Export();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns an instance of registration.
/// </summary>
/// <param name="registrationId">Enter RegistrationId to search for.</param>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("{registrationId}")]
[Route("~/api/account/registration/{registrationId}")]
[Authorize]
public Frapid.Account.Entities.Registration Get(System.Guid registrationId)
{
try
{
return this.RegistrationRepository.Get(registrationId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
[AcceptVerbs("GET", "HEAD")]
[Route("get")]
[Route("~/api/account/registration/get")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> Get([FromUri] System.Guid[] registrationIds)
{
try
{
return this.RegistrationRepository.Get(registrationIds);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns the first instance of registration.
/// </summary>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("first")]
[Route("~/api/account/registration/first")]
[Authorize]
public Frapid.Account.Entities.Registration GetFirst()
{
try
{
return this.RegistrationRepository.GetFirst();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns the previous instance of registration.
/// </summary>
/// <param name="registrationId">Enter RegistrationId to search for.</param>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("previous/{registrationId}")]
[Route("~/api/account/registration/previous/{registrationId}")]
[Authorize]
public Frapid.Account.Entities.Registration GetPrevious(System.Guid registrationId)
{
try
{
return this.RegistrationRepository.GetPrevious(registrationId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns the next instance of registration.
/// </summary>
/// <param name="registrationId">Enter RegistrationId to search for.</param>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("next/{registrationId}")]
[Route("~/api/account/registration/next/{registrationId}")]
[Authorize]
public Frapid.Account.Entities.Registration GetNext(System.Guid registrationId)
{
try
{
return this.RegistrationRepository.GetNext(registrationId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Returns the last instance of registration.
/// </summary>
/// <returns></returns>
[AcceptVerbs("GET", "HEAD")]
[Route("last")]
[Route("~/api/account/registration/last")]
[Authorize]
public Frapid.Account.Entities.Registration GetLast()
{
try
{
return this.RegistrationRepository.GetLast();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Creates a paginated collection containing 10 registrations on each page, sorted by the property RegistrationId.
/// </summary>
/// <returns>Returns the first page from the collection.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("")]
[Route("~/api/account/registration")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> GetPaginatedResult()
{
try
{
return this.RegistrationRepository.GetPaginatedResult();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Creates a paginated collection containing 10 registrations on each page, sorted by the property RegistrationId.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the resultset.</param>
/// <returns>Returns the requested page from the collection.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("page/{pageNumber}")]
[Route("~/api/account/registration/page/{pageNumber}")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> GetPaginatedResult(long pageNumber)
{
try
{
return this.RegistrationRepository.GetPaginatedResult(pageNumber);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Counts the number of registrations using the supplied filter(s).
/// </summary>
/// <param name="filters">The list of filter conditions.</param>
/// <returns>Returns the count of filtered registrations.</returns>
[AcceptVerbs("POST")]
[Route("count-where")]
[Route("~/api/account/registration/count-where")]
[Authorize]
public long CountWhere([FromBody]JArray filters)
{
try
{
List<Frapid.DataAccess.Models.Filter> f = filters.ToObject<List<Frapid.DataAccess.Models.Filter>>(JsonHelper.GetJsonSerializer());
return this.RegistrationRepository.CountWhere(f);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Creates a filtered and paginated collection containing 10 registrations on each page, sorted by the property RegistrationId.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the resultset. If you provide a negative number, the result will not be paginated.</param>
/// <param name="filters">The list of filter conditions.</param>
/// <returns>Returns the requested page from the collection using the supplied filters.</returns>
[AcceptVerbs("POST")]
[Route("get-where/{pageNumber}")]
[Route("~/api/account/registration/get-where/{pageNumber}")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> GetWhere(long pageNumber, [FromBody]JArray filters)
{
try
{
List<Frapid.DataAccess.Models.Filter> f = filters.ToObject<List<Frapid.DataAccess.Models.Filter>>(JsonHelper.GetJsonSerializer());
return this.RegistrationRepository.GetWhere(pageNumber, f);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Counts the number of registrations using the supplied filter name.
/// </summary>
/// <param name="filterName">The named filter.</param>
/// <returns>Returns the count of filtered registrations.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("count-filtered/{filterName}")]
[Route("~/api/account/registration/count-filtered/{filterName}")]
[Authorize]
public long CountFiltered(string filterName)
{
try
{
return this.RegistrationRepository.CountFiltered(filterName);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Creates a filtered and paginated collection containing 10 registrations on each page, sorted by the property RegistrationId.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the resultset. If you provide a negative number, the result will not be paginated.</param>
/// <param name="filterName">The named filter.</param>
/// <returns>Returns the requested page from the collection using the supplied filters.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("get-filtered/{pageNumber}/{filterName}")]
[Route("~/api/account/registration/get-filtered/{pageNumber}/{filterName}")]
[Authorize]
public IEnumerable<Frapid.Account.Entities.Registration> GetFiltered(long pageNumber, string filterName)
{
try
{
return this.RegistrationRepository.GetFiltered(pageNumber, filterName);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Displayfield is a lightweight key/value collection of registrations.
/// </summary>
/// <returns>Returns an enumerable key/value collection of registrations.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("display-fields")]
[Route("~/api/account/registration/display-fields")]
[Authorize]
public IEnumerable<Frapid.DataAccess.Models.DisplayField> GetDisplayFields()
{
try
{
return this.RegistrationRepository.GetDisplayFields();
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// A custom field is a user defined field for registrations.
/// </summary>
/// <returns>Returns an enumerable custom field collection of registrations.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("custom-fields")]
[Route("~/api/account/registration/custom-fields")]
[Authorize]
public IEnumerable<Frapid.DataAccess.Models.CustomField> GetCustomFields()
{
try
{
return this.RegistrationRepository.GetCustomFields(null);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// A custom field is a user defined field for registrations.
/// </summary>
/// <returns>Returns an enumerable custom field collection of registrations.</returns>
[AcceptVerbs("GET", "HEAD")]
[Route("custom-fields/{resourceId}")]
[Route("~/api/account/registration/custom-fields/{resourceId}")]
[Authorize]
public IEnumerable<Frapid.DataAccess.Models.CustomField> GetCustomFields(string resourceId)
{
try
{
return this.RegistrationRepository.GetCustomFields(resourceId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Adds or edits your instance of Registration class.
/// </summary>
/// <param name="registration">Your instance of registrations class to add or edit.</param>
[AcceptVerbs("POST")]
[Route("add-or-edit")]
[Route("~/api/account/registration/add-or-edit")]
[Authorize]
public object AddOrEdit([FromBody]Newtonsoft.Json.Linq.JArray form)
{
dynamic registration = form[0].ToObject<ExpandoObject>(JsonHelper.GetJsonSerializer());
List<Frapid.DataAccess.Models.CustomField> customFields = form[1].ToObject<List<Frapid.DataAccess.Models.CustomField>>(JsonHelper.GetJsonSerializer());
if (registration == null)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.MethodNotAllowed));
}
try
{
return this.RegistrationRepository.AddOrEdit(registration, customFields);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Adds your instance of Registration class.
/// </summary>
/// <param name="registration">Your instance of registrations class to add.</param>
[AcceptVerbs("POST")]
[Route("add/{registration}")]
[Route("~/api/account/registration/add/{registration}")]
[Authorize]
public void Add(Frapid.Account.Entities.Registration registration)
{
if (registration == null)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.MethodNotAllowed));
}
try
{
this.RegistrationRepository.Add(registration);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Edits existing record with your instance of Registration class.
/// </summary>
/// <param name="registration">Your instance of Registration class to edit.</param>
/// <param name="registrationId">Enter the value for RegistrationId in order to find and edit the existing record.</param>
[AcceptVerbs("PUT")]
[Route("edit/{registrationId}")]
[Route("~/api/account/registration/edit/{registrationId}")]
[Authorize]
public void Edit(System.Guid registrationId, [FromBody] Frapid.Account.Entities.Registration registration)
{
if (registration == null)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.MethodNotAllowed));
}
try
{
this.RegistrationRepository.Update(registration, registrationId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
private List<ExpandoObject> ParseCollection(JArray collection)
{
return JsonConvert.DeserializeObject<List<ExpandoObject>>(collection.ToString(), JsonHelper.GetJsonSerializerSettings());
}
/// <summary>
/// Adds or edits multiple instances of Registration class.
/// </summary>
/// <param name="collection">Your collection of Registration class to bulk import.</param>
/// <returns>Returns list of imported registrationIds.</returns>
/// <exception cref="DataAccessException">Thrown when your any Registration class in the collection is invalid or malformed.</exception>
[AcceptVerbs("POST")]
[Route("bulk-import")]
[Route("~/api/account/registration/bulk-import")]
[Authorize]
public List<object> BulkImport([FromBody]JArray collection)
{
List<ExpandoObject> registrationCollection = this.ParseCollection(collection);
if (registrationCollection == null || registrationCollection.Count.Equals(0))
{
return null;
}
try
{
return this.RegistrationRepository.BulkImport(registrationCollection);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
/// <summary>
/// Deletes an existing instance of Registration class via RegistrationId.
/// </summary>
/// <param name="registrationId">Enter the value for RegistrationId in order to find and delete the existing record.</param>
[AcceptVerbs("DELETE")]
[Route("delete/{registrationId}")]
[Route("~/api/account/registration/delete/{registrationId}")]
[Authorize]
public void Delete(System.Guid registrationId)
{
try
{
this.RegistrationRepository.Delete(registrationId);
}
catch (UnauthorizedException)
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.Forbidden));
}
catch (DataAccessException ex)
{
throw new HttpResponseException(new HttpResponseMessage
{
Content = new StringContent(ex.Message),
StatusCode = HttpStatusCode.InternalServerError
});
}
#if !DEBUG
catch
{
throw new HttpResponseException(new HttpResponseMessage(HttpStatusCode.InternalServerError));
}
#endif
}
}
}
| |
using System;
using System.Collections.Generic;
using ModestTree;
using System.Linq;
#if !NOT_UNITY3D
using UnityEngine;
#endif
using Zenject.Internal;
namespace Zenject
{
public abstract class FromBinder : ScopeArgConditionCopyNonLazyBinder
{
public FromBinder(
BindInfo bindInfo,
BindFinalizerWrapper finalizerWrapper)
: base(bindInfo)
{
FinalizerWrapper = finalizerWrapper;
}
protected BindFinalizerWrapper FinalizerWrapper
{
get;
private set;
}
protected IBindingFinalizer SubFinalizer
{
set { FinalizerWrapper.SubFinalizer = value; }
}
protected IEnumerable<Type> AllParentTypes
{
get { return BindInfo.ContractTypes.Concat(BindInfo.ToTypes); }
}
protected IEnumerable<Type> ConcreteTypes
{
get
{
if (BindInfo.ToChoice == ToChoices.Self)
{
return BindInfo.ContractTypes;
}
Assert.IsNotEmpty(BindInfo.ToTypes);
return BindInfo.ToTypes;
}
}
// This is the default if nothing else is called
public ScopeArgConditionCopyNonLazyBinder FromNew()
{
BindingUtil.AssertTypesAreNotComponents(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
return this;
}
public ScopeConditionCopyNonLazyBinder FromResolve()
{
return FromResolve(null);
}
public ScopeConditionCopyNonLazyBinder FromResolve(object subIdentifier)
{
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromResolve, subIdentifier,
(container, type) => new ResolveProvider(
type, container, subIdentifier, false, InjectSources.Any));
return new ScopeConditionCopyNonLazyBinder(BindInfo);
}
public SubContainerBinder FromSubContainerResolve()
{
return FromSubContainerResolve(null);
}
public SubContainerBinder FromSubContainerResolve(object subIdentifier)
{
// It's unlikely they will want to create the whole subcontainer with each binding
// (aka transient) which is the default so require that they specify it
BindInfo.RequireExplicitScope = true;
return new SubContainerBinder(
BindInfo, FinalizerWrapper, subIdentifier);
}
public ScopeArgConditionCopyNonLazyBinder FromFactory(Type factoryType)
{
Assert.That(factoryType.DerivesFrom<IFactory>());
BindInfo.RequireExplicitScope = true;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromFactory, factoryType,
(container, type) => new UntypedFactoryProvider(
factoryType, container, BindInfo.Arguments));
return new ScopeArgConditionCopyNonLazyBinder(BindInfo);
}
#if !NOT_UNITY3D
public ScopeArgConditionCopyNonLazyBinder FromNewComponentOn(GameObject gameObject)
{
BindingUtil.AssertIsValidGameObject(gameObject);
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo, SingletonTypes.FromComponentGameObject, gameObject,
(container, type) => new AddToExistingGameObjectComponentProvider(
gameObject, container, type, BindInfo.ConcreteIdentifier, BindInfo.Arguments));
return new ScopeArgConditionCopyNonLazyBinder(BindInfo);
}
public ScopeArgConditionCopyNonLazyBinder FromNewComponentOn(Func<InjectContext, GameObject> gameObjectGetter)
{
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo, SingletonTypes.FromComponentGameObject, gameObjectGetter,
(container, type) => new AddToExistingGameObjectComponentProviderGetter(
gameObjectGetter, container, type, BindInfo.ConcreteIdentifier, BindInfo.Arguments));
return new ScopeArgConditionCopyNonLazyBinder(BindInfo);
}
public ArgConditionCopyNonLazyBinder FromNewComponentSibling()
{
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new SingleProviderBindingFinalizer(
BindInfo, (container, type) => new AddToCurrentGameObjectComponentProvider(
container, type, BindInfo.ConcreteIdentifier, BindInfo.Arguments));
return new ArgConditionCopyNonLazyBinder(BindInfo);
}
public NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewGameObject()
{
return FromNewComponentOnNewGameObject(new GameObjectCreationParameters());
}
internal NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewGameObject(
GameObjectCreationParameters gameObjectInfo)
{
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo, SingletonTypes.FromGameObject, gameObjectInfo,
(container, type) => new AddToNewGameObjectComponentProvider(
container,
type,
BindInfo.ConcreteIdentifier,
BindInfo.Arguments,
gameObjectInfo));
return new NameTransformScopeArgConditionCopyNonLazyBinder(BindInfo, gameObjectInfo);
}
public NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewPrefabResource(string resourcePath)
{
return FromNewComponentOnNewPrefabResource(resourcePath, new GameObjectCreationParameters());
}
internal NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewPrefabResource(
string resourcePath, GameObjectCreationParameters gameObjectInfo)
{
BindingUtil.AssertIsValidResourcePath(resourcePath);
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new PrefabResourceBindingFinalizer(
BindInfo, gameObjectInfo, resourcePath,
(contractType, instantiator) => new InstantiateOnPrefabComponentProvider(contractType, instantiator));
return new NameTransformScopeArgConditionCopyNonLazyBinder(BindInfo, gameObjectInfo);
}
public NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewPrefab(UnityEngine.Object prefab)
{
return FromNewComponentOnNewPrefab(prefab, new GameObjectCreationParameters());
}
internal NameTransformScopeArgConditionCopyNonLazyBinder FromNewComponentOnNewPrefab(
UnityEngine.Object prefab, GameObjectCreationParameters gameObjectInfo)
{
BindingUtil.AssertIsValidPrefab(prefab);
BindingUtil.AssertIsComponent(ConcreteTypes);
BindingUtil.AssertTypesAreNotAbstract(ConcreteTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new PrefabBindingFinalizer(
BindInfo, gameObjectInfo, prefab,
(contractType, instantiator) => new InstantiateOnPrefabComponentProvider(contractType, instantiator));
return new NameTransformScopeArgConditionCopyNonLazyBinder(BindInfo, gameObjectInfo);
}
public NameTransformScopeArgConditionCopyNonLazyBinder FromComponentInNewPrefab(UnityEngine.Object prefab)
{
return FromComponentInNewPrefab(
prefab, new GameObjectCreationParameters());
}
internal NameTransformScopeArgConditionCopyNonLazyBinder FromComponentInNewPrefab(
UnityEngine.Object prefab, GameObjectCreationParameters gameObjectInfo)
{
BindingUtil.AssertIsValidPrefab(prefab);
BindingUtil.AssertIsInterfaceOrComponent(AllParentTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new PrefabBindingFinalizer(
BindInfo, gameObjectInfo, prefab,
(contractType, instantiator) => new GetFromPrefabComponentProvider(contractType, instantiator));
return new NameTransformScopeArgConditionCopyNonLazyBinder(BindInfo, gameObjectInfo);
}
public NameTransformScopeArgConditionCopyNonLazyBinder FromComponentInNewPrefabResource(string resourcePath)
{
return FromComponentInNewPrefabResource(resourcePath, new GameObjectCreationParameters());
}
internal NameTransformScopeArgConditionCopyNonLazyBinder FromComponentInNewPrefabResource(
string resourcePath, GameObjectCreationParameters gameObjectInfo)
{
BindingUtil.AssertIsValidResourcePath(resourcePath);
BindingUtil.AssertIsInterfaceOrComponent(AllParentTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new PrefabResourceBindingFinalizer(
BindInfo, gameObjectInfo, resourcePath,
(contractType, instantiator) => new GetFromPrefabComponentProvider(contractType, instantiator));
return new NameTransformScopeArgConditionCopyNonLazyBinder(BindInfo, gameObjectInfo);
}
public ScopeArgConditionCopyNonLazyBinder FromNewScriptableObjectResource(string resourcePath)
{
return FromScriptableObjectResourceInternal(resourcePath, true);
}
public ScopeArgConditionCopyNonLazyBinder FromScriptableObjectResource(string resourcePath)
{
return FromScriptableObjectResourceInternal(resourcePath, false);
}
ScopeArgConditionCopyNonLazyBinder FromScriptableObjectResourceInternal(
string resourcePath, bool createNew)
{
BindingUtil.AssertIsValidResourcePath(resourcePath);
BindingUtil.AssertIsInterfaceOrScriptableObject(AllParentTypes);
BindInfo.RequireExplicitScope = true;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
createNew ? SingletonTypes.FromNewScriptableObjectResource : SingletonTypes.FromScriptableObjectResource,
resourcePath.ToLower(),
(container, type) => new ScriptableObjectResourceProvider(
resourcePath, type, container, BindInfo.ConcreteIdentifier, BindInfo.Arguments, createNew));
return new ScopeArgConditionCopyNonLazyBinder(BindInfo);
}
public ScopeConditionCopyNonLazyBinder FromResource(string resourcePath)
{
BindingUtil.AssertDerivesFromUnityObject(ConcreteTypes);
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromResource,
resourcePath.ToLower(),
(_, type) => new ResourceProvider(resourcePath, type));
return new ScopeConditionCopyNonLazyBinder(BindInfo);
}
#endif
public ScopeArgConditionCopyNonLazyBinder FromMethodUntyped(Func<InjectContext, object> method)
{
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromMethod, new SingletonImplIds.ToMethod(method),
(container, type) => new MethodProviderUntyped(method, container));
return this;
}
protected ScopeArgConditionCopyNonLazyBinder FromMethodBase<TConcrete>(Func<InjectContext, TConcrete> method)
{
BindingUtil.AssertIsDerivedFromTypes(typeof(TConcrete), AllParentTypes);
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromMethod, new SingletonImplIds.ToMethod(method),
(container, type) => new MethodProvider<TConcrete>(method, container));
return this;
}
protected ScopeArgConditionCopyNonLazyBinder FromMethodMultipleBase<TConcrete>(Func<InjectContext, IEnumerable<TConcrete>> method)
{
BindingUtil.AssertIsDerivedFromTypes(typeof(TConcrete), AllParentTypes);
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromMethod, new SingletonImplIds.ToMethod(method),
(container, type) => new MethodProviderMultiple<TConcrete>(method, container));
return this;
}
protected ScopeArgConditionCopyNonLazyBinder FromFactoryBase<TConcrete, TFactory>()
where TFactory : IFactory<TConcrete>
{
BindingUtil.AssertIsDerivedFromTypes(typeof(TConcrete), AllParentTypes);
// This is kind of like a look up method like FromMethod so don't enforce specifying scope
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromFactory, typeof(TFactory),
(container, type) => new FactoryProvider<TConcrete, TFactory>(container, BindInfo.Arguments));
return new ScopeArgConditionCopyNonLazyBinder(BindInfo);
}
protected ScopeConditionCopyNonLazyBinder FromResolveGetterBase<TObj, TResult>(
object identifier, Func<TObj, TResult> method)
{
BindingUtil.AssertIsDerivedFromTypes(typeof(TResult), AllParentTypes);
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo,
SingletonTypes.FromGetter,
new SingletonImplIds.ToGetter(identifier, method),
(container, type) => new GetterProvider<TObj, TResult>(identifier, method, container));
return new ScopeConditionCopyNonLazyBinder(BindInfo);
}
protected ScopeConditionCopyNonLazyBinder FromInstanceBase(object instance)
{
BindingUtil.AssertInstanceDerivesFromOrEqual(instance, AllParentTypes);
BindInfo.RequireExplicitScope = false;
SubFinalizer = new ScopableBindingFinalizer(
BindInfo, SingletonTypes.FromInstance, instance,
(container, type) => new InstanceProvider(type, instance, container));
return new ScopeConditionCopyNonLazyBinder(BindInfo);
}
}
}
| |
using UnityEngine;
using UnityEngine.Events;
using UnityEngine.UI;
using System.Collections.Generic;
using System.Linq;
using System;
namespace UIWidgets
{
/// <summary>
/// TabSelectEvent.
/// </summary>
[Serializable]
public class TabSelectEvent : UnityEvent<int>
{
}
/// <summary>
/// Tabs.
/// http://ilih.ru/images/unity-assets/UIWidgets/Tabs.png
/// </summary>
[AddComponentMenu("UI/UIWidgets/Tabs")]
public class Tabs : MonoBehaviour
{
/// <summary>
/// The container for tab toggle buttons.
/// </summary>
[SerializeField]
public Transform Container;
/// <summary>
/// The default tab button.
/// </summary>
[SerializeField]
public Button DefaultTabButton;
/// <summary>
/// The active tab button.
/// </summary>
[SerializeField]
public Button ActiveTabButton;
[SerializeField]
Tab[] tabObjects = new Tab[]{};
/// <summary>
/// Gets or sets the tab objects.
/// </summary>
/// <value>The tab objects.</value>
public Tab[] TabObjects {
get {
return tabObjects;
}
set {
tabObjects = value;
UpdateButtons();
}
}
/// <summary>
/// The name of the default tab.
/// </summary>
[SerializeField]
[Tooltip("Tab name which will be active by default, if not specified will be opened first Tab.")]
public string DefaultTabName = string.Empty;
/// <summary>
/// If true does not deactivate hidden tabs.
/// </summary>
[SerializeField]
[Tooltip("If true does not deactivate hidden tabs.")]
public bool KeepTabsActive = false;
/// <summary>
/// OnTabSelect event.
/// </summary>
[SerializeField]
public TabSelectEvent OnTabSelect = new TabSelectEvent();
/// <summary>
/// Gets or sets the selected tab.
/// </summary>
/// <value>The selected tab.</value>
public Tab SelectedTab {
get;
protected set;
}
List<Button> defaultButtons = new List<Button>();
List<Button> activeButtons = new List<Button>();
List<UnityAction> callbacks = new List<UnityAction>();
/// <summary>
/// Start this instance.
/// </summary>
public void Start()
{
if (Container==null)
{
throw new NullReferenceException("Container is null. Set object of type GameObject to Container.");
}
if (DefaultTabButton==null)
{
throw new NullReferenceException("DefaultTabButton is null. Set object of type GameObject to DefaultTabButton.");
}
if (ActiveTabButton==null)
{
throw new NullReferenceException("ActiveTabButton is null. Set object of type GameObject to ActiveTabButton.");
}
DefaultTabButton.gameObject.SetActive(false);
ActiveTabButton.gameObject.SetActive(false);
UpdateButtons();
}
/// <summary>
/// Updates the buttons.
/// </summary>
void UpdateButtons()
{
if (tabObjects.Length==0)
{
throw new ArgumentException("TabObjects array is empty. Fill it.");
}
RemoveCallbacks();
CreateButtons();
AddCallbacks();
if (DefaultTabName!="")
{
if (IsExistsTabName(DefaultTabName))
{
SelectTab(DefaultTabName);
}
else
{
Debug.LogWarning(string.Format("Tab with specified DefaultTabName \"{0}\" not found. Opened first Tab.", DefaultTabName), this);
SelectTab(tabObjects[0].Name);
}
}
else
{
SelectTab(tabObjects[0].Name);
}
}
bool IsExistsTabName(string tabName)
{
return tabObjects.Any(x => x.Name==tabName);
}
void AddCallback(Tab tab, int index)
{
var tabName = tab.Name;
UnityAction callback = () => SelectTab(tabName);
callbacks.Add(callback);
defaultButtons[index].onClick.AddListener(callbacks[index]);
}
void AddCallbacks()
{
tabObjects.ForEach(AddCallback);
}
void RemoveCallback(Tab tab, int index)
{
if ((tab!=null) && (index < callbacks.Count))
{
defaultButtons[index].onClick.RemoveListener(callbacks[index]);
}
}
void RemoveCallbacks()
{
if (callbacks.Count > 0)
{
tabObjects.ForEach(RemoveCallback);
callbacks.Clear();
}
}
void OnDestroy()
{
RemoveCallbacks();
}
/// <summary>
/// Selects the tab.
/// </summary>
/// <param name="tabName">Tab name.</param>
public void SelectTab(string tabName)
{
var index = Array.FindIndex(tabObjects, x => x.Name==tabName);
if (index==-1)
{
throw new ArgumentException(string.Format("Tab with name \"{0}\" not found.", tabName));
}
if (KeepTabsActive)
{
tabObjects[index].TabObject.transform.SetAsLastSibling();
}
else
{
tabObjects.ForEach(DeactivateTab);
tabObjects[index].TabObject.SetActive(true);
}
defaultButtons.ForEach(ActivateButton);
defaultButtons[index].gameObject.SetActive(false);
activeButtons.ForEach(DeactivateButton);
activeButtons[index].gameObject.SetActive(true);
SelectedTab = tabObjects[index];
OnTabSelect.Invoke(index);
}
void DeactivateTab(Tab tab)
{
tab.TabObject.SetActive(false);
}
void ActivateButton(Button button)
{
button.gameObject.SetActive(true);
}
void DeactivateButton(Button button)
{
button.gameObject.SetActive(false);
}
/// <summary>
/// Creates the buttons.
/// </summary>
void CreateButtons()
{
if (tabObjects.Length > defaultButtons.Count)
{
for (var i = defaultButtons.Count; i < tabObjects.Length; i++)
{
var defaultButton = Instantiate(DefaultTabButton) as Button;
defaultButton.transform.SetParent(Container, false);
//Utilites.FixInstantiated(DefaultTabButton, defaultButton);
defaultButtons.Add(defaultButton);
var activeButton = Instantiate(ActiveTabButton) as Button;
activeButton.transform.SetParent(Container, false);
//Utilites.FixInstantiated(ActiveTabButton, activeButton);
activeButtons.Add(activeButton);
}
}
//del existing ui elements if necessary
if (tabObjects.Length < defaultButtons.Count)
{
for (var i = defaultButtons.Count; i > tabObjects.Length; i--)
{
Destroy(defaultButtons[i]);
Destroy(activeButtons[i]);
defaultButtons.RemoveAt(i);
activeButtons.RemoveAt(i);
}
}
defaultButtons.ForEach(SetButtonName);
activeButtons.ForEach(SetButtonName);
}
/// <summary>
/// Sets the name of the button.
/// </summary>
/// <param name="button">Button.</param>
/// <param name="index">Index.</param>
protected virtual void SetButtonName(Button button, int index)
{
var tab_button = button.GetComponent<TabButtonComponent>();
if (tab_button==null)
{
button.gameObject.SetActive(true);
button.GetComponentInChildren<Text>().text = TabObjects[index].Name;
}
else
{
tab_button.SetButtonData(TabObjects[index]);
}
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Linq;
using ASC.Common.Logging;
using ASC.Common.Utils;
using ASC.Mail.Core.Entities;
using ASC.Mail.Data.Contracts;
namespace ASC.Mail.Core.Engine
{
public class MailBoxSettingEngine
{
public ILog Log { get; private set; }
public MailBoxSettingEngine(ILog log = null)
{
Log = log ?? LogManager.GetLogger("ASC.Mail.MailBoxSettingEngine");
}
public bool SetMailBoxSettings(ClientConfig config, bool isUserData)
{
try
{
if (string.IsNullOrEmpty(config.EmailProvider.Id) ||
!config.EmailProvider.Domain.Any() ||
config.EmailProvider.IncomingServer == null ||
!config.EmailProvider.IncomingServer.Any() ||
config.EmailProvider.OutgoingServer == null ||
!config.EmailProvider.OutgoingServer.Any())
throw new Exception("Incorrect config");
using (var daoFactory = new DaoFactory())
{
using (var tx = daoFactory.DbManager.BeginTransaction())
{
var daoMbProvider = daoFactory.CreateMailboxProviderDao();
var provider = daoMbProvider.GetProvider(config.EmailProvider.Id);
if (provider == null)
{
provider = new MailboxProvider
{
Id = 0,
Name = config.EmailProvider.Id,
DisplayName = config.EmailProvider.DisplayName,
DisplayShortName = config.EmailProvider.DisplayShortName,
Url = config.EmailProvider.Documentation.Url
};
provider.Id = daoMbProvider.SaveProvider(provider);
if (provider.Id < 0)
{
tx.Rollback();
throw new Exception("id_provider not saved into DB");
}
}
var daoMbDomain = daoFactory.CreateMailboxDomainDao();
foreach (var domainName in config.EmailProvider.Domain)
{
var domain = daoMbDomain.GetDomain(domainName);
if (domain != null)
continue;
domain = new MailboxDomain
{
Id = 0,
ProviderId = provider.Id,
Name = domainName
};
domain.Id = daoMbDomain.SaveDomain(domain);
if (domain.Id < 0)
{
tx.Rollback();
throw new Exception("id_domain not saved into DB");
}
}
var daoMbServer = daoFactory.CreateMailboxServerDao();
var existingServers = daoMbServer.GetServers(provider.Id);
var newServers = config.EmailProvider
.IncomingServer
.ConvertAll(s => new MailboxServer
{
Id = 0,
Username = s.Username,
Type = s.Type,
ProviderId = provider.Id,
Hostname = s.Hostname,
Port = s.Port,
SocketType = s.SocketType,
Authentication = s.Authentication,
IsUserData = isUserData
});
newServers.AddRange(config.EmailProvider
.OutgoingServer
.ConvertAll(s => new MailboxServer
{
Id = 0,
Username = s.Username,
Type = s.Type,
ProviderId = provider.Id,
Hostname = s.Hostname,
Port = s.Port,
SocketType = s.SocketType,
Authentication = s.Authentication,
IsUserData = isUserData
}));
foreach (var s in newServers)
{
var existing =
existingServers.FirstOrDefault(
es =>
es.Type.Equals(s.Type) && es.Port == s.Port &&
es.SocketType.Equals(s.SocketType));
if (existing != null)
{
if (existing.Equals(s))
continue;
s.Id = existing.Id;
}
s.Id = daoMbServer.SaveServer(s);
if (s.Id < 0)
{
tx.Rollback();
throw new Exception("id_server not saved into DB");
}
}
tx.Commit();
}
}
}
catch (Exception ex)
{
Log.Error("SetMailBoxSettings failed", ex);
return false;
}
return true;
}
public ClientConfig GetMailBoxSettings(string host)
{
var config = GetStoredMailBoxSettings(host);
return config ?? SearchBusinessVendorsSettings(host);
}
private static ClientConfig GetStoredMailBoxSettings(string host)
{
using (var daoFactory = new DaoFactory())
{
var daoMbDomain = daoFactory.CreateMailboxDomainDao();
var domain = daoMbDomain.GetDomain(host);
if (domain == null)
return null;
var daoMbProvider = daoFactory.CreateMailboxProviderDao();
var provider = daoMbProvider.GetProvider(domain.ProviderId);
if (provider == null)
return null;
var daoMbServer = daoFactory.CreateMailboxServerDao();
var existingServers = daoMbServer.GetServers(provider.Id);
if (!existingServers.Any())
return null;
var config = new ClientConfig();
config.EmailProvider.Domain.Add(host);
config.EmailProvider.Id = provider.Name;
config.EmailProvider.DisplayName = provider.DisplayName;
config.EmailProvider.DisplayShortName = provider.DisplayShortName;
config.EmailProvider.Documentation.Url = provider.Url;
existingServers.ForEach(serv =>
{
if (serv.Type == "smtp")
{
config.EmailProvider.OutgoingServer.Add(
new ClientConfigEmailProviderOutgoingServer
{
Type = serv.Type,
SocketType = serv.SocketType,
Hostname = serv.Hostname,
Port = serv.Port,
Username = serv.Username,
Authentication = serv.Authentication
});
}
else
{
config.EmailProvider.IncomingServer.Add(
new ClientConfigEmailProviderIncomingServer
{
Type = serv.Type,
SocketType = serv.SocketType,
Hostname = serv.Hostname,
Port = serv.Port,
Username = serv.Username,
Authentication = serv.Authentication
});
}
});
if (!config.EmailProvider.IncomingServer.Any() || !config.EmailProvider.OutgoingServer.Any())
return null;
return config;
}
}
private ClientConfig SearchBusinessVendorsSettings(string domain)
{
ClientConfig settingsFromDb = null;
try
{
var dnsLookup = new DnsLookup();
var mxRecords = dnsLookup.GetDomainMxRecords(domain);
if (!mxRecords.Any())
{
return null;
}
var knownBusinessMxs =
Defines.MxToDomainBusinessVendorsList.Where(
mx =>
mxRecords.FirstOrDefault(
r => r.ExchangeDomainName.ToString().ToLowerInvariant().Contains(mx.Key.ToLowerInvariant())) != null)
.ToList();
foreach (var mxXdomain in knownBusinessMxs)
{
settingsFromDb = GetStoredMailBoxSettings(mxXdomain.Value);
if (settingsFromDb != null)
return settingsFromDb;
}
}
catch (Exception ex)
{
Log.Error("SearchBusinessVendorsSettings failed", ex);
}
return settingsFromDb;
}
}
}
| |
using System;
namespace Rock.Core.UnitTests.Extensions
{
public static class TemporalExtensions
{
private const int DaysPerYear = 365;
private const int DaysPerMonth = 30;
/// <summary>
/// Gets a <seealso cref="DateTime"/> that represents the last millisecond of the day represented by <paramref name="date"/>
/// </summary>
/// <param name="date"></param>
/// <returns></returns>
public static DateTime EndOfDay(this DateTime date)
{
return new DateTime(date.Year, date.Month, date.Day, 23, 59, 59, 999);
}
/// <summary>
/// Gets a <seealso cref="DateTime"/> that represents the first millisecond of the day represented by <paramref name="date"/>
/// </summary>
/// <param name="date"></param>
/// <returns></returns>
public static DateTime BeginningOfDay(this DateTime date)
{
return new DateTime(date.Year, date.Month, date.Day, 0, 0, 0, 0);
}
/// <summary>
/// Calculates a final date spanning <paramref name="source"/> after <paramref name="origin"/>
/// <example>
/// // calculate date/time that it will be in one hour
/// var finalDate = new TimeSpan(0, 1, 0, 0).From(DateTime.Now);
/// </example>
/// </summary>
/// <param name="source"></param>
/// <param name="origin"></param>
/// <returns></returns>
public static DateTime From(this TimeSpan source, DateTime origin)
{
return origin + source;
}
/// <summary>
/// Calculates a final date spanning <paramref name="source"/> after <paramref name="origin"/>
/// Alias to <seealso cref="From"/>
/// </summary>
/// <param name="source"></param>
/// <param name="origin"></param>
/// <seealso cref="From"/>
/// <returns></returns>
public static DateTime Since(this TimeSpan source, DateTime origin)
{
return From(source, origin);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> years.
/// <example>
/// var oneYear = 1.Years();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2233:OperationsShouldNotOverflow", MessageId = "source*365")]
public static TimeSpan Years(this int source)
{
return new TimeSpan(source*DaysPerYear, 0, 0, 0);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> months.
/// </summary>
/// var oneMonth = 1.Months();
/// <param name="source"></param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2233:OperationsShouldNotOverflow", MessageId = "source*30")]
public static TimeSpan Months(this int source)
{
return new TimeSpan(source*DaysPerMonth, 0, 0, 0);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> minutes.
/// <example>
/// var oneMinute = 1.Minutes();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Minutes(this int source)
{
return new TimeSpan(0, 0, source, 0);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> hours.
/// <example>
/// var oneHour = 1.Hours();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Hours(this int source)
{
return new TimeSpan(0, source, 0, 0);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> days.
/// <example>
/// var oneDay = 1.Days();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Days(this int source)
{
return new TimeSpan(source, 0, 0, 0);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning 1 second.
/// <example>
/// var oneSecond = 1.Second();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Second(this int source)
{
if (source != 1) throw new ArgumentOutOfRangeException("source", source, "'source' must have value of '1'.");
return new TimeSpan(0, 0, 0, 1);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> seconds.
/// <example>
/// var fiveSeconds = 5.Second();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Seconds(this int source)
{
return new TimeSpan(0, 0, 0, source);
}
/// <summary>
/// Creates a <see cref="TimeSpan"/> spanning <paramref name="source"/> milliseconds.
/// <example>
/// var fiftyMilliseconds = 50.Milliseconds();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Milliseconds(this int source)
{
return new TimeSpan(0, 0, 0, 0, source);
}
/// <summary>
/// Creates a final <see cref="DateTime"/> <paramref name="source"/> in the past
/// <example>
/// 50.Seconds.Ago();
/// </example>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static DateTime Ago(this TimeSpan source)
{
return DateTime.Now.Add(source.Negate());
}
/// <summary>
/// Doubles a <see cref="TimeSpan"/>
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static TimeSpan Double(this TimeSpan source)
{
return source + source;
}
/// <summary>
/// Rounds a <see cref="TimeSpan"/> to the nearest hour
/// </summary>
/// <param name="timeSpan"></param>
/// <returns></returns>
public static TimeSpan RoundToHours(this TimeSpan timeSpan)
{
var rounded = new TimeSpan(timeSpan.Days, timeSpan.Hours, 0, 0);
if (timeSpan.Minutes >= 30)
{
rounded = rounded + 1.Hours();
}
return rounded;
}
/// <summary>
/// Rounds a <see cref="TimeSpan"/> to the nearest minute
/// </summary>
/// <param name="timeSpan"></param>
/// <returns></returns>
public static TimeSpan RoundToMinutes(this TimeSpan timeSpan)
{
var rounded = new TimeSpan(timeSpan.Days, timeSpan.Hours, timeSpan.Minutes, 0);
if (timeSpan.Seconds >= 30)
{
rounded = rounded + 1.Minutes();
}
return rounded;
}
/// <summary>
/// Rounds a <see cref="TimeSpan"/> to the nearest second
/// </summary>
/// <param name="timeSpan"></param>
/// <returns></returns>
public static TimeSpan RoundToSeconds(this TimeSpan timeSpan)
{
var rounded = new TimeSpan(timeSpan.Days, timeSpan.Hours, timeSpan.Minutes, timeSpan.Seconds);
if (timeSpan.Milliseconds >= 500)
{
rounded = rounded + 1.Seconds();
}
return rounded;
}
/// <summary>
/// Creates an int of <paramref name="value"/> million.
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2233:OperationsShouldNotOverflow", MessageId = "value*1000000")]
public static int Million(this int value)
{
return value*1000000;
}
/// <summary>
/// Creates an int of <paramref name="value"/> thousand.
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2233:OperationsShouldNotOverflow", MessageId = "value*1000")]
public static int Thousand(this int value)
{
return value*1000;
}
/// <summary>
/// Creates an English string from a <see cref="TimeSpan"/> value
/// </summary>
/// <param name="timeSpan"></param>
/// <returns></returns>
public static string ToEnglishString(this TimeSpan timeSpan)
{
if (timeSpan.TotalDays > 1)
{
var round = timeSpan.RoundToHours();
return string.Format("{0} days and {1} hours", round.Days, round.Hours);
}
if (timeSpan.TotalHours > 1)
{
var round = timeSpan.RoundToMinutes();
return string.Format("{0} hours and {1} minutes", round.Hours, round.Minutes);
}
if (timeSpan.TotalMinutes > 1)
{
var round = timeSpan.RoundToSeconds();
return string.Format("{0} minutes and {1} seconds", round.Minutes, round.Seconds);
}
if (timeSpan.TotalSeconds > 1)
{
return string.Format("{0} seconds", timeSpan.TotalSeconds);
}
return string.Format("{0} milliseconds", timeSpan.Milliseconds);
}
}
}
| |
// Created by Paul Gonzalez Becerra
using System;
using System.Runtime.InteropServices;
using Saserdote.Mathematics.Collision;
namespace Saserdote.Mathematics
{
[StructLayout(LayoutKind.Sequential)]
public struct Point2f
{
#region --- Field Variables ---
// Variables
public float x;
public float y;
public readonly static Point2f ORIGIN= new Point2f(0f);
#endregion // Field Variables
#region --- Constructors ---
public Point2f(float pmX, float pmY)
{
x= pmX;
y= pmY;
}
internal Point2f(float all):this(all, all) {}
#endregion // Constructors
#region --- Methods ---
// Converts the point into an integer point
public Point2i toPoint2i()
{
return new Point2i((int)x, (int) y);
}
// Converts the 2d point into a 3d point
public Point3f toPoint3f()
{
return new Point3f(x, y, 0f);
}
// Converts the 3d point into a 3d point
public Point3i toPoint3i()
{
return new Point3i((int)x, (int)y, 0);
}
// Converts the point into a vector
public Vector3 toVector3()
{
return new Vector3(x, y, 0f);
}
// Converts the point into a vector
public Vector2 toVector2()
{
return new Vector2(x, y);
}
// Adds the point with the vector to get another point
public Point2f add(Vector3 vec)
{
return new Point2f(x+vec.x, y+vec.y);
}
// Adds the point with the vector to get another point
public Point2f add(Vector2 vec)
{
return new Point2f(x+vec.x, y+vec.y);
}
// Adds the point with a size to get another point
public Point2f add(Size3f size)
{
return new Point2f(x+size.width, y+size.height);
}
// Adds the point with a size to get another point
public Point2f add(Size3i size)
{
return new Point2f(x+(float)size.width, y+(float)size.height);
}
// Adds the point with a size to get another point
public Point2f add(Size2f size)
{
return new Point2f(x+size.width, y+size.height);
}
// Adds the point with a size to get another point
public Point2f add(Size2i size)
{
return new Point2f(x+(float)size.width, y+(float)size.height);
}
// Subtracts the point with the vector to get another point
public Point2f subtract(Vector3 vec)
{
return new Point2f(x-vec.x, y-vec.y);
}
// Subtracts the point with the vector to get another point
public Point2f subtract(Vector2 vec)
{
return new Point2f(x-vec.x, y-vec.y);
}
// Subtracts the point with a size to get another point
public Point2f subtract(Size3f size)
{
return new Point2f(x-size.width, y-size.height);
}
// Subtracts the point with a size to get another point
public Point2f subtract(Size3i size)
{
return new Point2f(x-(float)size.width, y-(float)size.height);
}
// Subtracts the point with a size to get another point
public Point2f subtract(Size2f size)
{
return new Point2f(x-size.width, y-size.height);
}
// Subtracts the point with a size to get another point
public Point2f subtract(Size2i size)
{
return new Point2f(x-(float)size.width, y-(float)size.height);
}
// Subtracts the two points to get a vector pointing in between both
public Vector2 subtract(Point2f pt)
{
return new Vector2(x-pt.x, y-pt.y);
}
// Subtracts the two points to get a vector pointing in between both
public Vector2 subtract(Point2i pt)
{
return new Vector2(x-(float)pt.x, y-(float)pt.y);
}
// Subtracts the two points to get a vector pointing in between both
public Vector3 subtract(Point3f pt)
{
return new Vector3(x-pt.x, y-pt.y, 0f-pt.z);
}
// Subtracts the two points to get a vector pointing in between both
public Vector3 subtract(Point3i pt)
{
return new Vector3(x-(float)pt.x, y-(float)pt.y, 0f-(float)pt.z);
}
// Gets the midpoint of the two points
public Point3f getMidpoint(Point3f pt)
{
return new Point3f((x+pt.x)/2f, (y+pt.y)/2f, (0f+pt.z)/2f);
}
// Gets the midpoint of the two points
public Point3f getMidpoint(Point3i pt)
{
return new Point3f((x+(float)pt.x)/2f, (y+(float)pt.y)/2f, (0f+(float)pt.z)/2f);
}
// Gets the midpoint of the two points
public Point2f getMidpoint(Point2f pt)
{
return new Point2f((x+pt.x)/2f, (y+pt.y)/2f);
}
// Gets the midpoint of the two points
public Point2f getMidpoint(Point2i pt)
{
return new Point2f((x+(float)pt.x)/2f, (y+(float)pt.y)/2f);
}
// Finds if the two points are equal
public bool equals(Point2f pt)
{
return (x== pt.x && y== pt.y);
}
// Finds if the two points are equal
public bool equals(Point2i pt)
{
return ((int)x== pt.x && (int)y== pt.y);
}
#endregion // Methods
#region --- Inherited Methods ---
// Finds out if the given object is equal to the point
public override bool Equals(object obj)
{
if(obj== null)
return false;
if(obj is Point2f)
return equals((Point2f)obj);
if(obj is Point2i)
return equals((Point2i)obj);
return false;
}
// Gets the hash code
public override int GetHashCode()
{
return ((int)x^(int)y);
}
// Prints out the contents of the point
public override string ToString()
{
return "X:"+x+",Y:"+y;
}
#endregion // Inherited Methods
#region --- Operators ---
// Equality operators
public static bool operator ==(Point2f left, Point2f right)
{
return left.equals(right);
}
public static bool operator ==(Point2f left, Point2i right)
{
return left.equals(right);
}
// Inequality operators
public static bool operator !=(Point2f left, Point2f right)
{
return !left.equals(right);
}
public static bool operator !=(Point2f left, Point2i right)
{
return !left.equals(right);
}
// Addition operators
public static Point2f operator +(Point2f left, Vector3 right)
{
return left.add(right);
}
public static Point2f operator +(Point2f left, Vector2 right)
{
return left.add(right);
}
public static Point2f operator +(Point2f left, Size3f right)
{
return left.add(right);
}
public static Point2f operator +(Point2f left, Size3i right)
{
return left.add(right);
}
public static Point2f operator +(Point2f left, Size2f right)
{
return left.add(right);
}
public static Point2f operator +(Point2f left, Size2i right)
{
return left.add(right);
}
// Subtration operators
public static Point2f operator -(Point2f left, Vector3 right)
{
return left.subtract(right);
}
public static Point2f operator -(Point2f left, Vector2 right)
{
return left.subtract(right);
}
public static Point2f operator -(Point2f left, Size3f right)
{
return left.subtract(right);
}
public static Point2f operator -(Point2f left, Size3i right)
{
return left.subtract(right);
}
public static Point2f operator -(Point2f left, Size2f right)
{
return left.subtract(right);
}
public static Point2f operator -(Point2f left, Size2i right)
{
return left.subtract(right);
}
public static Vector3 operator -(Point2f left, Point3f right)
{
return left.subtract(right);
}
public static Vector3 operator -(Point2f left, Point3i right)
{
return left.subtract(right);
}
public static Vector2 operator -(Point2f left, Point2f right)
{
return left.subtract(right);
}
public static Vector2 operator -(Point2f left, Point2i right)
{
return left.subtract(right);
}
// Multiplication operators
public static bool operator *(Point2f left, BoundingVolume right)
{
return right.contains(left);
}
// Unkown name operators
public static Point3f operator |(Point2f left, Point3f right)
{
return left.getMidpoint(right);
}
public static Point3f operator |(Point2f left, Point3i right)
{
return left.getMidpoint(right);
}
public static Point2f operator |(Point2f left, Point2f right)
{
return left.getMidpoint(right);
}
public static Point2f operator |(Point2f left, Point2i right)
{
return left.getMidpoint(right);
}
// Conversion operators
// [Point2f to Vector3]
public static explicit operator Vector3(Point2f castee)
{
return castee.toVector3();
}
// [Point2f to Vector2]
public static explicit operator Vector2(Point2f castee)
{
return castee.toVector2();
}
// [Point2f to Point3f]
public static explicit operator Point3f(Point2f castee)
{
return castee.toPoint3f();
}
// [Point2f to Point3i]
public static explicit operator Point3i(Point2f castee)
{
return castee.toPoint3i();
}
// [Point2f to Point2i]
public static implicit operator Point2i(Point2f castee)
{
return castee.toPoint2i();
}
#endregion // Operators
}
}
// End of File
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Drawing2D;
namespace Platform.Presentation.Forms
{
/// <summary>
/// Graphics helper class.
/// </summary>
public sealed class GraphicsHelper
{
/// <summary>
/// Initializes a new instance of the GraphicsHelper class.
/// </summary>
private GraphicsHelper()
{
}
/// <summary>
/// Helper to tile fill an unscaled image vertically.
/// </summary>
/// <param name="graphics">Graphics context in which the image is to be tiled.</param>
/// <param name="image">The image to tile.</param>
/// <param name="rectangle">The rectangle to fill.</param>
public static void TileFillUnscaledImageVertically(Graphics graphics, Image image, Rectangle rectangle)
{
// Tile while there is height left to fill.
int fillWidth = Math.Min(image.Width, rectangle.Width);
Rectangle imageRectangle = new Rectangle(Point.Empty, image.Size);
for (int y = rectangle.Y; y < rectangle.Bottom;)
{
// Calculate the fill height for this iteration.
int fillHeight = Math.Min(image.Height, rectangle.Bottom - y);
// Fill the fill height with the image.
graphics.DrawImage(image,
new Rectangle(rectangle.X, y, fillWidth, fillHeight),
new Rectangle(0, 0, fillWidth, fillHeight),
GraphicsUnit.Pixel);
// Adjust the y position for the next loop iteration.
y += fillHeight;
}
}
/// <summary>
/// Helper to tile fill an unscaled image vertically.
/// </summary>
/// <param name="graphics">Graphics context in which the image is to be tiled.</param>
/// <param name="image">The image to tile.</param>
/// <param name="srcRectangle">The source rectangle in the image.</param>
/// <param name="srcRectangle">The destination rectangle to fill.</param>
[Obsolete("Slow. Use OpenLiveWriter.CoreServices.UI.BorderPaint instead", false)]
public static void TileFillUnscaledImageVertically(Graphics graphics, Image image, Rectangle srcRectangle, Rectangle destRectangle)
{
// Tile while there is height left to fill.
int fillWidth = Math.Min(srcRectangle.Width, destRectangle.Width);
for (int y = destRectangle.Y; y < destRectangle.Bottom;)
{
// Calculate the fill height for this iteration.
int fillHeight = Math.Min(srcRectangle.Height, destRectangle.Bottom - y);
// Fill the fill height with the image.
graphics.DrawImage(image,
new Rectangle(destRectangle.X, y, fillWidth, fillHeight),
new Rectangle(srcRectangle.X, srcRectangle.Y, fillWidth, fillHeight),
GraphicsUnit.Pixel);
// Adjust the y position for the next loop iteration.
y += fillHeight;
}
}
/// <summary>
/// Helper to tile fill an unscaled image horizontally.
/// </summary>
/// <param name="graphics">Graphics context in which the image is to be tiled.</param>
/// <param name="image">The image to tile.</param>
/// <param name="rectangle">The rectangle to fill.</param>
public static void TileFillUnscaledImageHorizontally(Graphics graphics, Image image, Rectangle rectangle)
{
// Tile while there is width left to fill.
int fillHeight = Math.Min(image.Height, rectangle.Height);
for (int x = rectangle.X; x < rectangle.Right;)
{
// Calculate the fill width for this iteration.
int fillWidth = Math.Min(image.Width, rectangle.Right - x);
// Fill the fill width with the image.
graphics.DrawImage(image,
new Rectangle(x, rectangle.Y, fillWidth, fillHeight),
new Rectangle(0, 0, fillWidth, fillHeight),
GraphicsUnit.Pixel);
// Adjust the x position for the next loop iteration.
x += fillWidth;
}
}
/// <summary>
/// Helper to tile fill an unscaled image horizontally.
/// </summary>
/// <param name="graphics">Graphics context in which the image is to be tiled.</param>
/// <param name="image">The image to tile.</param>
/// <param name="srcRectangle">The source rectangle in the image.</param>
/// <param name="srcRectangle">The destination rectangle to fill.</param>
[Obsolete("Slow. Use OpenLiveWriter.CoreServices.UI.BorderPaint instead", false)]
public static void TileFillUnscaledImageHorizontally(Graphics graphics, Image image, Rectangle srcRectangle, Rectangle destRectangle)
{
// Tile while there is width left to fill.
int fillHeight = Math.Min(srcRectangle.Height, destRectangle.Height);
for (int x = destRectangle.X; x < destRectangle.Right;)
{
// Calculate the fill width for this iteration.
int fillWidth = Math.Min(srcRectangle.Width, destRectangle.Right - x);
// Fill the fill width with the image.
graphics.DrawImage(image,
new Rectangle(x, destRectangle.Y, fillWidth, fillHeight),
new Rectangle(srcRectangle.X, srcRectangle.Y, fillWidth, fillHeight),
GraphicsUnit.Pixel);
// Adjust the x position for the next loop iteration.
x += fillWidth;
}
}
/// <summary>
///
/// </summary>
/// <param name="graphics"></param>
/// <param name="image"></param>
/// <param name="rectangle"></param>
public static void TileFillScaledImageHorizontally(BidiGraphics graphics, Image image, Rectangle rectangle)
{
Rectangle imageRectangle = new Rectangle(Point.Empty, image.Size);
TileFillScaledImageHorizontally(graphics, image, rectangle, imageRectangle);
}
private static void TileFillScaledImageHorizontally(BidiGraphics graphics, Image image, Rectangle rectangle, Rectangle srcRectangle)
{
for (int x = rectangle.X; x < rectangle.Right; x += srcRectangle.Width)
graphics.DrawImage(true,
image,
new Rectangle(x, rectangle.Y, Math.Min(srcRectangle.Width, rectangle.Right - x), rectangle.Height),
srcRectangle,
GraphicsUnit.Pixel);
}
public static Rectangle[] SliceCompositedImageBorder(Size imgSize, int vert1, int vert2, int horiz1, int horiz2)
{
int left = 0, center = vert1, right = vert2, x4 = imgSize.Width;
int top = 0, middle = horiz1, bottom = horiz2, y4 = imgSize.Height;
int leftWidth = center, centerWidth = right - center, rightWidth = x4 - right;
int topHeight = middle, middleHeight = bottom - middle, bottomHeight = y4 - bottom;
return new Rectangle[]
{
// top left
new Rectangle(left, top, leftWidth, topHeight),
// top center
new Rectangle(center, top, centerWidth, topHeight),
// top right
new Rectangle(right, top, rightWidth, topHeight),
// left
new Rectangle(left, middle, leftWidth, middleHeight),
// middle
new Rectangle(center, middle, centerWidth, middleHeight),
// right
new Rectangle(right, middle, rightWidth, middleHeight),
// bottom left
new Rectangle(left, bottom, leftWidth, bottomHeight),
// bottom center
new Rectangle(center, bottom, centerWidth, bottomHeight),
// bottom right
new Rectangle(right, bottom, rightWidth, bottomHeight)
};
}
// Slow. Use OpenLiveWriter.CoreServices.UI.BorderPaint if performance matters
public static void DrawLeftCenterRightImageBorder(
BidiGraphics graphics,
Rectangle rectangle,
Image image,
Rectangle leftSlice,
Rectangle centerSlice,
Rectangle rightSlice)
{
GraphicsContainer graphicsContainer = graphics.Graphics.BeginContainer();
// Have to remove this line because it messes with mirrored images.
// Specifically, right-to-left drawing of the hover effect for the context menu dongle
// hanging off "Save Draft" doesn't happen at all. Seems like a short-circuit happens
// in Graphics when the image's location is outside the clipping area.
//graphics.Graphics.SetClip(rectangle);
graphics.Graphics.CompositingMode = CompositingMode.SourceOver;
graphics.Graphics.CompositingQuality = CompositingQuality.HighQuality;
graphics.DrawImage(
true,
image,
new Rectangle(rectangle.Left, rectangle.Top, leftSlice.Width, rectangle.Height),
leftSlice.Left, leftSlice.Top, leftSlice.Width, leftSlice.Height,
GraphicsUnit.Pixel
);
TileFillScaledImageHorizontally(
graphics,
image,
new Rectangle(rectangle.Left + leftSlice.Width, rectangle.Top, Math.Max(0, rectangle.Width - leftSlice.Width - rightSlice.Width), rectangle.Height),
centerSlice);
graphics.DrawImage(
true,
image,
new Rectangle(rectangle.Right - rightSlice.Width, rectangle.Top, rightSlice.Width, rectangle.Height),
rightSlice.Left, rightSlice.Top, rightSlice.Width, rightSlice.Height,
GraphicsUnit.Pixel
);
graphics.Graphics.EndContainer(graphicsContainer);
}
[Obsolete("Slow. Use OpenLiveWriter.CoreServices.UI.BorderPaint instead", false)]
public static void DrawCompositedImageBorder(
Graphics graphics,
Rectangle rectangle,
Image image,
Rectangle[] slices)
{
DrawCompositedImageBorder(graphics, rectangle, image,
slices[0],
slices[1],
slices[2],
slices[3],
slices[4],
slices[5],
slices[6],
slices[7]);
}
/// <summary>
/// Draws a composited image border.
/// </summary>
/// <remarks>
/// Note that because it would be too computationally expensive, it is assumed that images
/// will fit into the specified rectangle.
/// </remarks>
/// <param name="graphics">A graphics context into which the image-based border is to be drawn.</param>
/// <param name="rectangle">The rectangle into which the image-based border is to be drawn.</param>
/// <param name="topLeftRectangle">The top left rectangle.</param>
/// <param name="topCenterRectangle">The top center (fill) rectangle.</param>
/// <param name="topRightRectangle">The top right rectangle.</param>
/// <param name="leftCenterRectangle">The left center (fill) rectangle.</param>
/// <param name="rightCenterRectangle">The right center (fill) rectangle.</param>
/// <param name="bottomLeftRectangle">The bottom left rectangle.</param>
/// <param name="bottomCenterRectangle">The bottom center (fill) rectangle.</param>
/// <param name="bottomRightRectangle">The bottom right rectangle.</param>
[Obsolete("Slow. Use OpenLiveWriter.CoreServices.UI.BorderPaint instead", false)]
public static void DrawCompositedImageBorder
(
Graphics graphics,
Rectangle rectangle,
Image image,
Rectangle topLeftRectangle,
Rectangle topCenterRectangle,
Rectangle topRightRectangle,
Rectangle leftCenterRectangle,
Rectangle rightCenterRectangle,
Rectangle bottomLeftRectangle,
Rectangle bottomCenterRectangle,
Rectangle bottomRightRectangle
)
{
Rectangle fillRectangle;
// Save a graphics container with the current state of the graphics object and open
// and use a new, clipped graphics container.
GraphicsContainer graphicsContainer = graphics.BeginContainer();
graphics.SetClip(rectangle);
graphics.CompositingMode = CompositingMode.SourceOver;
graphics.CompositingQuality = CompositingQuality.HighQuality;
if (HasArea(topLeftRectangle))
{
// Top left.
graphics.DrawImage(image,
new Rectangle(rectangle.X, rectangle.Y, topLeftRectangle.Width, topLeftRectangle.Height),
topLeftRectangle,
GraphicsUnit.Pixel);
}
if (HasArea(topCenterRectangle))
{
// Top center.
fillRectangle = new Rectangle(rectangle.X + topLeftRectangle.Width,
rectangle.Y,
rectangle.Width - (topLeftRectangle.Width + topRightRectangle.Width),
topCenterRectangle.Height);
TileFillUnscaledImageHorizontally(graphics, image, topCenterRectangle, fillRectangle);
}
if (HasArea(topRightRectangle))
{
// Top right.
graphics.DrawImage(image,
new Rectangle(rectangle.Right - topRightRectangle.Width, rectangle.Y, topRightRectangle.Width, topRightRectangle.Height),
topRightRectangle,
GraphicsUnit.Pixel);
}
if (HasArea(leftCenterRectangle))
{
// Left center.
fillRectangle = new Rectangle(rectangle.X,
rectangle.Y + topLeftRectangle.Height,
leftCenterRectangle.Width,
rectangle.Height - (topLeftRectangle.Height + bottomLeftRectangle.Height));
TileFillUnscaledImageVertically(graphics, image, leftCenterRectangle, fillRectangle);
}
if (HasArea(rightCenterRectangle))
{
// Right center.
fillRectangle = new Rectangle(rectangle.Right - rightCenterRectangle.Width,
rectangle.Y + topRightRectangle.Height,
rightCenterRectangle.Width,
rectangle.Height - (topRightRectangle.Height + bottomRightRectangle.Height));
TileFillUnscaledImageVertically(graphics, image, rightCenterRectangle, fillRectangle);
}
if (HasArea(bottomLeftRectangle))
{
// Bottom left.
graphics.DrawImage(image,
new Rectangle(rectangle.X, rectangle.Bottom - bottomLeftRectangle.Height, bottomLeftRectangle.Width, bottomLeftRectangle.Height),
bottomLeftRectangle,
GraphicsUnit.Pixel);
}
if (HasArea(bottomCenterRectangle))
{
// Bottom center.
fillRectangle = new Rectangle(rectangle.X + bottomLeftRectangle.Width,
rectangle.Bottom - bottomCenterRectangle.Height,
rectangle.Width - (bottomLeftRectangle.Width + bottomRightRectangle.Width),
bottomCenterRectangle.Height);
TileFillUnscaledImageHorizontally(graphics, image, bottomCenterRectangle, fillRectangle);
}
if (HasArea(bottomRightRectangle))
{
// Botom right.
graphics.DrawImage(image,
new Rectangle(rectangle.Right - bottomRightRectangle.Width, rectangle.Bottom - bottomRightRectangle.Height, bottomRightRectangle.Width, bottomRightRectangle.Height),
bottomRightRectangle,
GraphicsUnit.Pixel);
}
// End the graphics container.
graphics.EndContainer(graphicsContainer);
}
private static bool HasArea(Rectangle rectangle)
{
return rectangle.Height > 0 && rectangle.Width > 0;
}
/// <summary>
/// Draws a composited image border. Note that because it would be too computationally
/// expensive, it is ASSUMED that the rectangle supplied is large enough to draw the border
/// witout
/// </summary>
/// <remarks>
/// Note that because it would be too computationally expensive, it is assumed that images
/// will fit into the specified rectangle.
/// </remarks>
/// <param name="graphics">A graphics context into which the image-based border is to be drawn.</param>
/// <param name="rectangle">The rectangle into which the image-based border is to be drawn.</param>
/// <param name="topLeftImage">The top left image.</param>
/// <param name="topCenterImage">The top center (fill) image.</param>
/// <param name="topRightImage">The top right image.</param>
/// <param name="leftCenterImage">The left center (fill) image.</param>
/// <param name="rightCenterImage">The right center (fill) image.</param>
/// <param name="bottomLeftImage">The bottom left image.</param>
/// <param name="bottomCenterImage">The bottom center (fill) image.</param>
/// <param name="bottomRightImage">The bottom right image.</param>
public static void DrawCompositedImageBorder
(
Graphics graphics,
Rectangle rectangle,
Image topLeftImage,
Image topCenterImage,
Image topRightImage,
Image leftCenterImage,
Image rightCenterImage,
Image bottomLeftImage,
Image bottomCenterImage,
Image bottomRightImage
)
{
Rectangle fillRectangle;
// Save a graphics container with the current state of the graphics object and open
// and use a new, clipped graphics container.
GraphicsContainer graphicsContainer = graphics.BeginContainer();
graphics.SetClip(rectangle);
// Top left.
graphics.DrawImageUnscaled(topLeftImage, rectangle.X, rectangle.Y);
// Top center.
fillRectangle = new Rectangle(rectangle.X + topLeftImage.Width,
rectangle.Y,
rectangle.Width - (topLeftImage.Width + topRightImage.Width),
topCenterImage.Height);
TileFillUnscaledImageHorizontally(graphics, topCenterImage, fillRectangle);
// Top right.
graphics.DrawImageUnscaled(topRightImage, rectangle.Right - topRightImage.Width, rectangle.Y);
// Left center.
fillRectangle = new Rectangle(rectangle.X,
rectangle.Y + topLeftImage.Height,
leftCenterImage.Width,
rectangle.Height - (topLeftImage.Height + bottomLeftImage.Height));
TileFillUnscaledImageVertically(graphics, leftCenterImage, fillRectangle);
// Right center.
fillRectangle = new Rectangle(rectangle.Right - rightCenterImage.Width,
rectangle.Y + topRightImage.Height,
rightCenterImage.Width,
rectangle.Height - (topRightImage.Height + bottomRightImage.Height));
TileFillUnscaledImageVertically(graphics, rightCenterImage, fillRectangle);
// Bottom left.
graphics.DrawImageUnscaled(bottomLeftImage, rectangle.X, rectangle.Bottom - bottomLeftImage.Height);
// Bottom center.
fillRectangle = new Rectangle(rectangle.X + bottomLeftImage.Width,
rectangle.Bottom - bottomCenterImage.Height,
rectangle.Width - (bottomLeftImage.Width + bottomRightImage.Width),
bottomCenterImage.Height);
TileFillUnscaledImageHorizontally(graphics, bottomCenterImage, fillRectangle);
// Bottom right.
graphics.DrawImageUnscaled(bottomRightImage, rectangle.Right - bottomRightImage.Width, rectangle.Bottom - bottomRightImage.Height);
// End the graphics container.
graphics.EndContainer(graphicsContainer);
}
/// <summary>
/// Converts an opacity percent between 0.0 and 100.0, inclusive, into an alpha component
/// value between 0 and 255.
/// </summary>
/// <param name="opacity">Opacity percent between 0.0 and 100.0.</param>
/// <returns>Alpha component value between 0 and 255</returns>
public static int Opacity(double opacity)
{
Debug.Assert(opacity >= 0.0 && opacity <= 100.0, "Invalid opacity specified", "Specify opacity as a value between 0.0 and 100.0, inclusive.");
if (opacity >= 0.0 && opacity <= 100.0)
return Convert.ToInt32((255.0 * opacity) / 100.0);
else
return 255;
}
public static IDisposable Offset(Graphics g, Rectangle dest, Rectangle src)
{
Debug.Assert(dest.Size.Equals(src.Size), "Can't offset with rectangles of unequal sizes");
return Offset(g, dest.Location.X - src.Location.X, dest.Location.Y - src.Location.Y);
}
public static IDisposable Offset(Graphics g, int x, int y)
{
GraphicsState graphicsState = g.Save();
g.TranslateTransform(x, y);
return new GraphicsStateRestorer(g, graphicsState);
}
private class GraphicsStateRestorer : IDisposable
{
private readonly Graphics graphics;
private readonly GraphicsState graphicsState;
private bool disposed = false;
public GraphicsStateRestorer(Graphics graphics, GraphicsState graphicsState)
{
this.graphics = graphics;
this.graphicsState = graphicsState;
}
public void Dispose()
{
if (!disposed)
{
disposed = true;
graphics.Restore(graphicsState);
}
}
}
}
}
| |
/*
* Magix - A Web Application Framework for Humans
* Copyright 2010 - 2014 - thomas@magixilluminate.com
* Magix is licensed as MITx11, see enclosed License.txt File for Details.
*/
using System;
using Magix.Core;
using System.Globalization;
namespace Magix.execute
{
/*
* if/else-if/else hyperlisp active events
*/
public class IfElseCore : ActiveController
{
/*
* if implementation
*/
[ActiveEvent(Name = "magix.execute.if")]
public static void magix_execute_if(object sender, ActiveEventArgs e)
{
Node ip = Ip(e.Params, true);
if (ShouldInspect(ip))
{
AppendInspectFromResource(
ip["inspect"],
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.if-dox].value");
AppendCodeFromResource(
ip,
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.if-sample]");
return;
}
IfElseIfImplementation(
e.Params,
"magix.execute.if");
}
/*
* else-if implementation
*/
[ActiveEvent(Name = "magix.execute.else-if")]
public static void magix_execute_else_if(object sender, ActiveEventArgs e)
{
Node ip = Ip(e.Params, true);
if (ShouldInspect(ip))
{
AppendInspectFromResource(
ip["inspect"],
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.else-if-dox].value");
AppendCodeFromResource(
ip,
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.else-if-sample]");
return;
}
// checking syntax
VerifySyntaxElseIf(ip);
// making sure previous [if] or [else-if] didn't execute before we run comparison to see if we should execute body of [else-if]
if (!CheckState(e.Params))
IfElseIfImplementation(
e.Params,
"magix.execute.else-if");
else
{
// checking to see if next keyword is [else] or [else-if], and if not, we remove signaling state ("_state_if") from state
Node next = ip.Next();
if (next == null || (next.Name != "else-if" && next.Name != "magix.execute.else-if"
&& next.Name != "else" && next.Name != "magix.execute.else"))
PopState(e.Params, ip);
}
}
/*
* else implementation
*/
[ActiveEvent(Name = "magix.execute.else")]
public static void magix_execute_else(object sender, ActiveEventArgs e)
{
Node ip = Ip(e.Params, true);
if (ShouldInspect(ip))
{
AppendInspectFromResource(
ip["inspect"],
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.else-dox].value");
AppendCodeFromResource(
ip,
"Magix.execute",
"Magix.execute.hyperlisp.inspect.hl",
"[magix.execute.else-sample]");
return;
}
// verifying an [else] is only followed by an [if] or an [else-if]
VerifySyntaxElse(ip);
// saving state before we pop it to see if we should execute [else] body
bool state = CheckState(e.Params);
// removing signaling state ("_state_if") from state
PopState(e.Params, ip);
// checking to see if previous [if] or [else-if] executed, before we execute [else]
if (!state)
RaiseActiveEvent(
"magix.execute",
e.Params);
}
/*
* verifies that [else-if] only comes after [if] or another [else-if]
*/
private static void VerifySyntaxElseIf(Node ip)
{
Node previous = ip.Previous();
if (previous == null || (previous.Name != "if" && previous.Name != "magix.execute.if" &&
previous.Name != "else-if" && previous.Name != "magix.execute.else-if"))
throw new HyperlispSyntaxErrorException("you cannot have an [else-if] statement without a matching [if]");
}
/*
* verifies syntax of [else] keyword
*/
private static void VerifySyntaxElse(Node ip)
{
Node previous = ip.Previous();
if (previous == null || (previous.Name != "if" && previous.Name != "magix.execute.if" &&
previous.Name != "else-if" && previous.Name != "magix.execute.else-if"))
throw new HyperlispSyntaxErrorException("you cannot have an [else] statement without a matching if");
}
/*
* helper for executing [if]/[else-if]
*/
private static void IfElseIfImplementation(Node pars, string evt)
{
Node ip = Ip(pars);
Node dp = Dp(pars);
// verifying [if] or [else-if] has a [code] block beneath itself
if (!ip.Contains("code"))
throw new HyperlispSyntaxErrorException("you must supply a [code] node for your [" + evt + "] expressions");
// verifying there's at least an [lhs] node beneath [if] or [else-if]
if (!ip.Contains("lhs"))
throw new HyperlispSyntaxErrorException("you must supply an [lhs] node for your [" + evt + "] expressions");
// verifying there's an operator on [if] or [else-if]
if (string.IsNullOrEmpty(ip.Get<string>()))
throw new HyperlispSyntaxErrorException("you must supply an operator for your [" + evt + "] expressions as Value of [" + evt + "]");
// checking statement to see if it's true, before we execute [code] block
if (StatementHelper.CheckExpressions(ip, dp))
{
// yup, we've got a match, executing [code] block
pars["_ip"].Value = ip["code"];
RaiseActiveEvent(
"magix.execute",
pars);
// checking to see if we should add state ("_state_if") to state such that no followup [else] or [else-if] gets executed
Node next = ip.Next();
if (next != null && (next.Name == "else-if" || next.Name == "magix.execute.else-if"
|| next.Name == "else" || next.Name == "magix.execute.else"))
PushState(pars, ip);
}
}
/*
* checks to see if a previous [if]/[else-if] has evaluated to true
*/
private static bool CheckState(Node pars)
{
Node ip = Ip(pars);
string currentScopeDna = ip.Parent.Dna;
if (pars.Contains("_state_if") && pars["_state_if"].Contains(currentScopeDna))
return true;
return false;
}
/*
* sets the state of the current scope of hyperlisp code to "executed", such that no following [else] or [else-if] executes
*/
private static void PushState(Node pars, Node ip)
{
string currentScopeDna = ip.Parent.Dna;
pars["_state_if"][currentScopeDna].Value = null;
}
/*
* removes the state from the current scope of hyperlisp code
*/
private static void PopState(Node pars, Node ip)
{
string currentScopeDna = ip.Parent.Dna;
pars["_state_if"][currentScopeDna].UnTie();
if (pars["_state_if"].Count == 0)
pars["_state_if"].UnTie();
}
}
}
| |
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace MetroHash
{
/// <summary>
/// Metro Hash 128
/// </summary>
public sealed class MetroHash128
{
private const ulong K0 = 0xC83A91E1;
private const ulong K1 = 0x8648DBDB;
private const ulong K2 = 0x7BDEC03B;
private const ulong K3 = 0x2F5870A5;
private readonly byte[] _buffer;
private readonly ulong[] _firstTwoStates;
private readonly byte[] _result;
private int _bytes;
private ulong _fourthState;
private ulong _thirdState;
/// <summary>
/// Constructor for incremental version, call Update and FinalizeHash for full Hash
/// </summary>
/// <param name="seed">Seed</param>
public MetroHash128(ulong seed)
{
_buffer = new byte[32];
_result = new byte[16];
_firstTwoStates = Unsafe.As<byte[], ulong[]>(ref _result);
_thirdState = 0;
_fourthState = 0;
ref var firstState = ref _firstTwoStates[0];
ref var secondState = ref _firstTwoStates[1];
firstState = (seed - K0) * K3;
secondState = (seed + K1) * K2;
_thirdState = (seed + K0) * K2;
_fourthState = (seed - K1) * K3;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ValidateInput(byte[] input, int offset, int count)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
if ((uint) offset > (uint) input.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((uint) count > (uint) (input.Length - offset))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
}
/// <summary>
/// Add data to hash
/// </summary>
/// <param name="input">data</param>
/// <param name="offset">offset</param>
/// <param name="count">count</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Update(byte[] input, int offset, int count)
{
ValidateInput(input, offset, count);
ref var firstState = ref _firstTwoStates[0];
ref var secondState = ref _firstTwoStates[1];
var end = offset + count;
var bMod = _bytes & 31;
if (bMod != 0)
{
var fill = 32 - bMod;
if (fill > count)
{
fill = count;
}
Buffer.BlockCopy(input, offset, _buffer, bMod, fill);
offset += fill;
_bytes += fill;
if ((_bytes & 31) != 0)
{
return;
}
var tempOffset = 0;
BulkLoop(ref firstState, ref secondState, ref _thirdState, ref _fourthState, ref _buffer[0],
ref tempOffset,
32);
}
_bytes += end - offset;
BulkLoop(ref firstState, ref secondState, ref _thirdState, ref _fourthState, ref input[0], ref offset, end);
if (offset < end)
{
Buffer.BlockCopy(input, offset, _buffer, 0, end - offset);
}
}
/// <summary>
/// Add data to hash
/// </summary>
/// <param name="input">data</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Update(ReadOnlySpan<byte> input)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
ref var firstState = ref _firstTwoStates[0];
ref var secondState = ref _firstTwoStates[1];
var count = input.Length;
var offset = 0;
var bMod = _bytes & 31;
if (bMod != 0)
{
var fill = 32 - bMod;
if (fill > count)
{
fill = count;
}
input.Slice(0, fill).CopyTo(_buffer.AsSpan().Slice(bMod));
_bytes += fill;
offset += fill;
if ((_bytes & 31) != 0)
{
return;
}
var tempOffset = 0;
BulkLoop(ref firstState, ref secondState, ref _thirdState, ref _fourthState, ref _buffer[0],
ref tempOffset, 32);
}
_bytes += count - offset;
ref var start = ref MemoryMarshal.GetReference(input);
BulkLoop(ref firstState, ref secondState, ref _thirdState, ref _fourthState, ref start, ref offset,
input.Length);
if (offset < count)
{
input.Slice(offset).CopyTo(_buffer.AsSpan());
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void BulkLoop(ref ulong firstState, ref ulong secondState, ref ulong thirdState,
ref ulong fourthState, ref byte b, ref int offset, int count)
{
// Create a local copy so that it remains in the CPU register.
int localOffset = offset; // workaround for dotnet/runtime#39349
while (localOffset <= count - 32)
{
firstState += Cast<ulong>(ref b, localOffset) * K0;
localOffset += 8;
firstState = RotateRight(firstState, 29) + thirdState;
secondState += Cast<ulong>(ref b, localOffset) * K1;
localOffset += 8;
secondState = RotateRight(secondState, 29) + fourthState;
thirdState += Cast<ulong>(ref b, localOffset) * K2;
localOffset += 8;
thirdState = RotateRight(thirdState, 29) + firstState;
fourthState += Cast<ulong>(ref b, localOffset) * K3;
localOffset += 8;
fourthState = RotateRight(fourthState, 29) + secondState;
}
// Return the final result of the local register.
offset = localOffset;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void FinalizeBulkLoop(ref ulong firstState, ref ulong secondState, ref ulong thirdState,
ref ulong fourthState)
{
thirdState ^= RotateRight((firstState + fourthState) * K0 + secondState, 21) * K1;
fourthState ^= RotateRight((secondState + thirdState) * K1 + firstState, 21) * K0;
firstState ^= RotateRight((firstState + thirdState) * K0 + fourthState, 21) * K1;
secondState ^= RotateRight((secondState + fourthState) * K1 + thirdState, 21) * K0;
}
private static void FinalizeHash(ref ulong firstState, ref ulong secondState, ref byte b, ref int offset,
int count)
{
var end = offset + (count & 31);
if (end - offset >= 16)
{
firstState += Cast<ulong>(ref b, offset) * K2;
offset += 8;
firstState = RotateRight(firstState, 33) * K3;
secondState += Cast<ulong>(ref b, offset) * K2;
offset += 8;
secondState = RotateRight(secondState, 33) * K3;
firstState ^= RotateRight(firstState * K2 + secondState, 45) * K1;
secondState ^= RotateRight(secondState * K3 + firstState, 45) * K0;
}
if (end - offset >= 8)
{
firstState += Cast<ulong>(ref b, offset) * K2;
offset += 8;
firstState = RotateRight(firstState, 33) * K3;
firstState ^= RotateRight(firstState * K2 + secondState, 27) * K1;
}
if (end - offset >= 4)
{
secondState += Cast<uint>(ref b, offset) * K2;
offset += 4;
secondState = RotateRight(secondState, 33) * K3;
secondState ^= RotateRight(secondState * K3 + firstState, 46) * K0;
}
if (end - offset >= 2)
{
firstState += Cast<ushort>(ref b, offset) * K2;
offset += 2;
firstState = RotateRight(firstState, 33) * K3;
firstState ^= RotateRight(firstState * K2 + secondState, 22) * K1;
}
if (end - offset >= 1)
{
secondState += Unsafe.Add(ref b, offset) * K2;
secondState = RotateRight(secondState, 33) * K3;
secondState ^= RotateRight(secondState * K3 + firstState, 58) * K0;
}
firstState += RotateRight(firstState * K0 + secondState, 13);
secondState += RotateRight(secondState * K1 + firstState, 37);
firstState += RotateRight(firstState * K2 + secondState, 13);
secondState += RotateRight(secondState * K3 + firstState, 37);
}
[MethodImpl((MethodImplOptions.AggressiveInlining))]
private static T Cast<T>(ref byte b, int offset)
{
return Unsafe.As<byte, T>(ref Unsafe.Add(ref b, offset));
}
/// <summary>
/// Finalizes the hash and returns the hash
/// </summary>
/// <returns>Hash</returns>
public byte[] FinalizeHash()
{
var offset = 0;
if (_bytes >= 32)
{
FinalizeBulkLoop(ref _firstTwoStates[0], ref _firstTwoStates[1], ref _thirdState, ref _fourthState);
}
FinalizeHash(ref _firstTwoStates[0], ref _firstTwoStates[1], ref _buffer[0],
ref offset, _bytes);
_bytes = 0;
return _result;
}
/// <summary>
/// Finalizes the hash and returns the hash
/// </summary>
/// <param name="output">Span to write to</param>
public void FinalizeHash(Span<byte> output)
{
if (output == null)
{
throw new ArgumentNullException(nameof(output));
}
var offset = 0;
if (_bytes >= 32)
{
FinalizeBulkLoop(ref _firstTwoStates[0], ref _firstTwoStates[1], ref _thirdState, ref _fourthState);
}
FinalizeHash(ref _firstTwoStates[0], ref _firstTwoStates[1], ref _buffer[0],
ref offset, _bytes);
_bytes = 0;
_result.CopyTo(output);
}
/// <summary>
/// MetroHash 128 hash method
/// Not cryptographically secure
/// </summary>
/// <param name="seed">Seed to initialize data</param>
/// <param name="input">Data you want to hash</param>
/// <param name="offset">Start of the data you want to hash</param>
/// <param name="count">Length of the data you want to hash</param>
/// <returns>Hash</returns>
public static byte[] Hash(ulong seed, byte[] input, int offset, int count)
{
ValidateInput(input, offset, count);
var result = new byte[16];
var end = offset + count;
var state = Unsafe.As<byte[], ulong[]>(ref result);
ref var firstState = ref state[0];
ref var secondState = ref state[1];
firstState = (seed - K0) * K3;
secondState = (seed + K1) * K2;
if (count >= 32)
{
var thirdState = (seed + K0) * K2;
var fourthState = (seed - K1) * K3;
BulkLoop(ref firstState, ref secondState, ref thirdState, ref fourthState, ref input[0], ref offset,
end);
FinalizeBulkLoop(ref firstState, ref secondState, ref thirdState, ref fourthState);
}
FinalizeHash(ref firstState, ref secondState, ref input[0], ref offset, count);
return result;
}
/// <summary>
/// MetroHash 128 hash method
/// Not cryptographically secure
/// </summary>
/// <param name="seed">Seed to initialize data</param>
/// <param name="input">Data you want to hash</param>
/// <param name="output">Span to write to</param>
public static void Hash(ulong seed, ReadOnlySpan<byte> input, Span<byte> output)
{
var state = MemoryMarshal.Cast<byte, ulong>(output);
ref var firstState = ref state[0];
ref var secondState = ref state[1];
firstState = (seed - K0) * K3;
secondState = (seed + K1) * K2;
var offset = 0;
var count = input.Length;
ref var start = ref MemoryMarshal.GetReference(input);
if (input.Length >= 32)
{
var thirdState = (seed + K0) * K2;
var fourthState = (seed - K1) * K3;
BulkLoop(ref firstState, ref secondState, ref thirdState, ref fourthState, ref start, ref offset,
input.Length);
FinalizeBulkLoop(ref firstState, ref secondState, ref thirdState, ref fourthState);
}
FinalizeHash(ref firstState, ref secondState, ref start, ref offset, count);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ulong RotateRight(ulong x, int r)
{
return (x >> r) | (x << (64 - r));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using Cysharp.Text;
using Microsoft.Extensions.Localization;
namespace OrchardCore.ContentManagement.Utilities
{
public static class StringExtensions
{
public static string CamelFriendly(this string camel)
{
// optimize common cases
if (string.IsNullOrWhiteSpace(camel))
{
return "";
}
using var sb = ZString.CreateStringBuilder();
for (var i = 0; i < camel.Length; ++i)
{
var c = camel[i];
if (i != 0 && char.IsUpper(c))
{
sb.Append(' ');
}
sb.Append(c);
}
return sb.ToString();
}
public static string Ellipsize(this string text, int characterCount)
{
return text.Ellipsize(characterCount, "\u00A0\u2026");
}
public static string Ellipsize(this string text, int characterCount, string ellipsis, bool wordBoundary = false)
{
if (string.IsNullOrWhiteSpace(text))
return "";
if (characterCount < 0 || text.Length <= characterCount)
return text;
// search beginning of word
var backup = characterCount;
while (characterCount > 0 && text[characterCount - 1].IsLetter())
{
characterCount--;
}
// search previous word
while (characterCount > 0 && text[characterCount - 1].IsSpace())
{
characterCount--;
}
// if it was the last word, recover it, unless boundary is requested
if (characterCount == 0 && !wordBoundary)
{
characterCount = backup;
}
var trimmed = text.Substring(0, characterCount);
return trimmed + ellipsis;
}
public static string HtmlClassify(this string text)
{
if (string.IsNullOrWhiteSpace(text))
return "";
var friendlier = text.CamelFriendly();
var result = new char[friendlier.Length];
var cursor = 0;
var previousIsNotLetter = false;
for (var i = 0; i < friendlier.Length; i++)
{
char current = friendlier[i];
if (IsLetter(current) || (char.IsDigit(current) && cursor > 0))
{
if (previousIsNotLetter && i != 0 && cursor > 0)
{
result[cursor++] = '-';
}
result[cursor++] = char.ToLowerInvariant(current);
previousIsNotLetter = false;
}
else
{
previousIsNotLetter = true;
}
}
return new string(result, 0, cursor);
}
public static LocalizedString OrDefault(this string text, LocalizedString defaultValue)
{
return string.IsNullOrEmpty(text)
? defaultValue
: new LocalizedString(null, text);
}
public static string RemoveTags(this string html, bool htmlDecode = false)
{
if (String.IsNullOrEmpty(html))
{
return String.Empty;
}
var result = new char[html.Length];
var cursor = 0;
var inside = false;
for (var i = 0; i < html.Length; i++)
{
char current = html[i];
switch (current)
{
case '<':
inside = true;
continue;
case '>':
inside = false;
continue;
}
if (!inside)
{
result[cursor++] = current;
}
}
var stringResult = new string(result, 0, cursor);
if (htmlDecode)
{
stringResult = WebUtility.HtmlDecode(stringResult);
}
return stringResult;
}
// not accounting for only \r (e.g. Apple OS 9 carriage return only new lines)
public static string ReplaceNewLinesWith(this string text, string replacement)
{
return String.IsNullOrWhiteSpace(text)
? String.Empty
: text
.Replace("\r\n", "\r\r")
.Replace("\n", String.Format(replacement, "\r\n"))
.Replace("\r\r", String.Format(replacement, "\r\n"));
}
private static readonly char[] validSegmentChars = "/?#[]@\"^{}|`<>\t\r\n\f ".ToCharArray();
public static bool IsValidUrlSegment(this string segment)
{
// valid isegment from rfc3987 - http://tools.ietf.org/html/rfc3987#page-8
// the relevant bits:
// isegment = *ipchar
// ipchar = iunreserved / pct-encoded / sub-delims / ":" / "@"
// iunreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" / ucschar
// pct-encoded = "%" HEXDIG HEXDIG
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
// ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD / %xD0000-DFFFD / %xE1000-EFFFD
//
// rough blacklist regex == m/^[^/?#[]@"^{}|\s`<>]+$/ (leaving off % to keep the regex simple)
return !segment.Any(validSegmentChars);
}
/// <summary>
/// Generates a valid technical name.
/// </summary>
/// <remarks>
/// Uses a white list set of chars.
/// </remarks>
public static string ToSafeName(this string name)
{
if (string.IsNullOrWhiteSpace(name))
return string.Empty;
name = RemoveDiacritics(name);
name = name.Strip(c =>
!c.IsLetter()
&& !char.IsDigit(c)
);
name = name.Trim();
// don't allow non A-Z chars as first letter, as they are not allowed in prefixes
while (name.Length > 0 && !IsLetter(name[0]))
{
name = name.Substring(1);
}
if (name.Length > 128)
name = name.Substring(0, 128);
return name;
}
private static HashSet<string> _reservedNames = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
nameof(ContentItem.Id),
nameof(ContentItem.ContentItemId),
nameof(ContentItem.ContentItemVersionId),
nameof(ContentItem.ContentType),
nameof(ContentItem.Published),
nameof(ContentItem.Latest),
nameof(ContentItem.ModifiedUtc),
nameof(ContentItem.PublishedUtc),
nameof(ContentItem.CreatedUtc),
nameof(ContentItem.Owner),
nameof(ContentItem.Author),
nameof(ContentItem.DisplayText)
};
public static bool IsReservedContentName(this string name)
{
if (_reservedNames.Contains(name))
{
return true;
}
return false;
}
/// <summary>
/// Whether the char is a letter between A and Z or not
/// </summary>
public static bool IsLetter(this char c)
{
return ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z');
}
public static bool IsSpace(this char c)
{
return (c == '\r' || c == '\n' || c == '\t' || c == '\f' || c == ' ');
}
public static string RemoveDiacritics(this string name)
{
string stFormD = name.Normalize(NormalizationForm.FormD);
var sb = new StringBuilder();
foreach (char t in stFormD)
{
UnicodeCategory uc = CharUnicodeInfo.GetUnicodeCategory(t);
if (uc != UnicodeCategory.NonSpacingMark)
{
sb.Append(t);
}
}
return (sb.ToString().Normalize(NormalizationForm.FormC));
}
public static string Strip(this string subject, params char[] stripped)
{
if (stripped == null || stripped.Length == 0 || string.IsNullOrEmpty(subject))
{
return subject;
}
var result = new char[subject.Length];
var cursor = 0;
for (var i = 0; i < subject.Length; i++)
{
char current = subject[i];
if (Array.IndexOf(stripped, current) < 0)
{
result[cursor++] = current;
}
}
return new string(result, 0, cursor);
}
public static string Strip(this string subject, Func<char, bool> predicate)
{
var result = new char[subject.Length];
var cursor = 0;
for (var i = 0; i < subject.Length; i++)
{
char current = subject[i];
if (!predicate(current))
{
result[cursor++] = current;
}
}
return new string(result, 0, cursor);
}
public static bool Any(this string subject, params char[] chars)
{
if (string.IsNullOrEmpty(subject) || chars == null || chars.Length == 0)
{
return false;
}
for (var i = 0; i < subject.Length; i++)
{
char current = subject[i];
if (Array.IndexOf(chars, current) >= 0)
{
return true;
}
}
return false;
}
public static bool All(this string subject, params char[] chars)
{
if (string.IsNullOrEmpty(subject))
{
return true;
}
if (chars == null || chars.Length == 0)
{
return false;
}
for (var i = 0; i < subject.Length; i++)
{
char current = subject[i];
if (Array.IndexOf(chars, current) < 0)
{
return false;
}
}
return true;
}
public static string Translate(this string subject, char[] from, char[] to)
{
if (string.IsNullOrEmpty(subject))
{
return subject;
}
if (from == null || to == null)
{
throw new ArgumentNullException();
}
if (from.Length != to.Length)
{
throw new ArgumentNullException(nameof(from), "Parameters must have the same length");
}
var map = new Dictionary<char, char>(from.Length);
for (var i = 0; i < from.Length; i++)
{
map[from[i]] = to[i];
}
var result = new char[subject.Length];
for (var i = 0; i < subject.Length; i++)
{
var current = subject[i];
if (map.ContainsKey(current))
{
result[i] = map[current];
}
else
{
result[i] = current;
}
}
return new string(result);
}
public static string ReplaceAll(this string original, IDictionary<string, string> replacements)
{
var pattern = $"{string.Join("|", replacements.Keys)}";
return Regex.Replace(original, pattern, match => replacements[match.Value]);
}
public static string TrimEnd(this string rough, string trim = "")
{
if (rough == null)
return null;
return rough.EndsWith(trim, StringComparison.Ordinal)
? rough.Substring(0, rough.Length - trim.Length)
: rough;
}
public static string ReplaceLastOccurrence(this string source, string find, string replace)
{
int place = source.LastIndexOf(find, StringComparison.Ordinal);
return source.Remove(place, find.Length).Insert(place, replace);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#pragma warning disable RS0008 // Implement IEquatable<T> when overriding Object.Equals
using System;
using System.Collections.Immutable;
using System.ComponentModel;
using System.Diagnostics;
using System.Reflection.Metadata;
#if !SRM
using PrimitiveTypeCode = Microsoft.Cci.PrimitiveTypeCode;
#endif
#if SRM
namespace System.Reflection.Metadata.Ecma335.Blobs
#else
namespace Roslyn.Reflection.Metadata.Ecma335.Blobs
#endif
{
// TODO: arg validation
// TODO: can we hide useless inherited methods?
// TODO: debug metadata blobs
// TODO: revisit ctors (public vs internal)?
//[EditorBrowsable(EditorBrowsableState.Never)]
//public override bool Equals(object obj) => base.Equals(obj);
//[EditorBrowsable(EditorBrowsableState.Never)]
//public override int GetHashCode() => base.GetHashCode();
//[EditorBrowsable(EditorBrowsableState.Never)]
//public override string ToString() => base.ToString();
#if SRM
public
#endif
struct BlobEncoder
{
public BlobBuilder Builder { get; }
public BlobEncoder(BlobBuilder builder)
{
Builder = builder;
}
public SignatureTypeEncoder FieldSignature()
{
Builder.WriteByte((byte)SignatureKind.Field);
return new SignatureTypeEncoder(Builder);
}
public GenericTypeArgumentsEncoder MethodSpecificationSignature(int genericArgumentCount)
{
// TODO: arg validation
Builder.WriteByte((byte)SignatureKind.MethodSpecification);
Builder.WriteCompressedInteger(genericArgumentCount);
return new GenericTypeArgumentsEncoder(Builder);
}
public MethodSignatureEncoder MethodSignature(
SignatureCallingConvention convention = SignatureCallingConvention.Default,
int genericParameterCount = 0,
bool isInstanceMethod = false)
{
// TODO: arg validation
var attributes =
(genericParameterCount != 0 ? SignatureAttributes.Generic : 0) |
(isInstanceMethod ? SignatureAttributes.Instance : 0);
Builder.WriteByte(SignatureHeader(SignatureKind.Method, convention, attributes).RawValue);
if (genericParameterCount != 0)
{
Builder.WriteCompressedInteger(genericParameterCount);
}
return new MethodSignatureEncoder(Builder, isVarArg: convention == SignatureCallingConvention.VarArgs);
}
public MethodSignatureEncoder PropertySignature(bool isInstanceProperty = false)
{
Builder.WriteByte(SignatureHeader(SignatureKind.Property, SignatureCallingConvention.Default, (isInstanceProperty ? SignatureAttributes.Instance : 0)).RawValue);
return new MethodSignatureEncoder(Builder, isVarArg: false);
}
public void CustomAttributeSignature(out FixedArgumentsEncoder fixedArguments, out CustomAttributeNamedArgumentsEncoder namedArguments)
{
Builder.WriteUInt16(0x0001);
fixedArguments = new FixedArgumentsEncoder(Builder);
namedArguments = new CustomAttributeNamedArgumentsEncoder(Builder);
}
public LocalVariablesEncoder LocalVariableSignature(int count)
{
Builder.WriteByte((byte)SignatureKind.LocalVariables);
Builder.WriteCompressedInteger(count);
return new LocalVariablesEncoder(Builder);
}
// TODO: TypeSpec is limited to structured types (doesn't have primitive types, TypeDefRefSpec, custom modifiers)
public SignatureTypeEncoder TypeSpecificationSignature()
{
return new SignatureTypeEncoder(Builder);
}
public PermissionSetEncoder PermissionSetBlob(int attributeCount)
{
Builder.WriteByte((byte)'.');
Builder.WriteCompressedInteger(attributeCount);
return new PermissionSetEncoder(Builder);
}
public NamedArgumentsEncoder PermissionSetArguments(int argumentCount)
{
Builder.WriteCompressedInteger(argumentCount);
return new NamedArgumentsEncoder(Builder);
}
// TOOD: add ctor to SignatureHeader
internal static SignatureHeader SignatureHeader(SignatureKind kind, SignatureCallingConvention convention, SignatureAttributes attributes)
{
return new SignatureHeader((byte)((int)kind | (int)convention | (int)attributes));
}
}
#if SRM
public
#endif
struct MethodSignatureEncoder
{
public BlobBuilder Builder { get; }
private readonly bool _isVarArg;
public MethodSignatureEncoder(BlobBuilder builder, bool isVarArg)
{
Builder = builder;
_isVarArg = isVarArg;
}
public void Parameters(int parameterCount, out ReturnTypeEncoder returnType, out ParametersEncoder parameters)
{
Builder.WriteCompressedInteger(parameterCount);
returnType = new ReturnTypeEncoder(Builder);
parameters = new ParametersEncoder(Builder, allowVarArgs: _isVarArg);
}
}
#if SRM
public
#endif
struct LocalVariablesEncoder
{
public BlobBuilder Builder { get; }
public LocalVariablesEncoder(BlobBuilder builder)
{
Builder = builder;
}
public LocalVariableTypeEncoder AddVariable()
{
return new LocalVariableTypeEncoder(Builder);
}
public void EndVariables()
{
}
}
#if SRM
public
#endif
struct LocalVariableTypeEncoder
{
public BlobBuilder Builder { get; }
public LocalVariableTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public CustomModifiersEncoder CustomModifiers()
{
return new CustomModifiersEncoder(Builder);
}
public SignatureTypeEncoder Type(bool isByRef = false, bool isPinned = false)
{
if (isPinned)
{
Builder.WriteByte((byte)SignatureTypeCode.Pinned);
}
if (isByRef)
{
Builder.WriteByte((byte)SignatureTypeCode.ByReference);
}
return new SignatureTypeEncoder(Builder);
}
public void TypedReference()
{
Builder.WriteByte((byte)SignatureTypeCode.TypedReference);
}
}
#if SRM
public
#endif
struct ParameterTypeEncoder
{
public BlobBuilder Builder { get; }
public ParameterTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public CustomModifiersEncoder CustomModifiers()
{
return new CustomModifiersEncoder(Builder);
}
public SignatureTypeEncoder Type(bool isByRef = false)
{
if (isByRef)
{
Builder.WriteByte((byte)SignatureTypeCode.ByReference);
}
return new SignatureTypeEncoder(Builder);
}
public void TypedReference()
{
Builder.WriteByte((byte)SignatureTypeCode.TypedReference);
}
}
#if SRM
public
#endif
struct PermissionSetEncoder
{
public BlobBuilder Builder { get; }
public PermissionSetEncoder(BlobBuilder builder)
{
Builder = builder;
}
public PermissionSetEncoder AddPermission(string typeName, BlobBuilder arguments)
{
Builder.WriteSerializedString(typeName);
Builder.WriteCompressedInteger(arguments.Count);
arguments.WriteContentTo(Builder);
return new PermissionSetEncoder(Builder);
}
public void EndPermissions()
{
}
}
#if SRM
public
#endif
struct GenericTypeArgumentsEncoder
{
public BlobBuilder Builder { get; }
public GenericTypeArgumentsEncoder(BlobBuilder builder)
{
Builder = builder;
}
public SignatureTypeEncoder AddArgument()
{
return new SignatureTypeEncoder(Builder);
}
public void EndArguments()
{
}
}
#if SRM
public
#endif
struct FixedArgumentsEncoder
{
public BlobBuilder Builder { get; }
public FixedArgumentsEncoder(BlobBuilder builder)
{
Builder = builder;
}
public LiteralEncoder AddArgument()
{
return new LiteralEncoder(Builder);
}
public void EndArguments()
{
}
}
#if SRM
public
#endif
struct LiteralEncoder
{
public BlobBuilder Builder { get; }
public LiteralEncoder(BlobBuilder builder)
{
Builder = builder;
}
public VectorEncoder Vector()
{
return new VectorEncoder(Builder);
}
public void TaggedVector(out CustomAttributeArrayTypeEncoder arrayType, out VectorEncoder vector)
{
arrayType = new CustomAttributeArrayTypeEncoder(Builder);
vector = new VectorEncoder(Builder);
}
public ScalarEncoder Scalar()
{
return new ScalarEncoder(Builder);
}
public void TaggedScalar(out CustomAttributeElementTypeEncoder type, out ScalarEncoder scalar)
{
type = new CustomAttributeElementTypeEncoder(Builder);
scalar = new ScalarEncoder(Builder);
}
}
#if SRM
public
#endif
struct ScalarEncoder
{
public BlobBuilder Builder { get; }
public ScalarEncoder(BlobBuilder builder)
{
Builder = builder;
}
public void NullArray()
{
Builder.WriteInt32(-1);
}
public void Constant(object value)
{
string str = value as string;
if (str != null || value == null)
{
String(str);
}
else
{
Builder.WriteConstant(value);
}
}
public void SystemType(string serializedTypeName)
{
String(serializedTypeName);
}
private void String(string value)
{
Builder.WriteSerializedString(value);
}
}
#if SRM
public
#endif
struct LiteralsEncoder
{
public BlobBuilder Builder { get; }
public LiteralsEncoder(BlobBuilder builder)
{
Builder = builder;
}
public LiteralEncoder AddLiteral()
{
return new LiteralEncoder(Builder);
}
public void EndLiterals()
{
}
}
#if SRM
public
#endif
struct VectorEncoder
{
public BlobBuilder Builder { get; }
public VectorEncoder(BlobBuilder builder)
{
Builder = builder;
}
public LiteralsEncoder Count(int count)
{
Builder.WriteUInt32((uint)count);
return new LiteralsEncoder(Builder);
}
}
#if SRM
public
#endif
struct NameEncoder
{
public BlobBuilder Builder { get; }
public NameEncoder(BlobBuilder builder)
{
Builder = builder;
}
public void Name(string name)
{
Builder.WriteSerializedString(name);
}
}
#if SRM
public
#endif
struct CustomAttributeNamedArgumentsEncoder
{
public BlobBuilder Builder { get; }
public CustomAttributeNamedArgumentsEncoder(BlobBuilder builder)
{
Builder = builder;
}
public NamedArgumentsEncoder Count(int count)
{
if (unchecked((ushort)count) > ushort.MaxValue)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
Builder.WriteUInt16((ushort)count);
return new NamedArgumentsEncoder(Builder);
}
}
#if SRM
public
#endif
struct NamedArgumentsEncoder
{
public BlobBuilder Builder { get; }
public NamedArgumentsEncoder(BlobBuilder builder)
{
Builder = builder;
}
public void AddArgument(bool isField, out NamedArgumentTypeEncoder typeEncoder, out NameEncoder name, out LiteralEncoder literal)
{
Builder.WriteByte(isField ? (byte)0x53 : (byte)0x54);
typeEncoder = new NamedArgumentTypeEncoder(Builder);
name = new NameEncoder(Builder);
literal = new LiteralEncoder(Builder);
}
public void EndArguments()
{
}
}
#if SRM
public
#endif
struct NamedArgumentTypeEncoder
{
public BlobBuilder Builder { get; }
public NamedArgumentTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public CustomAttributeElementTypeEncoder ScalarType()
{
return new CustomAttributeElementTypeEncoder(Builder);
}
public void Object()
{
Builder.WriteByte(0x51); // OBJECT
}
public CustomAttributeArrayTypeEncoder SZArray()
{
return new CustomAttributeArrayTypeEncoder(Builder);
}
}
#if SRM
public
#endif
struct CustomAttributeArrayTypeEncoder
{
public BlobBuilder Builder { get; }
public CustomAttributeArrayTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public void ObjectArray()
{
Builder.WriteByte((byte)SignatureTypeCode.SZArray);
Builder.WriteByte(0x51); // OBJECT
}
public CustomAttributeElementTypeEncoder ElementType()
{
Builder.WriteByte((byte)SignatureTypeCode.SZArray);
return new CustomAttributeElementTypeEncoder(Builder);
}
}
#if SRM
public
#endif
struct CustomAttributeElementTypeEncoder
{
public BlobBuilder Builder { get; }
public CustomAttributeElementTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
private void WriteTypeCode(SignatureTypeCode value)
{
Builder.WriteByte((byte)value);
}
public void Boolean() => WriteTypeCode(SignatureTypeCode.Boolean);
public void Char() => WriteTypeCode(SignatureTypeCode.Char);
public void Int8() => WriteTypeCode(SignatureTypeCode.SByte);
public void UInt8() => WriteTypeCode(SignatureTypeCode.Byte);
public void Int16() => WriteTypeCode(SignatureTypeCode.Int16);
public void UInt16() => WriteTypeCode(SignatureTypeCode.UInt16);
public void Int32() => WriteTypeCode(SignatureTypeCode.Int32);
public void UInt32() => WriteTypeCode(SignatureTypeCode.UInt32);
public void Int64() => WriteTypeCode(SignatureTypeCode.Int64);
public void UInt64() => WriteTypeCode(SignatureTypeCode.UInt64);
public void Float32() => WriteTypeCode(SignatureTypeCode.Single);
public void Float64() => WriteTypeCode(SignatureTypeCode.Double);
public void String() => WriteTypeCode(SignatureTypeCode.String);
public void IntPtr() => WriteTypeCode(SignatureTypeCode.IntPtr);
public void UIntPtr() => WriteTypeCode(SignatureTypeCode.UIntPtr);
#if !SRM
public void PrimitiveType(PrimitiveTypeCode type)
{
switch (type)
{
case PrimitiveTypeCode.Boolean: Boolean(); return;
case PrimitiveTypeCode.Char: Char(); return;
case PrimitiveTypeCode.Int8: Int8(); return;
case PrimitiveTypeCode.UInt8: UInt8(); return;
case PrimitiveTypeCode.Int16: Int16(); return;
case PrimitiveTypeCode.UInt16: UInt16(); return;
case PrimitiveTypeCode.Int32: Int32(); return;
case PrimitiveTypeCode.UInt32: UInt32(); return;
case PrimitiveTypeCode.Int64: Int64(); return;
case PrimitiveTypeCode.UInt64: UInt64(); return;
case PrimitiveTypeCode.Float32: Float32(); return;
case PrimitiveTypeCode.Float64: Float64(); return;
case PrimitiveTypeCode.String: String(); return;
case PrimitiveTypeCode.IntPtr: IntPtr(); return;
case PrimitiveTypeCode.UIntPtr: UIntPtr(); return;
default:
throw new InvalidOperationException();
}
}
#endif
public void SystemType()
{
Builder.WriteByte(0x50); // TYPE
}
public void Enum(string enumTypeName)
{
Builder.WriteByte(0x55); // ENUM
Builder.WriteSerializedString(enumTypeName);
}
}
#if SRM
public
#endif
enum FunctionPointerAttributes
{
None = SignatureAttributes.None,
HasThis = SignatureAttributes.Instance,
HasExplicitThis = SignatureAttributes.Instance | SignatureAttributes.ExplicitThis
}
#if SRM
public
#endif
struct SignatureTypeEncoder
{
public BlobBuilder Builder { get; }
public SignatureTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
private void WriteTypeCode(SignatureTypeCode value)
{
Builder.WriteByte((byte)value);
}
private void ClassOrValue(bool isValueType)
{
Builder.WriteByte(isValueType ? (byte)0x11 : (byte)0x12); // CLASS|VALUETYPE
}
public void Boolean() => WriteTypeCode(SignatureTypeCode.Boolean);
public void Char() => WriteTypeCode(SignatureTypeCode.Char);
public void Int8() => WriteTypeCode(SignatureTypeCode.SByte);
public void UInt8() => WriteTypeCode(SignatureTypeCode.Byte);
public void Int16() => WriteTypeCode(SignatureTypeCode.Int16);
public void UInt16() => WriteTypeCode(SignatureTypeCode.UInt16);
public void Int32() => WriteTypeCode(SignatureTypeCode.Int32);
public void UInt32() => WriteTypeCode(SignatureTypeCode.UInt32);
public void Int64() => WriteTypeCode(SignatureTypeCode.Int64);
public void UInt64() => WriteTypeCode(SignatureTypeCode.UInt64);
public void Float32() => WriteTypeCode(SignatureTypeCode.Single);
public void Float64() => WriteTypeCode(SignatureTypeCode.Double);
public void String() => WriteTypeCode(SignatureTypeCode.String);
public void IntPtr() => WriteTypeCode(SignatureTypeCode.IntPtr);
public void UIntPtr() => WriteTypeCode(SignatureTypeCode.UIntPtr);
#if !SRM
public void PrimitiveType(PrimitiveTypeCode type)
{
switch (type)
{
case PrimitiveTypeCode.Boolean: Boolean(); return;
case PrimitiveTypeCode.Char: Char(); return;
case PrimitiveTypeCode.Int8: Int8(); return;
case PrimitiveTypeCode.UInt8: UInt8(); return;
case PrimitiveTypeCode.Int16: Int16(); return;
case PrimitiveTypeCode.UInt16: UInt16(); return;
case PrimitiveTypeCode.Int32: Int32(); return;
case PrimitiveTypeCode.UInt32: UInt32(); return;
case PrimitiveTypeCode.Int64: Int64(); return;
case PrimitiveTypeCode.UInt64: UInt64(); return;
case PrimitiveTypeCode.Float32: Float32(); return;
case PrimitiveTypeCode.Float64: Float64(); return;
case PrimitiveTypeCode.String: String(); return;
case PrimitiveTypeCode.IntPtr: IntPtr(); return;
case PrimitiveTypeCode.UIntPtr: UIntPtr(); return;
default:
throw new InvalidOperationException();
}
}
#endif
public void Object() => WriteTypeCode(SignatureTypeCode.Object);
public void Array(out SignatureTypeEncoder elementType, out ArrayShapeEncoder arrayShape)
{
Builder.WriteByte((byte)SignatureTypeCode.Array);
elementType = this;
arrayShape = new ArrayShapeEncoder(Builder);
}
public void TypeDefOrRefOrSpec(bool isValueType, EntityHandle typeRefDefSpec)
{
ClassOrValue(isValueType);
Builder.WriteCompressedInteger(CodedIndex.ToTypeDefOrRefOrSpec(typeRefDefSpec));
}
public MethodSignatureEncoder FunctionPointer(SignatureCallingConvention convention, FunctionPointerAttributes attributes, int genericParameterCount)
{
// Spec:
// The EXPLICITTHIS (0x40) bit can be set only in signatures for function pointers.
// If EXPLICITTHIS (0x40) in the signature is set, then HASTHIS (0x20) shall also be set.
if (attributes != FunctionPointerAttributes.None &&
attributes != FunctionPointerAttributes.HasThis &&
attributes != FunctionPointerAttributes.HasExplicitThis)
{
throw new ArgumentException(SR.InvalidSignature, nameof(attributes));
}
Builder.WriteByte((byte)SignatureTypeCode.FunctionPointer);
Builder.WriteByte(BlobEncoder.SignatureHeader(SignatureKind.Method, convention, (SignatureAttributes)attributes).RawValue);
if (genericParameterCount != 0)
{
Builder.WriteCompressedInteger(genericParameterCount);
}
return new MethodSignatureEncoder(Builder, isVarArg: convention == SignatureCallingConvention.VarArgs);
}
public GenericTypeArgumentsEncoder GenericInstantiation(bool isValueType, EntityHandle typeRefDefSpec, int genericArgumentCount)
{
Builder.WriteByte((byte)SignatureTypeCode.GenericTypeInstance);
ClassOrValue(isValueType);
Builder.WriteCompressedInteger(CodedIndex.ToTypeDefOrRefOrSpec(typeRefDefSpec));
Builder.WriteCompressedInteger(genericArgumentCount);
return new GenericTypeArgumentsEncoder(Builder);
}
public void GenericMethodTypeParameter(int parameterIndex)
{
Builder.WriteByte((byte)SignatureTypeCode.GenericMethodParameter);
Builder.WriteCompressedInteger(parameterIndex);
}
public void GenericTypeParameter(int parameterIndex)
{
Builder.WriteByte((byte)SignatureTypeCode.GenericTypeParameter);
Builder.WriteCompressedInteger(parameterIndex);
}
public SignatureTypeEncoder Pointer()
{
Builder.WriteByte((byte)SignatureTypeCode.Pointer);
return this;
}
public void VoidPointer()
{
Builder.WriteByte((byte)SignatureTypeCode.Pointer);
Builder.WriteByte((byte)SignatureTypeCode.Void);
}
public SignatureTypeEncoder SZArray()
{
Builder.WriteByte((byte)SignatureTypeCode.SZArray);
return this;
}
public CustomModifiersEncoder CustomModifiers()
{
return new CustomModifiersEncoder(Builder);
}
}
#if SRM
public
#endif
struct CustomModifiersEncoder
{
public BlobBuilder Builder { get; }
public CustomModifiersEncoder(BlobBuilder builder)
{
Builder = builder;
}
public CustomModifiersEncoder AddModifier(bool isOptional, EntityHandle typeDefRefSpec)
{
if (isOptional)
{
Builder.WriteByte((byte)SignatureTypeCode.OptionalModifier);
}
else
{
Builder.WriteByte((byte)SignatureTypeCode.RequiredModifier);
}
Builder.WriteCompressedInteger(CodedIndex.ToTypeDefOrRefOrSpec(typeDefRefSpec));
return this;
}
public void EndModifiers()
{
}
}
#if SRM
public
#endif
struct ArrayShapeEncoder
{
public BlobBuilder Builder { get; }
public ArrayShapeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public void Shape(int rank, ImmutableArray<int> sizes, ImmutableArray<int> lowerBounds)
{
Builder.WriteCompressedInteger(rank);
Builder.WriteCompressedInteger(sizes.Length);
foreach (int size in sizes)
{
Builder.WriteCompressedInteger(size);
}
if (lowerBounds.IsDefault)
{
Builder.WriteCompressedInteger(rank);
for (int i = 0; i < rank; i++)
{
Builder.WriteCompressedSignedInteger(0);
}
}
else
{
Builder.WriteCompressedInteger(lowerBounds.Length);
foreach (int lowerBound in lowerBounds)
{
Builder.WriteCompressedSignedInteger(lowerBound);
}
}
}
}
#if SRM
public
#endif
struct ReturnTypeEncoder
{
public BlobBuilder Builder { get; }
public ReturnTypeEncoder(BlobBuilder builder)
{
Builder = builder;
}
public CustomModifiersEncoder CustomModifiers()
{
return new CustomModifiersEncoder(Builder);
}
public SignatureTypeEncoder Type(bool isByRef = false)
{
if (isByRef)
{
Builder.WriteByte((byte)SignatureTypeCode.ByReference);
}
return new SignatureTypeEncoder(Builder);
}
public void TypedReference()
{
Builder.WriteByte((byte)SignatureTypeCode.TypedReference);
}
public void Void()
{
Builder.WriteByte((byte)SignatureTypeCode.Void);
}
}
#if SRM
public
#endif
struct ParametersEncoder
{
public BlobBuilder Builder { get; }
private readonly bool _allowOptional;
public ParametersEncoder(BlobBuilder builder, bool allowVarArgs)
{
Builder = builder;
_allowOptional = allowVarArgs;
}
public ParameterTypeEncoder AddParameter()
{
return new ParameterTypeEncoder(Builder);
}
public ParametersEncoder StartVarArgs()
{
if (!_allowOptional)
{
throw new InvalidOperationException();
}
Builder.WriteByte((byte)SignatureTypeCode.Sentinel);
return new ParametersEncoder(Builder, allowVarArgs: false);
}
public void EndParameters()
{
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="InflateBlocks.cs" company="XamlNinja">
// 2011 Richard Griffin and Ollie Riches
// </copyright>
// <summary>
// http://www.sharpgis.net/post/2011/08/28/GZIP-Compressed-Web-Requests-in-WP7-Take-2.aspx
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace WP7Contrib.Communications.Compression
{
using System;
internal sealed class InflateBlocks
{
private static readonly int[] inflate_mask = new int[17]
{
0,
1,
3,
7,
15,
31,
63,
(int) sbyte.MaxValue,
(int) byte.MaxValue,
511,
1023,
2047,
4095,
8191,
16383,
(int) short.MaxValue,
(int) ushort.MaxValue
};
internal static readonly int[] border = new int[19]
{
16,
17,
18,
0,
8,
7,
9,
6,
10,
5,
11,
4,
12,
3,
13,
2,
14,
1,
15
};
internal int[] bb = new int[1];
internal int[] tb = new int[1];
internal InflateCodes codes = new InflateCodes();
internal InfTree inftree = new InfTree();
private const int MANY = 1440;
private const int TYPE = 0;
private const int LENS = 1;
private const int STORED = 2;
private const int TABLE = 3;
private const int BTREE = 4;
private const int DTREE = 5;
private const int CODES = 6;
private const int DRY = 7;
private const int DONE = 8;
private const int BAD = 9;
internal int mode;
internal int left;
internal int table;
internal int index;
internal int[] blens;
internal int last;
internal ZlibCodec _codec;
internal int bitk;
internal int bitb;
internal int[] hufts;
internal byte[] window;
internal int end;
internal int read;
internal int write;
internal object checkfn;
internal long check;
static InflateBlocks()
{
}
internal InflateBlocks(ZlibCodec codec, object checkfn, int w)
{
this._codec = codec;
this.hufts = new int[4320];
this.window = new byte[w];
this.end = w;
this.checkfn = checkfn;
this.mode = 0;
this.Reset((long[])null);
}
internal void Reset(long[] c)
{
if (c != null)
c[0] = this.check;
if (this.mode != 4 && this.mode != 5)
{}
if (this.mode != 6)
{}
this.mode = 0;
this.bitk = 0;
this.bitb = 0;
this.read = this.write = 0;
if (this.checkfn == null)
return;
this._codec._Adler32 = this.check = Adler.Adler32(0L, (byte[])null, 0, 0);
}
internal int Process(int r)
{
int sourceIndex = this._codec.NextIn;
int num1 = this._codec.AvailableBytesIn;
int number1 = this.bitb;
int num2 = this.bitk;
int destinationIndex = this.write;
int num3 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
int num4;
int num5;
while (true)
{
switch (this.mode)
{
case 0:
while (num2 < 3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
int number2 = number1 & 7;
this.last = number2 & 1;
switch (SharedUtils.URShift(number2, 1))
{
case 0:
int number3 = SharedUtils.URShift(number1, 3);
int num6 = num2 - 3;
int bits1 = num6 & 7;
number1 = SharedUtils.URShift(number3, bits1);
num2 = num6 - bits1;
this.mode = 1;
break;
case 1:
int[] bl1 = new int[1];
int[] bd1 = new int[1];
int[][] tl1 = new int[1][];
int[][] td1 = new int[1][];
InfTree.inflate_trees_fixed(bl1, bd1, tl1, td1, this._codec);
this.codes.Init(bl1[0], bd1[0], tl1[0], 0, td1[0], 0);
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
this.mode = 6;
break;
case 2:
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
this.mode = 3;
break;
case 3:
goto label_9;
}
break;
case 1:
while (num2 < 32)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
if ((SharedUtils.URShift(~number1, 16) & (int)ushort.MaxValue) == (number1 & (int)ushort.MaxValue))
{
this.left = number1 & (int)ushort.MaxValue;
number1 = num2 = 0;
this.mode = this.left != 0 ? 2 : (this.last != 0 ? 7 : 0);
break;
}
else
goto label_15;
case 2:
if (num1 != 0)
{
if (num3 == 0)
{
if (destinationIndex == this.end && this.read != 0)
{
destinationIndex = 0;
num3 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
}
if (num3 == 0)
{
this.write = destinationIndex;
r = this.Flush(r);
destinationIndex = this.write;
num3 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
if (destinationIndex == this.end && this.read != 0)
{
destinationIndex = 0;
num3 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
}
if (num3 == 0)
goto label_26;
}
}
r = 0;
int length = this.left;
if (length > num1)
length = num1;
if (length > num3)
length = num3;
Array.Copy((Array)this._codec.InputBuffer, sourceIndex, (Array)this.window, destinationIndex, length);
sourceIndex += length;
num1 -= length;
destinationIndex += length;
num3 -= length;
if ((this.left -= length) == 0)
{
this.mode = this.last != 0 ? 7 : 0;
break;
}
else
break;
}
else
goto label_18;
case 3:
while (num2 < 14)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
int num7;
this.table = num7 = number1 & 16383;
if ((num7 & 31) <= 29 && (num7 >> 5 & 31) <= 29)
{
int length = 258 + (num7 & 31) + (num7 >> 5 & 31);
if (this.blens == null || this.blens.Length < length)
{
this.blens = new int[length];
}
else
{
for (int index = 0; index < length; ++index)
this.blens[index] = 0;
}
number1 = SharedUtils.URShift(number1, 14);
num2 -= 14;
this.index = 0;
this.mode = 4;
goto case 4;
}
else
goto label_39;
case 4:
while (this.index < 4 + SharedUtils.URShift(this.table, 10))
{
while (num2 < 3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
this.blens[InflateBlocks.border[this.index++]] = number1 & 7;
number1 = SharedUtils.URShift(number1, 3);
num2 -= 3;
}
while (this.index < 19)
this.blens[InflateBlocks.border[this.index++]] = 0;
this.bb[0] = 7;
num4 = this.inftree.inflate_trees_bits(this.blens, this.bb, this.tb, this.hufts, this._codec);
if (num4 == 0)
{
this.index = 0;
this.mode = 5;
goto case 5;
}
else
goto label_55;
case 5:
while (true)
{
int num8 = this.table;
if (this.index < 258 + (num8 & 31) + (num8 >> 5 & 31))
{
int index = this.bb[0];
while (num2 < index)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
if (this.tb[0] != -1)
{}
int bits2 = this.hufts[(this.tb[0] + (number1 & InflateBlocks.inflate_mask[index])) * 3 + 1];
int num9 = this.hufts[(this.tb[0] + (number1 & InflateBlocks.inflate_mask[bits2])) * 3 + 2];
if (num9 < 16)
{
number1 = SharedUtils.URShift(number1, bits2);
num2 -= bits2;
this.blens[this.index++] = num9;
}
else
{
int bits3 = num9 == 18 ? 7 : num9 - 14;
int num10 = num9 == 18 ? 11 : 3;
while (num2 < bits2 + bits3)
{
if (num1 != 0)
{
r = 0;
--num1;
number1 |= ((int)this._codec.InputBuffer[sourceIndex++] & (int)byte.MaxValue) << num2;
num2 += 8;
}
else
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
}
int number4 = SharedUtils.URShift(number1, bits2);
int num11 = num2 - bits2;
int num12 = num10 + (number4 & InflateBlocks.inflate_mask[bits3]);
number1 = SharedUtils.URShift(number4, bits3);
num2 = num11 - bits3;
int num13 = this.index;
int num14 = this.table;
if (num13 + num12 <= 258 + (num14 & 31) + (num14 >> 5 & 31) && (num9 != 16 || num13 >= 1))
{
int num15 = num9 == 16 ? this.blens[num13 - 1] : 0;
do
{
this.blens[num13++] = num15;
}
while (--num12 != 0);
this.index = num13;
}
else
goto label_73;
}
}
else
break;
}
this.tb[0] = -1;
int[] bl2 = new int[1]
{
9
};
int[] bd2 = new int[1]
{
6
};
int[] tl2 = new int[1];
int[] td2 = new int[1];
int num16 = this.table;
num5 = this.inftree.inflate_trees_dynamic(257 + (num16 & 31), 1 + (num16 >> 5 & 31), this.blens, bl2, bd2, tl2, td2, this.hufts, this._codec);
switch (num5)
{
case 0:
this.codes.Init(bl2[0], bd2[0], this.hufts, tl2[0], this.hufts, td2[0]);
this.mode = 6;
goto label_83;
case -3:
goto label_80;
default:
goto label_81;
}
case 6:
label_83:
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
if ((r = this.codes.Process(this, r)) == 1)
{
r = 0;
sourceIndex = this._codec.NextIn;
num1 = this._codec.AvailableBytesIn;
number1 = this.bitb;
num2 = this.bitk;
destinationIndex = this.write;
num3 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
if (this.last == 0)
{
this.mode = 0;
break;
}
else
goto label_87;
}
else
goto label_84;
case 7:
goto label_88;
case 8:
goto label_91;
case 9:
goto label_92;
default:
goto label_93;
}
}
label_9:
int num17 = SharedUtils.URShift(number1, 3);
int num18 = num2 - 3;
this.mode = 9;
this._codec.Message = "invalid block type";
r = -3;
this.bitb = num17;
this.bitk = num18;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_15:
this.mode = 9;
this._codec.Message = "invalid stored block lengths";
r = -3;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_18:
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_26:
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_39:
this.mode = 9;
this._codec.Message = "too many length or distance symbols";
r = -3;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_55:
r = num4;
if (r == -3)
{
this.blens = (int[])null;
this.mode = 9;
}
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_73:
this.blens = (int[])null;
this.mode = 9;
this._codec.Message = "invalid bit length repeat";
r = -3;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_80:
this.blens = (int[])null;
this.mode = 9;
label_81:
r = num5;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_84:
return this.Flush(r);
label_87:
this.mode = 7;
label_88:
this.write = destinationIndex;
r = this.Flush(r);
destinationIndex = this.write;
int num19 = destinationIndex < this.read ? this.read - destinationIndex - 1 : this.end - destinationIndex;
if (this.read != this.write)
{
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
else
this.mode = 8;
label_91:
r = 1;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_92:
r = -3;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
label_93:
r = -2;
this.bitb = number1;
this.bitk = num2;
this._codec.AvailableBytesIn = num1;
this._codec.TotalBytesIn += (long)(sourceIndex - this._codec.NextIn);
this._codec.NextIn = sourceIndex;
this.write = destinationIndex;
return this.Flush(r);
}
internal void Free()
{
this.Reset((long[])null);
this.window = (byte[])null;
this.hufts = (int[])null;
}
internal void SetDictionary(byte[] d, int start, int n)
{
Array.Copy((Array)d, start, (Array)this.window, 0, n);
this.read = this.write = n;
}
internal int SyncPoint()
{
return this.mode == 1 ? 1 : 0;
}
internal int Flush(int r)
{
int destinationIndex1 = this._codec.NextOut;
int num1 = this.read;
int num2 = (num1 <= this.write ? this.write : this.end) - num1;
if (num2 > this._codec.AvailableBytesOut)
num2 = this._codec.AvailableBytesOut;
if (num2 != 0 && r == -5)
r = 0;
this._codec.AvailableBytesOut -= num2;
this._codec.TotalBytesOut += (long)num2;
if (this.checkfn != null)
this._codec._Adler32 = this.check = Adler.Adler32(this.check, this.window, num1, num2);
Array.Copy((Array)this.window, num1, (Array)this._codec.OutputBuffer, destinationIndex1, num2);
int destinationIndex2 = destinationIndex1 + num2;
int num3 = num1 + num2;
if (num3 == this.end)
{
int num4 = 0;
if (this.write == this.end)
this.write = 0;
int num5 = this.write - num4;
if (num5 > this._codec.AvailableBytesOut)
num5 = this._codec.AvailableBytesOut;
if (num5 != 0 && r == -5)
r = 0;
this._codec.AvailableBytesOut -= num5;
this._codec.TotalBytesOut += (long)num5;
if (this.checkfn != null)
this._codec._Adler32 = this.check = Adler.Adler32(this.check, this.window, num4, num5);
Array.Copy((Array)this.window, num4, (Array)this._codec.OutputBuffer, destinationIndex2, num5);
destinationIndex2 += num5;
num3 = num4 + num5;
}
this._codec.NextOut = destinationIndex2;
this.read = num3;
return r;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace Fabrikam.Module1.Uc1.Services.WebApi.v1.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V8.Resources;
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V8.Resources
{
/// <summary>Resource name for the <c>UserInterest</c> resource.</summary>
public sealed partial class UserInterestName : gax::IResourceName, sys::IEquatable<UserInterestName>
{
/// <summary>The possible contents of <see cref="UserInterestName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </summary>
CustomerUserInterest = 1,
}
private static gax::PathTemplate s_customerUserInterest = new gax::PathTemplate("customers/{customer_id}/userInterests/{user_interest_id}");
/// <summary>Creates a <see cref="UserInterestName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="UserInterestName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static UserInterestName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new UserInterestName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="UserInterestName"/> with the pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="userInterestId">The <c>UserInterest</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="UserInterestName"/> constructed from the provided ids.</returns>
public static UserInterestName FromCustomerUserInterest(string customerId, string userInterestId) =>
new UserInterestName(ResourceNameType.CustomerUserInterest, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), userInterestId: gax::GaxPreconditions.CheckNotNullOrEmpty(userInterestId, nameof(userInterestId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="UserInterestName"/> with pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="userInterestId">The <c>UserInterest</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="UserInterestName"/> with pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </returns>
public static string Format(string customerId, string userInterestId) =>
FormatCustomerUserInterest(customerId, userInterestId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="UserInterestName"/> with pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="userInterestId">The <c>UserInterest</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="UserInterestName"/> with pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>.
/// </returns>
public static string FormatCustomerUserInterest(string customerId, string userInterestId) =>
s_customerUserInterest.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), gax::GaxPreconditions.CheckNotNullOrEmpty(userInterestId, nameof(userInterestId)));
/// <summary>Parses the given resource name string into a new <see cref="UserInterestName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/userInterests/{user_interest_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="userInterestName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="UserInterestName"/> if successful.</returns>
public static UserInterestName Parse(string userInterestName) => Parse(userInterestName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="UserInterestName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/userInterests/{user_interest_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="userInterestName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="UserInterestName"/> if successful.</returns>
public static UserInterestName Parse(string userInterestName, bool allowUnparsed) =>
TryParse(userInterestName, allowUnparsed, out UserInterestName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="UserInterestName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/userInterests/{user_interest_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="userInterestName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="UserInterestName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string userInterestName, out UserInterestName result) =>
TryParse(userInterestName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="UserInterestName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/userInterests/{user_interest_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="userInterestName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="UserInterestName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string userInterestName, bool allowUnparsed, out UserInterestName result)
{
gax::GaxPreconditions.CheckNotNull(userInterestName, nameof(userInterestName));
gax::TemplatedResourceName resourceName;
if (s_customerUserInterest.TryParseName(userInterestName, out resourceName))
{
result = FromCustomerUserInterest(resourceName[0], resourceName[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(userInterestName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private UserInterestName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string customerId = null, string userInterestId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
CustomerId = customerId;
UserInterestId = userInterestId;
}
/// <summary>
/// Constructs a new instance of a <see cref="UserInterestName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/userInterests/{user_interest_id}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="userInterestId">The <c>UserInterest</c> ID. Must not be <c>null</c> or empty.</param>
public UserInterestName(string customerId, string userInterestId) : this(ResourceNameType.CustomerUserInterest, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), userInterestId: gax::GaxPreconditions.CheckNotNullOrEmpty(userInterestId, nameof(userInterestId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>
/// The <c>UserInterest</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource
/// name.
/// </summary>
public string UserInterestId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerUserInterest: return s_customerUserInterest.Expand(CustomerId, UserInterestId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as UserInterestName);
/// <inheritdoc/>
public bool Equals(UserInterestName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(UserInterestName a, UserInterestName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(UserInterestName a, UserInterestName b) => !(a == b);
}
public partial class UserInterest
{
/// <summary>
/// <see cref="gagvr::UserInterestName"/>-typed view over the <see cref="ResourceName"/> resource name property.
/// </summary>
internal UserInterestName ResourceNameAsUserInterestName
{
get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::UserInterestName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="gagvr::UserInterestName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
internal UserInterestName UserInterestName
{
get => string.IsNullOrEmpty(Name) ? null : gagvr::UserInterestName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="gagvr::UserInterestName"/>-typed view over the <see cref="UserInterestParent"/> resource name
/// property.
/// </summary>
internal UserInterestName UserInterestParentAsUserInterestName
{
get => string.IsNullOrEmpty(UserInterestParent) ? null : gagvr::UserInterestName.Parse(UserInterestParent, allowUnparsed: true);
set => UserInterestParent = value?.ToString() ?? "";
}
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System.Collections.Generic;
namespace System.Management.Automation
{
//
// SpecialVariables contains the names and variable paths to any variable that PowerShell depends
// on in some way, either in that it is an automatic variable (created and updated automatically)
// or configuration variables that users may or may not set.
//
// The convention is to have a const string field with either the exact variable name, or if that's
// not possible, a suggestive name, such as Underbar. Having a field is preferred over explicit strings
// to make searching easier.
//
// The other convention is to have a VariablePath field with "VarPath" appended to the string
// field name. In general, it is preferred to use the VariablePath instead of the string
// because we'll end up creating a VariablePath anyway, so doing it once is faster.
//
internal static class SpecialVariables
{
internal const string HistorySize = "MaximumHistoryCount";
internal static readonly VariablePath HistorySizeVarPath = new VariablePath(HistorySize);
internal const string MyInvocation = "MyInvocation";
internal static readonly VariablePath MyInvocationVarPath = new VariablePath(MyInvocation);
internal const string OFS = "OFS";
internal static readonly VariablePath OFSVarPath = new VariablePath(OFS);
internal const string OutputEncoding = "OutputEncoding";
internal static readonly VariablePath OutputEncodingVarPath = new VariablePath(OutputEncoding);
internal const string VerboseHelpErrors = "VerboseHelpErrors";
internal static readonly VariablePath VerboseHelpErrorsVarPath = new VariablePath(VerboseHelpErrors);
#region Logging Variables
internal const string LogEngineHealthEvent = "LogEngineHealthEvent";
internal static readonly VariablePath LogEngineHealthEventVarPath = new VariablePath(LogEngineHealthEvent);
internal const string LogEngineLifecycleEvent = "LogEngineLifecycleEvent";
internal static readonly VariablePath LogEngineLifecycleEventVarPath = new VariablePath(LogEngineLifecycleEvent);
internal const string LogCommandHealthEvent = "LogCommandHealthEvent";
internal static readonly VariablePath LogCommandHealthEventVarPath = new VariablePath(LogCommandHealthEvent);
internal const string LogCommandLifecycleEvent = "LogCommandLifecycleEvent";
internal static readonly VariablePath LogCommandLifecycleEventVarPath = new VariablePath(LogCommandLifecycleEvent);
internal const string LogProviderHealthEvent = "LogProviderHealthEvent";
internal static readonly VariablePath LogProviderHealthEventVarPath = new VariablePath(LogProviderHealthEvent);
internal const string LogProviderLifecycleEvent = "LogProviderLifecycleEvent";
internal static readonly VariablePath LogProviderLifecycleEventVarPath = new VariablePath(LogProviderLifecycleEvent);
internal const string LogSettingsEvent = "LogSettingsEvent";
internal static readonly VariablePath LogSettingsEventVarPath = new VariablePath(LogSettingsEvent);
internal const string PSLogUserData = "PSLogUserData";
internal static readonly VariablePath PSLogUserDataPath = new VariablePath(PSLogUserData);
#endregion Logging Variables
internal const string NestedPromptLevel = "NestedPromptLevel";
internal static readonly VariablePath NestedPromptCounterVarPath = new VariablePath("global:" + NestedPromptLevel);
internal const string CurrentlyExecutingCommand = "CurrentlyExecutingCommand";
internal static readonly VariablePath CurrentlyExecutingCommandVarPath = new VariablePath(CurrentlyExecutingCommand);
internal const string PSBoundParameters = "PSBoundParameters";
internal static readonly VariablePath PSBoundParametersVarPath = new VariablePath(PSBoundParameters);
internal const string Matches = "Matches";
internal static readonly VariablePath MatchesVarPath = new VariablePath(Matches);
internal const string LastExitCode = "LASTEXITCODE";
internal static readonly VariablePath LastExitCodeVarPath = new VariablePath("global:" + LastExitCode);
internal const string PSDebugContext = "PSDebugContext";
internal static readonly VariablePath PSDebugContextVarPath = new VariablePath(PSDebugContext);
internal const string StackTrace = "StackTrace";
internal static readonly VariablePath StackTraceVarPath = new VariablePath("global:" + StackTrace);
internal const string FirstToken = "^";
internal static readonly VariablePath FirstTokenVarPath = new VariablePath("global:" + FirstToken);
internal const string LastToken = "$";
internal static readonly VariablePath LastTokenVarPath = new VariablePath("global:" + LastToken);
internal static bool IsUnderbar(string name) { return name.Length == 1 && name[0] == '_'; }
internal const string PSItem = "PSItem"; // simple alias for $_
internal const string Underbar = "_";
internal static readonly VariablePath UnderbarVarPath = new VariablePath(Underbar);
internal const string Question = "?";
internal static readonly VariablePath QuestionVarPath = new VariablePath(Question);
internal const string Args = "args";
internal static readonly VariablePath ArgsVarPath = new VariablePath("local:" + Args);
internal const string This = "this";
internal static readonly VariablePath ThisVarPath = new VariablePath("this");
internal const string Input = "input";
internal static readonly VariablePath InputVarPath = new VariablePath("local:" + Input);
internal const string PSCmdlet = "PSCmdlet";
internal static readonly VariablePath PSCmdletVarPath = new VariablePath("PSCmdlet");
internal const string Error = "error";
internal static readonly VariablePath ErrorVarPath = new VariablePath("global:" + Error);
internal const string EventError = "error";
internal static readonly VariablePath EventErrorVarPath = new VariablePath("script:" + EventError);
internal const string PathExt = "env:PATHEXT";
internal static readonly VariablePath PathExtVarPath = new VariablePath(PathExt);
internal const string PSEmailServer = "PSEmailServer";
internal static readonly VariablePath PSEmailServerVarPath = new VariablePath(PSEmailServer);
internal const string PSDefaultParameterValues = "PSDefaultParameterValues";
internal static readonly VariablePath PSDefaultParameterValuesVarPath = new VariablePath(PSDefaultParameterValues);
internal const string PSScriptRoot = "PSScriptRoot";
internal static readonly VariablePath PSScriptRootVarPath = new VariablePath(PSScriptRoot);
internal const string PSCommandPath = "PSCommandPath";
internal static readonly VariablePath PSCommandPathVarPath = new VariablePath(PSCommandPath);
internal const string PSSenderInfo = "PSSenderInfo";
internal static readonly VariablePath PSSenderInfoVarPath = new VariablePath(PSSenderInfo);
internal const string @foreach = "foreach";
internal static readonly VariablePath foreachVarPath = new VariablePath("local:" + @foreach);
internal const string @switch = "switch";
internal static readonly VariablePath switchVarPath = new VariablePath("local:" + @switch);
internal const string pwd = "PWD";
internal static VariablePath PWDVarPath = new VariablePath("global:" + pwd);
internal const string Null = "null";
internal static VariablePath NullVarPath = new VariablePath("null");
internal const string True = "true";
internal static VariablePath TrueVarPath = new VariablePath("true");
internal const string False = "false";
internal static VariablePath FalseVarPath = new VariablePath("false");
internal const string PSModuleAutoLoading = "PSModuleAutoLoadingPreference";
internal static VariablePath PSModuleAutoLoadingPreferenceVarPath = new VariablePath("global:" + PSModuleAutoLoading);
#region Platform Variables
internal const string IsLinux = "IsLinux";
internal static VariablePath IsLinuxPath = new VariablePath("IsLinux");
internal const string IsOSX = "IsOSX";
internal static VariablePath IsOSXPath = new VariablePath("IsOSX");
internal const string IsWindows = "IsWindows";
internal static VariablePath IsWindowsPath = new VariablePath("IsWindows");
internal const string IsCoreCLR = "IsCoreCLR";
internal static VariablePath IsCoreCLRPath = new VariablePath("IsCoreCLR");
#endregion
#region Preference Variables
internal const string DebugPreference = "DebugPreference";
internal static readonly VariablePath DebugPreferenceVarPath = new VariablePath(DebugPreference);
internal const string ErrorActionPreference = "ErrorActionPreference";
internal static readonly VariablePath ErrorActionPreferenceVarPath = new VariablePath(ErrorActionPreference);
internal const string ProgressPreference = "ProgressPreference";
internal static readonly VariablePath ProgressPreferenceVarPath = new VariablePath(ProgressPreference);
internal const string VerbosePreference = "VerbosePreference";
internal static readonly VariablePath VerbosePreferenceVarPath = new VariablePath(VerbosePreference);
internal const string WarningPreference = "WarningPreference";
internal static readonly VariablePath WarningPreferenceVarPath = new VariablePath(WarningPreference);
internal const string WhatIfPreference = "WhatIfPreference";
internal static readonly VariablePath WhatIfPreferenceVarPath = new VariablePath(WhatIfPreference);
internal const string ConfirmPreference = "ConfirmPreference";
internal static readonly VariablePath ConfirmPreferenceVarPath = new VariablePath(ConfirmPreference);
internal const string InformationPreference = "InformationPreference";
internal static readonly VariablePath InformationPreferenceVarPath = new VariablePath(InformationPreference);
#endregion Preference Variables
internal const string ErrorView = "ErrorView";
internal static readonly VariablePath ErrorViewVarPath = new VariablePath(ErrorView);
/// <summary>
/// shell environment variable
/// </summary>
internal const string PSSessionConfigurationName = "PSSessionConfigurationName";
internal static readonly VariablePath PSSessionConfigurationNameVarPath = new VariablePath("global:" + PSSessionConfigurationName);
/// <summary>
/// environment variable that will define the default
/// application name for the connection uri
/// </summary>
internal const string PSSessionApplicationName = "PSSessionApplicationName";
internal static readonly VariablePath PSSessionApplicationNameVarPath = new VariablePath("global:" + PSSessionApplicationName);
#region AllScope variables created in every session
internal const string ConsoleFileName = "ConsoleFileName";
internal const string ExecutionContext = "ExecutionContext";
internal const string Home = "HOME";
internal const string Host = "Host";
internal const string PID = "PID";
internal const string PSCulture = "PSCulture";
internal const string PSHome = "PSHOME";
internal const string PSUICulture = "PSUICulture";
internal const string PSVersionTable = "PSVersionTable";
internal const string PSEdition = "PSEdition";
internal const string ShellId = "ShellId";
internal static List<string> AllScopeSessionVariables = new List<string>
{
ConsoleFileName,
ExecutionContext,
Home,
Host,
PID,
PSCulture,
PSHome,
PSUICulture,
PSVersionTable,
PSEdition,
ShellId
};
#endregion AllScope variables created in every session
internal static readonly string[] AutomaticVariables = {
SpecialVariables.Underbar,
SpecialVariables.Args,
SpecialVariables.This,
SpecialVariables.Input,
SpecialVariables.PSCmdlet,
SpecialVariables.PSBoundParameters,
SpecialVariables.MyInvocation,
SpecialVariables.PSScriptRoot,
SpecialVariables.PSCommandPath,
};
internal static readonly Type[] AutomaticVariableTypes = {
/* Underbar */ typeof(object),
/* Args */ typeof(object[]),
/* This */ typeof(object),
/* Input */ typeof(object),
/* PSCmdlet */ typeof(PSScriptCmdlet),
/* PSBoundParameters */ typeof(PSBoundParametersDictionary),
/* MyInvocation */ typeof(InvocationInfo),
/* PSScriptRoot */ typeof(string),
/* PSCommandPath */ typeof(string),
};
internal static readonly string[] PreferenceVariables = {
SpecialVariables.DebugPreference,
SpecialVariables.VerbosePreference,
SpecialVariables.ErrorActionPreference,
SpecialVariables.WhatIfPreference,
SpecialVariables.WarningPreference,
SpecialVariables.InformationPreference,
SpecialVariables.ConfirmPreference,
};
internal static readonly Type[] PreferenceVariableTypes = {
/* DebugPreference */ typeof(ActionPreference),
/* VerbosePreference */ typeof(ActionPreference),
/* ErrorPreference */ typeof(ActionPreference),
/* WhatIfPreference */ typeof(SwitchParameter),
/* WarningPreference */ typeof(ActionPreference),
/* InformationPreference */ typeof(ActionPreference),
/* ConfirmPreference */ typeof(ConfirmImpact),
};
// The following variables are created in every session w/ AllScope. We avoid creating local slots when we
// see an assignment to any of these variables so that they get handled properly (either throwing an exception
// because they are constant/readonly, or having the value persist in parent scopes where the allscope variable
// also exists.
internal static readonly string[] AllScopeVariables = {
SpecialVariables.Question,
SpecialVariables.ConsoleFileName,
SpecialVariables.ExecutionContext,
SpecialVariables.False,
SpecialVariables.Home,
SpecialVariables.Host,
SpecialVariables.PID,
SpecialVariables.PSCulture,
SpecialVariables.PSHome,
SpecialVariables.PSUICulture,
SpecialVariables.PSVersionTable,
SpecialVariables.PSEdition,
SpecialVariables.ShellId,
SpecialVariables.True,
};
private static readonly HashSet<string> s_classMethodsAccessibleVariables = new HashSet<string>
(
new string[]
{
SpecialVariables.LastExitCode,
SpecialVariables.Error,
SpecialVariables.StackTrace,
SpecialVariables.OutputEncoding,
SpecialVariables.NestedPromptLevel,
SpecialVariables.pwd,
SpecialVariables.Matches,
},
StringComparer.OrdinalIgnoreCase
);
internal static bool IsImplicitVariableAccessibleInClassMethod(VariablePath variablePath)
{
return s_classMethodsAccessibleVariables.Contains(variablePath.UserPath);
}
}
internal enum AutomaticVariable
{
Underbar = 0,
Args = 1,
This = 2,
Input = 3,
PSCmdlet = 4,
PSBoundParameters = 5,
MyInvocation = 6,
PSScriptRoot = 7,
PSCommandPath = 8,
NumberOfAutomaticVariables // 1 + the last, used to initialize global scope.
}
internal enum PreferenceVariable
{
Debug = 9,
Verbose = 10,
Error = 11,
WhatIf = 12,
Warning = 13,
Information = 14,
Confirm = 15,
}
}
| |
/*
* MindTouch Dream - a distributed REST framework
* Copyright (C) 2006-2011 MindTouch, Inc.
* www.mindtouch.com oss@mindtouch.com
*
* For community documentation and downloads visit wiki.developer.mindtouch.com;
* please review the licensing section.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Threading;
using MindTouch.Collections;
using NUnit.Framework;
using System.Linq;
namespace MindTouch.Dream.Test {
[TestFixture]
public class BlockingQueueTests {
private static readonly log4net.ILog _log = LogUtils.CreateLog();
[Test]
public void Single_threaded_queue_dequeue() {
int n = 10000;
List<string> guids = new List<string>();
BlockingQueue<string> q = new BlockingQueue<string>();
for(int i = 0; i < n; i++) {
string guid = Guid.NewGuid().ToString();
q.Enqueue(guid);
guids.Add(guid);
}
Assert.AreEqual(n, q.Count);
for(int i = 0; i < n; i++) {
string guid = q.Dequeue();
Assert.AreEqual(guids[i], guid);
}
}
[Test]
[ExpectedException(typeof(InvalidOperationException))]
public void Queue_on_closed_queue_throws() {
BlockingQueue<string> q = new BlockingQueue<string>();
q.Enqueue("foo");
Assert.IsFalse(q.IsClosed);
q.Close();
Assert.IsTrue(q.IsClosed);
q.Enqueue("bar");
}
[Test]
[ExpectedException(typeof(QueueClosedException))]
public void Dequeue_on_closed_queue_throws() {
BlockingQueue<string> q = new BlockingQueue<string>();
q.Enqueue("foo");
Assert.IsFalse(q.IsClosed);
q.Close();
Assert.IsTrue(q.IsClosed);
string x = q.Dequeue();
Assert.AreEqual("foo", x);
x = q.Dequeue();
}
[Test]
public void Dequeue_times_out_as_specified() {
BlockingQueue<string> q = new BlockingQueue<string>();
DateTime start = DateTime.Now;
string x;
Assert.IsFalse(q.TryDequeue(TimeSpan.FromSeconds(1), out x));
Assert.IsNull(x);
TimeSpan elapsed = DateTime.Now.Subtract(start);
Assert.GreaterOrEqual(elapsed.TotalSeconds, 0.95);
Assert.LessOrEqual(elapsed.TotalSeconds, 1.1d);
}
[Test]
public void One_producer_one_consumer_loop_manually() {
var n = 10000;
var enqueued = new List<string>();
var dequeued = new List<string>();
var q = new BlockingQueue<string>();
var consumer = new Thread(SingleConsumerManualLoop);
consumer.IsBackground = true;
var reset = new ManualResetEvent(false);
consumer.Start(new Tuplet<int, IBlockingQueue<string>, List<string>, ManualResetEvent>(n, q, dequeued, reset));
for(var i = 0; i < n; i++) {
string guid = Guid.NewGuid().ToString();
q.Enqueue(guid);
enqueued.Add(guid);
}
Assert.IsTrue(reset.WaitOne(1000, true));
Assert.AreEqual(n, enqueued.Count);
Assert.AreEqual(n, dequeued.Count);
for(var i = 0; i < n; i++) {
Assert.AreEqual(enqueued[i], dequeued[i]);
}
}
private void SingleConsumerManualLoop(object obj) {
var state = (Tuplet<int, IBlockingQueue<string>, List<string>, ManualResetEvent>)obj;
for(int i = 0; i < state.Item1; i++) {
string guid = state.Item2.Dequeue();
if( guid == null) {
_log.WarnMethodCall("guid is null");
}
Assert.IsNotNull(guid);
state.Item3.Add(guid);
}
state.Item4.Set();
}
[Test]
public void One_producer_one_consumer_loop_with_foreach() {
var n = 10000;
var enqueued = new List<string>();
var dequeued = new List<string>();
var q = new BlockingQueue<string>();
var consumer = new Thread(SingleConsumerForeachLoop);
consumer.IsBackground = true;
var reset = new ManualResetEvent(false);
consumer.Start(new Tuplet<int, IBlockingQueue<string>, List<string>, ManualResetEvent>(n, q, dequeued, reset));
for(int i = 0; i < n; i++) {
string guid = Guid.NewGuid().ToString();
q.Enqueue(guid);
enqueued.Add(guid);
}
Assert.IsTrue(reset.WaitOne(1000, true));
Assert.AreEqual(n, enqueued.Count);
Assert.AreEqual(n, dequeued.Count);
for(int i = 0; i < n; i++) {
Assert.AreEqual(enqueued[i], dequeued[i]);
}
}
private void SingleConsumerForeachLoop(object obj) {
var state = (Tuplet<int, IBlockingQueue<string>, List<string>, ManualResetEvent>)obj;
int n = 0;
foreach(string guid in state.Item2) {
state.Item3.Add(guid);
if(guid == null) {
_log.WarnMethodCall("guid is null");
}
n++;
if(n >= state.Item1) {
break;
}
}
state.Item4.Set();
}
[Test]
public void One_producer_one_consumer_loop_with_foreach_and_stop() {
int n = 10000;
List<string> enqueued = new List<string>();
List<string> dequeued = new List<string>();
BlockingQueue<string> q = new BlockingQueue<string>();
Thread consumer = new Thread(SingleConsumerForeachLoopAndStop);
consumer.Start(new Tuplet<IBlockingQueue<string>, List<string>>(q, dequeued));
for(int i = 0; i < n; i++) {
string guid = Guid.NewGuid().ToString();
q.Enqueue(guid);
enqueued.Add(guid);
}
q.Close();
Assert.IsTrue(consumer.Join(1000));
Assert.AreEqual(n, enqueued.Count);
Assert.AreEqual(n, dequeued.Count);
for(int i = 0; i < n; i++) {
Assert.AreEqual(enqueued[i], dequeued[i]);
}
}
private void SingleConsumerForeachLoopAndStop(object obj) {
Tuplet<IBlockingQueue<string>, List<string>> state = (Tuplet<IBlockingQueue<string>, List<string>>)obj;
foreach(string guid in state.Item1) {
state.Item2.Add(guid);
}
}
[Test]
public void Many_consumers_with_timeouts() {
BlockingQueue<string> q = new BlockingQueue<string>();
Thread c1 = new Thread(MultiConsumer);
Thread c2 = new Thread(MultiConsumer);
Thread c3 = new Thread(MultiConsumer);
c1.IsBackground = true;
c2.IsBackground = true;
c3.IsBackground = true;
Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v1
= new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false));
c1.Start(v1);
Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v2
= new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false));
c2.Start(v2);
Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v3
= new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false));
c3.Start(v3);
q.Enqueue("foo");
Assert.IsTrue(v1.Item4.WaitOne(2000, false), "thread 1 did not finish");
Assert.IsTrue(v2.Item4.WaitOne(2000, false), "thread 2 did not finish");
Assert.IsTrue(v3.Item4.WaitOne(2000, false), "thread 3 did not finish");
bool gotValue = false;
foreach(Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v in new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>[] { v1, v2, v3 }) {
if(v.Item2 == "foo") {
gotValue = true;
Assert.Less(v.Item3.TotalSeconds, 1);
} else {
Assert.IsNull(v.Item2);
Assert.GreaterOrEqual(v.Item3.TotalSeconds, 0.95);
}
}
Assert.IsTrue(gotValue);
}
private void MultiConsumer(object state) {
Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v = (Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>)state;
DateTime start = DateTime.Now;
v.Item1.TryDequeue(v.Item3, out v.Item2);
v.Item3 = DateTime.Now.Subtract(start);
v.Item4.Set();
}
[Test]
public void One_producer_many_consumers_loop_with_foreach() {
int n = 500;
var enqueued = new List<string>();
var dequeued = new List<string>();
var q = new BlockingQueue<string>();
var c1 = new Thread(MultiConsumerForeachLoop) { IsBackground = true };
var c2 = new Thread(MultiConsumerForeachLoop) { IsBackground = true };
var c3 = new Thread(MultiConsumerForeachLoop) { IsBackground = true };
var v1 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c1.Start(v1);
var v2 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c2.Start(v2);
var v3 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c3.Start(v3);
Thread.Sleep(1000);
for(int i = 0; i < n; i++) {
string guid = Guid.NewGuid().ToString();
q.Enqueue(guid);
enqueued.Add(guid);
}
q.Close();
Assert.IsTrue(v1.Item4.WaitOne(10000, false), "thread 1 did not finish");
Assert.IsTrue(v2.Item4.WaitOne(10000, false), "thread 2 did not finish");
Assert.IsTrue(v3.Item4.WaitOne(10000, false), "thread 3 did not finish");
_log.DebugFormat("Thread 1 processed {0}", v1.Item3);
_log.DebugFormat("Thread 2 processed {0}", v2.Item3);
_log.DebugFormat("Thread 3 processed {0}", v3.Item3);
Console.WriteLine("Thread 1 processed {0}", v1.Item3);
Console.WriteLine("Thread 2 processed {0}", v2.Item3);
Console.WriteLine("Thread 3 processed {0}", v3.Item3);
Assert.GreaterOrEqual(v1.Item3, n / 4);
Assert.GreaterOrEqual(v2.Item3, n / 4);
Assert.GreaterOrEqual(v3.Item3, n / 4);
Assert.AreEqual(n, dequeued.Count);
Assert.AreEqual(dequeued.OrderBy(x => x).ToArray(), enqueued.OrderBy(x => x).ToArray());
}
[Test]
public void Many_producers_many_consumers_loop_with_foreach() {
int n = 200;
List<string> enqueued = new List<string>();
List<string> dequeued = new List<string>();
BlockingQueue<string> q = new BlockingQueue<string>();
Thread c1 = new Thread(MultiConsumerForeachLoop);
Thread c2 = new Thread(MultiConsumerForeachLoop);
Thread c3 = new Thread(MultiConsumerForeachLoop);
c1.IsBackground = true;
c2.IsBackground = true;
c3.IsBackground = true;
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v1
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c1.Start(v1);
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v2
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c2.Start(v2);
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v3
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false));
c3.Start(v3);
Thread p1 = new Thread(MultiProducer);
Thread p2 = new Thread(MultiProducer);
Thread p3 = new Thread(MultiProducer);
p1.IsBackground = true;
p2.IsBackground = true;
p3.IsBackground = true;
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p1v
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false));
p1.Start(p1v);
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p2v
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false));
p2.Start(p2v);
Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p3v
= new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false));
p3.Start(p3v);
Assert.IsTrue(p1v.Item4.WaitOne(5000, false), "producer 1 did not finish");
Assert.IsTrue(p2v.Item4.WaitOne(5000, false), "producer 2 did not finish");
Assert.IsTrue(p3v.Item4.WaitOne(5000, false), "producer 3 did not finish");
q.Close();
Assert.IsTrue(v1.Item4.WaitOne(15000, false), "consumer 1 did not finish");
Assert.IsTrue(v2.Item4.WaitOne(15000, false), "consumer 2 did not finish");
Assert.IsTrue(v3.Item4.WaitOne(15000, false), "consumer 3 did not finish");
_log.DebugFormat("consumer 1 processed {0}", v1.Item3);
_log.DebugFormat("consumer 2 processed {0}", v2.Item3);
_log.DebugFormat("consumer 3 processed {0}", v3.Item3);
Assert.GreaterOrEqual(v1.Item3, n * 3 / 4);
Assert.GreaterOrEqual(v2.Item3, n * 3 / 4);
Assert.GreaterOrEqual(v3.Item3, n * 3 / 4);
Assert.AreEqual(enqueued.Count, dequeued.Count);
for(int i = 0; i < n; i++) {
Assert.Contains(dequeued[i], enqueued);
}
}
private void MultiProducer(object obj) {
var state = (Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>)obj;
List<string> enqueued = new List<string>();
for(int i = 0; i < state.Item3; i++) {
string guid = Guid.NewGuid().ToString();
state.Item1.Enqueue(guid);
enqueued.Add(guid);
}
_log.DebugFormat("production complete");
state.Item2.AddRange(enqueued);
state.Item4.Set();
}
private void MultiConsumerForeachLoop(object obj) {
var state = (Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>)obj;
_log.DebugFormat("consumption started");
var dequeued = new List<string>();
foreach(string guid in state.Item1) {
dequeued.Add(guid);
state.Item3++;
Thread.Sleep(10);
}
_log.DebugFormat("consumption complete");
lock(state.Item2) {
state.Item2.AddRange(dequeued);
}
state.Item4.Set();
}
}
}
| |
/*
* MindTouch Dream - a distributed REST framework
* Copyright (C) 2006-2014 MindTouch, Inc.
* www.mindtouch.com oss@mindtouch.com
*
* For community documentation and downloads visit mindtouch.com;
* please review the licensing section.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Data;
using System.Diagnostics;
using System.Reflection;
using MindTouch.Xml;
namespace MindTouch.Data {
/// <summary>
/// Provides a a database query/stored procedure command builder.
/// </summary>
public class DataCommand : IDataCommand {
//--- Types ---
internal class DataColumnField {
//--- Fields ---
private readonly PropertyInfo _property;
private readonly FieldInfo _field;
private readonly Type _type;
//--- Constructors ---
internal DataColumnField(PropertyInfo info) {
if(info == null) {
throw new ArgumentNullException("info");
}
_property = info;
_type = info.PropertyType;
}
internal DataColumnField(FieldInfo info) {
if(info == null) {
throw new ArgumentNullException("info");
}
_field = info;
_type = info.FieldType;
}
//--- Methods ---
internal void SetValue(object instance, object value) {
if((value == null) || (value is DBNull)) {
if(!_type.IsValueType || (_type.IsGenericType && (_type.GetGenericTypeDefinition() == typeof(Nullable<>)))) {
if(_property != null) {
_property.SetValue(instance, null, null);
} else {
_field.SetValue(instance, null);
}
}
} else if(_type.IsEnum) {
switch(Convert.GetTypeCode(value)) {
case TypeCode.String:
if(_property != null) {
_property.SetValue(instance, Enum.Parse(_type, (string)value, true), null);
} else {
_field.SetValue(instance, Enum.Parse(_type, (string)value, true));
}
break;
case TypeCode.Byte:
case TypeCode.UInt16:
case TypeCode.UInt32:
case TypeCode.UInt64:
case TypeCode.Int16:
case TypeCode.Int32:
case TypeCode.Int64:
case TypeCode.SByte:
string text = Enum.GetName(_type, value);
if(text != null) {
if(_property != null) {
_property.SetValue(instance, text, null);
} else {
_field.SetValue(instance, text);
}
}
break;
}
} else if(value.GetType() != _type) {
if(_property != null) {
_property.SetValue(instance, SysUtil.ChangeType(value, _type), null);
} else {
_field.SetValue(instance, SysUtil.ChangeType(value, _type));
}
} else {
if(_property != null) {
_property.SetValue(instance, value, null);
} else {
_field.SetValue(instance, value);
}
}
}
}
//--- Class Fields ---
private static readonly log4net.ILog _log = LogUtils.CreateLog();
private static readonly Dictionary<Type, Dictionary<string, DataColumnField>> _typeCache = new Dictionary<Type, Dictionary<string, DataColumnField>>();
private static readonly TimeSpan SLOW_SQL = TimeSpan.FromSeconds(5);
//--- Class Methods ---
/// <summary>
/// Ensure that string is safe for use in SQL statements.
/// </summary>
/// <param name="text">String to escape</param>
/// <returns>Escaped string</returns>
public static string MakeSqlSafe(string text) {
if(string.IsNullOrEmpty(text)) {
return text;
}
text = text.ReplaceAll(
"\\", "\\\\",
"\0", "\\0",
"\n", "\\n",
"\r", "\\r",
"'", "\\'",
"\"", "\\\"",
"\x1a", "\\x1a"
);
return text;
}
private static Dictionary<string, DataColumnField> GetDataFields(Type type) {
Dictionary<string, DataColumnField> result;
lock(_typeCache) {
if(!_typeCache.TryGetValue(type, out result)) {
result = new Dictionary<string, DataColumnField>(StringComparer.OrdinalIgnoreCase);
// enumerate all properties of this type
foreach(PropertyInfo property in type.GetProperties(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance)) {
foreach(DataColumnAttribute attribute in property.GetCustomAttributes(typeof(DataColumnAttribute), true)) {
result.Add(attribute.Name ?? property.Name, new DataColumnField(property));
}
}
// enumerate all fields of this type
foreach(FieldInfo field in type.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance)) {
foreach(DataColumnAttribute attribute in field.GetCustomAttributes(typeof(DataColumnAttribute), true)) {
result.Add(attribute.Name ?? field.Name, new DataColumnField(field));
}
}
if(result.Count == 0) {
throw new MissingFieldException("Type does not have any properties decorated with DataColumn attribute");
}
_typeCache[type] = result;
}
}
return result;
}
private static T FillObject<T>(T item, Dictionary<string, DataColumnField> fields, IDataReader reader) {
for(int i = 0; i < reader.FieldCount; i++) {
DataColumnField field;
if(fields.TryGetValue(reader.GetName(i), out field)) {
field.SetValue(item, reader.GetValue(i));
}
}
return item;
}
//--- Fields ---
private readonly DataFactory _factory;
private readonly IDbCommand _command;
private readonly string _connection;
private readonly Stopwatch _stopWatch = new Stopwatch();
private readonly DataCatalog _catalog;
private readonly bool _slowSqlWarningEnabled;
//--- Constructors ---
internal DataCommand(DataFactory factory, DataCatalog catalog, string connection, IDbCommand command, bool slowSqlWarningEnabled) {
if(factory == null) {
throw new ArgumentNullException("factory");
}
if(catalog == null) {
throw new ArgumentNullException("catalog");
}
if(connection == null) {
throw new ArgumentNullException("connection");
}
if(command == null) {
throw new ArgumentNullException("command");
}
_factory = factory;
_connection = connection;
_command = command;
_catalog = catalog;
_slowSqlWarningEnabled = slowSqlWarningEnabled;
}
//--- Properties ---
/// <summary>
/// <see langword="True"/> if this command is a stored procedure.
/// </summary>
public bool IsStoredProcedure { get { return _command.CommandType == CommandType.StoredProcedure; } }
/// <summary>
/// Execution time of the last query
/// </summary>
public TimeSpan ExecutionTime { get { return _stopWatch.Elapsed; } }
//--- Methods ---
/// <summary>
/// Adds an input parameter to the command.
/// </summary>
/// <param name="key">Name of the parameter</param>
/// <param name="value">Value of the parameter</param>
/// <returns>Returns this command</returns>
public DataCommand With(string key, object value) {
_command.Parameters.Add(_factory.CreateParameter(key, value, ParameterDirection.Input));
return this;
}
IDataCommand IDataCommand.With(string key, object value) {
return With(key, value);
}
/// <summary>
/// Adds an input-output parameter to the command.
/// </summary>
/// <param name="key">Name of the parameter</param>
/// <param name="value">Value of the parameter</param>
/// <returns>Returns this command</returns>
public DataCommand WithInOut(string key, object value) {
_command.Parameters.Add(_factory.CreateParameter(key, value, ParameterDirection.InputOutput));
return this;
}
IDataCommand IDataCommand.WithInOut(string key, object value) {
return WithInOut(key, value);
}
/// <summary>
/// Adds an output parameter to the command.
/// </summary>
/// <param name="key">Name of the parameter</param>
/// <returns>Returns this command</returns>
public DataCommand WithOutput(string key) {
_command.Parameters.Add(_factory.CreateParameter(key, null, ParameterDirection.Output));
return this;
}
IDataCommand IDataCommand.WithOutput(string key) {
return WithOutput(key);
}
/// <summary>
/// Adds an return parameter to the command.
/// </summary>
/// <param name="key">Name of the parameter</param>
/// <returns>Returns this command</returns>
public DataCommand WithReturn(string key) {
_command.Parameters.Add(_factory.CreateParameter(key, null, ParameterDirection.ReturnValue));
return this;
}
IDataCommand IDataCommand.WithReturn(string key) {
return WithReturn(key);
}
/// <summary>
/// Retrieve an output/return value from the finished command.
/// </summary>
/// <typeparam name="T">Returned value type</typeparam>
/// <param name="key">Name of returned parameter (provided previously using 'WithOutput()' or 'WithInOut()' or 'WithReturn()'</param>
/// <returns>Converted value</returns>
public T At<T>(string key) {
return At(key, default(T));
}
/// <summary>
/// Retrieve an output/return value from the finished command.
/// </summary>
/// <typeparam name="T">Returned value type</typeparam>
/// <param name="key">Name of returned parameter (provided previously using 'WithOutput()' or 'WithInOut()' or 'WithReturn()'</param>
/// <param name="def">Value to return if returned value is null or DbNull</param>
/// <returns>Converted value</returns>
public T At<T>(string key, T def) {
object value = ((IDataParameter)_command.Parameters[_factory.ParameterChar + key]).Value;
if(value == null) {
return def;
}
if(value is DBNull) {
return def;
}
if(value is T) {
return (T)value;
}
return (T)SysUtil.ChangeType(value, typeof(T));
}
/// <summary>
/// Execute command.
/// </summary>
public void Execute() {
_log.TraceMethodCall("Execute()", _command.CommandText);
QueryStart();
using(IDbConnection connection = _factory.OpenConnection(_connection)) {
using(IDbCommand command = CreateExecutableCommand(connection)) {
try {
command.ExecuteNonQuery();
} catch(Exception e) {
_log.DebugFormat(e, "Execute(): Text: '{0}', Type: {1}", _command.CommandText, _command.CommandType);
throw;
} finally {
QueryFinished(command);
}
}
}
}
/// <summary>
/// Execute command and call handler with an open IDataReader on the result set.
/// IDataReader and connection will be automatically closed upon completion of the handler.
/// </summary>
/// <param name="handler">Handler to invoke</param>
public void Execute(Action<IDataReader> handler) {
_log.TraceMethodCall("Execute(Action<IDataReader>)", _command.CommandText);
if(handler == null) {
throw new ArgumentNullException("handler");
}
QueryStart();
using(IDbConnection connection = _factory.OpenConnection(_connection)) {
using(IDbCommand command = CreateExecutableCommand(connection)) {
try {
using(IDataReader reader = command.ExecuteReader()) {
handler(reader);
}
} catch(Exception e) {
_log.DebugFormat(e, "Execute(handler): Text: '{0}', Type: {1}", _command.CommandText, _command.CommandType);
throw;
} finally {
QueryFinished(command);
}
}
}
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Read value</returns>
public string Read() {
_log.TraceMethodCall("Read()", _command.CommandText);
QueryStart();
using(IDbConnection connection = _factory.OpenConnection(_connection)) {
using(IDbCommand command = CreateExecutableCommand(connection)) {
try {
object value = command.ExecuteScalar();
if(value == null) {
return null;
}
if(value is DBNull) {
return null;
}
return (string)SysUtil.ChangeType(value, typeof(string));
} catch(Exception e) {
_log.DebugFormat(e, "Read(): Text: '{0}', Type: {1}", _command.CommandText, _command.CommandType);
throw;
} finally {
QueryFinished(command);
}
}
}
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public bool? ReadAsBool() {
return ReadAs<bool>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public byte? ReadAsByte() {
return ReadAs<byte>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public short? ReadAsShort() {
return ReadAs<short>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public ushort? ReadAsUShort() {
return ReadAs<ushort>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public int? ReadAsInt() {
return ReadAs<int>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public long? ReadAsLong() {
return ReadAs<long>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public uint? ReadAsUInt() {
return ReadAs<uint>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public ulong? ReadAsULong() {
return ReadAs<ulong>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <returns>Converted value</returns>
public DateTime? ReadAsDateTime() {
return ReadAs<DateTime>();
}
/// <summary>
/// Execute command and return value from the first column in the first row.
/// </summary>
/// <typeparam name="T">Returned value type</typeparam>
/// <returns>Converted value</returns>
private T? ReadAs<T>() where T : struct {
_log.TraceMethodCall("ReadAs<T>()", typeof(T).FullName, _command.CommandText);
QueryStart();
using(IDbConnection connection = _factory.OpenConnection(_connection)) {
using(IDbCommand command = CreateExecutableCommand(connection)) {
try {
object value = command.ExecuteScalar();
if(value == null) {
return null;
}
if(value is DBNull) {
return null;
}
return (T)SysUtil.ChangeType(value, typeof(T));
} catch(Exception e) {
_log.DebugFormat(e, "ReadAs(): Text: '{0}', Type: {1}", _command.CommandText, _command.CommandType);
throw;
} finally {
QueryFinished(command);
}
}
}
}
private IDbCommand CreateExecutableCommand(IDbConnection connection) {
IDbCommand command = _factory.CreateQuery(_command.CommandText);
try {
command.CommandType = _command.CommandType;
command.Connection = connection;
foreach(IDataParameter parameter in _command.Parameters) {
IDataParameter parameterCopy = command.CreateParameter();
parameterCopy.ParameterName = parameter.ParameterName;
parameterCopy.Value = parameter.Value;
parameterCopy.Direction = parameter.Direction;
command.Parameters.Add(parameterCopy);
}
} catch {
if(command != null) {
// must dispose of command in case of failure
command.Dispose();
}
throw;
}
return command;
}
/// <summary>
/// Execute command and read result into a DataSet.
/// </summary>
/// <returns>Read DataSet object</returns>
public DataSet ReadAsDataSet() {
_log.TraceMethodCall("ReadAsDataSet()", _command.CommandText);
DataSet result = new DataSet();
using(IDbConnection connection = _factory.OpenConnection(_connection)) {
using(IDbCommand command = CreateExecutableCommand(connection)) {
try {
_factory.CreateAdapter(command).Fill(result);
} catch(Exception e) {
_log.DebugFormat(e, "ReadAsDataSet(): Text: '{0}', Type: {1}", _command.CommandText, _command.CommandType);
throw;
}
}
}
return result;
}
/// <summary>
/// Execute command and read result into an XDoc.
/// </summary>
/// <param name="table">Name of the root element</param>
/// <param name="row">Name of the element created for each row</param>
/// <returns>Read DataSet object</returns>
public XDoc ReadAsXDoc(string table, string row) {
_log.TraceMethodCall("ReadAsXDoc()", _command.CommandText);
XDoc result = new XDoc(table);
Execute(reader => {
// capture row columns
int count = reader.FieldCount;
string[] columns = new string[count];
bool[] attr = new bool[count];
for(int i = 0; i < count; ++i) {
columns[i] = reader.GetName(i);
if(columns[i].StartsWith("@")) {
attr[i] = true;
columns[i] = columns[i].Substring(1);
}
}
// read records
while(reader.Read()) {
result.Start(row);
for(int i = 0; i < count; ++i) {
if(!reader.IsDBNull(i)) {
string column = columns[i];
if(attr[0]) {
result.Attr(column, reader.GetValue(i).ToString());
} else {
result.Elem(column, reader.GetValue(i).ToString());
}
}
}
result.End();
}
});
return result;
}
/// <summary>
/// Execute command and convert the first row into an object.
/// </summary>
/// <typeparam name="T">Object type to create</typeparam>
/// <returns>Created object</returns>
public T ReadAsObject<T>() where T : new() {
_log.TraceMethodCall("ReadAsObject()", _command.CommandText);
Dictionary<string, DataColumnField> fields = GetDataFields(typeof(T));
// read item from database
T result = default(T);
Execute(reader => {
if(reader.Read()) {
result = FillObject(new T(), fields, reader);
}
});
return result;
}
/// <summary>
/// Execute command and convert all rows into a list of objects.
/// </summary>
/// <typeparam name="T">Object type to create</typeparam>
/// <returns>List of created objects</returns>
public List<T> ReadAsObjects<T>() where T : new() {
if(_log.IsTraceEnabled()) {
_log.TraceMethodCall("ReadAsObject<T>()", typeof(T).FullName, _command.CommandText);
}
Dictionary<string, DataColumnField> fields = GetDataFields(typeof(T));
// read item from database
List<T> result = new List<T>();
Execute(reader => {
while(reader.Read()) {
result.Add(FillObject(new T(), fields, reader));
}
});
return result;
}
private void QueryStart() {
_stopWatch.Reset();
_stopWatch.Start();
}
private void QueryFinished(IDbCommand command) {
_stopWatch.Stop();
if(_slowSqlWarningEnabled && _stopWatch.Elapsed > SLOW_SQL) {
_log.WarnFormat("SLOW SQL ({0:0.000}s, database: {2}): {1}", _stopWatch.Elapsed.TotalSeconds, command.CommandText, command.Connection.Database);
}
_catalog.FireQueryFinished(this);
}
}
}
| |
using System;
using System.Globalization;
using System.Text;
using System.Web;
using System.Web.Mvc;
using System.Web.WebPages;
using Umbraco.Core;
using Umbraco.Core.Configuration;
using Umbraco.Core.IO;
using Umbraco.Core.Models;
using Umbraco.Web.Models;
using Umbraco.Web.Routing;
using Umbraco.Web.Security;
namespace Umbraco.Web.Mvc
{
/// <summary>
/// The View that umbraco front-end views inherit from
/// </summary>
public abstract class UmbracoViewPage<TModel> : WebViewPage<TModel>
{
/// <summary>
/// Returns the current UmbracoContext
/// </summary>
public UmbracoContext UmbracoContext
{
get
{
//we should always try to return the context from the data tokens just in case its a custom context and not
//using the UmbracoContext.Current, we will fallback to the singleton if necessary.
var umbCtx = ViewContext.GetUmbracoContext()
//lastly, we will use the singleton, the only reason this should ever happen is is someone is rendering a page that inherits from this
//class and are rendering it outside of the normal Umbraco routing process. Very unlikely.
?? UmbracoContext.Current;
return umbCtx;
}
}
/// <summary>
/// Returns the current ApplicationContext
/// </summary>
public ApplicationContext ApplicationContext
{
get { return UmbracoContext.Application; }
}
/// <summary>
/// Returns the current PublishedContentRequest
/// </summary>
internal PublishedContentRequest PublishedContentRequest
{
get
{
//we should always try to return the object from the data tokens just in case its a custom object and not
//using the UmbracoContext.Current.
//we will fallback to the singleton if necessary.
if (ViewContext.RouteData.DataTokens.ContainsKey(Core.Constants.Web.PublishedDocumentRequestDataToken))
{
return (PublishedContentRequest)ViewContext.RouteData.DataTokens.GetRequiredObject(Core.Constants.Web.PublishedDocumentRequestDataToken);
}
//next check if it is a child action and see if the parent has it set in data tokens
if (ViewContext.IsChildAction)
{
if (ViewContext.ParentActionViewContext.RouteData.DataTokens.ContainsKey(Core.Constants.Web.PublishedDocumentRequestDataToken))
{
return (PublishedContentRequest)ViewContext.ParentActionViewContext.RouteData.DataTokens.GetRequiredObject(Core.Constants.Web.PublishedDocumentRequestDataToken);
}
}
//lastly, we will use the singleton, the only reason this should ever happen is is someone is rendering a page that inherits from this
//class and are rendering it outside of the normal Umbraco routing process. Very unlikely.
return UmbracoContext.Current.PublishedContentRequest;
}
}
private UmbracoHelper _helper;
private MembershipHelper _membershipHelper;
/// <summary>
/// Gets an UmbracoHelper
/// </summary>
/// <remarks>
/// This constructs the UmbracoHelper with the content model of the page routed to
/// </remarks>
public virtual UmbracoHelper Umbraco
{
get
{
if (_helper == null)
{
var model = ViewData.Model;
var content = model as IPublishedContent;
if (content == null && model is IRenderModel)
content = ((IRenderModel) model).Content;
_helper = content == null
? new UmbracoHelper(UmbracoContext)
: new UmbracoHelper(UmbracoContext, content);
}
return _helper;
}
}
/// <summary>
/// Returns the MemberHelper instance
/// </summary>
public MembershipHelper Members
{
get { return _membershipHelper ?? (_membershipHelper = new MembershipHelper(UmbracoContext)); }
}
/// <summary>
/// Ensure that the current view context is added to the route data tokens so we can extract it if we like
/// </summary>
/// <remarks>
/// Currently this is required by mvc macro engines
/// </remarks>
protected override void InitializePage()
{
base.InitializePage();
if (ViewContext.IsChildAction == false)
{
//this is used purely for partial view macros that contain forms
// and mostly just when rendered within the RTE - This should already be set with the
// EnsurePartialViewMacroViewContextFilterAttribute
if (ViewContext.RouteData.DataTokens.ContainsKey(Constants.DataTokenCurrentViewContext) == false)
{
ViewContext.RouteData.DataTokens.Add(Constants.DataTokenCurrentViewContext, ViewContext);
}
}
}
// maps model
protected override void SetViewData(ViewDataDictionary viewData)
{
// capture the model before we tinker with the viewData
var viewDataModel = viewData.Model;
// map the view data (may change its type, may set model to null)
viewData = MapViewDataDictionary(viewData, typeof (TModel));
var culture = CultureInfo.CurrentCulture;
// bind the model (use context culture as default, if available)
if (UmbracoContext.PublishedContentRequest != null && UmbracoContext.PublishedContentRequest.Culture != null)
culture = UmbracoContext.PublishedContentRequest.Culture;
viewData.Model = RenderModelBinder.BindModel(viewDataModel, typeof (TModel), culture);
// set the view data
base.SetViewData(viewData);
}
// viewData is the ViewDataDictionary (maybe <TModel>) that we have
// modelType is the type of the model that we need to bind to
//
// figure out whether viewData can accept modelType else replace it
//
private static ViewDataDictionary MapViewDataDictionary(ViewDataDictionary viewData, Type modelType)
{
var viewDataType = viewData.GetType();
// if viewData is not generic then it is a simple ViewDataDictionary instance and its
// Model property is of type 'object' and will accept anything, so it is safe to use
// viewData
if (viewDataType.IsGenericType == false)
return viewData;
// ensure it is the proper generic type
var def = viewDataType.GetGenericTypeDefinition();
if (def != typeof(ViewDataDictionary<>))
throw new Exception("Could not map viewData of type \"" + viewDataType.FullName + "\".");
// get the viewData model type and compare with the actual view model type:
// viewData is ViewDataDictionary<viewDataModelType> and we will want to assign an
// object of type modelType to the Model property of type viewDataModelType, we
// need to check whether that is possible
var viewDataModelType = viewDataType.GenericTypeArguments[0];
if (viewDataModelType.IsAssignableFrom(modelType))
return viewData;
// if not possible then we need to create a new ViewDataDictionary
var nViewDataType = typeof(ViewDataDictionary<>).MakeGenericType(modelType);
var tViewData = new ViewDataDictionary(viewData) { Model = null }; // temp view data to copy values
var nViewData = (ViewDataDictionary)Activator.CreateInstance(nViewDataType, tViewData);
return nViewData;
}
/// <summary>
/// This will detect the end /body tag and insert the preview badge if in preview mode
/// </summary>
/// <param name="value"></param>
public override void WriteLiteral(object value)
{
// filter / add preview banner
if (Response.ContentType.InvariantEquals("text/html")) // ASP.NET default value
{
if (UmbracoContext.Current.IsDebug || UmbracoContext.Current.InPreviewMode)
{
var text = value.ToString();
var pos = text.IndexOf("</body>", StringComparison.InvariantCultureIgnoreCase);
if (pos > -1)
{
string markupToInject;
if (UmbracoContext.Current.InPreviewMode)
{
// creating previewBadge markup
markupToInject =
String.Format(UmbracoConfig.For.UmbracoSettings().Content.PreviewBadge,
IOHelper.ResolveUrl(SystemDirectories.Umbraco),
IOHelper.ResolveUrl(SystemDirectories.UmbracoClient),
Server.UrlEncode(UmbracoContext.Current.HttpContext.Request.Path));
}
else
{
// creating mini-profiler markup
markupToInject = Html.RenderProfiler().ToHtmlString();
}
var sb = new StringBuilder(text);
sb.Insert(pos, markupToInject);
base.WriteLiteral(sb.ToString());
return;
}
}
}
base.WriteLiteral(value);
}
public HelperResult RenderSection(string name, Func<dynamic, HelperResult> defaultContents)
{
return WebViewPageExtensions.RenderSection(this, name, defaultContents);
}
public HelperResult RenderSection(string name, HelperResult defaultContents)
{
return WebViewPageExtensions.RenderSection(this, name, defaultContents);
}
public HelperResult RenderSection(string name, string defaultContents)
{
return WebViewPageExtensions.RenderSection(this, name, defaultContents);
}
public HelperResult RenderSection(string name, IHtmlString defaultContents)
{
return WebViewPageExtensions.RenderSection(this, name, defaultContents);
}
}
}
| |
using System;
using System.Collections.Generic;
using FluentNHibernate.Conventions.Inspections;
using FluentNHibernate.Mapping;
using FluentNHibernate.MappingModel.Identity;
using NHibernate.Id;
namespace FluentNHibernate.Conventions.Instances
{
public class GeneratorInstance : GeneratorInspector, IGeneratorInstance
{
private readonly GeneratorMapping mapping;
private readonly GeneratorBuilder builder;
public GeneratorInstance(GeneratorMapping mapping, Type type)
: base(mapping)
{
this.mapping = mapping;
builder = new GeneratorBuilder(mapping, type);
}
/// <summary>
/// generates identifiers of any integral type that are unique only when no other
/// process is inserting data into the same table. Do not use in a cluster.
/// </summary>
/// <returns></returns>
public void Increment()
{
if (!mapping.IsSpecified("Class"))
builder.Increment();
}
/// <summary>
/// generates identifiers of any integral type that are unique only when no other
/// process is inserting data into the same table. Do not use in a cluster.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void Increment(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Increment(paramValues);
}
/// <summary>
/// supports identity columns in DB2, MySQL, MS SQL Server and Sybase.
/// The identifier returned by the database is converted to the property type using
/// Convert.ChangeType. Any integral property type is thus supported.
/// </summary>
/// <returns></returns>
public void Identity()
{
if (!mapping.IsSpecified("Class"))
builder.Identity();
}
/// <summary>
/// supports identity columns in DB2, MySQL, MS SQL Server and Sybase.
/// The identifier returned by the database is converted to the property type using
/// Convert.ChangeType. Any integral property type is thus supported.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void Identity(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Identity(paramValues);
}
/// <summary>
/// uses a sequence in DB2, PostgreSQL, Oracle or a generator in Firebird.
/// The identifier returned by the database is converted to the property type
/// using Convert.ChangeType. Any integral property type is thus supported.
/// </summary>
/// <param name="sequenceName"></param>
/// <returns></returns>
public void Sequence(string sequenceName)
{
if (!mapping.IsSpecified("Class"))
builder.Sequence(sequenceName);
}
/// <summary>
/// uses a sequence in DB2, PostgreSQL, Oracle or a generator in Firebird.
/// The identifier returned by the database is converted to the property type
/// using Convert.ChangeType. Any integral property type is thus supported.
/// </summary>
/// <param name="sequenceName"></param>
/// <param name="paramValues">Params configuration</param>
public void Sequence(string sequenceName, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Sequence(sequenceName, paramValues);
}
/// <summary>
/// uses a hi/lo algorithm to efficiently generate identifiers of any integral type,
/// given a table and column (by default hibernate_unique_key and next_hi respectively)
/// as a source of hi values. The hi/lo algorithm generates identifiers that are unique
/// only for a particular database. Do not use this generator with a user-supplied connection.
/// requires a "special" database table to hold the next available "hi" value
/// </summary>
/// <param name="table"></param>
/// <param name="column"></param>
/// <param name="maxLo"></param>
/// <returns></returns>
public void HiLo(string table, string column, string maxLo)
{
if (!mapping.IsSpecified("Class"))
builder.HiLo(table, column, maxLo);
}
/// <summary>
/// uses a hi/lo algorithm to efficiently generate identifiers of any integral type,
/// given a table and column (by default hibernate_unique_key and next_hi respectively)
/// as a source of hi values. The hi/lo algorithm generates identifiers that are unique
/// only for a particular database. Do not use this generator with a user-supplied connection.
/// requires a "special" database table to hold the next available "hi" value
/// </summary>
/// <param name="table"></param>
/// <param name="column"></param>
/// <param name="maxLo"></param>
/// <param name="paramValues">Params configuration</param>
public void HiLo(string table, string column, string maxLo, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.HiLo(table, column, maxLo, paramValues);
}
/// <summary>
/// uses a hi/lo algorithm to efficiently generate identifiers of any integral type,
/// given a table and column (by default hibernate_unique_key and next_hi respectively)
/// as a source of hi values. The hi/lo algorithm generates identifiers that are unique
/// only for a particular database. Do not use this generator with a user-supplied connection.
/// requires a "special" database table to hold the next available "hi" value
/// </summary>
/// <param name="maxLo"></param>
/// <returns></returns>
public void HiLo(string maxLo)
{
if (!mapping.IsSpecified("Class"))
builder.HiLo(maxLo);
}
/// <summary>
/// uses a hi/lo algorithm to efficiently generate identifiers of any integral type,
/// given a table and column (by default hibernate_unique_key and next_hi respectively)
/// as a source of hi values. The hi/lo algorithm generates identifiers that are unique
/// only for a particular database. Do not use this generator with a user-supplied connection.
/// requires a "special" database table to hold the next available "hi" value
/// </summary>
/// <param name="maxLo"></param>
/// <param name="paramValues">Params configuration</param>
public void HiLo(string maxLo, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.HiLo(maxLo, paramValues);
}
/// <summary>
/// uses an Oracle-style sequence (where supported)
/// </summary>
/// <param name="sequence"></param>
/// <param name="maxLo"></param>
/// <returns></returns>
public void SeqHiLo(string sequence, string maxLo)
{
if (!mapping.IsSpecified("Class"))
builder.SeqHiLo(sequence, maxLo);
}
/// <summary>
/// uses an Oracle-style sequence (where supported)
/// </summary>
/// <param name="sequence"></param>
/// <param name="maxLo"></param>
/// <param name="paramValues">Params configuration</param>
public void SeqHiLo(string sequence, string maxLo, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.SeqHiLo(sequence, maxLo, paramValues);
}
/// <summary>
/// uses System.Guid and its ToString(string format) method to generate identifiers
/// of type string. The length of the string returned depends on the configured format.
/// </summary>
/// <param name="format">http://msdn.microsoft.com/en-us/library/97af8hh4.aspx</param>
/// <returns></returns>
public void UuidHex(string format)
{
if (!mapping.IsSpecified("Class"))
builder.UuidHex(format);
}
/// <summary>
/// uses System.Guid and its ToString(string format) method to generate identifiers
/// of type string. The length of the string returned depends on the configured format.
/// </summary>
/// <param name="format">http://msdn.microsoft.com/en-us/library/97af8hh4.aspx</param>
/// <param name="paramValues">Params configuration</param>
public void UuidHex(string format, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.UuidHex(format, paramValues);
}
/// <summary>
/// uses a new System.Guid to create a byte[] that is converted to a string.
/// </summary>
/// <returns></returns>
public void UuidString()
{
if (!mapping.IsSpecified("Class"))
builder.UuidString();
}
/// <summary>
/// uses a new System.Guid to create a byte[] that is converted to a string.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void UuidString(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.UuidString(paramValues);
}
/// <summary>
/// uses a new System.Guid as the identifier.
/// </summary>
/// <returns></returns>
public void Guid()
{
if (!mapping.IsSpecified("Class"))
builder.Guid();
}
/// <summary>
/// uses a new System.Guid as the identifier.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void Guid(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Guid(paramValues);
}
/// <summary>
/// Recommended for Guid identifiers!
/// uses the algorithm to generate a new System.Guid described by Jimmy Nilsson
/// in the article http://www.informit.com/articles/article.asp?p=25862.
/// </summary>
/// <returns></returns>
public void GuidComb()
{
if (!mapping.IsSpecified("Class"))
builder.GuidComb();
}
/// <summary>
/// Recommended for Guid identifiers!
/// uses the algorithm to generate a new System.Guid described by Jimmy Nilsson
/// in the article http://www.informit.com/articles/article.asp?p=25862.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void GuidComb(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.GuidComb(paramValues);
}
/// <summary>
/// lets the application to assign an identifier to the object before Save() is called.
/// </summary>
/// <returns></returns>
public void Assigned()
{
if (!mapping.IsSpecified("Class"))
builder.Assigned();
}
/// <summary>
/// lets the application to assign an identifier to the object before Save() is called.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void Assigned(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Assigned(paramValues);
}
/// <summary>
/// picks identity, sequence or hilo depending upon the capabilities of the underlying database.
/// </summary>
/// <returns></returns>
public void Native()
{
if (!mapping.IsSpecified("Class"))
builder.Native();
}
/// <summary>
/// picks identity, sequence or hilo depending upon the capabilities of the underlying database.
/// </summary>
/// <param name="paramValues">Params configuration</param>
public void Native(Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Native(paramValues);
}
/// <summary>
/// picks identity, sequence or hilo depending upon the capabilities of the underlying database.
/// </summary>
public void Native(string sequenceName)
{
if (!mapping.IsSpecified("Class"))
builder.Native(sequenceName);
}
/// <summary>
/// picks identity, sequence or hilo depending upon the capabilities of the underlying database.
/// </summary>
public void Native(string sequenceName, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Native(sequenceName, paramValues);
}
/// <summary>
/// uses the identifier of another associated object. Usually used in conjunction with a one-to-one primary key association.
/// </summary>
/// <param name="property"></param>
/// <returns></returns>
public void Foreign(string property)
{
if (!mapping.IsSpecified("Class"))
builder.Foreign(property);
}
/// <summary>
/// uses the identifier of another associated object. Usually used in conjunction with a one-to-one primary key association.
/// </summary>
/// <param name="property"></param>
/// <param name="paramValues">Params configuration</param>
public void Foreign(string property, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Foreign(property, paramValues);
}
public void Custom<T>() where T : IIdentifierGenerator
{
Custom(typeof(T));
}
public void Custom(Type generator)
{
Custom(generator.AssemblyQualifiedName);
}
public void Custom(string generator)
{
if (!mapping.IsSpecified("Class"))
builder.Custom(generator);
}
public void Custom<T>(Action<ParamBuilder> paramValues) where T : IIdentifierGenerator
{
Custom(typeof(T), paramValues);
}
public void Custom(Type generator, Action<ParamBuilder> paramValues)
{
Custom(generator.AssemblyQualifiedName, paramValues);
}
public void Custom(string generator, Action<ParamBuilder> paramValues)
{
if (!mapping.IsSpecified("Class"))
builder.Custom(generator, paramValues);
}
}
}
| |
using System.Windows.Forms;
using System.Drawing;
using System.ComponentModel;
namespace WeifenLuo.WinFormsUI.Docking
{
[ToolboxItem(false)]
public partial class DockWindow : Panel, INestedPanesContainer, ISplitterDragSource
{
private DockPanel m_dockPanel;
private DockState m_dockState;
private SplitterControl m_splitter;
private NestedPaneCollection m_nestedPanes;
internal DockWindow(DockPanel dockPanel, DockState dockState)
{
m_nestedPanes = new NestedPaneCollection(this);
m_dockPanel = dockPanel;
m_dockState = dockState;
Visible = false;
SuspendLayout();
if (DockState == DockState.DockLeft || DockState == DockState.DockRight ||
DockState == DockState.DockTop || DockState == DockState.DockBottom)
{
m_splitter = new SplitterControl();
Controls.Add(m_splitter);
}
if (DockState == DockState.DockLeft)
{
Dock = DockStyle.Left;
m_splitter.Dock = DockStyle.Right;
}
else if (DockState == DockState.DockRight)
{
Dock = DockStyle.Right;
m_splitter.Dock = DockStyle.Left;
}
else if (DockState == DockState.DockTop)
{
Dock = DockStyle.Top;
m_splitter.Dock = DockStyle.Bottom;
}
else if (DockState == DockState.DockBottom)
{
Dock = DockStyle.Bottom;
m_splitter.Dock = DockStyle.Top;
}
else if (DockState == DockState.Document)
{
Dock = DockStyle.Fill;
}
ResumeLayout();
}
public VisibleNestedPaneCollection VisibleNestedPanes
{
get { return NestedPanes.VisibleNestedPanes; }
}
public NestedPaneCollection NestedPanes
{
get { return m_nestedPanes; }
}
public DockPanel DockPanel
{
get { return m_dockPanel; }
}
public DockState DockState
{
get { return m_dockState; }
}
public bool IsFloat
{
get { return DockState == DockState.Float; }
}
internal DockPane DefaultPane
{
get { return VisibleNestedPanes.Count == 0 ? null : VisibleNestedPanes[0]; }
}
public virtual Rectangle DisplayingRectangle
{
get
{
Rectangle rect = ClientRectangle;
// if DockWindow is document, exclude the border
if (DockState == DockState.Document)
{
rect.X += 1;
rect.Y += 1;
rect.Width -= 2;
rect.Height -= 2;
}
// exclude the splitter
else if (DockState == DockState.DockLeft)
rect.Width -= Measures.SplitterSize;
else if (DockState == DockState.DockRight)
{
rect.X += Measures.SplitterSize;
rect.Width -= Measures.SplitterSize;
}
else if (DockState == DockState.DockTop)
rect.Height -= Measures.SplitterSize;
else if (DockState == DockState.DockBottom)
{
rect.Y += Measures.SplitterSize;
rect.Height -= Measures.SplitterSize;
}
return rect;
}
}
protected override void OnPaint(PaintEventArgs e)
{
// if DockWindow is document, draw the border
if (DockState == DockState.Document)
e.Graphics.DrawRectangle(SystemPens.ControlDark, ClientRectangle.X, ClientRectangle.Y, ClientRectangle.Width - 1, ClientRectangle.Height - 1);
base.OnPaint(e);
}
protected override void OnLayout(LayoutEventArgs levent)
{
VisibleNestedPanes.Refresh();
if (VisibleNestedPanes.Count == 0)
{
if (Visible)
Visible = false;
}
else if (!Visible)
{
Visible = true;
VisibleNestedPanes.Refresh();
}
base.OnLayout (levent);
}
#region ISplitterDragSource Members
void ISplitterDragSource.BeginDrag(Rectangle rectSplitter)
{
}
void ISplitterDragSource.EndDrag()
{
}
bool ISplitterDragSource.IsVertical
{
get { return (DockState == DockState.DockLeft || DockState == DockState.DockRight); }
}
Rectangle ISplitterDragSource.DragLimitBounds
{
get
{
Rectangle rectLimit = DockPanel.DockArea;
Point location;
if ((Control.ModifierKeys & Keys.Shift) == 0)
location = Location;
else
location = DockPanel.DockArea.Location;
if (((ISplitterDragSource)this).IsVertical)
{
rectLimit.X += MeasurePane.MinSize;
rectLimit.Width -= 2 * MeasurePane.MinSize;
rectLimit.Y = location.Y;
if ((Control.ModifierKeys & Keys.Shift) == 0)
rectLimit.Height = Height;
}
else
{
rectLimit.Y += MeasurePane.MinSize;
rectLimit.Height -= 2 * MeasurePane.MinSize;
rectLimit.X = location.X;
if ((Control.ModifierKeys & Keys.Shift) == 0)
rectLimit.Width = Width;
}
return DockPanel.RectangleToScreen(rectLimit);
}
}
void ISplitterDragSource.MoveSplitter(int offset)
{
if ((Control.ModifierKeys & Keys.Shift) != 0)
SendToBack();
Rectangle rectDockArea = DockPanel.DockArea;
if (DockState == DockState.DockLeft && rectDockArea.Width > 0)
{
if (DockPanel.DockLeftPortion > 1)
DockPanel.DockLeftPortion = Width + offset;
else
DockPanel.DockLeftPortion += ((double)offset) / (double)rectDockArea.Width;
}
else if (DockState == DockState.DockRight && rectDockArea.Width > 0)
{
if (DockPanel.DockRightPortion > 1)
DockPanel.DockRightPortion = Width - offset;
else
DockPanel.DockRightPortion -= ((double)offset) / (double)rectDockArea.Width;
}
else if (DockState == DockState.DockBottom && rectDockArea.Height > 0)
{
if (DockPanel.DockBottomPortion > 1)
DockPanel.DockBottomPortion = Height - offset;
else
DockPanel.DockBottomPortion -= ((double)offset) / (double)rectDockArea.Height;
}
else if (DockState == DockState.DockTop && rectDockArea.Height > 0)
{
if (DockPanel.DockTopPortion > 1)
DockPanel.DockTopPortion = Height + offset;
else
DockPanel.DockTopPortion += ((double)offset) / (double)rectDockArea.Height;
}
}
#region IDragSource Members
Control IDragSource.DragControl
{
get { return this; }
}
#endregion
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis.DocumentationCommentFormatting;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.MetadataAsSource
{
internal partial class AbstractMetadataAsSourceService
{
private class WrappedNamedTypeSymbol : AbstractWrappedNamespaceOrTypeSymbol, INamedTypeSymbol
{
private readonly INamedTypeSymbol _symbol;
private readonly ImmutableArray<ISymbol> _members;
public WrappedNamedTypeSymbol(INamedTypeSymbol symbol, bool canImplementImplicitly, IDocumentationCommentFormattingService docCommentFormattingService)
: base(symbol, canImplementImplicitly, docCommentFormattingService)
{
_symbol = symbol;
var allMembers = _symbol.GetMembers();
var filteredMembers = from m in allMembers
where !m.HasUnsupportedMetadata
where m.DeclaredAccessibility == Accessibility.Public ||
m.DeclaredAccessibility == Accessibility.Protected ||
m.DeclaredAccessibility == Accessibility.ProtectedOrInternal
where m.Kind == SymbolKind.Event ||
m.Kind == SymbolKind.Field ||
m.Kind == SymbolKind.Method ||
m.Kind == SymbolKind.NamedType ||
m.Kind == SymbolKind.Property
select WrapMember(m, canImplementImplicitly, docCommentFormattingService);
_members = ImmutableArray.CreateRange<ISymbol>(filteredMembers);
}
private static ISymbol WrapMember(ISymbol m, bool canImplementImplicitly, IDocumentationCommentFormattingService docCommentFormattingService)
{
switch (m.Kind)
{
case SymbolKind.Field:
return new WrappedFieldSymbol((IFieldSymbol)m, docCommentFormattingService);
case SymbolKind.Event:
return new WrappedEventSymbol((IEventSymbol)m, canImplementImplicitly, docCommentFormattingService);
case SymbolKind.Method:
return new WrappedMethodSymbol((IMethodSymbol)m, canImplementImplicitly, docCommentFormattingService);
case SymbolKind.NamedType:
return new WrappedNamedTypeSymbol((INamedTypeSymbol)m, canImplementImplicitly, docCommentFormattingService);
case SymbolKind.Property:
return new WrappedPropertySymbol((IPropertySymbol)m, canImplementImplicitly, docCommentFormattingService);
}
throw ExceptionUtilities.Unreachable;
}
public int Arity
{
get
{
return _symbol.Arity;
}
}
public bool IsGenericType
{
get
{
return _symbol.IsGenericType;
}
}
public bool IsUnboundGenericType
{
get
{
return _symbol.IsUnboundGenericType;
}
}
public bool IsScriptClass
{
get
{
return _symbol.IsScriptClass;
}
}
public bool IsImplicitClass
{
get
{
return _symbol.IsImplicitClass;
}
}
public IEnumerable<string> MemberNames
{
get
{
throw new NotImplementedException();
}
}
public ImmutableArray<ITypeParameterSymbol> TypeParameters
{
get
{
return _symbol.TypeParameters;
}
}
public ImmutableArray<ITypeSymbol> TypeArguments
{
get
{
return _symbol.TypeArguments;
}
}
public IMethodSymbol DelegateInvokeMethod
{
get
{
return _symbol.DelegateInvokeMethod;
}
}
public INamedTypeSymbol EnumUnderlyingType
{
get
{
return _symbol.EnumUnderlyingType;
}
}
public INamedTypeSymbol ConstructedFrom
{
get
{
return _symbol.ConstructedFrom;
}
}
public INamedTypeSymbol Construct(params ITypeSymbol[] typeArguments)
{
return _symbol.Construct(typeArguments);
}
public INamedTypeSymbol ConstructUnboundGenericType()
{
return _symbol.ConstructUnboundGenericType();
}
public ImmutableArray<IMethodSymbol> InstanceConstructors
{
get
{
return _symbol.InstanceConstructors;
}
}
public ImmutableArray<IMethodSymbol> StaticConstructors
{
get
{
return _symbol.StaticConstructors;
}
}
public ImmutableArray<IMethodSymbol> Constructors
{
get
{
return _symbol.Constructors;
}
}
public ISymbol AssociatedSymbol
{
get
{
return _symbol.AssociatedSymbol;
}
}
public TypeKind TypeKind
{
get
{
return _symbol.TypeKind;
}
}
public INamedTypeSymbol BaseType
{
get
{
return _symbol.BaseType;
}
}
public ImmutableArray<INamedTypeSymbol> Interfaces
{
get
{
return _symbol.Interfaces;
}
}
public ImmutableArray<INamedTypeSymbol> AllInterfaces
{
get { return _symbol.AllInterfaces; }
}
public bool IsReferenceType
{
get
{
return _symbol.IsReferenceType;
}
}
public bool IsValueType
{
get
{
return _symbol.IsValueType;
}
}
public bool IsAnonymousType
{
get
{
return _symbol.IsAnonymousType;
}
}
ITypeSymbol ITypeSymbol.OriginalDefinition
{
get
{
return _symbol.OriginalDefinition;
}
}
public SpecialType SpecialType
{
get
{
return _symbol.SpecialType;
}
}
public ISymbol FindImplementationForInterfaceMember(ISymbol interfaceMember)
{
return _symbol.FindImplementationForInterfaceMember(interfaceMember);
}
public override ImmutableArray<ISymbol> GetMembers()
{
return _members;
}
public override ImmutableArray<ISymbol> GetMembers(string name)
{
throw new NotImplementedException();
}
public override ImmutableArray<INamedTypeSymbol> GetTypeMembers()
{
throw new NotImplementedException();
}
public override ImmutableArray<INamedTypeSymbol> GetTypeMembers(string name)
{
throw new NotImplementedException();
}
public override ImmutableArray<INamedTypeSymbol> GetTypeMembers(string name, int arity)
{
throw new NotImplementedException();
}
public new INamedTypeSymbol OriginalDefinition
{
get
{
return this;
}
}
public bool MightContainExtensionMethods
{
get { return _symbol.MightContainExtensionMethods; }
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Editor.Host;
using Microsoft.CodeAnalysis.Editor.Implementation.Debugging;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.LanguageServices.Implementation.Debugging;
using Microsoft.VisualStudio.LanguageServices.Implementation.Extensions;
using Microsoft.VisualStudio.LanguageServices.Implementation.Utilities;
using Microsoft.VisualStudio.Shell.Interop;
using Roslyn.Utilities;
using IVsDebugName = Microsoft.VisualStudio.TextManager.Interop.IVsDebugName;
using IVsEnumBSTR = Microsoft.VisualStudio.TextManager.Interop.IVsEnumBSTR;
using IVsTextBuffer = Microsoft.VisualStudio.TextManager.Interop.IVsTextBuffer;
using IVsTextLines = Microsoft.VisualStudio.TextManager.Interop.IVsTextLines;
using RESOLVENAMEFLAGS = Microsoft.VisualStudio.TextManager.Interop.RESOLVENAMEFLAGS;
using VsTextSpan = Microsoft.VisualStudio.TextManager.Interop.TextSpan;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.LanguageService
{
internal abstract partial class AbstractLanguageService<TPackage, TLanguageService>
{
internal class VsLanguageDebugInfo : IVsLanguageDebugInfo
{
private readonly Guid _languageId;
private readonly TLanguageService _languageService;
private readonly ILanguageDebugInfoService _languageDebugInfo;
private readonly IBreakpointResolutionService _breakpointService;
private readonly IProximityExpressionsService _proximityExpressionsService;
private readonly IWaitIndicator _waitIndicator;
private readonly CachedProximityExpressionsGetter _cachedProximityExpressionsGetter;
public VsLanguageDebugInfo(
Guid languageId,
TLanguageService languageService,
HostLanguageServices languageServiceProvider,
IWaitIndicator waitIndicator)
{
Contract.ThrowIfNull(languageService);
Contract.ThrowIfNull(languageServiceProvider);
_languageId = languageId;
_languageService = languageService;
_languageDebugInfo = languageServiceProvider.GetService<ILanguageDebugInfoService>();
_breakpointService = languageServiceProvider.GetService<IBreakpointResolutionService>();
_proximityExpressionsService = languageServiceProvider.GetService<IProximityExpressionsService>();
_cachedProximityExpressionsGetter = new CachedProximityExpressionsGetter(_proximityExpressionsService);
_waitIndicator = waitIndicator;
}
internal void OnDebugModeChanged(DebugMode debugMode)
{
_cachedProximityExpressionsGetter.OnDebugModeChanged(debugMode);
}
public int GetLanguageID(IVsTextBuffer pBuffer, int iLine, int iCol, out Guid pguidLanguageID)
{
pguidLanguageID = _languageId;
return VSConstants.S_OK;
}
public int GetLocationOfName(string pszName, out string pbstrMkDoc, out VsTextSpan pspanLocation)
{
pbstrMkDoc = null;
pspanLocation = default(VsTextSpan);
return VSConstants.E_NOTIMPL;
}
public int GetNameOfLocation(IVsTextBuffer pBuffer, int iLine, int iCol, out string pbstrName, out int piLineOffset)
{
using (Logger.LogBlock(FunctionId.Debugging_VsLanguageDebugInfo_GetNameOfLocation, CancellationToken.None))
{
string name = null;
int lineOffset = 0;
var succeeded = false;
if (_languageDebugInfo != null)
{
_waitIndicator.Wait(
title: ServicesVSResources.Debugger,
message: ServicesVSResources.Determining_breakpoint_location,
allowCancel: true,
action: waitContext =>
{
var cancellationToken = waitContext.CancellationToken;
var textBuffer = _languageService.EditorAdaptersFactoryService.GetDataBuffer(pBuffer);
if (textBuffer != null)
{
var point = textBuffer.CurrentSnapshot.GetPoint(iLine, iCol);
var document = point.Snapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
// NOTE(cyrusn): We have to wait here because the debuggers'
// GetNameOfLocation is a blocking call. In the future, it
// would be nice if they could make it async.
var debugLocationInfo = _languageDebugInfo.GetLocationInfoAsync(document, point, cancellationToken).WaitAndGetResult(cancellationToken);
if (!debugLocationInfo.IsDefault)
{
succeeded = true;
name = debugLocationInfo.Name;
lineOffset = debugLocationInfo.LineOffset;
}
}
}
});
if (succeeded)
{
pbstrName = name;
piLineOffset = lineOffset;
return VSConstants.S_OK;
}
}
// Note(DustinCa): Docs say that GetNameOfLocation should return S_FALSE if a name could not be found.
// Also, that's what the old native code does, so we should do it here.
pbstrName = null;
piLineOffset = 0;
return VSConstants.S_FALSE;
}
}
public int GetProximityExpressions(IVsTextBuffer pBuffer, int iLine, int iCol, int cLines, out IVsEnumBSTR ppEnum)
{
// NOTE(cyrusn): cLines is ignored. This is to match existing dev10 behavior.
using (Logger.LogBlock(FunctionId.Debugging_VsLanguageDebugInfo_GetProximityExpressions, CancellationToken.None))
{
VsEnumBSTR enumBSTR = null;
var succeeded = false;
_waitIndicator.Wait(
title: ServicesVSResources.Debugger,
message: ServicesVSResources.Determining_autos,
allowCancel: true,
action: waitContext =>
{
var textBuffer = _languageService.EditorAdaptersFactoryService.GetDataBuffer(pBuffer);
if (textBuffer != null)
{
var snapshot = textBuffer.CurrentSnapshot;
Document document = snapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
var point = snapshot.GetPoint(iLine, iCol);
var proximityExpressions = _proximityExpressionsService.GetProximityExpressionsAsync(document, point.Position, waitContext.CancellationToken).WaitAndGetResult(waitContext.CancellationToken);
if (proximityExpressions != null)
{
enumBSTR = new VsEnumBSTR(proximityExpressions);
succeeded = true;
}
}
}
});
if (succeeded)
{
ppEnum = enumBSTR;
return VSConstants.S_OK;
}
ppEnum = null;
return VSConstants.E_FAIL;
}
}
public int IsMappedLocation(IVsTextBuffer pBuffer, int iLine, int iCol)
{
return VSConstants.E_NOTIMPL;
}
public int ResolveName(string pszName, uint dwFlags, out IVsEnumDebugName ppNames)
{
using (Logger.LogBlock(FunctionId.Debugging_VsLanguageDebugInfo_ResolveName, CancellationToken.None))
{
// In VS, this method frequently get's called with an empty string to test if the language service
// supports this method (some language services, like F#, implement IVsLanguageDebugInfo but don't
// implement this method). In that scenario, there's no sense doing work, so we'll just return
// S_FALSE (as the old VB language service did).
if (string.IsNullOrEmpty(pszName))
{
ppNames = null;
return VSConstants.S_FALSE;
}
VsEnumDebugName enumName = null;
var succeeded = false;
_waitIndicator.Wait(
title: ServicesVSResources.Debugger,
message: ServicesVSResources.Resolving_breakpoint_location,
allowCancel: true,
action: waitContext =>
{
var cancellationToken = waitContext.CancellationToken;
if (dwFlags == (uint)RESOLVENAMEFLAGS.RNF_BREAKPOINT)
{
var solution = _languageService.Workspace.CurrentSolution;
// NOTE(cyrusn): We have to wait here because the debuggers' ResolveName
// call is synchronous. In the future it would be nice to make it async.
if (_breakpointService != null)
{
var breakpoints = _breakpointService.ResolveBreakpointsAsync(solution, pszName, cancellationToken).WaitAndGetResult(cancellationToken);
var debugNames = breakpoints.Select(bp => CreateDebugName(bp, solution, cancellationToken)).WhereNotNull().ToList();
enumName = new VsEnumDebugName(debugNames);
succeeded = true;
}
}
});
if (succeeded)
{
ppNames = enumName;
return VSConstants.S_OK;
}
ppNames = null;
return VSConstants.E_NOTIMPL;
}
}
private IVsDebugName CreateDebugName(BreakpointResolutionResult breakpoint, Solution solution, CancellationToken cancellationToken)
{
var document = breakpoint.Document;
var filePath = _languageService.Workspace.GetFilePath(document.Id);
var text = document.GetTextAsync(cancellationToken).WaitAndGetResult(cancellationToken);
var span = text.GetVsTextSpanForSpan(breakpoint.TextSpan);
// If we're inside an Venus code nugget, we need to map the span to the surface buffer.
// Otherwise, we'll just use the original span.
VsTextSpan mappedSpan;
if (!span.TryMapSpanFromSecondaryBufferToPrimaryBuffer(solution.Workspace, document.Id, out mappedSpan))
{
mappedSpan = span;
}
return new VsDebugName(breakpoint.LocationNameOpt, filePath, mappedSpan);
}
public int ValidateBreakpointLocation(IVsTextBuffer pBuffer, int iLine, int iCol, VsTextSpan[] pCodeSpan)
{
using (Logger.LogBlock(FunctionId.Debugging_VsLanguageDebugInfo_ValidateBreakpointLocation, CancellationToken.None))
{
int result = VSConstants.E_NOTIMPL;
_waitIndicator.Wait(
title: ServicesVSResources.Debugger,
message: ServicesVSResources.Validating_breakpoint_location,
allowCancel: true,
action: waitContext =>
{
result = ValidateBreakpointLocationWorker(pBuffer, iLine, iCol, pCodeSpan, waitContext.CancellationToken);
});
return result;
}
}
private int ValidateBreakpointLocationWorker(
IVsTextBuffer pBuffer,
int iLine,
int iCol,
VsTextSpan[] pCodeSpan,
CancellationToken cancellationToken)
{
if (_breakpointService == null)
{
return VSConstants.E_FAIL;
}
var textBuffer = _languageService.EditorAdaptersFactoryService.GetDataBuffer(pBuffer);
if (textBuffer != null)
{
var snapshot = textBuffer.CurrentSnapshot;
Document document = snapshot.AsText().GetDocumentWithFrozenPartialSemanticsAsync(cancellationToken).WaitAndGetResult(cancellationToken);
if (document != null)
{
var point = snapshot.GetPoint(iLine, iCol);
var length = 0;
if (pCodeSpan != null && pCodeSpan.Length > 0)
{
// If we have a non-empty span then it means that the debugger is asking us to adjust an
// existing span. In Everett we didn't do this so we had some good and some bad
// behavior. For example if you had a breakpoint on: "int i;" and you changed it to "int
// i = 4;", then the breakpoint wouldn't adjust. That was bad. However, if you had the
// breakpoint on an open or close curly brace then it would always "stick" to that brace
// which was good.
//
// So we want to keep the best parts of both systems. We want to appropriately "stick"
// to tokens and we also want to adjust spans intelligently.
//
// However, it turns out the latter is hard to do when there are parse errors in the
// code. Things like missing name nodes cause a lot of havoc and make it difficult to
// track a closing curly brace.
//
// So the way we do this is that we default to not intelligently adjusting the spans
// while there are parse errors. But when there are no parse errors then the span is
// adjusted.
var initialBreakpointSpan = snapshot.GetSpan(pCodeSpan[0]);
if (initialBreakpointSpan.Length > 0 && document.SupportsSyntaxTree)
{
var tree = document.GetSyntaxTreeSynchronously(cancellationToken);
if (tree.GetDiagnostics(cancellationToken).Any(d => d.Severity == DiagnosticSeverity.Error))
{
return VSConstants.E_FAIL;
}
}
// If a span is provided, and the requested position falls in that span, then just
// move the requested position to the start of the span.
// Length will be used to determine if we need further analysis, which is only required when text spans multiple lines.
if (initialBreakpointSpan.Contains(point))
{
point = initialBreakpointSpan.Start;
length = pCodeSpan[0].iEndLine > pCodeSpan[0].iStartLine ? initialBreakpointSpan.Length : 0;
}
}
// NOTE(cyrusn): we need to wait here because ValidateBreakpointLocation is
// synchronous. In the future, it would be nice for the debugger to provide
// an async entry point for this.
var breakpoint = _breakpointService.ResolveBreakpointAsync(document, new CodeAnalysis.Text.TextSpan(point.Position, length), cancellationToken).WaitAndGetResult(cancellationToken);
if (breakpoint == null)
{
// There should *not* be a breakpoint here. E_FAIL to let the debugger know
// that.
return VSConstants.E_FAIL;
}
if (breakpoint.IsLineBreakpoint)
{
// Let the debugger take care of this. They'll put a line breakpoint
// here. This is useful for when the user does something like put a
// breakpoint in inactive code. We want to allow this as they might
// just have different defines during editing versus debugging.
// TODO(cyrusn): Do we need to set the pCodeSpan in this case?
return VSConstants.E_NOTIMPL;
}
// There should be a breakpoint at the location passed back.
if (pCodeSpan != null && pCodeSpan.Length > 0)
{
pCodeSpan[0] = breakpoint.TextSpan.ToSnapshotSpan(snapshot).ToVsTextSpan();
}
return VSConstants.S_OK;
}
}
return VSConstants.E_NOTIMPL;
}
public int GetDataTipText(IVsTextBuffer pBuffer, VsTextSpan[] pSpan, string pbstrText)
{
using (Logger.LogBlock(FunctionId.Debugging_VsLanguageDebugInfo_GetDataTipText, CancellationToken.None))
{
pbstrText = null;
if (pSpan == null || pSpan.Length != 1)
{
return VSConstants.E_INVALIDARG;
}
int result = VSConstants.E_FAIL;
_waitIndicator.Wait(
title: ServicesVSResources.Debugger,
message: ServicesVSResources.Getting_DataTip_text,
allowCancel: true,
action: waitContext =>
{
var debugger = _languageService.Debugger;
DBGMODE[] debugMode = new DBGMODE[1];
var cancellationToken = waitContext.CancellationToken;
if (ErrorHandler.Succeeded(debugger.GetMode(debugMode)) && debugMode[0] != DBGMODE.DBGMODE_Design)
{
var editorAdapters = _languageService.EditorAdaptersFactoryService;
var textSpan = pSpan[0];
var subjectBuffer = editorAdapters.GetDataBuffer(pBuffer);
var textSnapshot = subjectBuffer.CurrentSnapshot;
var document = textSnapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
var spanOpt = textSnapshot.TryGetSpan(textSpan);
if (spanOpt.HasValue)
{
var dataTipInfo = _languageDebugInfo.GetDataTipInfoAsync(document, spanOpt.Value.Start, cancellationToken).WaitAndGetResult(cancellationToken);
if (!dataTipInfo.IsDefault)
{
var resultSpan = dataTipInfo.Span.ToSnapshotSpan(textSnapshot);
string textOpt = dataTipInfo.Text;
pSpan[0] = resultSpan.ToVsTextSpan();
result = debugger.GetDataTipValue((IVsTextLines)pBuffer, pSpan, textOpt, out pbstrText);
}
}
}
}
});
return result;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace invisible.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
#region License
//-----------------------------------------------------------------------
// <copyright>
// The MIT License (MIT)
//
// Copyright (c) 2014 Kirk S Woll
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
//-----------------------------------------------------------------------
#endregion
using System;
using System.Linq;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp.Syntax;
namespace WootzJs.Compiler.JsAst
{
public static class Js
{
public static JsLocalVariableDeclaration Local(JsVariableDeclaration declaration)
{
return new JsLocalVariableDeclaration(declaration);
}
public static JsLocalVariableDeclaration Local(params JsVariableDeclarator[] declarators)
{
return Local(Declare(declarators));
}
public static JsLocalVariableDeclaration Local(string name, JsExpression initializer = null)
{
return Local(Declare(Variable(name, initializer)));
}
public static JsVariableDeclaration Declare(params JsVariableDeclarator[] declarators)
{
var result = new JsVariableDeclaration();
result.Declarations.AddRange(declarators);
return result;
}
public static JsVariableDeclaration Declare(string name, JsExpression initializer = null)
{
return Declare(Variable(name, initializer));
}
public static JsVariableDeclarator Variable(string name, JsExpression initializer = null)
{
return new JsVariableDeclarator(name, initializer);
}
public static JsBinaryExpression Assign(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Assign, left, right);
}
public static JsBinaryExpression Binary(JsBinaryOperator @operator, JsExpression left, JsExpression right)
{
return new JsBinaryExpression(@operator, left, right);
}
public static JsUnaryExpression Unary(JsUnaryOperator @operator, JsExpression operand)
{
return new JsUnaryExpression(@operator, operand);
}
public static JsUnaryExpression Increment(this JsExpression expression)
{
return Unary(JsUnaryOperator.PostIncrement, expression);
}
public static JsVariableReferenceExpression Reference(string name)
{
return new JsVariableReferenceExpression(name);
}
public static JsVariableReferenceExpression Reference(JsFunctionDeclaration function)
{
if (function.Function.Name == null)
throw new Exception("Cannot reference anonymous function by name");
return new JsVariableReferenceExpression(function.Function.Name);
}
public static string GetFullName(this BaseTypeDeclarationSyntax typeDeclaration)
{
string fullTypeName;
string @namespace;
GetTypeInfo(typeDeclaration, out fullTypeName, out @namespace);
return fullTypeName;
}
public static void GetTypeInfo(this BaseTypeDeclarationSyntax typeDeclaration, out string fullTypeName, out string @namespace)
{
Func<SyntaxNode, NamespaceDeclarationSyntax> getNamespace = null;
getNamespace = x => x == null ? null : x is NamespaceDeclarationSyntax ? (NamespaceDeclarationSyntax)x : getNamespace(x.Parent);
var namespaceDeclaration = getNamespace(typeDeclaration);
@namespace = namespaceDeclaration != null ? namespaceDeclaration.Name.ToString() : null;
fullTypeName = typeDeclaration.Identifier.ValueText;
if (@namespace != null)
{
fullTypeName = @namespace + "." + fullTypeName;
}
}
public static JsMemberReferenceExpression Member(this JsExpression target, string name)
{
return new JsMemberReferenceExpression(target, name);
}
public static JsMemberReferenceExpression Member(string path)
{
var parts = path.Split('.');
if (parts.Length < 2)
throw new Exception("Cannot construct path with only one part");
var result = new JsMemberReferenceExpression();
var current = result;
for (var i = parts.Length - 1; i >= 0; i--)
{
current.Name = parts[i];
var next = parts[i - 1];
if (i == 1)
{
current.Target = Reference(next);
return result;
}
else
{
current.Target = new JsMemberReferenceExpression();
current = (JsMemberReferenceExpression)current.Target;
}
}
throw new Exception("Should never get here");
}
public static JsObjectExpression Object(params JsObjectItem[] items)
{
var jsObjectExpression = new JsObjectExpression();
jsObjectExpression.Items.AddRange(items);
return jsObjectExpression;
}
public static JsObjectItem Item(string name, JsExpression value)
{
return new JsObjectItem(name, value);
}
public static JsInvocationExpression Invoke(this JsExpression target, params JsExpression[] arguments)
{
var result = new JsInvocationExpression(target);
result.AddArgumentRange(arguments);
return result;
}
public static JsFunctionDeclaration Declare(JsFunction function)
{
return new JsFunctionDeclaration(function);
}
public static JsFunction NamedFunction(string name, params IJsDeclaration[] parameters)
{
var function = new JsFunction(name);
foreach (var parameter in parameters)
{
function.Parameters.Add(parameter);
}
return function;
}
public static JsFunction Function(params IJsDeclaration[] parameters)
{
return NamedFunction(null, parameters);
}
public static JsParameter Parameter(string name)
{
return new JsParameter(name);
}
public static JsPrimitiveExpression Primitive(string s)
{
return new JsPrimitiveExpression(s);
}
public static JsPrimitiveExpression Primitive(int value)
{
return new JsPrimitiveExpression(value);
}
public static JsPrimitiveExpression Primitive(bool value)
{
return new JsPrimitiveExpression(value);
}
public static JsExpressionStatement Express(this JsExpression expression)
{
return new JsExpressionStatement(expression);
}
public static JsFunction Body(this JsFunction function, JsExpression expression)
{
function.Body.Add(Express(expression));
return function;
}
public static JsFunction Body(this JsFunction function, JsStatement statement)
{
function.Body.Aggregate(statement);
return function;
}
public static JsFunction Body(this JsFunction function, JsBlockStatement statement)
{
function.Body = statement;
return function;
}
public static JsReturnStatement Return()
{
return new JsReturnStatement();
}
public static JsReturnStatement Return(this JsExpression expression)
{
return new JsReturnStatement(expression);
}
public static JsThisExpression This()
{
return new JsThisExpression();
}
public static JsPrimitiveExpression Null()
{
return new JsPrimitiveExpression();
}
public static JsExpression Literal(object value)
{
if (value == null)
return new JsPrimitiveExpression();
if (value is string)
return new JsPrimitiveExpression((string)value);
if (value is bool)
return new JsPrimitiveExpression((bool)value);
if (value is int)
return new JsPrimitiveExpression((int)value);
if (value is uint)
return new JsPrimitiveExpression((uint)value);
if (value is long)
return new JsPrimitiveExpression((long)value);
if (value is ulong)
return new JsPrimitiveExpression((ulong)value);
if (value is float)
return new JsPrimitiveExpression((float)value);
if (value is double)
return new JsPrimitiveExpression((double)value);
if (value is byte)
return new JsPrimitiveExpression((byte)value);
if (value is short)
return new JsPrimitiveExpression((short)value);
if (value is sbyte)
return new JsPrimitiveExpression((sbyte)value);
if (value is ushort)
return new JsPrimitiveExpression((ushort)value);
if (value is char)
return new JsPrimitiveExpression((char)value);
if (value is TypedConstant)
{
var typedConstant = (TypedConstant)value;
if (typedConstant.Type.Kind == SymbolKind.ArrayType)
{
return Array(typedConstant.Values.Select(x => Literal(x.Value)).ToArray());
}
else
{
return Literal(((TypedConstant)value).Value);
}
}
if (value is ITypeSymbol)
return Reference(((ITypeSymbol)value).GetFullName());
else
throw new Exception("Unexpected primitive type: " + value);
}
/*
public static JsNewExpression NakedNew(JsExpression expression)
{
return new JsNewExpression(expression.Invoke());
}
*/
public static JsNewExpression New(JsExpression type, params JsExpression[] arguments)
{
return new JsNewExpression(type.Invoke(arguments));
}
public static JsIfStatement If(JsExpression condition, JsStatement ifTrue, JsStatement ifFalse = null)
{
return new JsIfStatement(condition, ifTrue, ifFalse);
}
public static JsIfStatement If(JsExpression condition, JsExpression ifTrue, JsExpression ifFalse = null)
{
return new JsIfStatement(condition, Express(ifTrue), ifFalse != null ? Express(ifFalse) : null);
}
public static JsNativeStatement Native(string code)
{
return new JsNativeStatement(code);
}
public static JsNativeExpression NativeExpression(string code)
{
return new JsNativeExpression(code);
}
public static JsParentheticalExpression Parenthetical(this JsExpression expression)
{
return new JsParentheticalExpression(expression);
}
public static JsNewArrayExpression NewArray(JsExpression size)
{
return new JsNewArrayExpression(size);
}
public static JsForInStatement ForIn(JsVariableDeclaration declaration, JsExpression target)
{
return new JsForInStatement(declaration, target);
}
public static JsForInStatement ForIn(JsVariableDeclarator declarator, JsExpression target)
{
return ForIn(Declare(declarator), target);
}
public static JsForInStatement ForIn(string variableName, JsExpression target)
{
return ForIn(Variable(variableName), target);
}
public static JsForInStatement Body(this JsForInStatement forInStatement, JsStatement statement)
{
forInStatement.Body = statement;
return forInStatement;
}
public static JsForStatement For(JsExpression initializer, JsExpression condition, params JsExpression[] incrementors)
{
var forStatement = new JsForStatement(condition);
forStatement.Initializers.Add(initializer);
forStatement.Incrementors.AddRange(incrementors);
return forStatement;
}
public static JsForStatement For(JsExpression[] initializers, JsExpression condition, params JsExpression[] incrementors)
{
var forStatement = new JsForStatement(condition);
forStatement.Initializers.AddRange(initializers);
forStatement.Incrementors.AddRange(incrementors);
return forStatement;
}
public static JsForStatement For(JsVariableDeclaration declaration, JsExpression condition, params JsExpression[] incrementors)
{
var forStatement = new JsForStatement(declaration, condition);
forStatement.Incrementors.AddRange(incrementors);
return forStatement;
}
public static JsForStatement For(JsVariableDeclarator declarator, JsExpression condition, params JsExpression[] incrementors)
{
return For(Declare(declarator), condition, incrementors);
}
public static JsForStatement For(string variableName, JsExpression initializer, JsExpression condition, params JsExpression[] incrementors)
{
return For(Variable(variableName, initializer), condition, incrementors);
}
public static JsForStatement Body(this JsForStatement forStatement, JsStatement body)
{
forStatement.Body = body;
return forStatement;
}
public static JsIndexExpression Index(this JsExpression target, JsExpression index)
{
return new JsIndexExpression(target, index);
}
public static JsRegexExpression Regex(string pattern)
{
return new JsRegexExpression(pattern);
}
public static JsArrayExpression Array(params JsExpression[] elements)
{
var expression = new JsArrayExpression();
expression.Elements.AddRange(elements);
return expression;
}
public static JsThrowStatement Throw(JsExpression expression)
{
return new JsThrowStatement(expression);
}
public static JsSwitchStatement Switch(JsExpression expression, params JsSwitchSection[] sections)
{
var switchStatement = new JsSwitchStatement(expression);
switchStatement.Sections.AddRange(sections);
return switchStatement;
}
public static JsSwitchLabel DefaultLabel()
{
return new JsSwitchLabel(true, null);
}
public static JsSwitchLabel CaseLabel(JsExpression label)
{
return new JsSwitchLabel(false, label);
}
public static JsSwitchSection DefaultSection()
{
return Section(DefaultLabel());
}
public static JsSwitchSection Section(params JsExpression[] caseLabels)
{
return Section(caseLabels.Select(x => CaseLabel(x)).ToArray());
}
public static JsSwitchSection Section(params JsSwitchLabel[] caseLabels)
{
var switchSection = new JsSwitchSection();
foreach (var caseLabel in caseLabels)
{
switchSection.Labels.Add(caseLabel);
}
return switchSection;
}
public static JsSwitchSection Statement(this JsSwitchSection section, JsStatement statement)
{
section.Statements.Add(statement);
return section;
}
public static JsSwitchSection Statements(this JsSwitchSection section, params JsStatement[] statements)
{
section.Statements.AddRange(statements);
return section;
}
public static JsWhileStatement While(JsExpression condition, JsStatement statement)
{
return new JsWhileStatement(condition, statement);
}
public static JsDoWhileStatement DoWhile(JsExpression condition, JsStatement statement)
{
return new JsDoWhileStatement(condition, statement);
}
public static JsBreakStatement Break(string label = null)
{
return new JsBreakStatement(label);
}
public static JsEmptyStatement Empty()
{
return new JsEmptyStatement();
}
public static JsTryStatement Try()
{
var result = new JsTryStatement();
return result;
}
public static JsCatchClause Catch()
{
var catchClause = new JsCatchClause();
return catchClause;
}
public static JsCatchClause Catch(JsVariableDeclarator declaration)
{
var catchClause = new JsCatchClause(declaration);
return catchClause;
}
public static JsTryStatement Catch(this JsTryStatement tryStatement, JsCatchClause catchClause)
{
tryStatement.Catch = catchClause;
return tryStatement;
}
public static JsTryStatement Catch(this JsTryStatement tryStatement, JsVariableDeclarator declaration)
{
tryStatement.Catch = Catch(declaration);
return tryStatement;
}
public static JsTryStatement Finally(this JsTryStatement tryStatement)
{
tryStatement.Finally = new JsBlockStatement();
return tryStatement;
}
public static JsConditionalExpression Conditional(JsExpression condition, JsExpression ifTrue, JsExpression ifFalse)
{
return new JsConditionalExpression(condition, ifTrue, ifFalse);
}
public static void Express(this JsBlockStatement blockStatement, JsExpression expression)
{
blockStatement.Add(Express(expression));
}
public static void Local(this JsBlockStatement blockStatement, JsVariableDeclarator declarators)
{
blockStatement.Add(Local(declarators));
}
public static void Local(this JsBlockStatement blockStatement, params JsVariableDeclarator[] declarators)
{
blockStatement.Add(Local(declarators));
}
public static JsVariableDeclarator Local(this JsBlockStatement blockStatement, string name, JsExpression initializer)
{
var variable = Variable(name, initializer);
blockStatement.Add(Local(variable));
return variable;
}
public static void Local(this JsBlockStatement blockStatement, JsVariableDeclaration declaration)
{
blockStatement.Add(Local(declaration));
}
public static void Return(this JsBlockStatement blockStatement, JsExpression expression)
{
blockStatement.Add(Return(expression));
}
public static void Assign(this JsBlockStatement blockStatement, JsExpression left, JsExpression right)
{
blockStatement.Express(Assign(left, right));
}
public static void If(this JsBlockStatement blockStatement, JsExpression condition, JsExpression ifTrue, JsExpression ifFalse = null)
{
blockStatement.Add(If(condition, ifTrue, ifFalse));
}
public static void If(this JsBlockStatement blockStatement, JsExpression condition, JsStatement ifTrue, JsStatement ifFalse = null)
{
blockStatement.Add(If(condition, ifTrue, ifFalse));
}
public static void Invoke(this JsBlockStatement blockStatement, JsExpression target, params JsExpression[] arguments)
{
blockStatement.Express(Invoke(target, arguments));
}
public static JsDeleteExpression Delete(this JsExpression target)
{
return new JsDeleteExpression(target);
}
public static JsTypeOfExpression TypeOf(JsExpression type)
{
return new JsTypeOfExpression(type);
}
public static JsUnaryExpression Not(JsExpression expression)
{
return new JsUnaryExpression(JsUnaryOperator.LogicalNot, expression);
}
public static JsContinueStatement Continue(string label = null)
{
return new JsContinueStatement(label);
}
public static JsExpression GetLogicalTarget(this JsExpression expression)
{
if (expression is JsInvocationExpression)
{
var invocation = (JsInvocationExpression)expression;
var target = invocation.Target;
if (target is JsMemberReferenceExpression)
{
target = target.GetLogicalTarget();
}
return target;
}
else if (expression is JsMemberReferenceExpression)
{
var memberReference = (JsMemberReferenceExpression)expression;
return memberReference.Target;
}
else
{
return null;
}
}
public static JsBinaryExpression BitwiseOr(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.BitwiseOr, left, right);
}
public static JsBinaryExpression BitwiseAnd(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.BitwiseAnd, left, right);
}
public static JsBinaryExpression LogicalAnd(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.LogicalAnd, left, right);
}
public static JsBinaryExpression LessThan(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.LessThan, left, right);
}
public static JsBinaryExpression GreaterThan(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.GreaterThan, left, right);
}
public static JsBinaryExpression LogicalOr(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.LogicalOr, left, right);
}
public static JsBinaryExpression NotEqualTo(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.NotEquals, left, right);
}
public static JsBinaryExpression EqualTo(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Equals, left, right);
}
public static JsBinaryExpression Add(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Add, left, right);
}
public static JsBinaryExpression Subtract(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Subtract, left, right);
}
public static JsBinaryExpression Multiply(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Multiply, left, right);
}
public static JsBinaryExpression Divide(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Divide, left, right);
}
public static JsBinaryExpression Modulus(this JsExpression left, JsExpression right)
{
return new JsBinaryExpression(JsBinaryOperator.Modulus, left, right);
}
public static JsLabeledStatement Label(string label, JsStatement statement)
{
return new JsLabeledStatement(label, statement);
}
public static JsInstanceOfExpression InstanceOf(JsExpression expression, JsExpression type)
{
return new JsInstanceOfExpression(expression, type);
}
public static JsBlockStatement Block(params JsStatement[] statements)
{
var block = new JsBlockStatement();
foreach (var statement in statements)
block.Add(statement);
return block;
}
public static T Compact<T>(this T node) where T : JsNode
{
node.IsCompacted = true;
return node;
}
public static JsInExpression In(this JsExpression property, JsExpression obj)
{
return new JsInExpression(property, obj);
}
}
}
| |
using UnityEngine;
using System.Collections;
public enum PickupCharacterState
{
Idle = 0,
Walking = 1,
Trotting = 2,
Running = 3,
Jumping = 4,
}
[RequireComponent(typeof(CharacterController))]
public class PickupController : MonoBehaviour, IPunObservable
{
public AnimationClip idleAnimation;
public AnimationClip walkAnimation;
public AnimationClip runAnimation;
public AnimationClip jumpPoseAnimation;
public float walkMaxAnimationSpeed = 0.75f;
public float trotMaxAnimationSpeed = 1.0f;
public float runMaxAnimationSpeed = 1.0f;
public float jumpAnimationSpeed = 1.15f;
public float landAnimationSpeed = 1.0f;
private Animation _animation;
public PickupCharacterState _characterState;
// The speed when walking
public float walkSpeed = 2.0f;
// after trotAfterSeconds of walking we trot with trotSpeed
public float trotSpeed = 4.0f;
// when pressing "Fire3" button (cmd) we start running
public float runSpeed = 6.0f;
public float inAirControlAcceleration = 3.0f;
// How high do we jump when pressing jump and letting go immediately
public float jumpHeight = 0.5f;
// The gravity for the character
public float gravity = 20.0f;
// The gravity in controlled descent mode
public float speedSmoothing = 10.0f;
public float rotateSpeed = 500.0f;
public float trotAfterSeconds = 3.0f;
public bool canJump = false;
private float jumpRepeatTime = 0.05f;
private float jumpTimeout = 0.15f;
private float groundedTimeout = 0.25f;
// The camera doesnt start following the target immediately but waits for a split second to avoid too much waving around.
private float lockCameraTimer = 0.0f;
// The current move direction in x-z
private Vector3 moveDirection = Vector3.zero;
// The current vertical speed
private float verticalSpeed = 0.0f;
// The current x-z move speed
private float moveSpeed = 0.0f;
// The last collision flags returned from controller.Move
private CollisionFlags collisionFlags;
// Are we jumping? (Initiated with jump button and not grounded yet)
private bool jumping = false;
private bool jumpingReachedApex = false;
// Are we moving backwards (This locks the camera to not do a 180 degree spin)
private bool movingBack = false;
// Is the user pressing any keys?
private bool isMoving = false;
// When did the user start walking (Used for going into trot after a while)
private float walkTimeStart = 0.0f;
// Last time the jump button was clicked down
private float lastJumpButtonTime = -10.0f;
// Last time we performed a jump
private float lastJumpTime = -1.0f;
// the height we jumped from (Used to determine for how long to apply extra jump power after jumping.)
//private float lastJumpStartHeight = 0.0f;
private Vector3 inAirVelocity = Vector3.zero;
private float lastGroundedTime = 0.0f;
Vector3 velocity = Vector3.zero;
private Vector3 lastPos;
private Vector3 remotePosition;
public bool isControllable = false;
public bool DoRotate = true;
public float RemoteSmoothing = 5;
public bool AssignAsTagObject = true;
void Awake()
{
// PUN: automatically determine isControllable, if this GO has a PhotonView
PhotonView pv = this.gameObject.GetComponent<PhotonView>();
if (pv != null)
{
isControllable = pv.isMine;
// The pickup demo assigns this GameObject as the PhotonPlayer.TagObject. This way, we can access this character (controller, position, etc) easily
if (this.AssignAsTagObject)
{
pv.owner.TagObject = this.gameObject;
}
// please note: we change this setting on ANY PickupController if "DoRotate" is off. not only locally when it's "our" GameObject!
if (pv.observed is Transform && !DoRotate)
{
pv.onSerializeTransformOption = OnSerializeTransform.OnlyPosition;
}
}
moveDirection = transform.TransformDirection(Vector3.forward);
_animation = GetComponent<Animation>();
if (!_animation)
Debug.Log("The character you would like to control doesn't have animations. Moving her might look weird.");
if (!idleAnimation)
{
_animation = null;
Debug.Log("No idle animation found. Turning off animations.");
}
if (!walkAnimation)
{
_animation = null;
Debug.Log("No walk animation found. Turning off animations.");
}
if (!runAnimation)
{
_animation = null;
Debug.Log("No run animation found. Turning off animations.");
}
if (!jumpPoseAnimation && canJump)
{
_animation = null;
Debug.Log("No jump animation found and the character has canJump enabled. Turning off animations.");
}
}
void Update()
{
if (isControllable)
{
if (Input.GetButtonDown("Jump"))
{
lastJumpButtonTime = Time.time;
}
UpdateSmoothedMovementDirection();
// Apply gravity
// - extra power jump modifies gravity
// - controlledDescent mode modifies gravity
ApplyGravity();
// Apply jumping logic
ApplyJumping();
// Calculate actual motion
Vector3 movement = moveDirection * moveSpeed + new Vector3(0, verticalSpeed, 0) + inAirVelocity;
movement *= Time.deltaTime;
//Debug.Log(movement.x.ToString("0.000") + ":" + movement.z.ToString("0.000"));
// Move the controller
CharacterController controller = GetComponent<CharacterController>();
collisionFlags = controller.Move(movement);
}
// PUN: if a remote position is known, we smooth-move to it (being late(r) but smoother)
if (this.remotePosition != Vector3.zero)
{
transform.position = Vector3.Lerp(transform.position, this.remotePosition, Time.deltaTime * this.RemoteSmoothing);
}
velocity = (transform.position - lastPos)*25;
// ANIMATION sector
if (_animation)
{
if (_characterState == PickupCharacterState.Jumping)
{
if (!jumpingReachedApex)
{
_animation[jumpPoseAnimation.name].speed = jumpAnimationSpeed;
_animation[jumpPoseAnimation.name].wrapMode = WrapMode.ClampForever;
_animation.CrossFade(jumpPoseAnimation.name);
}
else
{
_animation[jumpPoseAnimation.name].speed = -landAnimationSpeed;
_animation[jumpPoseAnimation.name].wrapMode = WrapMode.ClampForever;
_animation.CrossFade(jumpPoseAnimation.name);
}
}
else
{
if (_characterState == PickupCharacterState.Idle)
{
_animation.CrossFade(idleAnimation.name);
}
else if (_characterState == PickupCharacterState.Running)
{
_animation[runAnimation.name].speed = runMaxAnimationSpeed;
if (this.isControllable)
{
_animation[runAnimation.name].speed = Mathf.Clamp(velocity.magnitude, 0.0f, runMaxAnimationSpeed);
}
_animation.CrossFade(runAnimation.name);
}
else if (_characterState == PickupCharacterState.Trotting)
{
_animation[walkAnimation.name].speed = trotMaxAnimationSpeed;
if (this.isControllable)
{
_animation[walkAnimation.name].speed = Mathf.Clamp(velocity.magnitude, 0.0f, trotMaxAnimationSpeed);
}
_animation.CrossFade(walkAnimation.name);
}
else if (_characterState == PickupCharacterState.Walking)
{
_animation[walkAnimation.name].speed = walkMaxAnimationSpeed;
if (this.isControllable)
{
_animation[walkAnimation.name].speed = Mathf.Clamp(velocity.magnitude, 0.0f, walkMaxAnimationSpeed);
}
_animation.CrossFade(walkAnimation.name);
}
if (_characterState != PickupCharacterState.Running)
{
_animation[runAnimation.name].time = 0.0f;
}
}
}
// ANIMATION sector
// Set rotation to the move direction
if (IsGrounded())
{
// a specialty of this controller: you can disable rotation!
if (DoRotate)
{
transform.rotation = Quaternion.LookRotation(moveDirection);
}
}
else
{
/* This causes choppy behaviour when colliding with SIDES
* Vector3 xzMove = velocity;
xzMove.y = 0;
if (xzMove.sqrMagnitude > 0.001f)
{
transform.rotation = Quaternion.LookRotation(xzMove);
}*/
}
// We are in jump mode but just became grounded
if (IsGrounded())
{
lastGroundedTime = Time.time;
inAirVelocity = Vector3.zero;
if (jumping)
{
jumping = false;
SendMessage("DidLand", SendMessageOptions.DontRequireReceiver);
}
}
lastPos = transform.position;
}
public void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info)
{
if (stream.isWriting)
{
stream.SendNext(this.transform.position);
stream.SendNext((byte)this._characterState);
}
else
{
bool initialRemotePosition = (remotePosition == Vector3.zero);
remotePosition = (Vector3)stream.ReceiveNext();
this._characterState = (PickupCharacterState)((byte)stream.ReceiveNext());
if (initialRemotePosition)
{
// avoids lerping the character from "center" to the "current" position when this client joins
this.transform.position = remotePosition;
}
}
}
void UpdateSmoothedMovementDirection()
{
Transform cameraTransform = Camera.main.transform;
bool grounded = IsGrounded();
// Forward vector relative to the camera along the x-z plane
Vector3 forward = cameraTransform.TransformDirection(Vector3.forward);
forward.y = 0;
forward = forward.normalized;
// Right vector relative to the camera
// Always orthogonal to the forward vector
Vector3 right = new Vector3(forward.z, 0, -forward.x);
float v = Input.GetAxisRaw("Vertical");
float h = Input.GetAxisRaw("Horizontal");
// Are we moving backwards or looking backwards
if (v < -0.2f)
movingBack = true;
else
movingBack = false;
bool wasMoving = isMoving;
isMoving = Mathf.Abs(h) > 0.1f || Mathf.Abs(v) > 0.1f;
// Target direction relative to the camera
Vector3 targetDirection = h * right + v * forward;
// Debug.Log("targetDirection " + targetDirection);
// Grounded controls
if (grounded)
{
// Lock camera for short period when transitioning moving & standing still
lockCameraTimer += Time.deltaTime;
if (isMoving != wasMoving)
lockCameraTimer = 0.0f;
// We store speed and direction seperately,
// so that when the character stands still we still have a valid forward direction
// moveDirection is always normalized, and we only update it if there is user input.
if (targetDirection != Vector3.zero)
{
// If we are really slow, just snap to the target direction
if (moveSpeed < walkSpeed * 0.9f && grounded)
{
moveDirection = targetDirection.normalized;
}
// Otherwise smoothly turn towards it
else
{
moveDirection = Vector3.RotateTowards(moveDirection, targetDirection, rotateSpeed * Mathf.Deg2Rad * Time.deltaTime, 1000);
moveDirection = moveDirection.normalized;
}
}
// Smooth the speed based on the current target direction
float curSmooth = speedSmoothing * Time.deltaTime;
// Choose target speed
//* We want to support analog input but make sure you cant walk faster diagonally than just forward or sideways
float targetSpeed = Mathf.Min(targetDirection.magnitude, 1.0f);
_characterState = PickupCharacterState.Idle;
// Pick speed modifier
if ((Input.GetKey(KeyCode.LeftShift) | Input.GetKey(KeyCode.RightShift)) && isMoving)
{
targetSpeed *= runSpeed;
_characterState = PickupCharacterState.Running;
}
else if (Time.time - trotAfterSeconds > walkTimeStart)
{
targetSpeed *= trotSpeed;
_characterState = PickupCharacterState.Trotting;
}
else if (isMoving)
{
targetSpeed *= walkSpeed;
_characterState = PickupCharacterState.Walking;
}
moveSpeed = Mathf.Lerp(moveSpeed, targetSpeed, curSmooth);
// Reset walk time start when we slow down
if (moveSpeed < walkSpeed * 0.3f)
walkTimeStart = Time.time;
}
// In air controls
else
{
// Lock camera while in air
if (jumping)
lockCameraTimer = 0.0f;
if (isMoving)
inAirVelocity += targetDirection.normalized * Time.deltaTime * inAirControlAcceleration;
}
}
void ApplyJumping()
{
// Prevent jumping too fast after each other
if (lastJumpTime + jumpRepeatTime > Time.time)
return;
if (IsGrounded())
{
// Jump
// - Only when pressing the button down
// - With a timeout so you can press the button slightly before landing
if (canJump && Time.time < lastJumpButtonTime + jumpTimeout)
{
verticalSpeed = CalculateJumpVerticalSpeed(jumpHeight);
SendMessage("DidJump", SendMessageOptions.DontRequireReceiver);
}
}
}
void ApplyGravity()
{
if (isControllable) // don't move player at all if not controllable.
{
// Apply gravity
//bool jumpButton = Input.GetButton("Jump");
// When we reach the apex of the jump we send out a message
if (jumping && !jumpingReachedApex && verticalSpeed <= 0.0f)
{
jumpingReachedApex = true;
SendMessage("DidJumpReachApex", SendMessageOptions.DontRequireReceiver);
}
if (IsGrounded())
verticalSpeed = 0.0f;
else
verticalSpeed -= gravity * Time.deltaTime;
}
}
float CalculateJumpVerticalSpeed(float targetJumpHeight)
{
// From the jump height and gravity we deduce the upwards speed
// for the character to reach at the apex.
return Mathf.Sqrt(2 * targetJumpHeight * gravity);
}
void DidJump()
{
jumping = true;
jumpingReachedApex = false;
lastJumpTime = Time.time;
//lastJumpStartHeight = transform.position.y;
lastJumpButtonTime = -10;
_characterState = PickupCharacterState.Jumping;
}
void OnControllerColliderHit(ControllerColliderHit hit)
{
// Debug.DrawRay(hit.point, hit.normal);
if (hit.moveDirection.y > 0.01f)
return;
}
public float GetSpeed()
{
return moveSpeed;
}
public bool IsJumping()
{
return jumping;
}
public bool IsGrounded()
{
return (collisionFlags & CollisionFlags.CollidedBelow) != 0;
}
public Vector3 GetDirection()
{
return moveDirection;
}
public bool IsMovingBackwards()
{
return movingBack;
}
public float GetLockCameraTimer()
{
return lockCameraTimer;
}
public bool IsMoving()
{
return Mathf.Abs(Input.GetAxisRaw("Vertical")) + Mathf.Abs(Input.GetAxisRaw("Horizontal")) > 0.5f;
}
public bool HasJumpReachedApex()
{
return jumpingReachedApex;
}
public bool IsGroundedWithTimeout()
{
return lastGroundedTime + groundedTimeout > Time.time;
}
public void Reset()
{
gameObject.tag = "Player";
}
}
| |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using Cassandra.IntegrationTests.TestClusterManagement;
using Cassandra.Tests;
using NUnit.Framework;
namespace Cassandra.IntegrationTests.TestBase
{
/// <summary>
/// A number of static fields/methods handy for tests.
/// </summary>
internal static class TestUtils
{
public static readonly Version Version40 = new Version(4, 0);
private const int DefaultSleepIterationMs = 1000;
public static readonly string CreateKeyspaceSimpleFormat =
"CREATE KEYSPACE \"{0}\" WITH replication = {{ 'class' : 'SimpleStrategy', 'replication_factor' : {1} }}";
public static readonly string CreateKeyspaceGenericFormat = "CREATE KEYSPACE {0} WITH replication = {{ 'class' : '{1}', {2} }}";
public static readonly string CreateTableSimpleFormat = "CREATE TABLE {0} (k text PRIMARY KEY, t text, i int, f float)";
public const string CreateTableAllTypes = @"
create table {0} (
id uuid primary key,
ascii_sample ascii,
text_sample text,
int_sample int,
bigint_sample bigint,
float_sample float,
double_sample double,
decimal_sample decimal,
blob_sample blob,
boolean_sample boolean,
timestamp_sample timestamp,
inet_sample inet,
timeuuid_sample timeuuid,
map_sample map<text, text>,
list_sample list<text>,
set_sample set<text>);
";
public const string CREATE_TABLE_TIME_SERIES = @"
create table {0} (
id uuid,
event_time timestamp,
text_sample text,
int_sample int,
bigint_sample bigint,
float_sample float,
double_sample double,
decimal_sample decimal,
blob_sample blob,
boolean_sample boolean,
timestamp_sample timestamp,
inet_sample inet,
PRIMARY KEY(id, event_time));
";
public static readonly string INSERT_FORMAT = "INSERT INTO {0} (k, t, i, f) VALUES ('{1}', '{2}', {3}, {4})";
public static readonly string SELECT_ALL_FORMAT = "SELECT * FROM {0}";
public static readonly string SELECT_WHERE_FORMAT = "SELECT * FROM {0} WHERE {1}";
public static string GetTestClusterNameBasedOnRandomString()
{
return "test_" + Randomm.RandomAlphaNum(12);
}
public static string GetUniqueKeyspaceName()
{
return "TestKeySpace_" + Randomm.RandomAlphaNum(12);
}
public static string GetUniqueTableName()
{
return "TestTable_" + Randomm.RandomAlphaNum(12);
}
public static void TryToDeleteKeyspace(ISession session, string keyspaceName)
{
if (session != null)
session.DeleteKeyspaceIfExists(keyspaceName);
}
public static bool TableExists(ISession session, string keyspaceName, string tableName, bool caseSensitive=false)
{
var cql = caseSensitive ? string.Format(@"SELECT * FROM ""{0}"".""{1}"" LIMIT 1", keyspaceName, tableName)
: string.Format("SELECT * FROM {0}.{1} LIMIT 1", keyspaceName, tableName);
//it will throw a InvalidQueryException if the table/keyspace does not exist
session.Execute(cql);
return true;
}
public static Builder NewBuilder()
{
var builder = Cluster.Builder();
if (TestClusterManager.CcmUseWsl)
{
builder = builder.WithSocketOptions(new SocketOptions().SetConnectTimeoutMillis(20000));
}
else
{
builder = builder.WithSocketOptions(new SocketOptions().SetConnectTimeoutMillis(10000));
}
return TestClusterManager.ShouldEnableBetaProtocolVersion() ? builder.WithBetaProtocolVersions() : builder;
}
public static byte[] GetBytes(string str)
{
byte[] bytes = new byte[str.Length*sizeof (char)];
System.Buffer.BlockCopy(str.ToCharArray(), 0, bytes, 0, bytes.Length);
return bytes;
}
/// <summary>
/// Validates that the bootstrapped node was added to the cluster and was queried.
/// </summary>
public static void ValidateBootStrappedNodeIsQueried(ITestCluster testCluster, int expectedTotalNodeCount, string newlyBootstrappedHost)
{
var hostsQueried = new List<string>();
DateTime timeInTheFuture = DateTime.Now.AddSeconds(120);
while (testCluster.Cluster.Metadata.AllHosts().ToList().Count() < expectedTotalNodeCount && DateTime.Now < timeInTheFuture)
{
var rs = testCluster.Session.Execute("SELECT key FROM system.local");
hostsQueried.Add(rs.Info.QueriedHost.Address.ToString());
Thread.Sleep(500);
}
Assert.That(testCluster.Cluster.Metadata.AllHosts().ToList().Count, Is.EqualTo(expectedTotalNodeCount));
timeInTheFuture = DateTime.Now.AddSeconds(120);
while (!hostsQueried.Contains(newlyBootstrappedHost) && DateTime.Now < timeInTheFuture)
{
var rs = testCluster.Session.Execute("SELECT key FROM system.local");
hostsQueried.Add(rs.Info.QueriedHost.Address.ToString());
Thread.Sleep(500);
}
// Validate host was queried
Assert.True(hostsQueried.Any(ip => ip.ToString() == newlyBootstrappedHost), "Newly bootstrapped node was not queried!");
}
/// <summary>
/// Determines if the test should use a remote ccm instance
/// </summary>
public static bool UseRemoteCcm
{
get { return false; }
}
public static void WaitForUp(string nodeHost, int nodePort, int maxSecondsToKeepTrying)
{
int msSleepPerIteration = 500;
DateTime futureDateTime = DateTime.Now.AddSeconds(maxSecondsToKeepTrying);
while (DateTime.Now < futureDateTime)
{
if (TestUtils.IsNodeReachable(IPAddress.Parse(nodeHost), nodePort))
{
return;
}
Trace.TraceInformation(
string.Format("Still waiting for node host: {0} to be available for connection, " +
" waiting another {1} MS ... ", nodeHost + ":" + nodePort, msSleepPerIteration));
Thread.Sleep(msSleepPerIteration);
}
throw new Exception("Could not connect to node: " + nodeHost + ":" + nodePort + " after " + maxSecondsToKeepTrying + " seconds!");
}
private static void WaitForMeta(string nodeHost, Cluster cluster, int maxTry, bool waitForUp)
{
string expectedFinalNodeState = "UP";
if (!waitForUp)
expectedFinalNodeState = "DOWN";
for (int i = 0; i < maxTry; ++i)
{
try
{
// Are all nodes in the cluster accounted for?
bool disconnected = !cluster.RefreshSchema();
if (disconnected)
{
string warnStr = "While waiting for host " + nodeHost + " to be " + expectedFinalNodeState + ", the cluster is now totally down, returning now ... ";
Trace.TraceWarning(warnStr);
return;
}
Metadata metadata = cluster.Metadata;
foreach (Host host in metadata.AllHosts())
{
bool hostFound = false;
if (host.Address.ToString() == nodeHost)
{
hostFound = true;
if (host.IsUp && waitForUp)
{
Trace.TraceInformation("Verified according to cluster meta that host " + nodeHost + " is " + expectedFinalNodeState + ", returning now ... ");
return;
}
Trace.TraceWarning("We're waiting for host " + nodeHost + " to be " + expectedFinalNodeState);
}
// Is the host even in the meta list?
if (!hostFound)
{
if (!waitForUp)
{
Trace.TraceInformation("Verified according to cluster meta that host " + host.Address + " is not available in the MetaData hosts list, returning now ... ");
return;
}
else
Trace.TraceWarning("We're waiting for host " + nodeHost + " to be " + expectedFinalNodeState + ", but this host was not found in the MetaData hosts list!");
}
}
}
catch (Exception e)
{
if (e.Message.Contains("None of the hosts tried for query are available") && !waitForUp)
{
Trace.TraceInformation("Verified according to cluster meta that host " + nodeHost + " is not available in the MetaData hosts list, returning now ... ");
return;
}
Trace.TraceInformation("Exception caught while waiting for meta data: " + e.Message);
}
Trace.TraceWarning("Waiting for node host: " + nodeHost + " to be " + expectedFinalNodeState);
Thread.Sleep(TestUtils.DefaultSleepIterationMs);
}
string errStr = "Node host should have been " + expectedFinalNodeState + " but was not after " + maxTry + " tries!";
Trace.TraceError(errStr);
}
public static void WaitFor(string node, Cluster cluster, int maxTry)
{
TestUtils.WaitFor(node, cluster, maxTry, false, false);
}
public static void WaitForDown(string node, Cluster cluster, int maxTry)
{
TestUtils.WaitFor(node, cluster, maxTry, true, false);
}
public static void waitForDecommission(string node, Cluster cluster, int maxTry)
{
TestUtils.WaitFor(node, cluster, maxTry, true, true);
}
public static void WaitForDownWithWait(String node, Cluster cluster, int waitTime)
{
TestUtils.WaitFor(node, cluster, 90, true, false);
// FIXME: Once stop() works, remove this line
try
{
Thread.Sleep(waitTime * 1000);
}
catch (InvalidQueryException e)
{
Debug.Write(e.StackTrace);
}
}
private static void WaitFor(string node, Cluster cluster, int maxTry, bool waitForDead, bool waitForOut)
{
TestUtils.WaitForMeta(node, cluster, maxTry, !waitForDead);
}
/// <summary>
/// Spawns a new process (platform independent)
/// </summary>
public static ProcessOutput ExecuteProcess(string processName, string args, int timeout = 300000)
{
var output = new ProcessOutput();
using (var process = new Process())
{
process.StartInfo.FileName = processName;
process.StartInfo.Arguments = args;
process.StartInfo.RedirectStandardOutput = true;
process.StartInfo.RedirectStandardError = true;
//Hide the python window if possible
process.StartInfo.UseShellExecute = false;
process.StartInfo.CreateNoWindow = true;
process.StartInfo.WindowStyle = ProcessWindowStyle.Hidden;
using (var outputWaitHandle = new AutoResetEvent(false))
using (var errorWaitHandle = new AutoResetEvent(false))
{
process.OutputDataReceived += (sender, e) =>
{
if (e.Data == null)
{
try
{
outputWaitHandle.Set();
}
catch
{
//probably is already disposed
}
}
else
{
output.OutputText.AppendLine(e.Data);
}
};
process.ErrorDataReceived += (sender, e) =>
{
if (e.Data == null)
{
try
{
errorWaitHandle.Set();
}
catch
{
//probably is already disposed
}
}
else
{
output.OutputText.AppendLine(e.Data);
}
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
if (process.WaitForExit(timeout) &&
outputWaitHandle.WaitOne(timeout) &&
errorWaitHandle.WaitOne(timeout))
{
// Process completed.
output.ExitCode = process.ExitCode;
}
else
{
// Timed out.
output.ExitCode = -1;
}
}
}
return output;
}
public static ProcessOutput ExecuteLocalCcm(string ccmArgs, string ccmConfigDir, int timeout = 300000, bool throwOnProcessError = false)
{
ccmConfigDir = TestUtils.EscapePath(ccmConfigDir);
var args = ccmArgs + " --config-dir=" + ccmConfigDir;
Trace.TraceInformation("Executing ccm: " + ccmArgs);
var processName = "/usr/local/bin/ccm";
if (TestUtils.IsWin)
{
processName = "cmd.exe";
args = "/c ccm " + args;
}
var output = TestUtils.ExecuteProcess(processName, args, timeout);
if (throwOnProcessError)
{
TestUtils.ValidateOutput(output);
}
return output;
}
public static bool IsWin
{
get { return TestHelper.IsWin; }
}
private static void ValidateOutput(ProcessOutput output)
{
if (output.ExitCode != 0)
{
throw new TestInfrastructureException(string.Format("Process exited in error {0}", output.ToString()));
}
}
/// <summary>
/// Starts a Cassandra cluster with the name, version and amount of nodes provided.
/// </summary>
/// <param name="ccmConfigDir">Path to the location where the cluster will be created</param>
/// <param name="cassandraVersion">Cassandra version in the form of MAJOR.MINOR.PATCH semver</param>
/// <param name="nodeLength">amount of nodes in the cluster</param>
/// <param name="secondDcNodeLength">amount of nodes to add the second DC</param>
/// <param name="clusterName"></param>
/// <returns></returns>
public static ProcessOutput ExecuteLocalCcmClusterStart(string ccmConfigDir, string cassandraVersion, int nodeLength = 1, int secondDcNodeLength = 0, string clusterName = "test")
{
//Starting ccm cluster involves:
// 1.- Getting the Apache Cassandra Distro
// 2.- Compiling it
// 3.- Fill the config files
// 4.- Starting each node.
//Considerations:
// As steps 1 and 2 can take a while, try to fail fast (2 sec) by doing a "ccm list"
// Also, the process can exit before the nodes are actually up: Execute ccm status until they are up
var totalNodeLength = nodeLength + secondDcNodeLength;
//Only if ccm list succedes, create the cluster and continue.
var output = TestUtils.ExecuteLocalCcm("list", ccmConfigDir, 2000);
if (output.ExitCode != 0)
{
return output;
}
var ccmCommand = string.Format("create {0} -v {1}", clusterName, cassandraVersion);
//When creating a cluster, it could download the Cassandra binaries from the internet.
//Give enough time = 3 minutes.
var timeout = 180000;
output = TestUtils.ExecuteLocalCcm(ccmCommand, ccmConfigDir, timeout);
if (output.ExitCode != 0)
{
return output;
}
if (secondDcNodeLength > 0)
{
ccmCommand = string.Format("populate -n {0}:{1}", nodeLength, secondDcNodeLength);
}
else
{
ccmCommand = "populate -n " + nodeLength;
}
var populateOutput = TestUtils.ExecuteLocalCcm(ccmCommand, ccmConfigDir, 300000);
if (populateOutput.ExitCode != 0)
{
return populateOutput;
}
output.OutputText.AppendLine(populateOutput.ToString());
var startOutput = TestUtils.ExecuteLocalCcm("start", ccmConfigDir);
if (startOutput.ExitCode != 0)
{
return startOutput;
}
output.OutputText.AppendLine(startOutput.ToString());
//Nodes are starting, but we dont know for sure if they are have started.
var allNodesAreUp = false;
var safeCounter = 0;
while (!allNodesAreUp && safeCounter < 10)
{
var statusOutput = TestUtils.ExecuteLocalCcm("status", ccmConfigDir, 1000);
if (statusOutput.ExitCode != 0)
{
//Something went wrong
output = statusOutput;
break;
}
//Analyze the status output to see if all nodes are up
if (Regex.Matches(statusOutput.OutputText.ToString(), "UP", RegexOptions.Multiline).Count == totalNodeLength)
{
//All nodes are up
for (int x = 1; x <= totalNodeLength; x++)
{
var foundText = false;
var sw = new Stopwatch();
sw.Start();
while (sw.ElapsedMilliseconds < 180000)
{
var logFileText =
TestUtils.TryReadAllTextNoLock(Path.Combine(ccmConfigDir, clusterName, string.Format("node{0}\\logs\\system.log", x)));
if (Regex.IsMatch(logFileText, "listening for CQL clients", RegexOptions.Multiline))
{
foundText = true;
break;
}
}
if (!foundText)
{
throw new TestInfrastructureException(string.Format("node{0} did not properly start", x));
}
}
allNodesAreUp = true;
}
safeCounter++;
}
return output;
}
/// <summary>
/// Stops the cluster and removes the config files
/// </summary>
/// <returns></returns>
public static ProcessOutput ExecuteLocalCcmClusterRemove(string ccmConfigDir)
{
var output = TestUtils.ExecuteLocalCcm("stop", ccmConfigDir);
if (output.ExitCode != 0)
{
return output;
}
return TestUtils.ExecuteLocalCcm("remove", ccmConfigDir);
}
/// <summary>
/// Reads a text file without file locking
/// </summary>
/// <returns></returns>
public static string TryReadAllTextNoLock(string fileName)
{
string fileText = "";
try
{
using (var file = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (var reader = new StreamReader(file))
{
fileText = reader.ReadToEnd();
}
}
}
catch
{
//We tried and failed, dont mind
}
return fileText;
}
private static Dictionary<string, bool> _existsCache = new Dictionary<string, bool>();
/// <summary>
/// Checks that the file exists and caches the result in a static variable
/// </summary>
public static bool FileExists(string path)
{
if (!TestUtils._existsCache.ContainsKey(path))
{
TestUtils._existsCache[path] = File.Exists(path);
}
return TestUtils._existsCache[path];
}
/// <summary>
/// Adds double quotes to the path in case it contains spaces.
/// </summary>
public static string EscapePath(string path)
{
if (path == null)
{
throw new ArgumentNullException("path");
}
if (path.Contains(" "))
{
return "\"" + path + "\"";
}
return path;
}
/// <summary>
/// Create a temporary directory inside OS temp path and returns the name of path of the newly created directory
/// </summary>
/// <returns></returns>
public static string CreateTempDirectory()
{
var tempDirectory = Path.Combine(Path.GetTempPath(), "ccm-" + Path.GetRandomFileName());
Directory.CreateDirectory(tempDirectory);
return tempDirectory;
}
//public static void CcmBootstrapNode(CcmCluster ccmCluster, int node, string dc = null)
//{
// ProcessOutput output = null;
// if (dc == null)
// {
// output = ccmCluster.CcmBridge.ExecuteCcm(string.Format("add node{0} -i {1}{2} -j {3} -b", node, Options.Default.IP_PREFIX, node, 7000 + 100 * node));
// }
// else
// {
// output = ccmCluster.CcmBridge.ExecuteCcm(string.Format("add node{0} -i {1}{2} -j {3} -b -d {4}", node, Options.Default.IP_PREFIX, node, 7000 + 100 * node, dc));
// }
// if (output.ExitCode != 0)
// {
// throw new TestInfrastructureException("Local ccm could not add node: " + output.ToString());
// }
//}
public static void CcmDecommissionNode(CcmClusterInfo info, int node)
{
TestUtils.ExecuteLocalCcm(string.Format("node{0} decommission", node), info.ConfigDir);
}
/// <summary>
/// Determines if a connection can be made to a node at port 9042
/// </summary>
public static bool IsNodeReachable(IPAddress ip, int port = 9042)
{
using (var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp))
{
try
{
socket.Connect(new IPEndPoint(ip, port));
return true;
}
catch
{
return false;
}
}
}
public static void WaitForSchemaAgreement(
ICluster cluster, bool ignoreDownNodes = true, bool throwOnMaxRetries = false, int maxRetries = 20)
{
var hostsLength = cluster.AllHosts().Count;
if (hostsLength == 1)
{
return;
}
var cc = cluster.Metadata.ControlConnection;
var counter = 0;
var nodesDown = ignoreDownNodes ? cluster.AllHosts().Count(h => !h.IsConsiderablyUp) : 0;
while (counter++ < maxRetries)
{
Trace.TraceInformation("Waiting for test schema agreement");
Thread.Sleep(500);
var schemaVersions = new List<Guid>();
//peers
schemaVersions.AddRange(cc.Query("SELECT peer, schema_version FROM system.peers").Select(r => r.GetValue<Guid>("schema_version")));
//local
schemaVersions.Add(cc.Query("SELECT schema_version FROM system.local").Select(r => r.GetValue<Guid>("schema_version")).First());
var differentSchemas = schemaVersions.Distinct().Count();
if (differentSchemas <= 1 + nodesDown)
{
//There is 1 schema version or 1 + nodes that are considered as down
return;
}
}
if (throwOnMaxRetries)
{
throw new Exception("Reached max attempts for obtaining a single schema version from all nodes.");
}
}
public static void WaitForSchemaAgreement(CcmClusterInfo clusterInfo)
{
TestUtils.WaitForSchemaAgreement(clusterInfo.Cluster);
}
public static void VerifyCurrentClusterWorkloads(string[] expectedWorkloads)
{
using (var cluster = TestUtils.NewBuilder()
.AddContactPoint(TestClusterManager.InitialContactPoint)
.Build())
{
cluster.Connect();
foreach (var host in cluster.Metadata.AllHosts())
{
CollectionAssert.AreEquivalent(expectedWorkloads, host.Workloads);
}
}
}
}
/// <summary>
/// Represents a result from executing an external process.
/// </summary>
public class ProcessOutput
{
public int ExitCode { get; set; }
public StringBuilder OutputText { get; set; }
private string Output { get; set; }
public ProcessOutput()
{
OutputText = new StringBuilder();
ExitCode = Int32.MinValue;
}
public override string ToString()
{
return
"Exit Code: " + this.ExitCode + Environment.NewLine +
"Output Text: " + (this.Output ?? this.OutputText.ToString()) + Environment.NewLine;
}
public void SetOutput(string output)
{
Output = output;
}
}
public class CcmClusterInfo
{
public Cluster Cluster { get; set; }
public ISession Session { get; set; }
public string ConfigDir { get; set; }
}
}
| |
#if (UNITY_WINRT || UNITY_WP_8_1) && !UNITY_EDITOR && !UNITY_WP8
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.ComponentModel;
using System.Globalization;
using System.Reflection;
using System.Security;
using Newtonsoft.Json.Utilities;
using System.Linq;
using System.Runtime.Serialization;
namespace Newtonsoft.Json.Serialization
{
internal static class JsonTypeReflector
{
private static bool? _dynamicCodeGeneration;
private static bool? _fullyTrusted;
public const string IdPropertyName = "$id";
public const string RefPropertyName = "$ref";
public const string TypePropertyName = "$type";
public const string ValuePropertyName = "$value";
public const string ArrayValuesPropertyName = "$values";
public const string ShouldSerializePrefix = "ShouldSerialize";
public const string SpecifiedPostfix = "Specified";
private static readonly ThreadSafeStore<object, Type> JsonConverterTypeCache = new ThreadSafeStore<object, Type>(GetJsonConverterTypeFromAttribute);
public static JsonContainerAttribute GetJsonContainerAttribute(Type type)
{
return CachedAttributeGetter<JsonContainerAttribute>.GetAttribute(type);
}
public static JsonObjectAttribute GetJsonObjectAttribute(Type type)
{
return GetJsonContainerAttribute(type) as JsonObjectAttribute;
}
public static JsonArrayAttribute GetJsonArrayAttribute(Type type)
{
return GetJsonContainerAttribute(type) as JsonArrayAttribute;
}
public static JsonDictionaryAttribute GetJsonDictionaryAttribute(Type type)
{
return GetJsonContainerAttribute(type) as JsonDictionaryAttribute;
}
public static DataContractAttribute GetDataContractAttribute(Type type)
{
// DataContractAttribute does not have inheritance
Type currentType = type;
while (currentType != null)
{
DataContractAttribute result = CachedAttributeGetter<DataContractAttribute>.GetAttribute(currentType);
if (result != null)
return result;
currentType = currentType.BaseType();
}
return null;
}
public static DataMemberAttribute GetDataMemberAttribute(MemberInfo memberInfo)
{
// DataMemberAttribute does not have inheritance
// can't override a field
if (memberInfo.MemberType() == Newtonsoft.Json.Utilities.MemberTypes.Field)
return CachedAttributeGetter<DataMemberAttribute>.GetAttribute(memberInfo);
// search property and then search base properties if nothing is returned and the property is virtual
PropertyInfo propertyInfo = (PropertyInfo)memberInfo;
DataMemberAttribute result = CachedAttributeGetter<DataMemberAttribute>.GetAttribute(propertyInfo);
if (result == null)
{
if (propertyInfo.IsVirtual())
{
Type currentType = propertyInfo.DeclaringType;
while (result == null && currentType != null)
{
PropertyInfo baseProperty = (PropertyInfo)ReflectionUtils.GetMemberInfoFromType(currentType, propertyInfo);
if (baseProperty != null && baseProperty.IsVirtual())
result = CachedAttributeGetter<DataMemberAttribute>.GetAttribute(baseProperty);
currentType = currentType.BaseType();
}
}
}
return result;
}
public static MemberSerialization GetObjectMemberSerialization(Type objectType, bool ignoreSerializableAttribute)
{
JsonObjectAttribute objectAttribute = GetJsonObjectAttribute(objectType);
if (objectAttribute != null)
return objectAttribute.MemberSerialization;
DataContractAttribute dataContractAttribute = GetDataContractAttribute(objectType);
if (dataContractAttribute != null)
return MemberSerialization.OptIn;
// the default
return MemberSerialization.OptOut;
}
private static Type GetJsonConverterType(object attributeProvider)
{
return JsonConverterTypeCache.Get(attributeProvider);
}
private static Type GetJsonConverterTypeFromAttribute(object attributeProvider)
{
JsonConverterAttribute converterAttribute = GetAttribute<JsonConverterAttribute>(attributeProvider);
return (converterAttribute != null)
? converterAttribute.ConverterType
: null;
}
public static JsonConverter GetJsonConverter(object attributeProvider, Type targetConvertedType)
{
Type converterType = GetJsonConverterType(attributeProvider);
if (converterType != null)
{
JsonConverter memberConverter = JsonConverterAttribute.CreateJsonConverterInstance(converterType);
return memberConverter;
}
return null;
}
private static T GetAttribute<T>(Type type) where T : System.Attribute
{
T attribute;
attribute = ReflectionUtils.GetAttribute<T>(type, true);
if (attribute != null)
return attribute;
foreach (Type typeInterface in type.GetInterfaces())
{
attribute = ReflectionUtils.GetAttribute<T>(typeInterface, true);
if (attribute != null)
return attribute;
}
return null;
}
private static T GetAttribute<T>(MemberInfo memberInfo) where T : System.Attribute
{
T attribute;
attribute = ReflectionUtils.GetAttribute<T>(memberInfo, true);
if (attribute != null)
return attribute;
if (memberInfo.DeclaringType != null)
{
foreach (Type typeInterface in memberInfo.DeclaringType.GetInterfaces())
{
MemberInfo interfaceTypeMemberInfo = ReflectionUtils.GetMemberInfoFromType(typeInterface, memberInfo);
if (interfaceTypeMemberInfo != null)
{
attribute = ReflectionUtils.GetAttribute<T>(interfaceTypeMemberInfo, true);
if (attribute != null)
return attribute;
}
}
}
return null;
}
public static T GetAttribute<T>(object provider) where T : System.Attribute
{
Type type = provider as Type;
if (type != null)
return GetAttribute<T>(type);
MemberInfo memberInfo = provider as MemberInfo;
if (memberInfo != null)
return GetAttribute<T>(memberInfo);
return ReflectionUtils.GetAttribute<T>(provider, true);
}
#if DEBUG
internal static void SetFullyTrusted(bool fullyTrusted)
{
_fullyTrusted = fullyTrusted;
}
internal static void SetDynamicCodeGeneration(bool dynamicCodeGeneration)
{
_dynamicCodeGeneration = dynamicCodeGeneration;
}
#endif
public static bool DynamicCodeGeneration
{
get
{
if (_dynamicCodeGeneration == null)
{
_dynamicCodeGeneration = false;
}
return _dynamicCodeGeneration.Value;
}
}
public static bool FullyTrusted
{
get
{
if (_fullyTrusted == null)
{
_fullyTrusted = false;
}
return _fullyTrusted.Value;
}
}
public static ReflectionDelegateFactory ReflectionDelegateFactory
{
get
{
return ExpressionReflectionDelegateFactory.Instance;
}
}
}
}
#endif
| |
using System;
using NUnit.Framework;
using SharpVectors.Dom;
using SharpVectors.Dom.Svg;
namespace SharpVectors.UnitTests.Svg.BasicTypesAndInterfaces
{
public class SvgLengthListTests : SvgListTests
{
#region Fields
private static int counter = 0;
#endregion
#region Additional SvgLengthList-specific tests
[Test]
public void TestFromStringEmpty()
{
((SvgLengthList) list).FromString("");
Assert.AreEqual(0, list.NumberOfItems);
}
[Test]
public void TestFromStringNull()
{
((SvgLengthList) list).FromString(null);
Assert.AreEqual(0, list.NumberOfItems);
}
[Test]
public void TestFromStringLeadingWhitespace()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString(" 1px 2em 3in 4cm");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Px, svgLengthList.GetItem(0).UnitType);
Assert.AreEqual(2, svgLengthList.GetItem(1).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Ems, svgLengthList.GetItem(1).UnitType);
Assert.AreEqual(3, svgLengthList.GetItem(2).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.In, svgLengthList.GetItem(2).UnitType);
Assert.AreEqual(4, svgLengthList.GetItem(3).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Cm, svgLengthList.GetItem(3).UnitType);
}
[Test]
public void TestFromStringTrailingWhitespace()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString("1px 2em 3in 4cm ");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Px, svgLengthList.GetItem(0).UnitType);
Assert.AreEqual(2, svgLengthList.GetItem(1).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Ems, svgLengthList.GetItem(1).UnitType);
Assert.AreEqual(3, svgLengthList.GetItem(2).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.In, svgLengthList.GetItem(2).UnitType);
Assert.AreEqual(4, svgLengthList.GetItem(3).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Cm, svgLengthList.GetItem(3).UnitType);
}
[Test]
public void TestFromStringOneValue()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString("1mm");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
}
[Test]
public void TestFromStringSpaceDelimited()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString("1px 2em 3in 4cm");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Px, svgLengthList.GetItem(0).UnitType);
Assert.AreEqual(2, svgLengthList.GetItem(1).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Ems, svgLengthList.GetItem(1).UnitType);
Assert.AreEqual(3, svgLengthList.GetItem(2).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.In, svgLengthList.GetItem(2).UnitType);
Assert.AreEqual(4, svgLengthList.GetItem(3).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Cm, svgLengthList.GetItem(3).UnitType);
}
[Test]
public void TestFromStringCommaDelimited()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString("1px,2em, 3in ,4cm , 5ex");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Px, svgLengthList.GetItem(0).UnitType);
Assert.AreEqual(2, svgLengthList.GetItem(1).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Ems, svgLengthList.GetItem(1).UnitType);
Assert.AreEqual(3, svgLengthList.GetItem(2).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.In, svgLengthList.GetItem(2).UnitType);
Assert.AreEqual(4, svgLengthList.GetItem(3).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Cm, svgLengthList.GetItem(3).UnitType);
Assert.AreEqual(5, svgLengthList.GetItem(4).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Exs, svgLengthList.GetItem(4).UnitType);
}
[Test]
[ExpectedException(typeof(DomException))]
public void TestFromStringMultipleCommas()
{
((SvgLengthList) list).FromString("1px,,2em");
}
[Test]
[ExpectedException(typeof(DomException))]
public void TestFromStringMultipleCommas2()
{
((SvgLengthList) list).FromString("1px, ,2em");
}
[Test]
[ExpectedException(typeof(DomException))]
public void TestFromStringMultipleCommas3()
{
((SvgLengthList) list).FromString("1px , ,2em");
}
[Test]
[ExpectedException(typeof(DomException))]
public void TestFromStringMultipleCommas4()
{
((SvgLengthList) list).FromString("1px , , 2em");
}
[Test]
public void TestFromStringMixed()
{
SvgLengthList svgLengthList = (SvgLengthList) list;
svgLengthList.FromString("1px 2em 3in,4cm ,5ex , 6mm");
Assert.AreEqual(1, svgLengthList.GetItem(0).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Px, svgLengthList.GetItem(0).UnitType);
Assert.AreEqual(2, svgLengthList.GetItem(1).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Ems, svgLengthList.GetItem(1).UnitType);
Assert.AreEqual(3, svgLengthList.GetItem(2).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.In, svgLengthList.GetItem(2).UnitType);
Assert.AreEqual(4, svgLengthList.GetItem(3).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Cm, svgLengthList.GetItem(3).UnitType);
Assert.AreEqual(5, svgLengthList.GetItem(4).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Exs, svgLengthList.GetItem(4).UnitType);
Assert.AreEqual(6, svgLengthList.GetItem(5).ValueInSpecifiedUnits);
Assert.AreEqual(SvgLengthType.Mm, svgLengthList.GetItem(5).UnitType);
}
#endregion
#region Support Methods
protected override SvgList makeList()
{
return new SvgLengthList();
}
protected new string makeItem()
{
return "string" + (counter++).ToString() + "px";
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#if !UNIX
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Eventing;
namespace System.Management.Automation.Tracing
{
/// <summary>
/// Tracer.
/// </summary>
public sealed partial class Tracer : System.Management.Automation.Tracing.EtwActivity
{
/// <summary>
/// Critical level.
/// </summary>
public const byte LevelCritical = 1;
/// <summary>
/// Error level.
/// </summary>
public const byte LevelError = 2;
/// <summary>
/// Warning level.
/// </summary>
public const byte LevelWarning = 3;
/// <summary>
/// Informational level.
/// </summary>
public const byte LevelInformational = 4;
/// <summary>
/// Verbose level.
/// </summary>
public const byte LevelVerbose = 5;
/// <summary>
/// Keyword all.
/// </summary>
public const long KeywordAll = 0xFFFFFFFF;
private static readonly Guid providerId = Guid.Parse("a0c1853b-5c40-4b15-8766-3cf1c58f985a");
private static readonly EventDescriptor WriteTransferEventEvent;
private static readonly EventDescriptor DebugMessageEvent;
private static readonly EventDescriptor M3PAbortingWorkflowExecutionEvent;
private static readonly EventDescriptor M3PActivityExecutionFinishedEvent;
private static readonly EventDescriptor M3PActivityExecutionQueuedEvent;
private static readonly EventDescriptor M3PActivityExecutionStartedEvent;
private static readonly EventDescriptor M3PBeginContainerParentJobExecutionEvent;
private static readonly EventDescriptor M3PBeginCreateNewJobEvent;
private static readonly EventDescriptor M3PBeginJobLogicEvent;
private static readonly EventDescriptor M3PBeginProxyChildJobEventHandlerEvent;
private static readonly EventDescriptor M3PBeginProxyJobEventHandlerEvent;
private static readonly EventDescriptor M3PBeginProxyJobExecutionEvent;
private static readonly EventDescriptor M3PBeginRunGarbageCollectionEvent;
private static readonly EventDescriptor M3PBeginStartWorkflowApplicationEvent;
private static readonly EventDescriptor M3PBeginWorkflowExecutionEvent;
private static readonly EventDescriptor M3PCancellingWorkflowExecutionEvent;
private static readonly EventDescriptor M3PChildWorkflowJobAdditionEvent;
private static readonly EventDescriptor M3PEndContainerParentJobExecutionEvent;
private static readonly EventDescriptor M3PEndCreateNewJobEvent;
private static readonly EventDescriptor M3PEndJobLogicEvent;
private static readonly EventDescriptor M3PEndpointDisabledEvent;
private static readonly EventDescriptor M3PEndpointEnabledEvent;
private static readonly EventDescriptor M3PEndpointModifiedEvent;
private static readonly EventDescriptor M3PEndpointRegisteredEvent;
private static readonly EventDescriptor M3PEndpointUnregisteredEvent;
private static readonly EventDescriptor M3PEndProxyChildJobEventHandlerEvent;
private static readonly EventDescriptor M3PEndProxyJobEventHandlerEvent;
private static readonly EventDescriptor M3PEndProxyJobExecutionEvent;
private static readonly EventDescriptor M3PEndRunGarbageCollectionEvent;
private static readonly EventDescriptor M3PEndStartWorkflowApplicationEvent;
private static readonly EventDescriptor M3PEndWorkflowExecutionEvent;
private static readonly EventDescriptor M3PErrorImportingWorkflowFromXamlEvent;
private static readonly EventDescriptor M3PForcedWorkflowShutdownErrorEvent;
private static readonly EventDescriptor M3PForcedWorkflowShutdownFinishedEvent;
private static readonly EventDescriptor M3PForcedWorkflowShutdownStartedEvent;
private static readonly EventDescriptor M3PImportedWorkflowFromXamlEvent;
private static readonly EventDescriptor M3PImportingWorkflowFromXamlEvent;
private static readonly EventDescriptor M3PJobCreationCompleteEvent;
private static readonly EventDescriptor M3PJobErrorEvent;
private static readonly EventDescriptor M3PJobRemovedEvent;
private static readonly EventDescriptor M3PJobRemoveErrorEvent;
private static readonly EventDescriptor M3PJobStateChangedEvent;
private static readonly EventDescriptor M3PLoadingWorkflowForExecutionEvent;
private static readonly EventDescriptor M3POutOfProcessRunspaceStartedEvent;
private static readonly EventDescriptor M3PParameterSplattingWasPerformedEvent;
private static readonly EventDescriptor M3PParentJobCreatedEvent;
private static readonly EventDescriptor M3PPersistenceStoreMaxSizeReachedEvent;
private static readonly EventDescriptor M3PPersistingWorkflowEvent;
private static readonly EventDescriptor M3PProxyJobRemoteJobAssociationEvent;
private static readonly EventDescriptor M3PRemoveJobStartedEvent;
private static readonly EventDescriptor M3PRunspaceAvailabilityChangedEvent;
private static readonly EventDescriptor M3PRunspaceStateChangedEvent;
private static readonly EventDescriptor M3PTrackingGuidContainerParentJobCorrelationEvent;
private static readonly EventDescriptor M3PUnloadingWorkflowEvent;
private static readonly EventDescriptor M3PWorkflowActivityExecutionFailedEvent;
private static readonly EventDescriptor M3PWorkflowActivityValidatedEvent;
private static readonly EventDescriptor M3PWorkflowActivityValidationFailedEvent;
private static readonly EventDescriptor M3PWorkflowCleanupPerformedEvent;
private static readonly EventDescriptor M3PWorkflowDeletedFromDiskEvent;
private static readonly EventDescriptor M3PWorkflowEngineStartedEvent;
private static readonly EventDescriptor M3PWorkflowExecutionAbortedEvent;
private static readonly EventDescriptor M3PWorkflowExecutionCancelledEvent;
private static readonly EventDescriptor M3PWorkflowExecutionErrorEvent;
private static readonly EventDescriptor M3PWorkflowExecutionFinishedEvent;
private static readonly EventDescriptor M3PWorkflowExecutionStartedEvent;
private static readonly EventDescriptor M3PWorkflowJobCreatedEvent;
private static readonly EventDescriptor M3PWorkflowLoadedForExecutionEvent;
private static readonly EventDescriptor M3PWorkflowLoadedFromDiskEvent;
private static readonly EventDescriptor M3PWorkflowManagerCheckpointEvent;
private static readonly EventDescriptor M3PWorkflowPersistedEvent;
private static readonly EventDescriptor M3PWorkflowPluginRequestedToShutdownEvent;
private static readonly EventDescriptor M3PWorkflowPluginRestartedEvent;
private static readonly EventDescriptor M3PWorkflowPluginStartedEvent;
private static readonly EventDescriptor M3PWorkflowQuotaViolatedEvent;
private static readonly EventDescriptor M3PWorkflowResumedEvent;
private static readonly EventDescriptor M3PWorkflowResumingEvent;
private static readonly EventDescriptor M3PWorkflowRunspacePoolCreatedEvent;
private static readonly EventDescriptor M3PWorkflowStateChangedEvent;
private static readonly EventDescriptor M3PWorkflowUnloadedEvent;
private static readonly EventDescriptor M3PWorkflowValidationErrorEvent;
private static readonly EventDescriptor M3PWorkflowValidationFinishedEvent;
private static readonly EventDescriptor M3PWorkflowValidationStartedEvent;
/// <summary>
/// Static constructor.
/// </summary>
static Tracer()
{
unchecked
{
WriteTransferEventEvent = new EventDescriptor(0x1f05, 0x1, 0x11, 0x5, 0x14, 0x0, (long)0x4000000000000000);
DebugMessageEvent = new EventDescriptor(0xc000, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PAbortingWorkflowExecutionEvent = new EventDescriptor(0xb038, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PActivityExecutionFinishedEvent = new EventDescriptor(0xb03f, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PActivityExecutionQueuedEvent = new EventDescriptor(0xb017, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PActivityExecutionStartedEvent = new EventDescriptor(0xb018, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PBeginContainerParentJobExecutionEvent = new EventDescriptor(0xb50c, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginCreateNewJobEvent = new EventDescriptor(0xb503, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginJobLogicEvent = new EventDescriptor(0xb506, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginProxyChildJobEventHandlerEvent = new EventDescriptor(0xb512, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginProxyJobEventHandlerEvent = new EventDescriptor(0xb510, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginProxyJobExecutionEvent = new EventDescriptor(0xb50e, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginRunGarbageCollectionEvent = new EventDescriptor(0xb514, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginStartWorkflowApplicationEvent = new EventDescriptor(0xb501, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PBeginWorkflowExecutionEvent = new EventDescriptor(0xb508, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PCancellingWorkflowExecutionEvent = new EventDescriptor(0xb037, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PChildWorkflowJobAdditionEvent = new EventDescriptor(0xb50a, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndContainerParentJobExecutionEvent = new EventDescriptor(0xb50d, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndCreateNewJobEvent = new EventDescriptor(0xb504, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndJobLogicEvent = new EventDescriptor(0xb507, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndpointDisabledEvent = new EventDescriptor(0xb044, 0x1, 0x11, 0x5, 0x14, 0x9, (long)0x4000000000000200);
M3PEndpointEnabledEvent = new EventDescriptor(0xb045, 0x1, 0x11, 0x5, 0x14, 0x9, (long)0x4000000000000200);
M3PEndpointModifiedEvent = new EventDescriptor(0xb042, 0x1, 0x11, 0x5, 0x14, 0x9, (long)0x4000000000000200);
M3PEndpointRegisteredEvent = new EventDescriptor(0xb041, 0x1, 0x11, 0x5, 0x14, 0x9, (long)0x4000000000000200);
M3PEndpointUnregisteredEvent = new EventDescriptor(0xb043, 0x1, 0x11, 0x5, 0x14, 0x9, (long)0x4000000000000200);
M3PEndProxyChildJobEventHandlerEvent = new EventDescriptor(0xb513, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndProxyJobEventHandlerEvent = new EventDescriptor(0xb511, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndProxyJobExecutionEvent = new EventDescriptor(0xb50f, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndRunGarbageCollectionEvent = new EventDescriptor(0xb515, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndStartWorkflowApplicationEvent = new EventDescriptor(0xb502, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PEndWorkflowExecutionEvent = new EventDescriptor(0xb509, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PErrorImportingWorkflowFromXamlEvent = new EventDescriptor(0xb01b, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PForcedWorkflowShutdownErrorEvent = new EventDescriptor(0xb03c, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PForcedWorkflowShutdownFinishedEvent = new EventDescriptor(0xb03b, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PForcedWorkflowShutdownStartedEvent = new EventDescriptor(0xb03a, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PImportedWorkflowFromXamlEvent = new EventDescriptor(0xb01a, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PImportingWorkflowFromXamlEvent = new EventDescriptor(0xb019, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PJobCreationCompleteEvent = new EventDescriptor(0xb032, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PJobErrorEvent = new EventDescriptor(0xb02e, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PJobRemovedEvent = new EventDescriptor(0xb033, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PJobRemoveErrorEvent = new EventDescriptor(0xb034, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PJobStateChangedEvent = new EventDescriptor(0xb02d, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PLoadingWorkflowForExecutionEvent = new EventDescriptor(0xb035, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3POutOfProcessRunspaceStartedEvent = new EventDescriptor(0xb046, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PParameterSplattingWasPerformedEvent = new EventDescriptor(0xb047, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PParentJobCreatedEvent = new EventDescriptor(0xb031, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PPersistenceStoreMaxSizeReachedEvent = new EventDescriptor(0xb516, 0x1, 0x10, 0x3, 0x0, 0x0, (long)0x8000000000000000);
M3PPersistingWorkflowEvent = new EventDescriptor(0xb03d, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PProxyJobRemoteJobAssociationEvent = new EventDescriptor(0xb50b, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PRemoveJobStartedEvent = new EventDescriptor(0xb02c, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PRunspaceAvailabilityChangedEvent = new EventDescriptor(0xb022, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PRunspaceStateChangedEvent = new EventDescriptor(0xb023, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PTrackingGuidContainerParentJobCorrelationEvent = new EventDescriptor(0xb505, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000000);
M3PUnloadingWorkflowEvent = new EventDescriptor(0xb039, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowActivityExecutionFailedEvent = new EventDescriptor(0xb021, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowActivityValidatedEvent = new EventDescriptor(0xb01f, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowActivityValidationFailedEvent = new EventDescriptor(0xb020, 0x1, 0x11, 0x5, 0x14, 0x8, (long)0x4000000000000200);
M3PWorkflowCleanupPerformedEvent = new EventDescriptor(0xb028, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowDeletedFromDiskEvent = new EventDescriptor(0xb02a, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowEngineStartedEvent = new EventDescriptor(0xb048, 0x1, 0x11, 0x5, 0x14, 0x5, (long)0x4000000000000200);
M3PWorkflowExecutionAbortedEvent = new EventDescriptor(0xb027, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowExecutionCancelledEvent = new EventDescriptor(0xb026, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowExecutionErrorEvent = new EventDescriptor(0xb040, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowExecutionFinishedEvent = new EventDescriptor(0xb036, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowExecutionStartedEvent = new EventDescriptor(0xb008, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowJobCreatedEvent = new EventDescriptor(0xb030, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowLoadedForExecutionEvent = new EventDescriptor(0xb024, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowLoadedFromDiskEvent = new EventDescriptor(0xb029, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowManagerCheckpointEvent = new EventDescriptor(0xb049, 0x1, 0x12, 0x4, 0x0, 0x0, (long)0x2000000000000200);
M3PWorkflowPersistedEvent = new EventDescriptor(0xb03e, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowPluginRequestedToShutdownEvent = new EventDescriptor(0xb010, 0x1, 0x11, 0x5, 0x14, 0x5, (long)0x4000000000000200);
M3PWorkflowPluginRestartedEvent = new EventDescriptor(0xb011, 0x1, 0x11, 0x5, 0x14, 0x5, (long)0x4000000000000200);
M3PWorkflowPluginStartedEvent = new EventDescriptor(0xb007, 0x1, 0x11, 0x5, 0x14, 0x5, (long)0x4000000000000200);
M3PWorkflowQuotaViolatedEvent = new EventDescriptor(0xb013, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowResumedEvent = new EventDescriptor(0xb014, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowResumingEvent = new EventDescriptor(0xb012, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowRunspacePoolCreatedEvent = new EventDescriptor(0xb016, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowStateChangedEvent = new EventDescriptor(0xb009, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowUnloadedEvent = new EventDescriptor(0xb025, 0x1, 0x11, 0x5, 0x14, 0x6, (long)0x4000000000000200);
M3PWorkflowValidationErrorEvent = new EventDescriptor(0xb01e, 0x1, 0x11, 0x5, 0x14, 0x8, (long)0x4000000000000200);
M3PWorkflowValidationFinishedEvent = new EventDescriptor(0xb01d, 0x1, 0x11, 0x5, 0x14, 0x8, (long)0x4000000000000200);
M3PWorkflowValidationStartedEvent = new EventDescriptor(0xb01c, 0x1, 0x11, 0x5, 0x14, 0x8, (long)0x4000000000000200);
}
}
/// <summary>
/// Constructor.
/// </summary>
public Tracer() : base() { }
/// <summary>
/// Provider Guid.
/// </summary>
protected override Guid ProviderId
{
get
{
return providerId;
}
}
/// <summary>
/// Transfer Event.
/// </summary>
protected override EventDescriptor TransferEvent
{
get
{
return WriteTransferEventEvent;
}
}
/// <summary>
/// WriteTransferEvent (EventId: 0x1f05/7941).
/// </summary>
[EtwEvent(0x1f05)]
public void WriteTransferEvent(Guid currentActivityId, Guid parentActivityId)
{
WriteEvent(WriteTransferEventEvent, currentActivityId, parentActivityId);
}
/// <summary>
/// DebugMessage (EventId: 0xc000/49152).
/// </summary>
[EtwEvent(0xc000)]
public void DebugMessage(string message)
{
WriteEvent(DebugMessageEvent, message);
}
/// <summary>
/// AbortingWorkflowExecution (EventId: 0xb038/45112).
/// </summary>
[EtwEvent(0xb038)]
public void AbortingWorkflowExecution(Guid workflowId, string reason)
{
WriteEvent(M3PAbortingWorkflowExecutionEvent, workflowId, reason);
}
/// <summary>
/// ActivityExecutionFinished (EventId: 0xb03f/45119).
/// </summary>
[EtwEvent(0xb03f)]
public void ActivityExecutionFinished(string activityName)
{
WriteEvent(M3PActivityExecutionFinishedEvent, activityName);
}
/// <summary>
/// ActivityExecutionQueued (EventId: 0xb017/45079).
/// </summary>
[EtwEvent(0xb017)]
public void ActivityExecutionQueued(Guid workflowId, string activityName)
{
WriteEvent(M3PActivityExecutionQueuedEvent, workflowId, activityName);
}
/// <summary>
/// ActivityExecutionStarted (EventId: 0xb018/45080).
/// </summary>
[EtwEvent(0xb018)]
public void ActivityExecutionStarted(string activityName, string activityTypeName)
{
WriteEvent(M3PActivityExecutionStartedEvent, activityName, activityTypeName);
}
/// <summary>
/// BeginContainerParentJobExecution (EventId: 0xb50c/46348).
/// </summary>
[EtwEvent(0xb50c)]
public void BeginContainerParentJobExecution(Guid containerParentJobInstanceId)
{
WriteEvent(M3PBeginContainerParentJobExecutionEvent, containerParentJobInstanceId);
}
/// <summary>
/// BeginCreateNewJob (EventId: 0xb503/46339).
/// </summary>
[EtwEvent(0xb503)]
public void BeginCreateNewJob(Guid trackingId)
{
WriteEvent(M3PBeginCreateNewJobEvent, trackingId);
}
/// <summary>
/// BeginJobLogic (EventId: 0xb506/46342).
/// </summary>
[EtwEvent(0xb506)]
public void BeginJobLogic(Guid workflowJobJobInstanceId)
{
WriteEvent(M3PBeginJobLogicEvent, workflowJobJobInstanceId);
}
/// <summary>
/// BeginProxyChildJobEventHandler (EventId: 0xb512/46354).
/// </summary>
[EtwEvent(0xb512)]
public void BeginProxyChildJobEventHandler(Guid proxyChildJobInstanceId)
{
WriteEvent(M3PBeginProxyChildJobEventHandlerEvent, proxyChildJobInstanceId);
}
/// <summary>
/// BeginProxyJobEventHandler (EventId: 0xb510/46352).
/// </summary>
[EtwEvent(0xb510)]
public void BeginProxyJobEventHandler(Guid proxyJobInstanceId)
{
WriteEvent(M3PBeginProxyJobEventHandlerEvent, proxyJobInstanceId);
}
/// <summary>
/// BeginProxyJobExecution (EventId: 0xb50e/46350).
/// </summary>
[EtwEvent(0xb50e)]
public void BeginProxyJobExecution(Guid proxyJobInstanceId)
{
WriteEvent(M3PBeginProxyJobExecutionEvent, proxyJobInstanceId);
}
/// <summary>
/// BeginRunGarbageCollection (EventId: 0xb514/46356).
/// </summary>
[EtwEvent(0xb514)]
public void BeginRunGarbageCollection()
{
WriteEvent(M3PBeginRunGarbageCollectionEvent);
}
/// <summary>
/// BeginStartWorkflowApplication (EventId: 0xb501/46337).
/// </summary>
[EtwEvent(0xb501)]
public void BeginStartWorkflowApplication(Guid trackingId)
{
WriteEvent(M3PBeginStartWorkflowApplicationEvent, trackingId);
}
/// <summary>
/// BeginWorkflowExecution (EventId: 0xb508/46344).
/// </summary>
[EtwEvent(0xb508)]
public void BeginWorkflowExecution(Guid workflowJobJobInstanceId)
{
WriteEvent(M3PBeginWorkflowExecutionEvent, workflowJobJobInstanceId);
}
/// <summary>
/// CancellingWorkflowExecution (EventId: 0xb037/45111).
/// </summary>
[EtwEvent(0xb037)]
public void CancellingWorkflowExecution(Guid workflowId)
{
WriteEvent(M3PCancellingWorkflowExecutionEvent, workflowId);
}
/// <summary>
/// ChildWorkflowJobAddition (EventId: 0xb50a/46346).
/// </summary>
[EtwEvent(0xb50a)]
public void ChildWorkflowJobAddition(Guid workflowJobInstanceId, Guid containerParentJobInstanceId)
{
WriteEvent(M3PChildWorkflowJobAdditionEvent, workflowJobInstanceId, containerParentJobInstanceId);
}
/// <summary>
/// EndContainerParentJobExecution (EventId: 0xb50d/46349).
/// </summary>
[EtwEvent(0xb50d)]
public void EndContainerParentJobExecution(Guid containerParentJobInstanceId)
{
WriteEvent(M3PEndContainerParentJobExecutionEvent, containerParentJobInstanceId);
}
/// <summary>
/// EndCreateNewJob (EventId: 0xb504/46340).
/// </summary>
[EtwEvent(0xb504)]
public void EndCreateNewJob(Guid trackingId)
{
WriteEvent(M3PEndCreateNewJobEvent, trackingId);
}
/// <summary>
/// EndJobLogic (EventId: 0xb507/46343).
/// </summary>
[EtwEvent(0xb507)]
public void EndJobLogic(Guid workflowJobJobInstanceId)
{
WriteEvent(M3PEndJobLogicEvent, workflowJobJobInstanceId);
}
/// <summary>
/// EndpointDisabled (EventId: 0xb044/45124).
/// </summary>
[EtwEvent(0xb044)]
public void EndpointDisabled(string endpointName, string disabledBy)
{
WriteEvent(M3PEndpointDisabledEvent, endpointName, disabledBy);
}
/// <summary>
/// EndpointEnabled (EventId: 0xb045/45125).
/// </summary>
[EtwEvent(0xb045)]
public void EndpointEnabled(string endpointName, string enabledBy)
{
WriteEvent(M3PEndpointEnabledEvent, endpointName, enabledBy);
}
/// <summary>
/// EndpointModified (EventId: 0xb042/45122).
/// </summary>
[EtwEvent(0xb042)]
public void EndpointModified(string endpointName, string modifiedBy)
{
WriteEvent(M3PEndpointModifiedEvent, endpointName, modifiedBy);
}
/// <summary>
/// EndpointRegistered (EventId: 0xb041/45121).
/// </summary>
[EtwEvent(0xb041)]
public void EndpointRegistered(string endpointName, string registeredBy)
{
WriteEvent(M3PEndpointRegisteredEvent, endpointName, registeredBy);
}
/// <summary>
/// EndpointUnregistered (EventId: 0xb043/45123).
/// </summary>
[EtwEvent(0xb043)]
public void EndpointUnregistered(string endpointName, string unregisteredBy)
{
WriteEvent(M3PEndpointUnregisteredEvent, endpointName, unregisteredBy);
}
/// <summary>
/// EndProxyChildJobEventHandler (EventId: 0xb513/46355).
/// </summary>
[EtwEvent(0xb513)]
public void EndProxyChildJobEventHandler(Guid proxyChildJobInstanceId)
{
WriteEvent(M3PEndProxyChildJobEventHandlerEvent, proxyChildJobInstanceId);
}
/// <summary>
/// EndProxyJobEventHandler (EventId: 0xb511/46353).
/// </summary>
[EtwEvent(0xb511)]
public void EndProxyJobEventHandler(Guid proxyJobInstanceId)
{
WriteEvent(M3PEndProxyJobEventHandlerEvent, proxyJobInstanceId);
}
/// <summary>
/// EndProxyJobExecution (EventId: 0xb50f/46351).
/// </summary>
[EtwEvent(0xb50f)]
public void EndProxyJobExecution(Guid proxyJobInstanceId)
{
WriteEvent(M3PEndProxyJobExecutionEvent, proxyJobInstanceId);
}
/// <summary>
/// EndRunGarbageCollection (EventId: 0xb515/46357).
/// </summary>
[EtwEvent(0xb515)]
public void EndRunGarbageCollection()
{
WriteEvent(M3PEndRunGarbageCollectionEvent);
}
/// <summary>
/// EndStartWorkflowApplication (EventId: 0xb502/46338).
/// </summary>
[EtwEvent(0xb502)]
public void EndStartWorkflowApplication(Guid trackingId)
{
WriteEvent(M3PEndStartWorkflowApplicationEvent, trackingId);
}
/// <summary>
/// EndWorkflowExecution (EventId: 0xb509/46345).
/// </summary>
[EtwEvent(0xb509)]
public void EndWorkflowExecution(Guid workflowJobJobInstanceId)
{
WriteEvent(M3PEndWorkflowExecutionEvent, workflowJobJobInstanceId);
}
/// <summary>
/// ErrorImportingWorkflowFromXaml (EventId: 0xb01b/45083).
/// </summary>
[EtwEvent(0xb01b)]
public void ErrorImportingWorkflowFromXaml(Guid workflowId, string errorDescription)
{
WriteEvent(M3PErrorImportingWorkflowFromXamlEvent, workflowId, errorDescription);
}
/// <summary>
/// ForcedWorkflowShutdownError (EventId: 0xb03c/45116).
/// </summary>
[EtwEvent(0xb03c)]
public void ForcedWorkflowShutdownError(Guid workflowId, string errorDescription)
{
WriteEvent(M3PForcedWorkflowShutdownErrorEvent, workflowId, errorDescription);
}
/// <summary>
/// ForcedWorkflowShutdownFinished (EventId: 0xb03b/45115).
/// </summary>
[EtwEvent(0xb03b)]
public void ForcedWorkflowShutdownFinished(Guid workflowId)
{
WriteEvent(M3PForcedWorkflowShutdownFinishedEvent, workflowId);
}
/// <summary>
/// ForcedWorkflowShutdownStarted (EventId: 0xb03a/45114).
/// </summary>
[EtwEvent(0xb03a)]
public void ForcedWorkflowShutdownStarted(Guid workflowId)
{
WriteEvent(M3PForcedWorkflowShutdownStartedEvent, workflowId);
}
/// <summary>
/// ImportedWorkflowFromXaml (EventId: 0xb01a/45082).
/// </summary>
[EtwEvent(0xb01a)]
public void ImportedWorkflowFromXaml(Guid workflowId, string xamlFile)
{
WriteEvent(M3PImportedWorkflowFromXamlEvent, workflowId, xamlFile);
}
/// <summary>
/// ImportingWorkflowFromXaml (EventId: 0xb019/45081).
/// </summary>
[EtwEvent(0xb019)]
public void ImportingWorkflowFromXaml(Guid workflowId, string xamlFile)
{
WriteEvent(M3PImportingWorkflowFromXamlEvent, workflowId, xamlFile);
}
/// <summary>
/// JobCreationComplete (EventId: 0xb032/45106).
/// </summary>
[EtwEvent(0xb032)]
public void JobCreationComplete(Guid jobId, Guid workflowId)
{
WriteEvent(M3PJobCreationCompleteEvent, jobId, workflowId);
}
/// <summary>
/// JobError (EventId: 0xb02e/45102).
/// </summary>
[EtwEvent(0xb02e)]
public void JobError(int jobId, Guid workflowId, string errorDescription)
{
WriteEvent(M3PJobErrorEvent, jobId, workflowId, errorDescription);
}
/// <summary>
/// JobRemoved (EventId: 0xb033/45107).
/// </summary>
[EtwEvent(0xb033)]
public void JobRemoved(Guid parentJobId, Guid childJobId, Guid workflowId)
{
WriteEvent(M3PJobRemovedEvent, parentJobId, childJobId, workflowId);
}
/// <summary>
/// JobRemoveError (EventId: 0xb034/45108).
/// </summary>
[EtwEvent(0xb034)]
public void JobRemoveError(Guid parentJobId, Guid childJobId, Guid workflowId, string error)
{
WriteEvent(M3PJobRemoveErrorEvent, parentJobId, childJobId, workflowId, error);
}
/// <summary>
/// JobStateChanged (EventId: 0xb02d/45101).
/// </summary>
[EtwEvent(0xb02d)]
public void JobStateChanged(int jobId, Guid workflowId, string newState, string oldState)
{
WriteEvent(M3PJobStateChangedEvent, jobId, workflowId, newState, oldState);
}
/// <summary>
/// LoadingWorkflowForExecution (EventId: 0xb035/45109).
/// </summary>
[EtwEvent(0xb035)]
public void LoadingWorkflowForExecution(Guid workflowId)
{
WriteEvent(M3PLoadingWorkflowForExecutionEvent, workflowId);
}
/// <summary>
/// OutOfProcessRunspaceStarted (EventId: 0xb046/45126).
/// </summary>
[EtwEvent(0xb046)]
public void OutOfProcessRunspaceStarted(string command)
{
WriteEvent(M3POutOfProcessRunspaceStartedEvent, command);
}
/// <summary>
/// ParameterSplattingWasPerformed (EventId: 0xb047/45127).
/// </summary>
[EtwEvent(0xb047)]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly")]
public void ParameterSplattingWasPerformed(string parameters, string computers)
{
WriteEvent(M3PParameterSplattingWasPerformedEvent, parameters, computers);
}
/// <summary>
/// ParentJobCreated (EventId: 0xb031/45105).
/// </summary>
[EtwEvent(0xb031)]
public void ParentJobCreated(Guid jobId)
{
WriteEvent(M3PParentJobCreatedEvent, jobId);
}
/// <summary>
/// PersistenceStoreMaxSizeReached (EventId: 0xb516/46358).
/// </summary>
[EtwEvent(0xb516)]
public void PersistenceStoreMaxSizeReached()
{
WriteEvent(M3PPersistenceStoreMaxSizeReachedEvent);
}
/// <summary>
/// PersistingWorkflow (EventId: 0xb03d/45117).
/// </summary>
[EtwEvent(0xb03d)]
public void PersistingWorkflow(Guid workflowId, string persistPath)
{
WriteEvent(M3PPersistingWorkflowEvent, workflowId, persistPath);
}
/// <summary>
/// ProxyJobRemoteJobAssociation (EventId: 0xb50b/46347).
/// </summary>
[EtwEvent(0xb50b)]
public void ProxyJobRemoteJobAssociation(Guid proxyJobInstanceId, Guid containerParentJobInstanceId)
{
WriteEvent(M3PProxyJobRemoteJobAssociationEvent, proxyJobInstanceId, containerParentJobInstanceId);
}
/// <summary>
/// RemoveJobStarted (EventId: 0xb02c/45100).
/// </summary>
[EtwEvent(0xb02c)]
public void RemoveJobStarted(Guid jobId)
{
WriteEvent(M3PRemoveJobStartedEvent, jobId);
}
/// <summary>
/// RunspaceAvailabilityChanged (EventId: 0xb022/45090).
/// </summary>
[EtwEvent(0xb022)]
public void RunspaceAvailabilityChanged(string runspaceId, string availability)
{
WriteEvent(M3PRunspaceAvailabilityChangedEvent, runspaceId, availability);
}
/// <summary>
/// RunspaceStateChanged (EventId: 0xb023/45091).
/// </summary>
[EtwEvent(0xb023)]
public void RunspaceStateChanged(string runspaceId, string newState, string oldState)
{
WriteEvent(M3PRunspaceStateChangedEvent, runspaceId, newState, oldState);
}
/// <summary>
/// TrackingGuidContainerParentJobCorrelation (EventId: 0xb505/46341).
/// </summary>
[EtwEvent(0xb505)]
public void TrackingGuidContainerParentJobCorrelation(Guid trackingId, Guid containerParentJobInstanceId)
{
WriteEvent(M3PTrackingGuidContainerParentJobCorrelationEvent, trackingId, containerParentJobInstanceId);
}
/// <summary>
/// UnloadingWorkflow (EventId: 0xb039/45113).
/// </summary>
[EtwEvent(0xb039)]
public void UnloadingWorkflow(Guid workflowId)
{
WriteEvent(M3PUnloadingWorkflowEvent, workflowId);
}
/// <summary>
/// WorkflowActivityExecutionFailed (EventId: 0xb021/45089).
/// </summary>
[EtwEvent(0xb021)]
public void WorkflowActivityExecutionFailed(Guid workflowId, string activityName, string failureDescription)
{
WriteEvent(M3PWorkflowActivityExecutionFailedEvent, workflowId, activityName, failureDescription);
}
/// <summary>
/// WorkflowActivityValidated (EventId: 0xb01f/45087).
/// </summary>
[EtwEvent(0xb01f)]
public void WorkflowActivityValidated(Guid workflowId, string activityDisplayName, string activityType)
{
WriteEvent(M3PWorkflowActivityValidatedEvent, workflowId, activityDisplayName, activityType);
}
/// <summary>
/// WorkflowActivityValidationFailed (EventId: 0xb020/45088).
/// </summary>
[EtwEvent(0xb020)]
public void WorkflowActivityValidationFailed(Guid workflowId, string activityDisplayName, string activityType)
{
WriteEvent(M3PWorkflowActivityValidationFailedEvent, workflowId, activityDisplayName, activityType);
}
/// <summary>
/// WorkflowCleanupPerformed (EventId: 0xb028/45096).
/// </summary>
[EtwEvent(0xb028)]
public void WorkflowCleanupPerformed(Guid workflowId)
{
WriteEvent(M3PWorkflowCleanupPerformedEvent, workflowId);
}
/// <summary>
/// WorkflowDeletedFromDisk (EventId: 0xb02a/45098).
/// </summary>
[EtwEvent(0xb02a)]
public void WorkflowDeletedFromDisk(Guid workflowId, string path)
{
WriteEvent(M3PWorkflowDeletedFromDiskEvent, workflowId, path);
}
/// <summary>
/// WorkflowEngineStarted (EventId: 0xb048/45128).
/// </summary>
[EtwEvent(0xb048)]
public void WorkflowEngineStarted(string endpointName)
{
WriteEvent(M3PWorkflowEngineStartedEvent, endpointName);
}
/// <summary>
/// WorkflowExecutionAborted (EventId: 0xb027/45095).
/// </summary>
[EtwEvent(0xb027)]
public void WorkflowExecutionAborted(Guid workflowId)
{
WriteEvent(M3PWorkflowExecutionAbortedEvent, workflowId);
}
/// <summary>
/// WorkflowExecutionCancelled (EventId: 0xb026/45094).
/// </summary>
[EtwEvent(0xb026)]
public void WorkflowExecutionCancelled(Guid workflowId)
{
WriteEvent(M3PWorkflowExecutionCancelledEvent, workflowId);
}
/// <summary>
/// WorkflowExecutionError (EventId: 0xb040/45120).
/// </summary>
[EtwEvent(0xb040)]
public void WorkflowExecutionError(Guid workflowId, string errorDescription)
{
WriteEvent(M3PWorkflowExecutionErrorEvent, workflowId, errorDescription);
}
/// <summary>
/// WorkflowExecutionFinished (EventId: 0xb036/45110).
/// </summary>
[EtwEvent(0xb036)]
public void WorkflowExecutionFinished(Guid workflowId)
{
WriteEvent(M3PWorkflowExecutionFinishedEvent, workflowId);
}
/// <summary>
/// WorkflowExecutionStarted (EventId: 0xb008/45064).
/// </summary>
[EtwEvent(0xb008)]
public void WorkflowExecutionStarted(Guid workflowId, string managedNodes)
{
WriteEvent(M3PWorkflowExecutionStartedEvent, workflowId, managedNodes);
}
/// <summary>
/// WorkflowJobCreated (EventId: 0xb030/45104).
/// </summary>
[EtwEvent(0xb030)]
public void WorkflowJobCreated(Guid parentJobId, Guid childJobId, Guid childWorkflowId)
{
WriteEvent(M3PWorkflowJobCreatedEvent, parentJobId, childJobId, childWorkflowId);
}
/// <summary>
/// WorkflowLoadedForExecution (EventId: 0xb024/45092).
/// </summary>
[EtwEvent(0xb024)]
public void WorkflowLoadedForExecution(Guid workflowId)
{
WriteEvent(M3PWorkflowLoadedForExecutionEvent, workflowId);
}
/// <summary>
/// WorkflowLoadedFromDisk (EventId: 0xb029/45097).
/// </summary>
[EtwEvent(0xb029)]
public void WorkflowLoadedFromDisk(Guid workflowId, string path)
{
WriteEvent(M3PWorkflowLoadedFromDiskEvent, workflowId, path);
}
/// <summary>
/// WorkflowManagerCheckpoint (EventId: 0xb049/45129).
/// </summary>
[EtwEvent(0xb049)]
public void WorkflowManagerCheckpoint(string checkpointPath, string configProviderId, string userName, string path)
{
WriteEvent(M3PWorkflowManagerCheckpointEvent, checkpointPath, configProviderId, userName, path);
}
/// <summary>
/// WorkflowPersisted (EventId: 0xb03e/45118).
/// </summary>
[EtwEvent(0xb03e)]
public void WorkflowPersisted(Guid workflowId)
{
WriteEvent(M3PWorkflowPersistedEvent, workflowId);
}
/// <summary>
/// WorkflowPluginRequestedToShutdown (EventId: 0xb010/45072).
/// </summary>
[EtwEvent(0xb010)]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly")]
public void WorkflowPluginRequestedToShutdown(string endpointName)
{
WriteEvent(M3PWorkflowPluginRequestedToShutdownEvent, endpointName);
}
/// <summary>
/// WorkflowPluginRestarted (EventId: 0xb011/45073).
/// </summary>
[EtwEvent(0xb011)]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly")]
public void WorkflowPluginRestarted(string endpointName)
{
WriteEvent(M3PWorkflowPluginRestartedEvent, endpointName);
}
/// <summary>
/// WorkflowPluginStarted (EventId: 0xb007/45063).
/// </summary>
[EtwEvent(0xb007)]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly")]
public void WorkflowPluginStarted(string endpointName, string user, string hostingMode, string protocol, string configuration)
{
WriteEvent(M3PWorkflowPluginStartedEvent, endpointName, user, hostingMode, protocol, configuration);
}
/// <summary>
/// WorkflowQuotaViolated (EventId: 0xb013/45075).
/// </summary>
[EtwEvent(0xb013)]
public void WorkflowQuotaViolated(string endpointName, string configName, string allowedValue, string valueInQuestion)
{
WriteEvent(M3PWorkflowQuotaViolatedEvent, endpointName, configName, allowedValue, valueInQuestion);
}
/// <summary>
/// WorkflowResumed (EventId: 0xb014/45076).
/// </summary>
[EtwEvent(0xb014)]
public void WorkflowResumed(Guid workflowId)
{
WriteEvent(M3PWorkflowResumedEvent, workflowId);
}
/// <summary>
/// WorkflowResuming (EventId: 0xb012/45074).
/// </summary>
[EtwEvent(0xb012)]
public void WorkflowResuming(Guid workflowId)
{
WriteEvent(M3PWorkflowResumingEvent, workflowId);
}
/// <summary>
/// WorkflowRunspacePoolCreated (EventId: 0xb016/45078).
/// </summary>
[EtwEvent(0xb016)]
public void WorkflowRunspacePoolCreated(Guid workflowId, string managedNode)
{
WriteEvent(M3PWorkflowRunspacePoolCreatedEvent, workflowId, managedNode);
}
/// <summary>
/// WorkflowStateChanged (EventId: 0xb009/45065).
/// </summary>
[EtwEvent(0xb009)]
public void WorkflowStateChanged(Guid workflowId, string newState, string oldState)
{
WriteEvent(M3PWorkflowStateChangedEvent, workflowId, newState, oldState);
}
/// <summary>
/// WorkflowUnloaded (EventId: 0xb025/45093).
/// </summary>
[EtwEvent(0xb025)]
public void WorkflowUnloaded(Guid workflowId)
{
WriteEvent(M3PWorkflowUnloadedEvent, workflowId);
}
/// <summary>
/// WorkflowValidationError (EventId: 0xb01e/45086).
/// </summary>
[EtwEvent(0xb01e)]
public void WorkflowValidationError(Guid workflowId)
{
WriteEvent(M3PWorkflowValidationErrorEvent, workflowId);
}
/// <summary>
/// WorkflowValidationFinished (EventId: 0xb01d/45085).
/// </summary>
[EtwEvent(0xb01d)]
public void WorkflowValidationFinished(Guid workflowId)
{
WriteEvent(M3PWorkflowValidationFinishedEvent, workflowId);
}
/// <summary>
/// WorkflowValidationStarted (EventId: 0xb01c/45084).
/// </summary>
[EtwEvent(0xb01c)]
public void WorkflowValidationStarted(Guid workflowId)
{
WriteEvent(M3PWorkflowValidationStartedEvent, workflowId);
}
}
}
// This code was generated on 02/01/2012 19:52:32
#endif
| |
using System;
using Greatbone;
namespace Greatbone.Service
{
/// <summary>
/// A specialized string builder for generating SQL commands.
/// </summary>
public class DbSql : DynamicContent, ISink
{
// contexts
const sbyte CtxColumnList = 1, CtxParamList = 2, CtxSetList = 3;
// the putting context
internal sbyte ctx;
// used when generating a list
internal int ordinal;
internal DbSql(string str) : base(false, 1024)
{
Add(str);
}
public override string Type { get; set; } = "text/plain";
internal void Clear()
{
count = 0;
ctx = 0;
ordinal = 0;
}
public DbSql T(short v, bool cond = true)
{
if (cond)
{
Add(v);
}
return this;
}
public DbSql T(int v, bool cond = true)
{
if (cond)
{
Add(v);
}
return this;
}
public DbSql T(string v, bool cond = true)
{
if (cond)
{
Add(v);
}
return this;
}
public DbSql TT(string v, bool cond = true)
{
if (cond)
{
Add('\'');
Add(v);
Add('\'');
}
return this;
}
public DbSql T(short[] vals)
{
for (int i = 0; i < vals.Length; i++)
{
if (i > 0) Add(',');
Add(vals[i]);
}
return this;
}
public DbSql T(int[] vals)
{
for (int i = 0; i < vals.Length; i++)
{
if (i > 0) Add(',');
Add(vals[i]);
}
return this;
}
public DbSql T(long[] vals)
{
for (int i = 0; i < vals.Length; i++)
{
if (i > 0) Add(',');
Add(vals[i]);
}
return this;
}
public DbSql T(string[] vals)
{
Add(" IN (");
for (int i = 0; i < vals.Length; i++)
{
if (i > 0) Add(',');
Add('\'');
Add(vals[i]);
Add('\'');
}
Add(')');
return this;
}
public DbSql setlst(IData obj, byte proj = 0x0f)
{
ctx = CtxSetList;
ordinal = 1;
obj.Write(this, proj);
return this;
}
public DbSql collst(IData obj, byte proj = 0x0f)
{
ctx = CtxColumnList;
ordinal = 1;
obj.Write(this, proj);
return this;
}
public DbSql paramlst(IData obj, byte proj = 0x0f)
{
ctx = CtxParamList;
ordinal = 1;
obj.Write(this, proj);
return this;
}
public DbSql _(IData obj, byte proj = 0x0f, string extra = null)
{
Add(" (");
collst(obj, proj);
if (extra != null)
{
Add(",");
Add(extra);
}
Add(")");
return this;
}
public DbSql _VALUES_(short n)
{
Add(" VALUES (");
for (short i = 1; i <= n; i++)
{
if (i > 1)
{
Add(',');
Add(' ');
}
Add('@');
Add(i);
}
Add(")");
return this;
}
public DbSql _VALUES_(IData obj, byte proj = 0x0f, string extra = null)
{
Add(" VALUES (");
paramlst(obj, proj);
if (extra != null)
{
Add(",");
Add(extra);
}
Add(")");
return this;
}
public DbSql _SET_(IData obj, byte proj = 0x0f, string extra = null)
{
Add(" SET ");
setlst(obj, proj);
if (extra != null)
{
Add(",");
Add(extra);
}
return this;
}
public DbSql _IN_(short[] vals)
{
Add(" IN (");
for (int i = 1; i <= vals.Length; i++)
{
if (i > 1) Add(',');
Add('@');
Add('v');
Add(i);
}
Add(')');
return this;
}
public DbSql _IN_(int[] vals)
{
Add(" IN (");
T(vals);
Add(')');
return this;
}
public DbSql _IN_(long[] vals)
{
Add(" IN (");
for (int i = 1; i <= vals.Length; i++)
{
if (i > 1) Add(',');
Add('@');
Add('v');
Add(i);
}
Add(')');
return this;
}
public DbSql _IN_(string[] vals)
{
Add(" IN (");
for (int i = 1; i <= vals.Length; i++)
{
if (i > 1) Add(',');
Add('@');
Add('v');
Add(i);
}
Add(')');
return this;
}
void Build(string name)
{
if (ordinal > 1) Add(", ");
switch (ctx)
{
case CtxColumnList:
Add('"');
Add(name);
Add('"');
break;
case CtxParamList:
Add("@");
Add(name);
break;
case CtxSetList:
Add('"');
Add(name);
Add('"');
Add("=@");
Add(name);
break;
}
ordinal++;
}
//
// SINK
//
public void PutNull(string name)
{
Build(name);
}
public void Put(string name, JNumber v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, bool v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v ? "TRUE" : "FALSE");
}
}
public void Put(string name, char v)
{
if (name != null)
{
Build(name);
}
else
{
Add('\'');
Add(v);
Add('\'');
}
}
public void Put(string name, short v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, int v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, long v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, double v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, decimal v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, DateTime v)
{
if (name != null)
{
Build(name);
}
else
{
Add(v);
}
}
public void Put(string name, string v)
{
if (name != null)
{
Build(name);
}
else
{
Add('\'');
Add(v);
Add('\'');
}
}
public void Put(string name, ArraySegment<byte> v)
{
Build(name);
}
public void Put(string name, byte[] v)
{
Build(name);
}
public void Put(string name, short[] v)
{
if (name != null)
{
Build(name);
}
else
{
if (v == null)
{
Add("NULL");
}
else
{
Add("ARRAY[");
for (int i = 0; i < v.Length; i++)
{
if (i > 0) Add(',');
Add(v[i]);
}
Add(']');
if (v.Length == 0)
{
Add("::smallint[]");
}
}
}
}
public void Put(string name, int[] v)
{
if (name != null)
{
Build(name);
}
else
{
if (v == null)
{
Add("NULL");
}
else
{
Add("ARRAY[");
for (int i = 0; i < v.Length; i++)
{
if (i > 0) Add(',');
Add(v[i]);
}
Add(']');
if (v.Length == 0)
{
Add("::integer[]");
}
}
}
}
public void Put(string name, long[] v)
{
if (name != null)
{
Build(name);
}
else
{
if (v == null)
{
Add("NULL");
}
else
{
Add("ARRAY[");
for (int i = 0; i < v.Length; i++)
{
if (i > 0) Add(',');
Add(v[i]);
}
Add(']');
if (v.Length == 0)
{
Add("::bigint[]");
}
}
}
}
public void Put(string name, string[] v)
{
if (name != null)
{
Build(name);
}
else
{
if (v == null)
{
Add("NULL");
}
else
{
Add("ARRAY[");
for (int i = 0; i < v.Length; i++)
{
if (i > 0) Add(',');
Add('\'');
Add(v[i]);
Add('\'');
}
Add(']');
if (v.Length == 0)
{
Add("::varchar[]");
}
}
}
}
public void Put(string name, JObj v)
{
if (name != null)
{
Build(name);
}
else
{
throw new NotImplementedException();
}
}
public void Put(string name, JArr v)
{
if (name != null)
{
Build(name);
}
else
{
throw new NotImplementedException();
}
}
public void Put(string name, IData v, byte proj = 0x0f)
{
if (name != null)
{
Build(name);
}
else
{
if (v == null)
{
Add("NULL");
}
}
}
public void Put<D>(string name, D[] v, byte proj = 0x0f) where D : IData
{
Build(name);
}
public void PutFrom(ISource s)
{
throw new NotImplementedException();
}
public override string ToString()
{
return new string(charbuf, 0, count);
}
}
}
| |
using System.Collections.Generic;
using Pathfinding.Serialization;
using UnityEngine;
namespace Pathfinding {
/** Node used for the GridGraph */
public class GridNode : GridNodeBase {
public GridNode (AstarPath astar) : base(astar) {
}
private static GridGraph[] _gridGraphs = new GridGraph[0];
public static GridGraph GetGridGraph (uint graphIndex) { return _gridGraphs[(int)graphIndex]; }
public static void SetGridGraph (int graphIndex, GridGraph graph) {
if (_gridGraphs.Length <= graphIndex) {
var gg = new GridGraph[graphIndex+1];
for (int i = 0; i < _gridGraphs.Length; i++) gg[i] = _gridGraphs[i];
_gridGraphs = gg;
}
_gridGraphs[graphIndex] = graph;
}
/** Internal use only */
internal ushort InternalGridFlags {
get { return gridFlags; }
set { gridFlags = value; }
}
const int GridFlagsConnectionOffset = 0;
const int GridFlagsConnectionBit0 = 1 << GridFlagsConnectionOffset;
const int GridFlagsConnectionMask = 0xFF << GridFlagsConnectionOffset;
const int GridFlagsEdgeNodeOffset = 10;
const int GridFlagsEdgeNodeMask = 1 << GridFlagsEdgeNodeOffset;
public override bool HasConnectionsToAllEightNeighbours {
get {
return (InternalGridFlags & GridFlagsConnectionMask) == GridFlagsConnectionMask;
}
}
/** True if the node has a connection in the specified direction.
* The dir parameter corresponds to directions in the grid as:
* \code
* Z
* |
* |
*
* 6 2 5
* \ | /
* -- 3 - X - 1 ----- X
* / | \
* 7 0 4
*
* |
* |
* \endcode
*
* \see SetConnectionInternal
*/
public bool HasConnectionInDirection (int dir) {
return (gridFlags >> dir & GridFlagsConnectionBit0) != 0;
}
/** True if the node has a connection in the specified direction.
* \deprecated Use HasConnectionInDirection
*/
[System.Obsolete("Use HasConnectionInDirection")]
public bool GetConnectionInternal (int dir) {
return HasConnectionInDirection(dir);
}
/** Enables or disables a connection in a specified direction on the graph.
* \see HasConnectionInDirection
*/
public void SetConnectionInternal (int dir, bool value) {
// Set bit number #dir to 1 or 0 depending on #value
unchecked { gridFlags = (ushort)(gridFlags & ~((ushort)1 << GridFlagsConnectionOffset << dir) | (value ? (ushort)1 : (ushort)0) << GridFlagsConnectionOffset << dir); }
}
/** Sets the state of all grid connections.
* \param connections a bitmask of the connections (bit 0 is the first connection, bit 1 the second connection, etc.).
*
* \see SetConnectionInternal
*/
public void SetAllConnectionInternal (int connections) {
unchecked { gridFlags = (ushort)((gridFlags & ~GridFlagsConnectionMask) | (connections << GridFlagsConnectionOffset)); }
}
/** Disables all grid connections from this node.
* \note Other nodes might still be able to get to this node.
* Therefore it is recommended to also disable the relevant connections on adjacent nodes.
*/
public void ResetConnectionsInternal () {
unchecked {
gridFlags = (ushort)(gridFlags & ~GridFlagsConnectionMask);
}
}
/** Work in progress for a feature that required info about which nodes were at the border of the graph.
* \note This property is not functional at the moment.
*/
public bool EdgeNode {
get {
return (gridFlags & GridFlagsEdgeNodeMask) != 0;
}
set {
unchecked { gridFlags = (ushort)(gridFlags & ~GridFlagsEdgeNodeMask | (value ? GridFlagsEdgeNodeMask : 0)); }
}
}
public override GridNodeBase GetNeighbourAlongDirection (int direction) {
if (HasConnectionInDirection(direction)) {
GridGraph gg = GetGridGraph(GraphIndex);
return gg.nodes[NodeInGridIndex+gg.neighbourOffsets[direction]];
}
return null;
}
public override void ClearConnections (bool alsoReverse) {
if (alsoReverse) {
// Note: This assumes that all connections are bidirectional
// which should hold for all grid graphs unless some custom code has been added
for (int i = 0; i < 8; i++) {
var other = GetNeighbourAlongDirection(i) as GridNode;
if (other != null) {
// Remove reverse connection. See doc for GridGraph.neighbourOffsets to see which indices are used for what.
other.SetConnectionInternal(i < 4 ? ((i + 2) % 4) : (((i-2) % 4) + 4), false);
}
}
}
ResetConnectionsInternal();
#if !ASTAR_GRID_NO_CUSTOM_CONNECTIONS
base.ClearConnections(alsoReverse);
#endif
}
public override void GetConnections (System.Action<GraphNode> action) {
GridGraph gg = GetGridGraph(GraphIndex);
int[] neighbourOffsets = gg.neighbourOffsets;
GridNode[] nodes = gg.nodes;
for (int i = 0; i < 8; i++) {
if (HasConnectionInDirection(i)) {
GridNode other = nodes[NodeInGridIndex + neighbourOffsets[i]];
if (other != null) action(other);
}
}
#if !ASTAR_GRID_NO_CUSTOM_CONNECTIONS
base.GetConnections(action);
#endif
}
public Vector3 ClosestPointOnNode (Vector3 p) {
var gg = GetGridGraph(GraphIndex);
// Convert to graph space
p = gg.transform.InverseTransform(p);
// Nodes are offset 0.5 graph space nodes
float xf = position.x-0.5F;
float zf = position.z-0.5f;
// Calculate graph position of this node
int x = NodeInGridIndex % gg.width;
int z = NodeInGridIndex / gg.width;
// Handle the y coordinate separately
float y = gg.transform.InverseTransform((Vector3)position).y;
var closestInGraphSpace = new Vector3(Mathf.Clamp(xf, x-0.5f, x+0.5f)+0.5f, y, Mathf.Clamp(zf, z-0.5f, z+0.5f)+0.5f);
// Convert to world space
return gg.transform.Transform(closestInGraphSpace);
}
public override bool GetPortal (GraphNode other, List<Vector3> left, List<Vector3> right, bool backwards) {
if (backwards) return true;
GridGraph gg = GetGridGraph(GraphIndex);
int[] neighbourOffsets = gg.neighbourOffsets;
GridNode[] nodes = gg.nodes;
for (int i = 0; i < 4; i++) {
if (HasConnectionInDirection(i) && other == nodes[NodeInGridIndex + neighbourOffsets[i]]) {
Vector3 middle = ((Vector3)(position + other.position))*0.5f;
Vector3 cross = Vector3.Cross(gg.collision.up, (Vector3)(other.position-position));
cross.Normalize();
cross *= gg.nodeSize*0.5f;
left.Add(middle - cross);
right.Add(middle + cross);
return true;
}
}
for (int i = 4; i < 8; i++) {
if (HasConnectionInDirection(i) && other == nodes[NodeInGridIndex + neighbourOffsets[i]]) {
bool rClear = false;
bool lClear = false;
if (HasConnectionInDirection(i-4)) {
GridNode n2 = nodes[NodeInGridIndex + neighbourOffsets[i-4]];
if (n2.Walkable && n2.HasConnectionInDirection((i-4+1)%4)) {
rClear = true;
}
}
if (HasConnectionInDirection((i-4+1)%4)) {
GridNode n2 = nodes[NodeInGridIndex + neighbourOffsets[(i-4+1)%4]];
if (n2.Walkable && n2.HasConnectionInDirection(i-4)) {
lClear = true;
}
}
Vector3 middle = ((Vector3)(position + other.position))*0.5f;
Vector3 cross = Vector3.Cross(gg.collision.up, (Vector3)(other.position-position));
cross.Normalize();
cross *= gg.nodeSize*1.4142f;
left.Add(middle - (lClear ? cross : Vector3.zero));
right.Add(middle + (rClear ? cross : Vector3.zero));
return true;
}
}
return false;
}
public override void FloodFill (Stack<GraphNode> stack, uint region) {
GridGraph gg = GetGridGraph(GraphIndex);
int[] neighbourOffsets = gg.neighbourOffsets;
GridNode[] nodes = gg.nodes;
var index = NodeInGridIndex;
for (int i = 0; i < 8; i++) {
if (HasConnectionInDirection(i)) {
GridNode other = nodes[index + neighbourOffsets[i]];
if (other != null && other.Area != region) {
other.Area = region;
stack.Push(other);
}
}
}
#if !ASTAR_GRID_NO_CUSTOM_CONNECTIONS
base.FloodFill(stack, region);
#endif
}
public override void UpdateRecursiveG (Path path, PathNode pathNode, PathHandler handler) {
GridGraph gg = GetGridGraph(GraphIndex);
int[] neighbourOffsets = gg.neighbourOffsets;
GridNode[] nodes = gg.nodes;
UpdateG(path, pathNode);
handler.heap.Add(pathNode);
ushort pid = handler.PathID;
var index = NodeInGridIndex;
for (int i = 0; i < 8; i++) {
if (HasConnectionInDirection(i)) {
GridNode other = nodes[index + neighbourOffsets[i]];
PathNode otherPN = handler.GetPathNode(other);
if (otherPN.parent == pathNode && otherPN.pathID == pid) other.UpdateRecursiveG(path, otherPN, handler);
}
}
#if !ASTAR_GRID_NO_CUSTOM_CONNECTIONS
base.UpdateRecursiveG(path, pathNode, handler);
#endif
}
public override void Open (Path path, PathNode pathNode, PathHandler handler) {
GridGraph gg = GetGridGraph(GraphIndex);
ushort pid = handler.PathID;
{
int[] neighbourOffsets = gg.neighbourOffsets;
uint[] neighbourCosts = gg.neighbourCosts;
GridNode[] nodes = gg.nodes;
var index = NodeInGridIndex;
for (int i = 0; i < 8; i++) {
if (HasConnectionInDirection(i)) {
GridNode other = nodes[index + neighbourOffsets[i]];
if (!path.CanTraverse(other)) continue;
PathNode otherPN = handler.GetPathNode(other);
uint tmpCost = neighbourCosts[i];
if (otherPN.pathID != pid) {
otherPN.parent = pathNode;
otherPN.pathID = pid;
otherPN.cost = tmpCost;
otherPN.H = path.CalculateHScore(other);
other.UpdateG(path, otherPN);
//Debug.Log ("G " + otherPN.G + " F " + otherPN.F);
handler.heap.Add(otherPN);
//Debug.DrawRay ((Vector3)otherPN.node.Position, Vector3.up,Color.blue);
} else {
// Sorry for the huge number of #ifs
//If not we can test if the path from the current node to this one is a better one then the one already used
#if ASTAR_NO_TRAVERSAL_COST
if (pathNode.G+tmpCost < otherPN.G)
#else
if (pathNode.G+tmpCost+path.GetTraversalCost(other) < otherPN.G)
#endif
{
//Debug.Log ("Path better from " + NodeIndex + " to " + otherPN.node.NodeIndex + " " + (pathNode.G+tmpCost+path.GetTraversalCost(other)) + " < " + otherPN.G);
otherPN.cost = tmpCost;
otherPN.parent = pathNode;
other.UpdateRecursiveG(path, otherPN, handler);
//Or if the path from this node ("other") to the current ("current") is better
}
#if ASTAR_NO_TRAVERSAL_COST
else if (otherPN.G+tmpCost < pathNode.G)
#else
else if (otherPN.G+tmpCost+path.GetTraversalCost(this) < pathNode.G)
#endif
{
//Debug.Log ("Path better from " + otherPN.node.NodeIndex + " to " + NodeIndex + " " + (otherPN.G+tmpCost+path.GetTraversalCost (this)) + " < " + pathNode.G);
pathNode.parent = otherPN;
pathNode.cost = tmpCost;
UpdateRecursiveG(path, pathNode, handler);
}
}
}
}
}
#if !ASTAR_GRID_NO_CUSTOM_CONNECTIONS
base.Open(path, pathNode, handler);
#endif
}
public override void SerializeNode (GraphSerializationContext ctx) {
base.SerializeNode(ctx);
ctx.SerializeInt3(position);
ctx.writer.Write(gridFlags);
}
public override void DeserializeNode (GraphSerializationContext ctx) {
base.DeserializeNode(ctx);
position = ctx.DeserializeInt3();
gridFlags = ctx.reader.ReadUInt16();
}
}
}
| |
namespace tomenglertde.Wax.Model.Wix
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using tomenglertde.Wax.Model.Mapping;
using tomenglertde.Wax.Model.Tools;
using tomenglertde.Wax.Model.VisualStudio;
public class WixProject : Project
{
private static readonly string[] _wixFileExtensions = { ".wxs", ".wxi" };
private static readonly string[] _wellKnownPublicMsiProperties = { "x86", "x64" };
private const string WaxConfigurationFileExtension = ".wax";
private readonly EnvDTE.ProjectItem _configurationFileProjectItem;
private readonly ProjectConfiguration _configuration;
private readonly IList<WixSourceFile> _sourceFiles;
public WixProject(Solution solution, EnvDTE.Project project)
: base(solution, project)
{
_configurationFileProjectItem = GetConfigurationFileProjectItem();
var configurationText = _configurationFileProjectItem.GetContent();
_configuration = configurationText.Deserialize<ProjectConfiguration>();
Regex? excludedItemsFilter = null;
try
{
if (!string.IsNullOrEmpty(_configuration.ExcludedProjectItems))
{
excludedItemsFilter = new Regex(_configuration.ExcludedProjectItems);
}
}
catch
{
// filter is corrupt, go with no filter.
}
_sourceFiles = GetAllProjectItems()
.Where(item => _wixFileExtensions.Contains(Path.GetExtension(item.Name) ?? string.Empty, StringComparer.OrdinalIgnoreCase))
.Where(item => excludedItemsFilter == null || !excludedItemsFilter.IsMatch(item.Name))
.OrderByDescending(item => Path.GetExtension(item.Name), StringComparer.OrdinalIgnoreCase)
.Select(item => new WixSourceFile(this, item))
.ToList().AsReadOnly();
}
public IEnumerable<WixSourceFile> SourceFiles => _sourceFiles;
public IEnumerable<WixFileNode> FileNodes => _sourceFiles.SelectMany(sourceFile => sourceFile.FileNodes);
public IEnumerable<WixDirectoryNode> DirectoryNodes => _sourceFiles.SelectMany(sourceFile => sourceFile.DirectoryNodes);
public IEnumerable<WixFeatureNode> FeatureNodes => _sourceFiles.SelectMany(sourceFile => sourceFile.FeatureNodes);
public IEnumerable<WixComponentNode> ComponentNodes => _sourceFiles.SelectMany(sourceFile => sourceFile.ComponentNodes);
public IEnumerable<WixComponentGroupNode> ComponentGroupNodes => _sourceFiles.SelectMany(sourceFile => sourceFile.ComponentGroupNodes);
public IEnumerable<Project> DeployedProjects
{
get
{
return Solution.Projects.Where(project => _configuration.DeployedProjectNames.Contains(project.UniqueName, StringComparer.OrdinalIgnoreCase));
}
set
{
var projects = value.ToList().AsReadOnly();
var removedProjects = DeployedProjects.Except(projects).ToList().AsReadOnly();
_configuration.DeployedProjectNames = projects.Select(project => project.UniqueName).ToArray();
RemoveProjectReferences(removedProjects);
AddProjectReferences(projects);
SaveProjectConfiguration();
}
}
public bool DeploySymbols
{
get => _configuration.DeploySymbols;
set => _configuration.DeploySymbols = value;
}
public bool DeployLocalizations
{
get => _configuration.DeployLocalizations;
set => _configuration.DeployLocalizations = value;
}
public bool DeployExternalLocalizations
{
get => _configuration.DeployExternalLocalizations;
set => _configuration.DeployExternalLocalizations = value;
}
public bool HasChanges => HasConfigurationChanges | HasSourceFileChanges;
public bool IsBootstrapper => WixExtensionReferences.Contains("WixBalExtension");
public string GetDirectoryId(string directory)
{
return (_configuration.DirectoryMappings.TryGetValue(directory, out var value) && (value != null)) ? value : GetDefaultId(directory);
}
public void UnmapDirectory(string directory)
{
_configuration.DirectoryMappings.Remove(directory);
SaveProjectConfiguration();
}
public void MapDirectory(string directory, WixDirectoryNode node)
{
MapElement(directory, node, _configuration.DirectoryMappings);
}
public WixDirectoryNode AddDirectoryNode(string directory)
{
var name = Path.GetFileName(directory);
var id = GetDirectoryId(directory);
var parentDirectoryName = Path.GetDirectoryName(directory);
var parentId = string.IsNullOrEmpty(directory) ? string.Empty : GetDirectoryId(parentDirectoryName);
var parent = DirectoryNodes.FirstOrDefault(node => node.Id.Equals(parentId));
if (parent == null)
{
if (!string.IsNullOrEmpty(parentId))
{
parent = AddDirectoryNode(parentDirectoryName);
}
else
{
parentId = "TODO:" + Guid.NewGuid();
var sourceFile = _sourceFiles.First();
return sourceFile.AddDirectory(id, name, parentId);
}
}
return parent.AddSubDirectory(id, name);
}
public bool HasDefaultDirectoryId(DirectoryMapping directoryMapping)
{
var directory = directoryMapping.Directory;
var id = GetDirectoryId(directory);
var defaultId = GetDefaultId(directory);
return id == defaultId;
}
public string GetFileId(string filePath)
{
return (_configuration.FileMappings.TryGetValue(filePath, out var value) && value != null) ? value : GetDefaultId(filePath);
}
public void UnmapFile(string filePath)
{
_configuration.FileMappings.Remove(filePath);
SaveProjectConfiguration();
}
public void MapFile(string filePath, WixFileNode node)
{
MapElement(filePath, node, _configuration.FileMappings);
}
public WixFileNode? AddFileNode(FileMapping fileMapping)
{
var targetName = fileMapping.TargetName;
var name = Path.GetFileName(targetName);
var id = GetFileId(targetName);
var directoryName = Path.GetDirectoryName(targetName);
var directoryId = GetDirectoryId(directoryName);
var directory = DirectoryNodes.FirstOrDefault(node => node.Id.Equals(directoryId, StringComparison.OrdinalIgnoreCase));
directoryId = directory?.Id ?? "TODO: unknown directory " + directoryName;
var componentGroup = ForceComponentGroup(directoryId);
if (componentGroup == null)
return null;
ForceFeatureRef(componentGroup.Id);
return componentGroup.AddFileComponent(id, name, fileMapping);
}
private static string GetDefaultId(string path)
{
if (path.Length == 0)
return "_";
var s = new StringBuilder(path);
for (var i = 0; i < s.Length; i++)
{
if (!IsValidForId(s[i]))
{
s[i] = '_';
}
}
if (char.IsDigit(s[0]))
{
s.Insert(0, '_');
}
if (_wellKnownPublicMsiProperties.Contains(s.ToString(), StringComparer.OrdinalIgnoreCase))
{
s.Insert(0, '_');
}
return s.ToString();
}
private WixComponentGroupNode? ForceComponentGroup(string directoryId)
{
return ComponentGroupNodes.FirstOrDefault(group => group.Directory == directoryId) ?? _sourceFiles.FirstOrDefault()?.AddComponentGroup(directoryId);
}
private void ForceFeatureRef(string componentGroupId)
{
if (FeatureNodes.Any(feature => feature.ComponentGroupRefs.Contains(componentGroupId)))
return;
var firstFeature = FeatureNodes.FirstOrDefault();
if (firstFeature == null)
return;
firstFeature.AddComponentGroupRef(componentGroupId);
firstFeature.SourceFile.Save();
}
public bool HasDefaultFileId(FileMapping fileMapping)
{
var filePath = fileMapping.TargetName;
var id = GetFileId(filePath);
var defaultId = GetDefaultId(filePath);
return id == defaultId;
}
private void MapElement(string path, WixNode node, IDictionary<string, string> mappings)
{
if (node.Id.Equals(GetDefaultId(path)))
mappings.Remove(path);
else
mappings[path] = node.Id;
SaveProjectConfiguration();
}
private static bool IsValidForId(char value)
{
return (value <= 'z') && (char.IsLetterOrDigit(value) || (value == '_') || (value == '.'));
}
private bool HasConfigurationChanges => (_configuration.Serialize() != _configurationFileProjectItem.GetContent());
private bool HasSourceFileChanges => _sourceFiles.Any(sourceFile => sourceFile.HasChanges);
private void SaveProjectConfiguration()
{
var configurationText = _configuration.Serialize();
if (configurationText != _configurationFileProjectItem.GetContent())
_configurationFileProjectItem.SetContent(configurationText);
}
private EnvDTE.ProjectItem GetConfigurationFileProjectItem()
{
var configurationFileProjectItem = GetAllProjectItems().FirstOrDefault(item => WaxConfigurationFileExtension.Equals(Path.GetExtension(item.Name), StringComparison.OrdinalIgnoreCase));
if (configurationFileProjectItem != null)
return configurationFileProjectItem;
var configurationFileName = Path.ChangeExtension(FullName, WaxConfigurationFileExtension);
if (!File.Exists(configurationFileName))
File.WriteAllText(configurationFileName, new ProjectConfiguration().Serialize());
return AddItemFromFile(configurationFileName);
}
}
}
| |
using Lucene.Net.Documents;
using Lucene.Net.Support;
using NUnit.Framework;
namespace Lucene.Net.Search
{
using Directory = Lucene.Net.Store.Directory;
using DirectoryReader = Lucene.Net.Index.DirectoryReader;
using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
using Document = Documents.Document;
using English = Lucene.Net.Util.English;
using Field = Field;
using Fields = Lucene.Net.Index.Fields;
using FieldType = FieldType;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using IOUtils = Lucene.Net.Util.IOUtils;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
using OpenMode = Lucene.Net.Index.OpenMode;
using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
using Term = Lucene.Net.Index.Term;
using Terms = Lucene.Net.Index.Terms;
using TermsEnum = Lucene.Net.Index.TermsEnum;
using TextField = TextField;
public class TestTermVectors : LuceneTestCase
{
private static IndexReader Reader;
private static Directory Directory;
/// <summary>
/// LUCENENET specific
/// Is non-static because NewIndexWriterConfig is no longer static.
/// </summary>
[OneTimeSetUp]
public override void BeforeClass()
{
base.BeforeClass();
Directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true)).SetMergePolicy(NewLogMergePolicy()));
//writer.setNoCFSRatio(1.0);
//writer.infoStream = System.out;
for (int i = 0; i < 1000; i++)
{
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);
int mod3 = i % 3;
int mod2 = i % 2;
if (mod2 == 0 && mod3 == 0)
{
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
ft.StoreTermVectorPositions = true;
}
else if (mod2 == 0)
{
ft.StoreTermVectors = true;
ft.StoreTermVectorPositions = true;
}
else if (mod3 == 0)
{
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
}
else
{
ft.StoreTermVectors = true;
}
doc.Add(new Field("field", English.IntToEnglish(i), ft));
//test no term vectors too
doc.Add(new TextField("noTV", English.IntToEnglish(i), Field.Store.YES));
writer.AddDocument(doc);
}
Reader = writer.Reader;
writer.Dispose();
}
[OneTimeTearDown]
public override void AfterClass()
{
Reader.Dispose();
Directory.Dispose();
Reader = null;
Directory = null;
base.AfterClass();
}
// In a single doc, for the same field, mix the term
// vectors up
[Test]
public virtual void TestMixedVectrosVectors()
{
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true)).SetOpenMode(OpenMode.CREATE));
Document doc = new Document();
FieldType ft2 = new FieldType(TextField.TYPE_STORED);
ft2.StoreTermVectors = true;
FieldType ft3 = new FieldType(TextField.TYPE_STORED);
ft3.StoreTermVectors = true;
ft3.StoreTermVectorPositions = true;
FieldType ft4 = new FieldType(TextField.TYPE_STORED);
ft4.StoreTermVectors = true;
ft4.StoreTermVectorOffsets = true;
FieldType ft5 = new FieldType(TextField.TYPE_STORED);
ft5.StoreTermVectors = true;
ft5.StoreTermVectorOffsets = true;
ft5.StoreTermVectorPositions = true;
doc.Add(NewTextField("field", "one", Field.Store.YES));
doc.Add(NewField("field", "one", ft2));
doc.Add(NewField("field", "one", ft3));
doc.Add(NewField("field", "one", ft4));
doc.Add(NewField("field", "one", ft5));
writer.AddDocument(doc);
IndexReader reader = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(reader);
Query query = new TermQuery(new Term("field", "one"));
ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(1, hits.Length);
Fields vectors = searcher.IndexReader.GetTermVectors(hits[0].Doc);
Assert.IsNotNull(vectors);
Assert.AreEqual(1, vectors.Count);
Terms vector = vectors.GetTerms("field");
Assert.IsNotNull(vector);
Assert.AreEqual(1, vector.Count);
TermsEnum termsEnum = vector.GetIterator(null);
Assert.IsNotNull(termsEnum.Next());
Assert.AreEqual("one", termsEnum.Term.Utf8ToString());
Assert.AreEqual(5, termsEnum.TotalTermFreq);
DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
Assert.IsNotNull(dpEnum);
Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
Assert.AreEqual(5, dpEnum.Freq);
for (int i = 0; i < 5; i++)
{
Assert.AreEqual(i, dpEnum.NextPosition());
}
dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
Assert.IsNotNull(dpEnum);
Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
Assert.AreEqual(5, dpEnum.Freq);
for (int i = 0; i < 5; i++)
{
dpEnum.NextPosition();
Assert.AreEqual(4 * i, dpEnum.StartOffset);
Assert.AreEqual(4 * i + 3, dpEnum.EndOffset);
}
reader.Dispose();
}
private IndexWriter CreateWriter(Directory dir)
{
return new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
}
private void CreateDir(Directory dir)
{
IndexWriter writer = CreateWriter(dir);
writer.AddDocument(CreateDoc());
writer.Dispose();
}
private Document CreateDoc()
{
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
ft.StoreTermVectorPositions = true;
doc.Add(NewField("c", "aaa", ft));
return doc;
}
private void VerifyIndex(Directory dir)
{
IndexReader r = DirectoryReader.Open(dir);
int numDocs = r.NumDocs;
for (int i = 0; i < numDocs; i++)
{
Assert.IsNotNull(r.GetTermVectors(i).GetTerms("c"), "term vectors should not have been null for document " + i);
}
r.Dispose();
}
[Test]
public virtual void TestFullMergeAddDocs()
{
Directory target = NewDirectory();
IndexWriter writer = CreateWriter(target);
// with maxBufferedDocs=2, this results in two segments, so that forceMerge
// actually does something.
for (int i = 0; i < 4; i++)
{
writer.AddDocument(CreateDoc());
}
writer.ForceMerge(1);
writer.Dispose();
VerifyIndex(target);
target.Dispose();
}
[Test]
public virtual void TestFullMergeAddIndexesDir()
{
Directory[] input = new Directory[] { NewDirectory(), NewDirectory() };
Directory target = NewDirectory();
foreach (Directory dir in input)
{
CreateDir(dir);
}
IndexWriter writer = CreateWriter(target);
writer.AddIndexes(input);
writer.ForceMerge(1);
writer.Dispose();
VerifyIndex(target);
IOUtils.Dispose(target, input[0], input[1]);
}
[Test]
public virtual void TestFullMergeAddIndexesReader()
{
Directory[] input = new Directory[] { NewDirectory(), NewDirectory() };
Directory target = NewDirectory();
foreach (Directory dir in input)
{
CreateDir(dir);
}
IndexWriter writer = CreateWriter(target);
foreach (Directory dir in input)
{
IndexReader r = DirectoryReader.Open(dir);
writer.AddIndexes(r);
r.Dispose();
}
writer.ForceMerge(1);
writer.Dispose();
VerifyIndex(target);
IOUtils.Dispose(target, input[0], input[1]);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Orleans.GrainDirectory;
using Orleans.Providers;
using Orleans.Runtime.Configuration;
using Orleans.Runtime.GrainDirectory;
using Orleans.Runtime.Placement;
using Orleans.Runtime.Scheduler;
using Orleans.Storage;
namespace Orleans.Runtime
{
internal class Catalog : SystemTarget, ICatalog, IPlacementContext, ISiloStatusListener
{
/// <summary>
/// Exception to indicate that the activation would have been a duplicate so messages pending for it should be redirected.
/// </summary>
[Serializable]
internal class DuplicateActivationException : Exception
{
public ActivationAddress ActivationToUse { get; private set; }
public SiloAddress PrimaryDirectoryForGrain { get; private set; } // for diagnostics only!
public DuplicateActivationException() : base("DuplicateActivationException") { }
public DuplicateActivationException(string msg) : base(msg) { }
public DuplicateActivationException(string message, Exception innerException) : base(message, innerException) { }
public DuplicateActivationException(ActivationAddress activationToUse)
: base("DuplicateActivationException")
{
ActivationToUse = activationToUse;
}
public DuplicateActivationException(ActivationAddress activationToUse, SiloAddress primaryDirectoryForGrain)
: base("DuplicateActivationException")
{
ActivationToUse = activationToUse;
PrimaryDirectoryForGrain = primaryDirectoryForGrain;
}
// Implementation of exception serialization with custom properties according to:
// http://stackoverflow.com/questions/94488/what-is-the-correct-way-to-make-a-custom-net-exception-serializable
protected DuplicateActivationException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
if (info != null)
{
ActivationToUse = (ActivationAddress) info.GetValue("ActivationToUse", typeof (ActivationAddress));
PrimaryDirectoryForGrain = (SiloAddress) info.GetValue("PrimaryDirectoryForGrain", typeof (SiloAddress));
}
}
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info != null)
{
info.AddValue("ActivationToUse", ActivationToUse, typeof (ActivationAddress));
info.AddValue("PrimaryDirectoryForGrain", PrimaryDirectoryForGrain, typeof (SiloAddress));
}
// MUST call through to the base class to let it save its own state
base.GetObjectData(info, context);
}
}
[Serializable]
internal class NonExistentActivationException : Exception
{
public ActivationAddress NonExistentActivation { get; private set; }
public bool IsStatelessWorker { get; private set; }
public NonExistentActivationException() : base("NonExistentActivationException") { }
public NonExistentActivationException(string msg) : base(msg) { }
public NonExistentActivationException(string message, Exception innerException)
: base(message, innerException) { }
public NonExistentActivationException(string msg, ActivationAddress nonExistentActivation, bool isStatelessWorker)
: base(msg)
{
NonExistentActivation = nonExistentActivation;
IsStatelessWorker = isStatelessWorker;
}
protected NonExistentActivationException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
if (info != null)
{
NonExistentActivation = (ActivationAddress)info.GetValue("NonExistentActivation", typeof(ActivationAddress));
IsStatelessWorker = (bool)info.GetValue("IsStatelessWorker", typeof(bool));
}
}
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info != null)
{
info.AddValue("NonExistentActivation", NonExistentActivation, typeof(ActivationAddress));
info.AddValue("IsStatelessWorker", IsStatelessWorker, typeof(bool));
}
// MUST call through to the base class to let it save its own state
base.GetObjectData(info, context);
}
}
public GrainTypeManager GrainTypeManager { get; private set; }
public SiloAddress LocalSilo { get; private set; }
internal ISiloStatusOracle SiloStatusOracle { get; set; }
internal readonly ActivationCollector ActivationCollector;
private readonly ILocalGrainDirectory directory;
private readonly OrleansTaskScheduler scheduler;
private readonly ActivationDirectory activations;
private IStorageProviderManager storageProviderManager;
private Dispatcher dispatcher;
private readonly Logger logger;
private int collectionNumber;
private int destroyActivationsNumber;
private IDisposable gcTimer;
private readonly GlobalConfiguration config;
private readonly string localSiloName;
private readonly CounterStatistic activationsCreated;
private readonly CounterStatistic activationsDestroyed;
private readonly CounterStatistic activationsFailedToActivate;
private readonly IntValueStatistic inProcessRequests;
private readonly CounterStatistic collectionCounter;
private readonly GrainCreator grainCreator;
internal Catalog(
GrainId grainId,
SiloAddress silo,
string siloName,
ILocalGrainDirectory grainDirectory,
GrainTypeManager typeManager,
OrleansTaskScheduler scheduler,
ActivationDirectory activationDirectory,
ClusterConfiguration config,
GrainCreator grainCreator,
out Action<Dispatcher> setDispatcher)
: base(grainId, silo)
{
LocalSilo = silo;
localSiloName = siloName;
directory = grainDirectory;
activations = activationDirectory;
this.scheduler = scheduler;
GrainTypeManager = typeManager;
collectionNumber = 0;
destroyActivationsNumber = 0;
this.grainCreator = grainCreator;
logger = LogManager.GetLogger("Catalog", Runtime.LoggerType.Runtime);
this.config = config.Globals;
setDispatcher = d => dispatcher = d;
ActivationCollector = new ActivationCollector(config);
GC.GetTotalMemory(true); // need to call once w/true to ensure false returns OK value
config.OnConfigChange("Globals/Activation", () => scheduler.RunOrQueueAction(Start, SchedulingContext), false);
IntValueStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_COUNT, () => activations.Count);
activationsCreated = CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_CREATED);
activationsDestroyed = CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_DESTROYED);
activationsFailedToActivate = CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_FAILED_TO_ACTIVATE);
collectionCounter = CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_COLLECTION_NUMBER_OF_COLLECTIONS);
inProcessRequests = IntValueStatistic.FindOrCreate(StatisticNames.MESSAGING_PROCESSING_ACTIVATION_DATA_ALL, () =>
{
long counter = 0;
lock (activations)
{
foreach (var activation in activations)
{
ActivationData data = activation.Value;
counter += data.GetRequestCount();
}
}
return counter;
});
}
internal void SetStorageManager(IStorageProviderManager storageManager)
{
storageProviderManager = storageManager;
}
internal void Start()
{
if (gcTimer != null) gcTimer.Dispose();
var t = GrainTimer.FromTaskCallback(OnTimer, null, TimeSpan.Zero, ActivationCollector.Quantum, "Catalog.GCTimer");
t.Start();
gcTimer = t;
}
private Task OnTimer(object _)
{
return CollectActivationsImpl(true);
}
public Task CollectActivations(TimeSpan ageLimit)
{
return CollectActivationsImpl(false, ageLimit);
}
private async Task CollectActivationsImpl(bool scanStale, TimeSpan ageLimit = default(TimeSpan))
{
var watch = new Stopwatch();
watch.Start();
var number = Interlocked.Increment(ref collectionNumber);
long memBefore = GC.GetTotalMemory(false) / (1024 * 1024);
logger.Info(ErrorCode.Catalog_BeforeCollection, "Before collection#{0}: memory={1}MB, #activations={2}, collector={3}.",
number, memBefore, activations.Count, ActivationCollector.ToString());
List<ActivationData> list = scanStale ? ActivationCollector.ScanStale() : ActivationCollector.ScanAll(ageLimit);
collectionCounter.Increment();
var count = 0;
if (list != null && list.Count > 0)
{
count = list.Count;
if (logger.IsVerbose) logger.Verbose("CollectActivations{0}", list.ToStrings(d => d.Grain.ToString() + d.ActivationId));
await DeactivateActivationsFromCollector(list);
}
long memAfter = GC.GetTotalMemory(false) / (1024 * 1024);
watch.Stop();
logger.Info(ErrorCode.Catalog_AfterCollection, "After collection#{0}: memory={1}MB, #activations={2}, collected {3} activations, collector={4}, collection time={5}.",
number, memAfter, activations.Count, count, ActivationCollector.ToString(), watch.Elapsed);
}
public List<Tuple<GrainId, string, int>> GetGrainStatistics()
{
var counts = new Dictionary<string, Dictionary<GrainId, int>>();
lock (activations)
{
foreach (var activation in activations)
{
ActivationData data = activation.Value;
if (data == null || data.GrainInstance == null) continue;
// TODO: generic type expansion
var grainTypeName = TypeUtils.GetFullName(data.GrainInstanceType);
Dictionary<GrainId, int> grains;
int n;
if (!counts.TryGetValue(grainTypeName, out grains))
{
counts.Add(grainTypeName, new Dictionary<GrainId, int> { { data.Grain, 1 } });
}
else if (!grains.TryGetValue(data.Grain, out n))
grains[data.Grain] = 1;
else
grains[data.Grain] = n + 1;
}
}
return counts
.SelectMany(p => p.Value.Select(p2 => Tuple.Create(p2.Key, p.Key, p2.Value)))
.ToList();
}
public List<DetailedGrainStatistic> GetDetailedGrainStatistics(string[] types=null)
{
var stats = new List<DetailedGrainStatistic>();
lock (activations)
{
foreach (var activation in activations)
{
ActivationData data = activation.Value;
if (data == null || data.GrainInstance == null) continue;
if (types==null || types.Contains(TypeUtils.GetFullName(data.GrainInstanceType)))
{
stats.Add(new DetailedGrainStatistic()
{
GrainType = TypeUtils.GetFullName(data.GrainInstanceType),
GrainIdentity = data.Grain,
SiloAddress = data.Silo,
Category = data.Grain.Category.ToString()
});
}
}
}
return stats;
}
public IEnumerable<KeyValuePair<string, long>> GetSimpleGrainStatistics()
{
return activations.GetSimpleGrainStatistics();
}
public DetailedGrainReport GetDetailedGrainReport(GrainId grain)
{
var report = new DetailedGrainReport
{
Grain = grain,
SiloAddress = LocalSilo,
SiloName = localSiloName,
LocalCacheActivationAddresses = directory.GetLocalCacheData(grain),
LocalDirectoryActivationAddresses = directory.GetLocalDirectoryData(grain).Addresses,
PrimaryForGrain = directory.GetPrimaryForGrain(grain)
};
try
{
PlacementStrategy unused;
MultiClusterRegistrationStrategy unusedActivationStrategy;
string grainClassName;
GrainTypeManager.GetTypeInfo(grain.GetTypeCode(), out grainClassName, out unused, out unusedActivationStrategy);
report.GrainClassTypeName = grainClassName;
}
catch (Exception exc)
{
report.GrainClassTypeName = exc.ToString();
}
List<ActivationData> acts = activations.FindTargets(grain);
report.LocalActivations = acts != null ?
acts.Select(activationData => activationData.ToDetailedString()).ToList() :
new List<string>();
return report;
}
#region MessageTargets
/// <summary>
/// Register a new object to which messages can be delivered with the local lookup table and scheduler.
/// </summary>
/// <param name="activation"></param>
public void RegisterMessageTarget(ActivationData activation)
{
var context = new SchedulingContext(activation);
scheduler.RegisterWorkContext(context);
activations.RecordNewTarget(activation);
activationsCreated.Increment();
}
/// <summary>
/// Unregister message target and stop delivering messages to it
/// </summary>
/// <param name="activation"></param>
public void UnregisterMessageTarget(ActivationData activation)
{
activations.RemoveTarget(activation);
// this should be removed once we've refactored the deactivation code path. For now safe to keep.
ActivationCollector.TryCancelCollection(activation);
activationsDestroyed.Increment();
scheduler.UnregisterWorkContext(new SchedulingContext(activation));
if (activation.GrainInstance == null) return;
var grainTypeName = TypeUtils.GetFullName(activation.GrainInstanceType);
activations.DecrementGrainCounter(grainTypeName);
activation.SetGrainInstance(null);
}
/// <summary>
/// FOR TESTING PURPOSES ONLY!!
/// </summary>
/// <param name="grain"></param>
internal int UnregisterGrainForTesting(GrainId grain)
{
var acts = activations.FindTargets(grain);
if (acts == null) return 0;
int numActsBefore = acts.Count;
foreach (var act in acts)
UnregisterMessageTarget(act);
return numActsBefore;
}
#endregion
#region Grains
internal bool IsReentrantGrain(ActivationId running)
{
ActivationData target;
GrainTypeData data;
return TryGetActivationData(running, out target) &&
target.GrainInstance != null &&
GrainTypeManager.TryGetData(TypeUtils.GetFullName(target.GrainInstanceType), out data) &&
data.IsReentrant;
}
public void GetGrainTypeInfo(int typeCode, out string grainClass, out PlacementStrategy placement, out MultiClusterRegistrationStrategy activationStrategy, string genericArguments = null)
{
GrainTypeManager.GetTypeInfo(typeCode, out grainClass, out placement, out activationStrategy, genericArguments);
}
#endregion
#region Activations
public int ActivationCount { get { return activations.Count; } }
/// <summary>
/// If activation already exists, use it
/// Otherwise, create an activation of an existing grain by reading its state.
/// Return immediately using a dummy that will queue messages.
/// Concurrently start creating and initializing the real activation and replace it when it is ready.
/// </summary>
/// <param name="address">Grain's activation address</param>
/// <param name="newPlacement">Creation of new activation was requested by the placement director.</param>
/// <param name="grainType">The type of grain to be activated or created</param>
/// <param name="genericArguments">Specific generic type of grain to be activated or created</param>
/// <param name="requestContextData">Request context data.</param>
/// <param name="activatedPromise"></param>
/// <returns></returns>
public ActivationData GetOrCreateActivation(
ActivationAddress address,
bool newPlacement,
string grainType,
string genericArguments,
Dictionary<string, object> requestContextData,
out Task activatedPromise)
{
ActivationData result;
activatedPromise = TaskDone.Done;
PlacementStrategy placement;
lock (activations)
{
if (TryGetActivationData(address.Activation, out result))
{
return result;
}
int typeCode = address.Grain.GetTypeCode();
string actualGrainType = null;
MultiClusterRegistrationStrategy activationStrategy;
if (typeCode != 0)
{
GetGrainTypeInfo(typeCode, out actualGrainType, out placement, out activationStrategy, genericArguments);
}
else
{
// special case for Membership grain.
placement = SystemPlacement.Singleton;
activationStrategy = ClusterLocalRegistration.Singleton;
}
if (newPlacement && !SiloStatusOracle.CurrentStatus.IsTerminating())
{
// create a dummy activation that will queue up messages until the real data arrives
if (string.IsNullOrEmpty(grainType))
{
grainType = actualGrainType;
}
// We want to do this (RegisterMessageTarget) under the same lock that we tested TryGetActivationData. They both access ActivationDirectory.
result = new ActivationData(
address,
genericArguments,
placement,
activationStrategy,
ActivationCollector,
config.Application.GetCollectionAgeLimit(grainType));
RegisterMessageTarget(result);
}
} // End lock
// Did not find and did not start placing new
if (result == null)
{
var msg = String.Format("Non-existent activation: {0}, grain type: {1}.",
address.ToFullString(), grainType);
if (logger.IsVerbose) logger.Verbose(ErrorCode.CatalogNonExistingActivation2, msg);
CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_NON_EXISTENT_ACTIVATIONS).Increment();
throw new NonExistentActivationException(msg, address, placement is StatelessWorkerPlacement);
}
SetupActivationInstance(result, grainType, genericArguments);
activatedPromise = InitActivation(result, grainType, genericArguments, requestContextData);
return result;
}
private void SetupActivationInstance(ActivationData result, string grainType, string genericArguments)
{
lock (result)
{
if (result.GrainInstance == null)
{
CreateGrainInstance(grainType, result, genericArguments);
}
}
}
private async Task InitActivation(ActivationData activation, string grainType, string genericArguments, Dictionary<string, object> requestContextData)
{
// We've created a dummy activation, which we'll eventually return, but in the meantime we'll queue up (or perform promptly)
// the operations required to turn the "dummy" activation into a real activation
ActivationAddress address = activation.Address;
int initStage = 0;
// A chain of promises that will have to complete in order to complete the activation
// Register with the grain directory, register with the store if necessary and call the Activate method on the new activation.
try
{
initStage = 1;
await RegisterActivationInGrainDirectoryAndValidate(activation);
initStage = 2;
await SetupActivationState(activation, String.IsNullOrEmpty(genericArguments) ? grainType : $"{grainType}[{genericArguments}]");
initStage = 3;
await InvokeActivate(activation, requestContextData);
ActivationCollector.ScheduleCollection(activation);
// Success!! Log the result, and start processing messages
if (logger.IsVerbose) logger.Verbose("InitActivation is done: {0}", address);
}
catch (Exception ex)
{
lock (activation)
{
activation.SetState(ActivationState.Invalid);
try
{
UnregisterMessageTarget(activation);
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_UnregisterMessageTarget4, String.Format("UnregisterMessageTarget failed on {0}.", activation), exc);
}
switch (initStage)
{
case 1: // failed to RegisterActivationInGrainDirectory
ActivationAddress target = null;
Exception dupExc;
// Failure!! Could it be that this grain uses single activation placement, and there already was an activation?
if (Utils.TryFindException(ex, typeof (DuplicateActivationException), out dupExc))
{
target = ((DuplicateActivationException) dupExc).ActivationToUse;
CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_DUPLICATE_ACTIVATIONS)
.Increment();
}
activation.ForwardingAddress = target;
if (target != null)
{
var primary = ((DuplicateActivationException)dupExc).PrimaryDirectoryForGrain;
// If this was a duplicate, it's not an error, just a race.
// Forward on all of the pending messages, and then forget about this activation.
string logMsg = String.Format("Tried to create a duplicate activation {0}, but we'll use {1} instead. " +
"GrainInstanceType is {2}. " +
"{3}" +
"Full activation address is {4}. We have {5} messages to forward.",
address,
target,
activation.GrainInstanceType,
primary != null ? "Primary Directory partition for this grain is " + primary + ". " : String.Empty,
address.ToFullString(),
activation.WaitingCount);
if (activation.IsUsingGrainDirectory)
{
logger.Info(ErrorCode.Catalog_DuplicateActivation, logMsg);
}
else
{
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_DuplicateActivation, logMsg);
}
RerouteAllQueuedMessages(activation, target, "Duplicate activation", ex);
}
else
{
logger.Warn(ErrorCode.Runtime_Error_100064,
String.Format("Failed to RegisterActivationInGrainDirectory for {0}.",
activation), ex);
// Need to undo the registration we just did earlier
if (activation.IsUsingGrainDirectory)
{
scheduler.RunOrQueueTask(() => directory.UnregisterAsync(address),
SchedulingContext).Ignore();
}
RerouteAllQueuedMessages(activation, null,
"Failed RegisterActivationInGrainDirectory", ex);
}
break;
case 2: // failed to setup persistent state
logger.Warn(ErrorCode.Catalog_Failed_SetupActivationState,
String.Format("Failed to SetupActivationState for {0}.", activation), ex);
// Need to undo the registration we just did earlier
if (activation.IsUsingGrainDirectory)
{
scheduler.RunOrQueueTask(() => directory.UnregisterAsync(address),
SchedulingContext).Ignore();
}
RerouteAllQueuedMessages(activation, null, "Failed SetupActivationState", ex);
break;
case 3: // failed to InvokeActivate
logger.Warn(ErrorCode.Catalog_Failed_InvokeActivate,
String.Format("Failed to InvokeActivate for {0}.", activation), ex);
// Need to undo the registration we just did earlier
if (activation.IsUsingGrainDirectory)
{
scheduler.RunOrQueueTask(() => directory.UnregisterAsync(address),
SchedulingContext).Ignore();
}
RerouteAllQueuedMessages(activation, null, "Failed InvokeActivate", ex);
break;
}
}
throw;
}
}
/// <summary>
/// Perform just the prompt, local part of creating an activation object
/// Caller is responsible for registering locally, registering with store and calling its activate routine
/// </summary>
/// <param name="grainTypeName"></param>
/// <param name="data"></param>
/// <param name="genericArguments"></param>
/// <returns></returns>
private void CreateGrainInstance(string grainTypeName, ActivationData data, string genericArguments)
{
string grainClassName;
if (!GrainTypeManager.TryGetPrimaryImplementation(grainTypeName, out grainClassName))
{
// Lookup from grain type code
var typeCode = data.Grain.GetTypeCode();
if (typeCode != 0)
{
PlacementStrategy unused;
MultiClusterRegistrationStrategy unusedActivationStrategy;
GetGrainTypeInfo(typeCode, out grainClassName, out unused, out unusedActivationStrategy, genericArguments);
}
else
{
grainClassName = grainTypeName;
}
}
GrainTypeData grainTypeData = GrainTypeManager[grainClassName];
//Get the grain's type
Type grainType = grainTypeData.Type;
//Gets the type for the grain's state
Type stateObjectType = grainTypeData.StateObjectType;
lock (data)
{
Grain grain;
//Create a new instance of a stateless grain
if (stateObjectType == null)
{
//Create a new instance of the given grain type
grain = grainCreator.CreateGrainInstance(grainType, data.Identity);
}
//Create a new instance of a stateful grain
else
{
SetupStorageProvider(grainType, data);
grain = grainCreator.CreateGrainInstance(grainType, data.Identity, stateObjectType, data.StorageProvider);
}
grain.Data = data;
data.SetGrainInstance(grain);
}
activations.IncrementGrainCounter(grainClassName);
if (logger.IsVerbose) logger.Verbose("CreateGrainInstance {0}{1}", data.Grain, data.ActivationId);
}
private void SetupStorageProvider(Type grainType, ActivationData data)
{
var grainTypeName = grainType.FullName;
// Get the storage provider name, using the default if not specified.
var attr = grainType.GetTypeInfo().GetCustomAttributes<StorageProviderAttribute>(true).FirstOrDefault();
var storageProviderName = attr != null ? attr.ProviderName : Constants.DEFAULT_STORAGE_PROVIDER_NAME;
IStorageProvider provider;
if (storageProviderManager == null || storageProviderManager.GetNumLoadedProviders() == 0)
{
var errMsg = string.Format("No storage providers found loading grain type {0}", grainTypeName);
logger.Error(ErrorCode.Provider_CatalogNoStorageProvider_1, errMsg);
throw new BadProviderConfigException(errMsg);
}
if (string.IsNullOrWhiteSpace(storageProviderName))
{
// Use default storage provider
provider = storageProviderManager.GetDefaultProvider();
}
else
{
// Look for MemoryStore provider as special case name
bool caseInsensitive = Constants.MEMORY_STORAGE_PROVIDER_NAME.Equals(storageProviderName, StringComparison.OrdinalIgnoreCase);
storageProviderManager.TryGetProvider(storageProviderName, out provider, caseInsensitive);
if (provider == null)
{
var errMsg = string.Format(
"Cannot find storage provider with Name={0} for grain type {1}", storageProviderName,
grainTypeName);
logger.Error(ErrorCode.Provider_CatalogNoStorageProvider_2, errMsg);
throw new BadProviderConfigException(errMsg);
}
}
data.StorageProvider = provider;
if (logger.IsVerbose2)
{
string msg = string.Format("Assigned storage provider with Name={0} to grain type {1}",
storageProviderName, grainTypeName);
logger.Verbose2(ErrorCode.Provider_CatalogStorageProviderAllocated, msg);
}
}
private async Task SetupActivationState(ActivationData result, string grainType)
{
var statefulGrain = result.GrainInstance as IStatefulGrain;
if (statefulGrain == null)
{
return;
}
var state = statefulGrain.GrainState;
if (result.StorageProvider != null && state != null)
{
var sw = Stopwatch.StartNew();
var innerState = statefulGrain.GrainState.State;
// Populate state data
try
{
var grainRef = result.GrainReference;
await scheduler.RunOrQueueTask(() =>
result.StorageProvider.ReadStateAsync(grainType, grainRef, state),
new SchedulingContext(result));
sw.Stop();
StorageStatisticsGroup.OnStorageActivate(result.StorageProvider, grainType, result.GrainReference, sw.Elapsed);
}
catch (Exception ex)
{
StorageStatisticsGroup.OnStorageActivateError(result.StorageProvider, grainType, result.GrainReference);
sw.Stop();
if (!(ex.GetBaseException() is KeyNotFoundException))
throw;
statefulGrain.GrainState.State = innerState; // Just keep original empty state object
}
}
}
/// <summary>
/// Try to get runtime data for an activation
/// </summary>
/// <param name="activationId"></param>
/// <param name="data"></param>
/// <returns></returns>
public bool TryGetActivationData(ActivationId activationId, out ActivationData data)
{
data = null;
if (activationId.IsSystem) return false;
data = activations.FindTarget(activationId);
return data != null;
}
private Task DeactivateActivationsFromCollector(List<ActivationData> list)
{
logger.Info(ErrorCode.Catalog_ShutdownActivations_1, "DeactivateActivationsFromCollector: total {0} to promptly Destroy.", list.Count);
CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_SHUTDOWN_VIA_COLLECTION).IncrementBy(list.Count);
foreach (var activation in list)
{
lock (activation)
{
activation.PrepareForDeactivation(); // Don't accept any new messages
}
}
return DestroyActivations(list);
}
// To be called fro within Activation context.
// Cannot be awaitable, since after DestroyActivation is done the activation is in Invalid state and cannot await any Task.
internal void DeactivateActivationOnIdle(ActivationData data)
{
bool promptly = false;
bool alreadBeingDestroyed = false;
lock (data)
{
if (data.State == ActivationState.Valid)
{
// Change the ActivationData state here, since we're about to give up the lock.
data.PrepareForDeactivation(); // Don't accept any new messages
ActivationCollector.TryCancelCollection(data);
if (!data.IsCurrentlyExecuting)
{
promptly = true;
}
else // busy, so destroy later.
{
data.AddOnInactive(() => DestroyActivationVoid(data));
}
}
else if (data.State == ActivationState.Create)
{
throw new InvalidOperationException(String.Format(
"Activation {0} has called DeactivateOnIdle from within a constructor, which is not allowed.",
data.ToString()));
}
else if (data.State == ActivationState.Activating)
{
throw new InvalidOperationException(String.Format(
"Activation {0} has called DeactivateOnIdle from within OnActivateAsync, which is not allowed.",
data.ToString()));
}
else
{
alreadBeingDestroyed = true;
}
}
logger.Info(ErrorCode.Catalog_ShutdownActivations_2,
"DeactivateActivationOnIdle: {0} {1}.", data.ToString(), promptly ? "promptly" : (alreadBeingDestroyed ? "already being destroyed or invalid" : "later when become idle"));
CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_SHUTDOWN_VIA_DEACTIVATE_ON_IDLE).Increment();
if (promptly)
{
DestroyActivationVoid(data); // Don't await or Ignore, since we are in this activation context and it may have alraedy been destroyed!
}
}
/// <summary>
/// Gracefully deletes activations, putting it into a shutdown state to
/// complete and commit outstanding transactions before deleting it.
/// To be called not from within Activation context, so can be awaited.
/// </summary>
/// <param name="list"></param>
/// <returns></returns>
internal async Task DeactivateActivations(List<ActivationData> list)
{
if (list == null || list.Count == 0) return;
if (logger.IsVerbose) logger.Verbose("DeactivateActivations: {0} activations.", list.Count);
List<ActivationData> destroyNow = null;
List<MultiTaskCompletionSource> destroyLater = null;
int alreadyBeingDestroyed = 0;
foreach (var d in list)
{
var activationData = d; // capture
lock (activationData)
{
if (activationData.State == ActivationState.Valid)
{
// Change the ActivationData state here, since we're about to give up the lock.
activationData.PrepareForDeactivation(); // Don't accept any new messages
ActivationCollector.TryCancelCollection(activationData);
if (!activationData.IsCurrentlyExecuting)
{
if (destroyNow == null)
{
destroyNow = new List<ActivationData>();
}
destroyNow.Add(activationData);
}
else // busy, so destroy later.
{
if (destroyLater == null)
{
destroyLater = new List<MultiTaskCompletionSource>();
}
var tcs = new MultiTaskCompletionSource(1);
destroyLater.Add(tcs);
activationData.AddOnInactive(() => DestroyActivationAsync(activationData, tcs));
}
}
else
{
alreadyBeingDestroyed++;
}
}
}
int numDestroyNow = destroyNow == null ? 0 : destroyNow.Count;
int numDestroyLater = destroyLater == null ? 0 : destroyLater.Count;
logger.Info(ErrorCode.Catalog_ShutdownActivations_3,
"DeactivateActivations: total {0} to shutdown, out of them {1} promptly, {2} later when become idle and {3} are already being destroyed or invalid.",
list.Count, numDestroyNow, numDestroyLater, alreadyBeingDestroyed);
CounterStatistic.FindOrCreate(StatisticNames.CATALOG_ACTIVATION_SHUTDOWN_VIA_DIRECT_SHUTDOWN).IncrementBy(list.Count);
if (destroyNow != null && destroyNow.Count > 0)
{
await DestroyActivations(destroyNow);
}
if (destroyLater != null && destroyLater.Count > 0)
{
await Task.WhenAll(destroyLater.Select(t => t.Task).ToArray());
}
}
public Task DeactivateAllActivations()
{
logger.Info(ErrorCode.Catalog_DeactivateAllActivations, "DeactivateAllActivations.");
var activationsToShutdown = activations.Where(kv => !kv.Value.IsExemptFromCollection).Select(kv => kv.Value).ToList();
return DeactivateActivations(activationsToShutdown);
}
/// <summary>
/// Deletes activation immediately regardless of active transactions etc.
/// For use by grain delete, transaction abort, etc.
/// </summary>
/// <param name="activation"></param>
private void DestroyActivationVoid(ActivationData activation)
{
StartDestroyActivations(new List<ActivationData> { activation });
}
private void DestroyActivationAsync(ActivationData activation, MultiTaskCompletionSource tcs)
{
StartDestroyActivations(new List<ActivationData> { activation }, tcs);
}
/// <summary>
/// Forcibly deletes activations now, without waiting for any outstanding transactions to complete.
/// Deletes activation immediately regardless of active transactions etc.
/// For use by grain delete, transaction abort, etc.
/// </summary>
/// <param name="list"></param>
/// <returns></returns>
// Overall code flow:
// Deactivating state was already set before, in the correct context under lock.
// that means no more new requests will be accepted into this activation and all timer were stopped (no new ticks will be delivered or enqueued)
// Wait for all already scheduled ticks to finish
// CallGrainDeactivate
// when AsyncDeactivate promise is resolved (NOT when all Deactivate turns are done, which may be orphan tasks):
// Unregister in the directory
// when all AsyncDeactivate turns are done (Dispatcher.OnActivationCompletedRequest):
// Set Invalid state
// UnregisterMessageTarget -> no new tasks will be enqueue (if an orphan task get enqueud, it is ignored and dropped on the floor).
// InvalidateCacheEntry
// Reroute pending
private Task DestroyActivations(List<ActivationData> list)
{
var tcs = new MultiTaskCompletionSource(list.Count);
StartDestroyActivations(list, tcs);
return tcs.Task;
}
private async void StartDestroyActivations(List<ActivationData> list, MultiTaskCompletionSource tcs = null)
{
int number = destroyActivationsNumber;
destroyActivationsNumber++;
try
{
logger.Info(ErrorCode.Catalog_DestroyActivations, "Starting DestroyActivations #{0} of {1} activations", number, list.Count);
// step 1 - WaitForAllTimersToFinish
var tasks1 = new List<Task>();
foreach (var activation in list)
{
tasks1.Add(activation.WaitForAllTimersToFinish());
}
try
{
await Task.WhenAll(tasks1);
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_WaitForAllTimersToFinish_Exception, String.Format("WaitForAllTimersToFinish {0} failed.", list.Count), exc);
}
// step 2 - CallGrainDeactivate
var tasks2 = new List<Tuple<Task, ActivationData>>();
foreach (var activation in list)
{
var activationData = activation; // Capture loop variable
var task = scheduler.RunOrQueueTask(() => CallGrainDeactivateAndCleanupStreams(activationData), new SchedulingContext(activationData));
tasks2.Add(new Tuple<Task, ActivationData>(task, activationData));
}
var asyncQueue = new AsyncBatchedContinuationQueue<ActivationData>();
asyncQueue.Queue(tasks2, tupleList =>
{
FinishDestroyActivations(tupleList.Select(t => t.Item2).ToList(), number, tcs);
GC.KeepAlive(asyncQueue); // not sure about GC not collecting the asyncQueue local var prematuraly, so just want to capture it here to make sure. Just to be safe.
});
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_DeactivateActivation_Exception, String.Format("StartDestroyActivations #{0} failed with {1} Activations.", number, list.Count), exc);
}
}
private async void FinishDestroyActivations(List<ActivationData> list, int number, MultiTaskCompletionSource tcs)
{
try
{
//logger.Info(ErrorCode.Catalog_DestroyActivations_Done, "Starting FinishDestroyActivations #{0} - with {1} Activations.", number, list.Count);
// step 3 - UnregisterManyAsync
try
{
List<ActivationAddress> activationsToDeactivate = list.
Where((ActivationData d) => d.IsUsingGrainDirectory).
Select((ActivationData d) => ActivationAddress.GetAddress(LocalSilo, d.Grain, d.ActivationId)).ToList();
if (activationsToDeactivate.Count > 0)
{
await scheduler.RunOrQueueTask(() =>
directory.UnregisterManyAsync(activationsToDeactivate),
SchedulingContext);
}
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_UnregisterManyAsync, String.Format("UnregisterManyAsync {0} failed.", list.Count), exc);
}
// step 4 - UnregisterMessageTarget and OnFinishedGrainDeactivate
foreach (var activationData in list)
{
try
{
lock (activationData)
{
activationData.SetState(ActivationState.Invalid); // Deactivate calls on this activation are finished
}
UnregisterMessageTarget(activationData);
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_UnregisterMessageTarget2, String.Format("UnregisterMessageTarget failed on {0}.", activationData), exc);
}
try
{
// IMPORTANT: no more awaits and .Ignore after that point.
// Just use this opportunity to invalidate local Cache Entry as well.
// If this silo is not the grain directory partition for this grain, it may have it in its cache.
directory.InvalidateCacheEntry(activationData.Address);
RerouteAllQueuedMessages(activationData, null, "Finished Destroy Activation");
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_UnregisterMessageTarget3, String.Format("Last stage of DestroyActivations failed on {0}.", activationData), exc);
}
}
// step 5 - Resolve any waiting TaskCompletionSource
if (tcs != null)
{
tcs.SetMultipleResults(list.Count);
}
logger.Info(ErrorCode.Catalog_DestroyActivations_Done, "Done FinishDestroyActivations #{0} - Destroyed {1} Activations.", number, list.Count);
}catch (Exception exc)
{
logger.Error(ErrorCode.Catalog_FinishDeactivateActivation_Exception, String.Format("FinishDestroyActivations #{0} failed with {1} Activations.", number, list.Count), exc);
}
}
private void RerouteAllQueuedMessages(ActivationData activation, ActivationAddress forwardingAddress, string failedOperation, Exception exc = null)
{
lock (activation)
{
List<Message> msgs = activation.DequeueAllWaitingMessages();
if (msgs == null || msgs.Count <= 0) return;
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_RerouteAllQueuedMessages, String.Format("RerouteAllQueuedMessages: {0} msgs from Invalid activation {1}.", msgs.Count(), activation));
dispatcher.ProcessRequestsToInvalidActivation(msgs, activation.Address, forwardingAddress, failedOperation, exc);
}
}
private async Task CallGrainActivate(ActivationData activation, Dictionary<string, object> requestContextData)
{
var grainTypeName = activation.GrainInstanceType.FullName;
// Note: This call is being made from within Scheduler.Queue wrapper, so we are already executing on worker thread
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_BeforeCallingActivate, "About to call {1} grain's OnActivateAsync() method {0}", activation, grainTypeName);
// Call OnActivateAsync inline, but within try-catch wrapper to safely capture any exceptions thrown from called function
try
{
RequestContext.Import(requestContextData);
await activation.GrainInstance.OnActivateAsync();
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_AfterCallingActivate, "Returned from calling {1} grain's OnActivateAsync() method {0}", activation, grainTypeName);
lock (activation)
{
if (activation.State == ActivationState.Activating)
{
activation.SetState(ActivationState.Valid); // Activate calls on this activation are finished
}
if (!activation.IsCurrentlyExecuting)
{
activation.RunOnInactive();
}
// Run message pump to see if there is a new request is queued to be processed
dispatcher.RunMessagePump(activation);
}
}
catch (Exception exc)
{
logger.Error(ErrorCode.Catalog_ErrorCallingActivate,
string.Format("Error calling grain's OnActivateAsync() method - Grain type = {1} Activation = {0}", activation, grainTypeName), exc);
activation.SetState(ActivationState.Invalid); // Mark this activation as unusable
activationsFailedToActivate.Increment();
throw;
}
}
private async Task<ActivationData> CallGrainDeactivateAndCleanupStreams(ActivationData activation)
{
try
{
var grainTypeName = activation.GrainInstanceType.FullName;
// Note: This call is being made from within Scheduler.Queue wrapper, so we are already executing on worker thread
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_BeforeCallingDeactivate, "About to call {1} grain's OnDeactivateAsync() method {0}", activation, grainTypeName);
// Call OnDeactivateAsync inline, but within try-catch wrapper to safely capture any exceptions thrown from called function
try
{
// just check in case this activation data is already Invalid or not here at all.
ActivationData ignore;
if (TryGetActivationData(activation.ActivationId, out ignore) &&
activation.State == ActivationState.Deactivating)
{
RequestContext.Clear(); // Clear any previous RC, so it does not leak into this call by mistake.
await activation.GrainInstance.OnDeactivateAsync();
}
if (logger.IsVerbose) logger.Verbose(ErrorCode.Catalog_AfterCallingDeactivate, "Returned from calling {1} grain's OnDeactivateAsync() method {0}", activation, grainTypeName);
}
catch (Exception exc)
{
logger.Error(ErrorCode.Catalog_ErrorCallingDeactivate,
string.Format("Error calling grain's OnDeactivateAsync() method - Grain type = {1} Activation = {0}", activation, grainTypeName), exc);
}
if (activation.IsUsingStreams)
{
try
{
await activation.DeactivateStreamResources();
}
catch (Exception exc)
{
logger.Warn(ErrorCode.Catalog_DeactivateStreamResources_Exception, String.Format("DeactivateStreamResources Grain type = {0} Activation = {1} failed.", grainTypeName, activation), exc);
}
}
}
catch(Exception exc)
{
logger.Error(ErrorCode.Catalog_FinishGrainDeactivateAndCleanupStreams_Exception, String.Format("CallGrainDeactivateAndCleanupStreams Activation = {0} failed.", activation), exc);
}
return activation;
}
private async Task RegisterActivationInGrainDirectoryAndValidate(ActivationData activation)
{
ActivationAddress address = activation.Address;
// Currently, the only grain type that is not registered in the Grain Directory is StatelessWorker.
// Among those that are registered in the directory, we currently do not have any multi activations.
if (activation.IsUsingGrainDirectory)
{
var result = await scheduler.RunOrQueueTask(() => directory.RegisterAsync(address, singleActivation:true), this.SchedulingContext);
if (address.Equals(result.Address)) return;
SiloAddress primaryDirectoryForGrain = directory.GetPrimaryForGrain(address.Grain);
throw new DuplicateActivationException(result.Address, primaryDirectoryForGrain);
}
else
{
StatelessWorkerPlacement stPlacement = activation.PlacedUsing as StatelessWorkerPlacement;
int maxNumLocalActivations = stPlacement.MaxLocal;
lock (activations)
{
List<ActivationData> local;
if (!LocalLookup(address.Grain, out local) || local.Count <= maxNumLocalActivations)
return;
var id = StatelessWorkerDirector.PickRandom(local).Address;
throw new DuplicateActivationException(id);
}
}
// We currently don't have any other case for multiple activations except for StatelessWorker.
}
#endregion
#region Activations - private
/// <summary>
/// Invoke the activate method on a newly created activation
/// </summary>
/// <param name="activation"></param>
/// <param name="requestContextData"></param>
/// <returns></returns>
private Task InvokeActivate(ActivationData activation, Dictionary<string, object> requestContextData)
{
// NOTE: This should only be called with the correct schedulering context for the activation to be invoked.
lock (activation)
{
activation.SetState(ActivationState.Activating);
}
return scheduler.QueueTask(() => CallGrainActivate(activation, requestContextData), new SchedulingContext(activation)); // Target grain's scheduler context);
// ActivationData will transition out of ActivationState.Activating via Dispatcher.OnActivationCompletedRequest
}
#endregion
#region IPlacementContext
public Logger Logger => logger;
public bool FastLookup(GrainId grain, out AddressesAndTag addresses)
{
return directory.LocalLookup(grain, out addresses) && addresses.Addresses != null && addresses.Addresses.Count > 0;
// NOTE: only check with the local directory cache.
// DO NOT check in the local activations TargetDirectory!!!
// The only source of truth about which activation should be legit to is the state of the ditributed directory.
// Everyone should converge to that (that is the meaning of "eventualy consistency - eventualy we converge to one truth").
// If we keep using the local activation, it may not be registered in th directory any more, but we will never know that and keep using it,
// thus volaiting the single-activation semantics and not converging even eventualy!
}
public Task<AddressesAndTag> FullLookup(GrainId grain)
{
return scheduler.RunOrQueueTask(() => directory.LookupAsync(grain), this.SchedulingContext);
}
public bool LocalLookup(GrainId grain, out List<ActivationData> addresses)
{
addresses = activations.FindTargets(grain);
return addresses != null;
}
public List<SiloAddress> AllActiveSilos
{
get
{
var result = SiloStatusOracle.GetApproximateSiloStatuses(true).Select(s => s.Key).ToList();
if (result.Count > 0) return result;
logger.Warn(ErrorCode.Catalog_GetApproximateSiloStatuses, "AllActiveSilos SiloStatusOracle.GetApproximateSiloStatuses empty");
return new List<SiloAddress> { LocalSilo };
}
}
#endregion
#region Implementation of ICatalog
public Task CreateSystemGrain(GrainId grainId, string grainType)
{
ActivationAddress target = ActivationAddress.NewActivationAddress(LocalSilo, grainId);
Task activatedPromise;
GetOrCreateActivation(target, true, grainType, null, null, out activatedPromise);
return activatedPromise ?? TaskDone.Done;
}
public Task DeleteActivations(List<ActivationAddress> addresses)
{
return DestroyActivations(TryGetActivationDatas(addresses));
}
private List<ActivationData> TryGetActivationDatas(List<ActivationAddress> addresses)
{
var datas = new List<ActivationData>(addresses.Count);
foreach (var activationAddress in addresses)
{
ActivationData data;
if (TryGetActivationData(activationAddress.Activation, out data))
datas.Add(data);
}
return datas;
}
#endregion
#region Implementation of ISiloStatusListener
public void SiloStatusChangeNotification(SiloAddress updatedSilo, SiloStatus status)
{
// ignore joining events and also events on myself.
if (updatedSilo.Equals(LocalSilo)) return;
// We deactivate those activations when silo goes either of ShuttingDown/Stopping/Dead states,
// since this is what Directory is doing as well. Directory removes a silo based on all those 3 statuses,
// thus it will only deliver a "remove" notification for a given silo once to us. Therefore, we need to react the fist time we are notified.
// We may review the directory behaiviour in the future and treat ShuttingDown differently ("drain only") and then this code will have to change a well.
if (!status.IsTerminating()) return;
if (status == SiloStatus.Dead)
{
RuntimeClient.Current.BreakOutstandingMessagesToDeadSilo(updatedSilo);
}
var activationsToShutdown = new List<ActivationData>();
try
{
// scan all activations in activation directory and deactivate the ones that the removed silo is their primary partition owner.
lock (activations)
{
foreach (var activation in activations)
{
try
{
var activationData = activation.Value;
if (!activationData.IsUsingGrainDirectory) continue;
if (!directory.GetPrimaryForGrain(activationData.Grain).Equals(updatedSilo)) continue;
lock (activationData)
{
// adapted from InsideGarinClient.DeactivateOnIdle().
activationData.ResetKeepAliveRequest();
activationsToShutdown.Add(activationData);
}
}
catch (Exception exc)
{
logger.Error(ErrorCode.Catalog_SiloStatusChangeNotification_Exception,
String.Format("Catalog has thrown an exception while executing SiloStatusChangeNotification of silo {0}.", updatedSilo.ToStringWithHashCode()), exc);
}
}
}
logger.Info(ErrorCode.Catalog_SiloStatusChangeNotification,
String.Format("Catalog is deactivating {0} activations due to a failure of silo {1}, since it is a primary directory partiton to these grain ids.",
activationsToShutdown.Count, updatedSilo.ToStringWithHashCode()));
}
finally
{
// outside the lock.
if (activationsToShutdown.Count > 0)
{
DeactivateActivations(activationsToShutdown).Ignore();
}
}
}
#endregion
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using osu.Game.Replays;
using osu.Game.Rulesets.Replays;
namespace osu.Game.Tests.NonVisual
{
[TestFixture]
public class FramedReplayInputHandlerTest
{
private Replay replay;
private TestInputHandler handler;
[SetUp]
public void SetUp()
{
handler = new TestInputHandler(replay = new Replay
{
HasReceivedAllFrames = false
});
}
[Test]
public void TestNormalPlayback()
{
setReplayFrames();
setTime(0, 0);
confirmCurrentFrame(0);
confirmNextFrame(1);
// if we hit the first frame perfectly, time should progress to it.
setTime(1000, 1000);
confirmCurrentFrame(1);
confirmNextFrame(2);
// in between non-important frames should progress based on input.
setTime(1200, 1200);
confirmCurrentFrame(1);
setTime(1400, 1400);
confirmCurrentFrame(1);
// progressing beyond the next frame should force time to that frame once.
setTime(2200, 2000);
confirmCurrentFrame(2);
// second attempt should progress to input time
setTime(2200, 2200);
confirmCurrentFrame(2);
// entering important section
setTime(3000, 3000);
confirmCurrentFrame(3);
// cannot progress within
setTime(3500, null);
confirmCurrentFrame(3);
setTime(4000, 4000);
confirmCurrentFrame(4);
// still cannot progress
setTime(4500, null);
confirmCurrentFrame(4);
setTime(5200, 5000);
confirmCurrentFrame(5);
// important section AllowedImportantTimeSpan allowance
setTime(5200, 5200);
confirmCurrentFrame(5);
setTime(7200, 7000);
confirmCurrentFrame(6);
setTime(7200, null);
confirmCurrentFrame(6);
// exited important section
setTime(8200, 8000);
confirmCurrentFrame(7);
confirmNextFrame(null);
setTime(8200, 8200);
confirmCurrentFrame(7);
confirmNextFrame(null);
}
[Test]
public void TestIntroTime()
{
setReplayFrames();
setTime(-1000, -1000);
confirmCurrentFrame(null);
confirmNextFrame(0);
setTime(-500, -500);
confirmCurrentFrame(null);
confirmNextFrame(0);
setTime(0, 0);
confirmCurrentFrame(0);
confirmNextFrame(1);
}
[Test]
public void TestBasicRewind()
{
setReplayFrames();
setTime(2800, 0);
setTime(2800, 1000);
setTime(2800, 2000);
setTime(2800, 2800);
confirmCurrentFrame(2);
confirmNextFrame(3);
// pivot without crossing a frame boundary
setTime(2700, 2700);
confirmCurrentFrame(2);
confirmNextFrame(3);
// cross current frame boundary
setTime(1980, 2000);
confirmCurrentFrame(2);
confirmNextFrame(3);
setTime(1200, 1200);
confirmCurrentFrame(1);
confirmNextFrame(2);
// ensure each frame plays out until start
setTime(-500, 1000);
confirmCurrentFrame(1);
confirmNextFrame(2);
setTime(-500, 0);
confirmCurrentFrame(0);
confirmNextFrame(1);
setTime(-500, -500);
confirmCurrentFrame(null);
confirmNextFrame(0);
}
[Test]
public void TestRewindInsideImportantSection()
{
setReplayFrames();
fastForwardToPoint(3000);
setTime(4000, 4000);
confirmCurrentFrame(4);
confirmNextFrame(5);
setTime(3500, null);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(3000, 3000);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(3500, null);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(4000, 4000);
confirmCurrentFrame(4);
confirmNextFrame(5);
setTime(4500, null);
confirmCurrentFrame(4);
confirmNextFrame(5);
setTime(4000, 4000);
confirmCurrentFrame(4);
confirmNextFrame(5);
setTime(3500, null);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(3000, 3000);
confirmCurrentFrame(3);
confirmNextFrame(4);
}
[Test]
public void TestRewindOutOfImportantSection()
{
setReplayFrames();
fastForwardToPoint(3500);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(3200, null);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(3000, 3000);
confirmCurrentFrame(3);
confirmNextFrame(4);
setTime(2800, 2800);
confirmCurrentFrame(2);
confirmNextFrame(3);
}
[Test]
public void TestReplayStreaming()
{
// no frames are arrived yet
setTime(0, null);
setTime(1000, null);
Assert.IsTrue(handler.WaitingForFrame, "Should be waiting for the first frame");
replay.Frames.Add(new TestReplayFrame(0));
replay.Frames.Add(new TestReplayFrame(1000));
// should always play from beginning
setTime(1000, 0);
confirmCurrentFrame(0);
Assert.IsFalse(handler.WaitingForFrame, "Should not be waiting yet");
setTime(1000, 1000);
confirmCurrentFrame(1);
confirmNextFrame(null);
Assert.IsTrue(handler.WaitingForFrame, "Should be waiting");
// cannot seek beyond the last frame
setTime(1500, null);
confirmCurrentFrame(1);
setTime(-100, 0);
confirmCurrentFrame(0);
// can seek to the point before the first frame, however
setTime(-100, -100);
confirmCurrentFrame(null);
confirmNextFrame(0);
fastForwardToPoint(1000);
setTime(3000, null);
replay.Frames.Add(new TestReplayFrame(2000));
confirmCurrentFrame(1);
setTime(1000, 1000);
setTime(3000, 2000);
}
[Test]
public void TestMultipleFramesSameTime()
{
replay.Frames.Add(new TestReplayFrame(0));
replay.Frames.Add(new TestReplayFrame(0));
replay.Frames.Add(new TestReplayFrame(1000));
replay.Frames.Add(new TestReplayFrame(1000));
replay.Frames.Add(new TestReplayFrame(2000));
// forward direction is prioritized when multiple frames have the same time.
setTime(0, 0);
setTime(0, 0);
setTime(2000, 1000);
setTime(2000, 1000);
setTime(1000, 1000);
setTime(1000, 1000);
setTime(-100, 1000);
setTime(-100, 0);
setTime(-100, 0);
setTime(-100, -100);
}
[Test]
public void TestReplayFramesSortStability()
{
const double repeating_time = 5000;
// add a collection of frames in shuffled order time-wise; each frame also stores its original index to check stability later.
// data is hand-picked and breaks if the unstable List<T>.Sort() is used.
// in theory this can still return a false-positive with another unstable algorithm if extremely unlucky,
// but there is no conceivable fool-proof way to prevent that anyways.
replay.Frames.AddRange(new[]
{
repeating_time,
0,
3000,
repeating_time,
repeating_time,
6000,
9000,
repeating_time,
repeating_time,
1000,
11000,
21000,
4000,
repeating_time,
repeating_time,
8000,
2000,
7000,
repeating_time,
repeating_time,
10000
}.Select((time, index) => new TestReplayFrame(time, true, index)));
replay.HasReceivedAllFrames = true;
// create a new handler with the replay for the sort to be performed.
handler = new TestInputHandler(replay);
// ensure sort stability by checking that the frames with time == repeating_time are sorted in ascending frame index order themselves.
var repeatingTimeFramesData = replay.Frames
.Cast<TestReplayFrame>()
.Where(f => f.Time == repeating_time)
.Select(f => f.FrameIndex);
Assert.That(repeatingTimeFramesData, Is.Ordered.Ascending);
}
private void setReplayFrames()
{
replay.Frames = new List<ReplayFrame>
{
new TestReplayFrame(0),
new TestReplayFrame(1000),
new TestReplayFrame(2000),
new TestReplayFrame(3000, true),
new TestReplayFrame(4000, true),
new TestReplayFrame(5000, true),
new TestReplayFrame(7000, true),
new TestReplayFrame(8000),
};
replay.HasReceivedAllFrames = true;
}
private void fastForwardToPoint(double destination)
{
for (int i = 0; i < 1000; i++)
{
var time = handler.SetFrameFromTime(destination);
if (time == null || time == destination)
return;
}
throw new TimeoutException("Seek was never fulfilled");
}
private void setTime(double set, double? expect)
{
Assert.AreEqual(expect, handler.SetFrameFromTime(set), "Unexpected return value");
}
private void confirmCurrentFrame(int? frame)
{
Assert.AreEqual(frame is int x ? replay.Frames[x].Time : (double?)null, handler.CurrentFrame?.Time, "Unexpected current frame");
}
private void confirmNextFrame(int? frame)
{
Assert.AreEqual(frame is int x ? replay.Frames[x].Time : (double?)null, handler.NextFrame?.Time, "Unexpected next frame");
}
private class TestReplayFrame : ReplayFrame
{
public readonly bool IsImportant;
public readonly int FrameIndex;
public TestReplayFrame(double time, bool isImportant = false, int frameIndex = 0)
: base(time)
{
IsImportant = isImportant;
FrameIndex = frameIndex;
}
}
private class TestInputHandler : FramedReplayInputHandler<TestReplayFrame>
{
public TestInputHandler(Replay replay)
: base(replay)
{
FrameAccuratePlayback = true;
}
protected override double AllowedImportantTimeSpan => 1000;
protected override bool IsImportant(TestReplayFrame frame) => frame.IsImportant;
}
}
}
| |
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using UnityEngine;
using System.Collections.Generic;
using System.Linq;
namespace TiltBrushToolkit {
/// <summary>
/// A point in space to teleport to.
/// </summary>
[AddComponentMenu("Tilt Brush/Story/Story Teleport Point")]
public class StoryTeleportPoint : MonoBehaviour {
public static Color TELEPORT_MAINCOLOR = Color.cyan;
[Tooltip("Make all teleport points available from here")]
[SerializeField] public bool m_TeleportToAllPoints = false;
[Tooltip("Specify which points can be teleported to from here")]
[SerializeField] public StoryTeleportPoint[] m_Points = {};
[Tooltip("How far down is the floor")]
[SerializeField] private float m_FloorOffset = 1.1f;
[Tooltip("Teleport into this point when the scene starts?")]
[SerializeField] public bool m_TeleportOnStart = false;
[Tooltip("Place the player's eyes at this point?")]
[SerializeField] private bool m_MoveToSight = false;
[Tooltip("Icon to visualize the point (can be empty)")]
public Texture2D m_IconTexture;
public Material m_IconMaterial;
private StoryScene m_ParentScene;
public bool PointedAt { get { return m_PointedAt.Count > 0; } }
private List<uint> m_PointedAt = new List<uint>(); // which pointers are pointing at this
private Vector3 m_LocalScale;
private GameObject m_IconObject;
private StoryTeleportPoint[] m_RuntimePoints;
static Vector3[] m_ArrowPoints = new Vector3[] {
new Vector3 (-0.06f, 0, -0.06f),
new Vector3 (0.06f, 0, -0.06f),
new Vector3 (0.06f, 0, 0.06f),
new Vector3 (0.12f, 0, 0.06f),
new Vector3 (0f, 0, 0.2f),
new Vector3 (-0.12f, 0, 0.06f),
new Vector3 (-0.06f, 0, 0.06f),
};
public StoryTeleportPoint[] ActivePoints { get { return m_RuntimePoints; } }
public static System.Action<StoryTeleportPoint> OnTeleported;
public void TeleportHere() {
// Move the scene into this position
Vector3 targetPosition = transform.position;
#if TILTBRUSH_STEAMVRPRESENT
if (m_MoveToSight)
targetPosition -= VRInput.Instance.HeadPosition - VRInput.Instance.VR_PlayArea.transform.position;
else
targetPosition += Vector3.down * m_FloorOffset;
#endif
if (VRInput.Instance.IsSteamVRPresent) {
#if TILTBRUSH_STEAMVRPRESENT
VRInput.Instance.VR_PlayArea.transform.position = targetPosition;
#endif
} else {
if (m_ParentScene)
m_ParentScene.transform.position = -targetPosition;
}
if (OnTeleported != null)
OnTeleported(this);
// Hide all other points, including myself
foreach(var p in Resources.FindObjectsOfTypeAll<StoryTeleportPoint>())
p.gameObject.SetActive(ActivePoints.Contains(p));
}
void Reset() {
if (GetComponent<Collider>() != null) GetComponent<Collider>().isTrigger = true;
if (GetComponent<SphereCollider>() != null) GetComponent<SphereCollider>().radius = 0.2f;
}
void Start() {
if (GetComponent<Collider>() != null) GetComponent<Collider>().isTrigger = true;
m_LocalScale = transform.localScale;
#if TILTBRUSH_STEAMVRPRESENT
// Get callbacks from laser pointers
foreach(var pointer in Resources.FindObjectsOfTypeAll<SteamVR_LaserPointer>()) {
pointer.PointerIn += Pointer_PointerIn;
pointer.PointerOut += Pointer_PointerOut;
}
#endif
// Create an icon
if (m_IconTexture != null) {
if (m_IconMaterial == null) {
Debug.LogError("No material for the telporting icon defined, using default.", this);
m_IconMaterial = new Material(Shader.Find("Diffuse"));
}
m_IconObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
Destroy(m_IconObject.GetComponent<Collider>());
m_IconObject.name = "Icon";
m_IconObject.transform.SetParent(transform);
m_IconObject.transform.localPosition = Vector3.zero;
m_IconObject.transform.localScale = Vector3.one * GetComponent<SphereCollider>().radius * 2f;
var shader = Shader.Find("Unlit/AlwaysVisible");
if (shader == null) shader = Shader.Find("Unlit/Color");
var renderer = m_IconObject.GetComponent<MeshRenderer>();
renderer.material = m_IconMaterial;
renderer.material.color = TELEPORT_MAINCOLOR;
renderer.material.mainTexture = m_IconTexture;
if (Camera.main)
m_IconObject.transform.LookAt(Camera.main.transform);
}
// Find the parent scene
var parent = transform.parent;
while (parent != null) {
m_ParentScene = parent.GetComponent<StoryScene> ();
if (m_ParentScene != null)
break;
parent = parent.parent;
}
// Get target points
if (m_TeleportToAllPoints) {
var allpoints = new List<StoryTeleportPoint>();
foreach(var p in Resources.FindObjectsOfTypeAll<StoryTeleportPoint>()) {
if (p != this)
allpoints.Add(p);
}
m_RuntimePoints = allpoints.ToArray();
} else {
m_RuntimePoints = m_Points;
}
// Teleport on start
if (m_TeleportOnStart)
TeleportHere();
}
#if TILTBRUSH_STEAMVRPRESENT
void OnDestroy() {
foreach (var pointer in Resources.FindObjectsOfTypeAll<SteamVR_LaserPointer>()) {
pointer.PointerIn -= Pointer_PointerIn;
pointer.PointerOut -= Pointer_PointerOut;
}
}
private void Pointer_PointerIn(object sender, PointerEventArgs e) {
if (e.target == transform)
m_PointedAt.Add(e.controllerIndex);
}
private void Pointer_PointerOut(object sender, PointerEventArgs e) {
if(e.target == transform && m_PointedAt.Contains(e.controllerIndex))
m_PointedAt.Remove(e.controllerIndex);
}
#endif
void Update() {
#if TILTBRUSH_STEAMVRPRESENT
foreach (var i in m_PointedAt) {
if (VRInput.Instance.IsTriggerPressedDown((int)i))
StoryManager.m_Instance.TransitionTo(this, StoryManager.TransitionType.Fade, 0.7f, Color.black);
}
#endif
if (Camera.main != null) {
if (m_IconObject != null && Camera.main)
m_IconObject.transform.LookAt(Camera.main.transform);
// Scale depending on distance to the camera
var scale = Mathf.Clamp01 ((transform.position - Camera.main.transform.position).magnitude / 4f);
scale = Mathf.Lerp (0.3f, 1f, scale) * (PointedAt ? 1.25f : 1f);
transform.localScale = Vector3.Lerp (transform.localScale, m_LocalScale * scale, 8.0f * Time.deltaTime);
}
}
void OnDrawGizmos() {
Gizmos.color = Color.cyan;
Gizmos.DrawWireSphere(transform.position, 0.2f);
}
void OnDrawGizmosSelected() {
var color = Color.cyan;
Gizmos.color = color;
Gizmos.DrawSphere(transform.position, 0.05f);
foreach (var p in (m_TeleportToAllPoints ? FindObjectsOfType<StoryTeleportPoint>() : m_Points)) {
if (p == null || p == this) continue;
DrawArrowGizmo(transform.position, p.transform.position);
Gizmos.DrawWireSphere(p.transform.position, 0.05f);
}
// draw player
Gizmos.color = new Color(1,1,1, 0.7f);
Gizmos.matrix = transform.localToWorldMatrix;
float standingHeight = 1.65f;
var offset = Vector3.down * Mathf.Max(m_FloorOffset, m_MoveToSight ? standingHeight - 0.1f : 0);
Gizmos.DrawWireCube(offset + Vector3.up * standingHeight * .5f, new Vector3(0.2f, standingHeight, 0.2f)); // player standing
if (!m_MoveToSight) {
float sittingHeight = 1.2f;
offset = Vector3.down * Mathf.Max(m_FloorOffset, m_MoveToSight ? sittingHeight - 0.1f : 0);
Gizmos.DrawWireCube(offset + Vector3.up * sittingHeight * .5f, new Vector3(0.4f, sittingHeight, 0.4f)); // player sitting
}
Gizmos.DrawWireCube(offset, new Vector3(1.5f, 0.01f, 1.5f)); // floor
Vector3 arrowoffset = Vector3.forward * 0.5f;
for (int i = 0; i < m_ArrowPoints.Length - 1; i++)
Gizmos.DrawLine (arrowoffset + m_ArrowPoints [i], arrowoffset + m_ArrowPoints [i+1]);
Gizmos.DrawLine (arrowoffset + m_ArrowPoints [0], arrowoffset + m_ArrowPoints [m_ArrowPoints.Length - 1]);
Gizmos.matrix = Matrix4x4.identity;
}
void DrawArrowGizmo(Vector3 pos, Vector3 target, float arrowHeadLength = 0.5f, float arrowHeadAngle = 20.0f) {
Gizmos.DrawLine(pos, target);
var direction = (target - pos).normalized;
Gizmos.DrawRay(pos, direction);
if ((target - pos).magnitude > 0) {
Vector3 right = Quaternion.LookRotation(direction) * Quaternion.Euler(0, 180 + arrowHeadAngle, 0) * new Vector3(0, 0, 1);
Vector3 left = Quaternion.LookRotation(direction) * Quaternion.Euler(0, 180 - arrowHeadAngle, 0) * new Vector3(0, 0, 1);
Gizmos.DrawRay(target, right * arrowHeadLength);
Gizmos.DrawRay(target, left * arrowHeadLength);
Gizmos.DrawLine(target + right * arrowHeadLength, target + left * arrowHeadLength);
}
}
}
} // namepsace TiltBrushToolkit
| |
// /*
// * Copyright (c) 2016, Alachisoft. All Rights Reserved.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// */
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.ProtocolBuffers;
using pbc = global::Google.ProtocolBuffers.Collections;
using pbd = global::Google.ProtocolBuffers.Descriptors;
using scg = global::System.Collections.Generic;
namespace Alachisoft.NosDB.Common.Protobuf {
namespace Proto {
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public static partial class ESENTConfig {
#region EXTENSION registration
public static void RegisterAllExtensions(pb::ExtensionRegistry registry) {
}
#endregion
#region Static variables
internal static pbd::MessageDescriptor internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig, global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig.Builder> internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__FieldAccessorTable;
#endregion
#region Descriptor
public static pbd::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbd::FileDescriptor descriptor;
static ESENTConfig() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"ChFFU0VOVENvbmZpZy5wcm90bxIgQWxhY2hpc29mdC5Ob3NEQi5Db21tb24u",
"UHJvdG9idWYiDQoLRVNFTlRDb25maWdCOwokY29tLmFsYWNoaXNvZnQubm9z",
"ZGIuY29tbW9uLnByb3RvYnVmQhNFU0VOVENvbmZpZ1Byb3RvY29s"));
pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) {
descriptor = root;
internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__Descriptor = Descriptor.MessageTypes[0];
internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig, global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig.Builder>(internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__Descriptor,
new string[] { });
return null;
};
pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
new pbd::FileDescriptor[] {
}, assigner);
}
#endregion
}
}
#region Messages
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class ESENTConfig : pb::GeneratedMessage<ESENTConfig, ESENTConfig.Builder> {
private ESENTConfig() { }
private static readonly ESENTConfig defaultInstance = new ESENTConfig().MakeReadOnly();
private static readonly string[] _eSENTConfigFieldNames = new string[] { };
private static readonly uint[] _eSENTConfigFieldTags = new uint[] { };
public static ESENTConfig DefaultInstance {
get { return defaultInstance; }
}
public override ESENTConfig DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override ESENTConfig ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Alachisoft.NosDB.Common.Protobuf.Proto.ESENTConfig.internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<ESENTConfig, ESENTConfig.Builder> InternalFieldAccessors {
get { return global::Alachisoft.NosDB.Common.Protobuf.Proto.ESENTConfig.internal__static_Alachisoft_NosDB_Common_Protobuf_ESENTConfig__FieldAccessorTable; }
}
public override bool IsInitialized {
get {
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
CalcSerializedSize();
string[] field_names = _eSENTConfigFieldNames;
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
return CalcSerializedSize();
}
}
private int CalcSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
public static ESENTConfig ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static ESENTConfig ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static ESENTConfig ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static ESENTConfig ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static ESENTConfig ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static ESENTConfig ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static ESENTConfig ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static ESENTConfig ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static ESENTConfig ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static ESENTConfig ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private ESENTConfig MakeReadOnly() {
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(ESENTConfig prototype) {
return new Builder(prototype);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<ESENTConfig, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(ESENTConfig cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private ESENTConfig result;
private ESENTConfig PrepareBuilder() {
if (resultIsReadOnly) {
ESENTConfig original = result;
result = new ESENTConfig();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override ESENTConfig MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig.Descriptor; }
}
public override ESENTConfig DefaultInstanceForType {
get { return global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig.DefaultInstance; }
}
public override ESENTConfig BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is ESENTConfig) {
return MergeFrom((ESENTConfig) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(ESENTConfig other) {
if (other == global::Alachisoft.NosDB.Common.Protobuf.ESENTConfig.DefaultInstance) return this;
PrepareBuilder();
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_eSENTConfigFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _eSENTConfigFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
}
static ESENTConfig() {
object.ReferenceEquals(global::Alachisoft.NosDB.Common.Protobuf.Proto.ESENTConfig.Descriptor, null);
}
}
#endregion
}
#endregion Designer generated code
| |
using System;
using System.Linq;
using System.Reflection;
using System.Xml;
using Umbraco.Core;
using Umbraco.Web;
using umbraco.BusinessLogic.Utils;
using umbraco.cms.businesslogic.member;
using umbraco.IO;
namespace umbraco.presentation.umbracobase
{
[Obsolete]
public class restExtension
{
private Type _type;
private MethodInfo _method;
private Assembly _assembly;
private string _alias;
private bool _isAllowed;
private bool _returnXml = true;
public Type type
{
get { return _type; }
set { _type = value; }
}
public MethodInfo method
{
get { return _method; }
set { _method = value; }
}
public Assembly assembly
{
get { return _assembly; }
set { _assembly = value; }
}
public string alias
{
get { return _alias; }
set { _alias = value; }
}
public bool isAllowed
{
get { return _isAllowed; }
set { _isAllowed = value; }
}
public bool returnXML
{
get { return _returnXml; }
set { _returnXml = value; }
}
public restExtension()
{ }
public restExtension(string extensionAlias, string methodName)
{
bool allowed = false;
bool fromFile = true;
XmlDocument baseDoc = new XmlDocument(); //RESTExtension document...
baseDoc.Load(IOHelper.MapPath(SystemFiles.RestextensionsConfig));
XmlNode baseExt = baseDoc.SelectSingleNode("/RestExtensions/ext [@alias='" + extensionAlias + "']/permission [@method='" + methodName + "']");
//if not there.. it's not allowed...
if (baseExt != null)
{
//Access for all ?
if (baseExt.Attributes["allowAll"] != null)
{
if (baseExt.Attributes["allowAll"].Value.ToString().ToLower() == "true")
allowed = true;
}
if (!allowed)
{
//Member Based permissions.. check for group, type and ID...
Member currentMem = Member.GetCurrentMember();
//not basic.. and not logged in? - out..
if (currentMem == null)
{
allowed = false;
}
else //do member authentication stuff...
allowed = memberAuthentication(baseExt, currentMem);
}
}
else
{
//check for RestExtensionAttribute
var restExtensions = PluginManager.Current.ResolveRestExtensions();
foreach (var t in restExtensions)
{
var temp = t.GetCustomAttributes(typeof(RestExtension), false).OfType<RestExtension>();
if (temp.Any(x => x.GetAlias() == extensionAlias))
{
MethodInfo mi = t.GetMethod(methodName);
if (mi != null)
{
//check allowed
var attributes = mi.GetCustomAttributes(typeof (RestExtensionMethod), false)
.OfType<RestExtensionMethod>()
.ToArray();
//check to make sure the method was decorated properly
if (attributes.Any())
{
fromFile = false;
var attribute = attributes.First();
allowed = attribute.allowAll;
if (!allowed)
{
//Member Based permissions.. check for group, type and ID...
Member currentMem = Member.GetCurrentMember();
//not basic.. and not logged in? - out..
if (currentMem == null)
{
allowed = false;
}
else
{
//do member authentication stuff...
allowed = memberAuthentication(attribute, currentMem);
}
}
if (allowed)
{
this.method = t.GetMethod(methodName);
this.isAllowed = this.method != null;
this.alias = extensionAlias;
this.assembly = t.Assembly;
this.type = t;
this.returnXML = attribute.returnXml;
}
}
}
}
}
}
if (allowed)
{
if (fromFile)
{
XmlNode extNode = baseDoc.SelectSingleNode("/RestExtensions/ext [@alias='" + extensionAlias + "']");
string asml = extNode.Attributes["assembly"].Value;
string assemblyPath = IOHelper.MapPath(string.Format("{0}/{1}.dll", SystemDirectories.Bin, asml.TrimStart('/')));
Assembly returnAssembly = System.Reflection.Assembly.LoadFrom(assemblyPath);
string returnTypeName = extNode.Attributes["type"].Value;
Type returnType = returnAssembly.GetType(returnTypeName);
if (baseExt.Attributes["returnXml"] != null && baseExt.Attributes["returnXml"].Value.ToLower() == "false")
this.returnXML = false;
this.method = returnType.GetMethod(methodName);
this.isAllowed = this.method != null;
this.alias = extensionAlias;
this.assembly = returnAssembly;
this.type = returnType;
}
}
else
{
this.isAllowed = false;
}
}
private static bool memberAuthentication(RestExtensionMethod baseExt, Member currentMem)
{
//Check group, type and ID
bool memberAccess = false;
if (!string.IsNullOrEmpty(baseExt.GetAllowGroup()))
{
//Groups array
string[] groupArray = baseExt.GetAllowGroup().Split(',');
foreach (MemberGroup mg in currentMem.Groups.Values)
{
foreach (string group in groupArray)
{
if (group == mg.Text)
memberAccess = true;
}
}
}
//Membertype allowed?
if (!string.IsNullOrEmpty(baseExt.GetAllowType()) && !memberAccess)
{
//Types array
string[] typeArray = baseExt.GetAllowType().Split(',');
foreach (string type in typeArray)
{
if (type == currentMem.ContentType.Alias)
memberAccess = true;
}
}
//Member ID allowed? should this work with loginName instead?
if (!string.IsNullOrEmpty(baseExt.GetAllowMember()) && !memberAccess)
{
if (int.Parse((string)baseExt.GetAllowMember().Trim()) == currentMem.Id)
memberAccess = true;
}
return memberAccess;
}
private static bool memberAuthentication(XmlNode baseExt, Member currentMem)
{
//Check group, type and ID
bool memberAccess = false;
if (baseExt.Attributes["allowGroup"] != null)
{
if (baseExt.Attributes["allowGroup"].Value != "")
{
//Groups array
string[] groupArray = baseExt.Attributes["allowGroup"].Value.Split(',');
foreach (MemberGroup mg in currentMem.Groups.Values)
{
foreach (string group in groupArray)
{
if (group == mg.Text)
memberAccess = true;
}
}
}
}
//Membertype allowed?
if (baseExt.Attributes["allowType"] != null && !memberAccess)
{
if (baseExt.Attributes["allowType"].Value != "")
{
//Types array
string[] typeArray = baseExt.Attributes["allowType"].Value.Split(',');
foreach (string type in typeArray)
{
if (type == currentMem.ContentType.Alias)
memberAccess = true;
}
}
}
//Member ID allowed? should this work with loginName instead?
if (baseExt.Attributes["allowMember"] != null && !memberAccess)
{
if (baseExt.Attributes["allowMember"].Value != "")
{
if (int.Parse((string)baseExt.Attributes["allowMember"].Value.Trim()) == currentMem.Id)
memberAccess = true;
}
}
return memberAccess;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.