context stringlengths 2.52k 185k | gt stringclasses 1 value |
|---|---|
namespace Volante
{
using System;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Double linked list.
/// </summary>
public class L2List<T> : PersistentCollection<T> where T : L2ListElem<T>
{
T head;
T tail;
private int nElems;
private int updateCounter;
/// <summary>
/// Get list head element
/// </summary>
/// <returns>list head element or null if list is empty
/// </returns>>
public T Head
{
get
{
return head;
}
}
/// <summary>
/// Get list tail element
/// </summary>
/// <returns>list tail element or null if list is empty
/// </returns>
public T Tail
{
get
{
return tail;
}
}
public override bool Contains(T obj)
{
foreach (T o in this)
{
if (o == obj)
return true;
}
return false;
}
/// <summary>
/// Make list empty.
/// </summary>
public override void Clear()
{
lock (this)
{
Modify();
head = tail = null;
nElems = 0;
updateCounter += 1;
}
}
/// <summary>
/// Insert element at the beginning of the list
/// </summary>
public void Prepend(T elem)
{
lock (this)
{
Modify();
elem.Modify();
elem.next = head;
elem.prev = null;
if (head != null)
{
head.Modify();
head.prev = elem;
}
else
tail = elem;
head = elem;
nElems += 1;
updateCounter += 1;
}
}
/// <summary>
/// Insert element at the end of the list
/// </summary>
public void Append(T elem)
{
lock (this)
{
Modify();
elem.Modify();
elem.next = null;
elem.prev = tail;
if (tail != null)
{
tail.Modify();
tail.next = elem;
}
else
tail = elem;
tail = elem;
if (head == null)
head = elem;
nElems += 1;
updateCounter += 1;
}
}
/// <summary>
/// Remove element from the list
/// </summary>
public override bool Remove(T elem)
{
lock (this)
{
Modify();
if (elem.prev != null)
{
elem.prev.Modify();
elem.prev.next = elem.next;
elem.prev = null;
}
else
head = head.next;
if (elem.next != null)
{
elem.next.Modify();
elem.next.prev = elem.prev;
elem.next = null;
}
else
tail = tail.prev;
nElems -= 1;
updateCounter += 1;
return true;
}
}
/// <summary>
/// Add element to the list
/// </summary>
public override void Add(T elem)
{
Append(elem);
}
public override int Count
{
get
{
return nElems;
}
}
class L2ListEnumerator : IEnumerator<T>
{
private T curr;
private int counter;
private L2List<T> list;
private bool head;
internal L2ListEnumerator(L2List<T> list)
{
this.list = list;
Reset();
}
public void Reset()
{
curr = null;
counter = list.updateCounter;
head = true;
}
public T Current
{
get
{
if (curr == null || counter != list.updateCounter)
throw new InvalidOperationException();
return curr;
}
}
object IEnumerator.Current
{
get
{
return Current;
}
}
public void Dispose() { }
public bool MoveNext()
{
if (counter != list.updateCounter)
throw new InvalidOperationException();
if (head)
{
curr = list.head;
head = false;
}
else if (curr != null)
curr = curr.next;
return curr != null;
}
}
public override IEnumerator<T> GetEnumerator()
{
return new L2ListEnumerator(this);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.Text;
using Microsoft.VisualStudio.Text;
using Roslyn.Test.Utilities;
using Roslyn.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.MetadataAsSource
{
public abstract partial class AbstractMetadataAsSourceTests
{
public const string DefaultMetadataSource = "public class C {}";
public const string DefaultSymbolMetadataName = "C";
internal class TestContext : IDisposable
{
private readonly TestWorkspace _workspace;
private readonly IMetadataAsSourceFileService _metadataAsSourceService;
private readonly ITextBufferFactoryService _textBufferFactoryService;
public static async Task<TestContext> CreateAsync(string projectLanguage = null, IEnumerable<string> metadataSources = null, bool includeXmlDocComments = false, string sourceWithSymbolReference = null)
{
projectLanguage = projectLanguage ?? LanguageNames.CSharp;
metadataSources = metadataSources ?? SpecializedCollections.EmptyEnumerable<string>();
metadataSources = !metadataSources.Any()
? new[] { AbstractMetadataAsSourceTests.DefaultMetadataSource }
: metadataSources;
var workspace = await CreateWorkspaceAsync(projectLanguage, metadataSources, includeXmlDocComments, sourceWithSymbolReference);
return new TestContext(workspace);
}
public TestContext(TestWorkspace workspace)
{
_workspace = workspace;
_metadataAsSourceService = _workspace.GetService<IMetadataAsSourceFileService>();
_textBufferFactoryService = _workspace.GetService<ITextBufferFactoryService>();
}
public Solution CurrentSolution
{
get { return _workspace.CurrentSolution; }
}
public Project DefaultProject
{
get { return this.CurrentSolution.Projects.First(); }
}
public Task<MetadataAsSourceFile> GenerateSourceAsync(ISymbol symbol, Project project = null)
{
project = project ?? this.DefaultProject;
// Generate and hold onto the result so it can be disposed of with this context
return _metadataAsSourceService.GetGeneratedFileAsync(project, symbol);
}
public async Task<MetadataAsSourceFile> GenerateSourceAsync(string symbolMetadataName = null, Project project = null)
{
symbolMetadataName = symbolMetadataName ?? AbstractMetadataAsSourceTests.DefaultSymbolMetadataName;
project = project ?? this.DefaultProject;
// Get an ISymbol corresponding to the metadata name
var compilation = await project.GetCompilationAsync();
var diagnostics = compilation.GetDiagnostics().ToArray();
Assert.Equal(0, diagnostics.Length);
var symbol = await ResolveSymbolAsync(symbolMetadataName, compilation);
// Generate and hold onto the result so it can be disposed of with this context
var result = await _metadataAsSourceService.GetGeneratedFileAsync(project, symbol);
return result;
}
private static string GetSpaceSeparatedTokens(string source)
{
var tokens = source.Split(new[] { ' ', '\r', '\n', '\t' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).Where(s => s != string.Empty);
return string.Join(" ", tokens);
}
public void VerifyResult(MetadataAsSourceFile file, string expected, bool compareTokens = true)
{
var actual = File.ReadAllText(file.FilePath).Trim();
var actualSpan = file.IdentifierLocation.SourceSpan;
if (compareTokens)
{
// Compare tokens and verify location relative to the generated tokens
expected = GetSpaceSeparatedTokens(expected);
actual = GetSpaceSeparatedTokens(actual.Insert(actualSpan.Start, "[|").Insert(actualSpan.End + 2, "|]"));
}
else
{
// Compare exact texts and verify that the location returned is exactly that
// indicated by expected
TextSpan expectedSpan;
MarkupTestFile.GetSpan(expected.TrimStart().TrimEnd(), out expected, out expectedSpan);
Assert.Equal(expectedSpan.Start, actualSpan.Start);
Assert.Equal(expectedSpan.End, actualSpan.End);
}
Assert.Equal(expected, actual);
}
public async Task GenerateAndVerifySourceAsync(string symbolMetadataName, string expected, bool compareTokens = true, Project project = null)
{
var result = await GenerateSourceAsync(symbolMetadataName, project);
VerifyResult(result, expected, compareTokens);
}
public void VerifyDocumentReused(MetadataAsSourceFile a, MetadataAsSourceFile b)
{
Assert.Same(a.FilePath, b.FilePath);
}
public void VerifyDocumentNotReused(MetadataAsSourceFile a, MetadataAsSourceFile b)
{
Assert.NotSame(a.FilePath, b.FilePath);
}
public void Dispose()
{
try
{
_metadataAsSourceService.CleanupGeneratedFiles();
}
finally
{
_workspace.Dispose();
}
}
public async Task<ISymbol> ResolveSymbolAsync(string symbolMetadataName, Compilation compilation = null)
{
if (compilation == null)
{
compilation = await this.DefaultProject.GetCompilationAsync();
var diagnostics = compilation.GetDiagnostics().ToArray();
Assert.Equal(0, diagnostics.Length);
}
foreach (var reference in compilation.References)
{
var assemblySymbol = (IAssemblySymbol)compilation.GetAssemblyOrModuleSymbol(reference);
var namedTypeSymbol = assemblySymbol.GetTypeByMetadataName(symbolMetadataName);
if (namedTypeSymbol != null)
{
return namedTypeSymbol;
}
else
{
// The symbol name could possibly be referring to the member of a named
// type. Parse the member symbol name.
var lastDotIndex = symbolMetadataName.LastIndexOf('.');
if (lastDotIndex < 0)
{
// The symbol name is not a member name and the named type was not found
// in this assembly
continue;
}
// The member symbol name itself could contain a dot (e.g. '.ctor'), so make
// sure we don't cut that off
while (lastDotIndex > 0 && symbolMetadataName[lastDotIndex - 1] == '.')
{
--lastDotIndex;
}
var memberSymbolName = symbolMetadataName.Substring(lastDotIndex + 1);
var namedTypeName = symbolMetadataName.Substring(0, lastDotIndex);
namedTypeSymbol = assemblySymbol.GetTypeByMetadataName(namedTypeName);
if (namedTypeSymbol != null)
{
var memberSymbol = namedTypeSymbol.GetMembers()
.Where(member => member.MetadataName == memberSymbolName)
.FirstOrDefault();
if (memberSymbol != null)
{
return memberSymbol;
}
}
}
}
return null;
}
private static bool ContainsVisualBasicKeywords(string input)
{
return
input.Contains("Class") ||
input.Contains("Structure") ||
input.Contains("Namespace") ||
input.Contains("Sub") ||
input.Contains("Function") ||
input.Contains("Dim");
}
private static string DeduceLanguageString(string input)
{
return ContainsVisualBasicKeywords(input)
? LanguageNames.VisualBasic : LanguageNames.CSharp;
}
private static Task<TestWorkspace> CreateWorkspaceAsync(string projectLanguage, IEnumerable<string> metadataSources, bool includeXmlDocComments, string sourceWithSymbolReference)
{
var xmlString = string.Concat(@"
<Workspace>
<Project Language=""", projectLanguage, @""" CommonReferences=""true"">");
metadataSources = metadataSources ?? new[] { AbstractMetadataAsSourceTests.DefaultMetadataSource };
foreach (var source in metadataSources)
{
var metadataLanguage = DeduceLanguageString(source);
xmlString = string.Concat(xmlString, string.Format(@"
<MetadataReferenceFromSource Language=""{0}"" CommonReferences=""true"" IncludeXmlDocComments=""{2}"">
<Document FilePath=""MetadataDocument"">
{1}
</Document>
</MetadataReferenceFromSource>",
metadataLanguage,
SecurityElement.Escape(source),
includeXmlDocComments.ToString()));
}
if (sourceWithSymbolReference != null)
{
xmlString = string.Concat(xmlString, string.Format(@"
<Document FilePath=""SourceDocument"">
{0}
</Document>",
sourceWithSymbolReference));
}
xmlString = string.Concat(xmlString, @"
</Project>
</Workspace>");
return TestWorkspace.CreateAsync(xmlString);
}
internal Document GetDocument(MetadataAsSourceFile file)
{
using (var reader = new StreamReader(file.FilePath))
{
var textBuffer = _textBufferFactoryService.CreateTextBuffer(reader, _textBufferFactoryService.TextContentType);
Assert.True(_metadataAsSourceService.TryAddDocumentToWorkspace(file.FilePath, textBuffer));
return textBuffer.AsTextContainer().GetRelatedDocuments().Single();
}
}
internal async Task<ISymbol> GetNavigationSymbolAsync()
{
var testDocument = _workspace.Documents.Single(d => d.FilePath == "SourceDocument");
var document = _workspace.CurrentSolution.GetDocument(testDocument.Id);
var syntaxRoot = await document.GetSyntaxRootAsync();
var semanticModel = await document.GetSemanticModelAsync();
return semanticModel.GetSymbolInfo(syntaxRoot.FindNode(testDocument.SelectedSpans.Single())).Symbol;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace Spa.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
/*
* MessageProperties.cs - Implementation of the
* "System.Runtime.Remoting.Messaging.MessageProperties" class.
*
* Copyright (C) 2003 Southern Storm Software, Pty Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
namespace System.Runtime.Remoting.Messaging
{
#if CONFIG_SERIALIZATION
using System.Collections;
internal class MessageProperties : IDictionary
{
// Internal state.
private IMessageDictionary special;
private IDictionary dict;
private String[] properties;
// Constructor.
public MessageProperties(IMessageDictionary special, IDictionary dict)
{
this.special = special;
this.dict = dict;
this.properties = special.SpecialProperties;
}
// Implement the IDictionary interface.
void IDictionary.Add(Object key, Object value)
{
if(Array.IndexOf(properties, key) != -1)
{
throw new ArgumentException
(_("Remoting_InvalidKey"));
}
dict.Add(key, value);
}
void IDictionary.Clear()
{
dict.Clear();
}
bool IDictionary.Contains(Object key)
{
if(Array.IndexOf(properties, key) != -1)
{
return true;
}
return dict.Contains(key);
}
IDictionaryEnumerator IDictionary.GetEnumerator()
{
return new Enumerator(this);
}
void IDictionary.Remove(Object key)
{
if(Array.IndexOf(properties, key) != -1)
{
throw new ArgumentException
(_("Remoting_InvalidKey"));
}
dict.Remove(key);
}
bool IDictionary.IsFixedSize
{
get
{
return false;
}
}
bool IDictionary.IsReadOnly
{
get
{
return false;
}
}
Object IDictionary.this[Object key]
{
get
{
if(Array.IndexOf(properties, key) != -1)
{
return special.GetSpecialProperty((String)key);
}
else
{
return dict[key];
}
}
set
{
if(Array.IndexOf(properties, key) != -1)
{
special.SetSpecialProperty((String)key, value);
}
else
{
dict[key] = value;
}
}
}
ICollection IDictionary.Keys
{
get
{
ArrayList list = new ArrayList();
foreach(String name in properties)
{
list.Add(name);
}
list.AddRange(dict.Keys);
return list;
}
}
ICollection IDictionary.Values
{
get
{
ArrayList list = new ArrayList();
foreach(String name in properties)
{
list.Add(special.GetSpecialProperty(name));
}
list.AddRange(dict.Keys);
return list;
}
}
// Implement the ICollection interface.
void ICollection.CopyTo(Array array, int index)
{
foreach(String name in properties)
{
array.SetValue(special.GetSpecialProperty(name), index);
++index;
}
dict.CopyTo(array, index);
}
int ICollection.Count
{
get
{
return properties.Length + dict.Count;
}
}
bool ICollection.IsSynchronized
{
get
{
return false;
}
}
Object ICollection.SyncRoot
{
get
{
return this;
}
}
// Implement the IEnumerable interface.
IEnumerator IEnumerable.GetEnumerator()
{
return new Enumerator(this);
}
// Enumerator for a "MessageProperties" dictionary.
private class Enumerator : IDictionaryEnumerator
{
// Internal state.
private MessageProperties properties;
private int index;
private IDictionaryEnumerator e;
// Constructor.
public Enumerator(MessageProperties properties)
{
this.properties = properties;
this.index = -1;
this.e = null;
}
// Implement the IEnumerator interface.
public bool MoveNext()
{
if(e != null)
{
return e.MoveNext();
}
else
{
++index;
if(index < properties.properties.Length)
{
return true;
}
e = properties.dict.GetEnumerator();
return e.MoveNext();
}
}
public void Reset()
{
index = -1;
e = null;
}
public Object Current
{
get
{
return Value;
}
}
// Implement the IDictionaryEnumerator interface.
public DictionaryEntry Entry
{
get
{
return new DictionaryEntry(Key, Value);
}
}
public Object Key
{
get
{
if(e != null)
{
return e.Key;
}
else if(index >= 0 &&
index < properties.properties.Length)
{
return properties.properties[index];
}
else
{
throw new InvalidOperationException
(_("Invalid_BadEnumeratorPosition"));
}
}
}
public Object Value
{
get
{
if(e != null)
{
return e.Value;
}
else if(index >= 0 &&
index < properties.properties.Length)
{
return properties.special.GetSpecialProperty
(properties.properties[index]);
}
else
{
throw new InvalidOperationException
(_("Invalid_BadEnumeratorPosition"));
}
}
}
}; // class Enumerator
}; // class MessageProperties
#endif // CONFIG_SERIALIZATION
}; // namespace System.Runtime.Remoting.Messaging
| |
// StreamManipulator.cs
// Copyright (C) 2001 Mike Krueger
//
// This file was translated from java, it was part of the GNU Classpath
// Copyright (C) 2001 Free Software Foundation, Inc.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// Linking this library statically or dynamically with other modules is
// making a combined work based on this library. Thus, the terms and
// conditions of the GNU General Public License cover the whole
// combination.
//
// As a special exception, the copyright holders of this library give you
// permission to link this library with independent modules to produce an
// executable, regardless of the license terms of these independent
// modules, and to copy and distribute the resulting executable under
// terms of your choice, provided that you also meet, for each linked
// independent module, the terms and conditions of the license of that
// module. An independent module is a module which is not derived from
// or based on this library. If you modify this library, you may extend
// this exception to your version of the library, but you are not
// obligated to do so. If you do not wish to do so, delete this
// exception statement from your version.
#if CONFIG_COMPRESSION
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// This class allows us to retrieve a specified amount of bits from
/// the input buffer, as well as copy big byte blocks.
///
/// It uses an int buffer to store up to 31 bits for direct
/// manipulation. This guarantees that we can get at least 16 bits,
/// but we only need at most 15, so this is all safe.
///
/// There are some optimizations in this class, for example, you must
/// never peek more then 8 bits more than needed, and you must first
/// peek bits before you may drop them. This is not a general purpose
/// class but optimized for the behaviour of the Inflater.
///
/// authors of the original java version : John Leuner, Jochen Hoenicke
/// </summary>
internal class StreamManipulator
{
private byte[] window;
private int window_start = 0;
private int window_end = 0;
private uint buffer = 0;
private int bits_in_buffer = 0;
/// <summary>
/// Get the next n bits but don't increase input pointer. n must be
/// less or equal 16 and if you if this call succeeds, you must drop
/// at least n-8 bits in the next call.
/// </summary>
/// <returns>
/// the value of the bits, or -1 if not enough bits available. */
/// </returns>
public int PeekBits(int n)
{
if (bits_in_buffer < n)
{
if (window_start == window_end)
{
return -1;
}
buffer |= (uint)((window[window_start++] & 0xff |
(window[window_start++] & 0xff) << 8) << bits_in_buffer);
bits_in_buffer += 16;
}
return (int)(buffer & ((1 << n) - 1));
}
/// <summary>
/// Drops the next n bits from the input. You should have called peekBits
/// with a bigger or equal n before, to make sure that enough bits are in
/// the bit buffer.
/// </summary>
public void DropBits(int n)
{
buffer >>= n;
bits_in_buffer -= n;
}
/// <summary>
/// Gets the next n bits and increases input pointer. This is equivalent
/// to peekBits followed by dropBits, except for correct error handling.
/// </summary>
/// <returns>
/// the value of the bits, or -1 if not enough bits available.
/// </returns>
public int GetBits(int n)
{
int bits = PeekBits(n);
if (bits >= 0)
{
DropBits(n);
}
return bits;
}
/// <summary>
/// Gets the number of bits available in the bit buffer. This must be
/// only called when a previous peekBits() returned -1.
/// </summary>
/// <returns>
/// the number of bits available.
/// </returns>
public int AvailableBits
{
get
{
return bits_in_buffer;
}
}
/// <summary>
/// Gets the number of bytes available.
/// </summary>
/// <returns>
/// the number of bytes available.
/// </returns>
public int AvailableBytes
{
get
{
return window_end - window_start + (bits_in_buffer >> 3);
}
}
/// <summary>
/// Skips to the next byte boundary.
/// </summary>
public void SkipToByteBoundary()
{
buffer >>= (bits_in_buffer & 7);
bits_in_buffer &= ~7;
}
public bool IsNeedingInput
{
get
{
return window_start == window_end;
}
}
/// <summary>
/// Copies length bytes from input buffer to output buffer starting
/// at output[offset]. You have to make sure, that the buffer is
/// byte aligned. If not enough bytes are available, copies fewer
/// bytes.
/// </summary>
/// <param name="output">
/// the buffer.
/// </param>
/// <param name="offset">
/// the offset in the buffer.
/// </param>
/// <param name="length">
/// the length to copy, 0 is allowed.
/// </param>
/// <returns>
/// the number of bytes copied, 0 if no byte is available.
/// </returns>
public int CopyBytes(byte[] output, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException("length negative");
}
if ((bits_in_buffer & 7) != 0)
{
/* bits_in_buffer may only be 0 or 8 */
throw new InvalidOperationException("Bit buffer is not aligned!");
}
int count = 0;
while (bits_in_buffer > 0 && length > 0)
{
output[offset++] = (byte) buffer;
buffer >>= 8;
bits_in_buffer -= 8;
length--;
count++;
}
if (length == 0)
{
return count;
}
int avail = window_end - window_start;
if (length > avail)
{
length = avail;
}
System.Array.Copy(window, window_start, output, offset, length);
window_start += length;
if (((window_start - window_end) & 1) != 0)
{
/* We always want an even number of bytes in input, see peekBits */
buffer = (uint)(window[window_start++] & 0xff);
bits_in_buffer = 8;
}
return count + length;
}
public StreamManipulator()
{
}
public void Reset()
{
buffer = (uint)(window_start = window_end = bits_in_buffer = 0);
}
public void SetInput(byte[] buf, int off, int len)
{
if (window_start < window_end)
{
throw new InvalidOperationException("Old input was not completely processed");
}
int end = off + len;
/* We want to throw an ArrayIndexOutOfBoundsException early. The
* check is very tricky: it also handles integer wrap around.
*/
if (0 > off || off > end || end > buf.Length)
{
throw new ArgumentOutOfRangeException();
}
if ((len & 1) != 0)
{
/* We always want an even number of bytes in input, see peekBits */
buffer |= (uint)((buf[off++] & 0xff) << bits_in_buffer);
bits_in_buffer += 8;
}
window = buf;
window_start = off;
window_end = end;
}
}
}
#endif // CONFIG_COMPRESSION
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting.Server;
using System.Text.Formatting;
namespace System.IO.Pipelines.Samples.Http
{
public partial class HttpConnection<TContext>
{
private static readonly byte[] _http11Bytes = Encoding.UTF8.GetBytes("HTTP/1.1 ");
private static readonly byte[] _chunkedEndBytes = Encoding.UTF8.GetBytes("0\r\n\r\n");
private static readonly byte[] _endChunkBytes = Encoding.ASCII.GetBytes("\r\n");
private readonly IPipeReader _input;
private readonly IPipeWriter _output;
private readonly IHttpApplication<TContext> _application;
public RequestHeaderDictionary RequestHeaders => _parser.RequestHeaders;
public ResponseHeaderDictionary ResponseHeaders { get; } = new ResponseHeaderDictionary();
public ReadableBuffer HttpVersion => _parser.HttpVersion;
public ReadableBuffer Path => _parser.Path;
public ReadableBuffer Method => _parser.Method;
// TODO: Check the http version
public bool KeepAlive => true; //RequestHeaders.ContainsKey("Connection") && string.Equals(RequestHeaders["Connection"], "keep-alive");
private bool HasContentLength => ResponseHeaders.ContainsKey("Content-Length");
private bool HasTransferEncoding => ResponseHeaders.ContainsKey("Transfer-Encoding");
private HttpRequestStream<TContext> _requestBody;
private HttpResponseStream<TContext> _responseBody;
private bool _autoChunk;
private HttpRequestParser _parser = new HttpRequestParser();
public HttpConnection(IHttpApplication<TContext> application, IPipeReader input, IPipeWriter output)
{
_application = application;
_input = input;
_output = output;
_requestBody = new HttpRequestStream<TContext>(this);
_responseBody = new HttpResponseStream<TContext>(this);
}
public IPipeReader Input => _input;
public IPipeWriter Output => _output;
public HttpRequestStream<TContext> RequestBody { get; set; }
public HttpResponseStream<TContext> ResponseBody { get; set; }
public async Task ProcessAllRequests()
{
Reset();
while (true)
{
var result = await _input.ReadAsync();
var buffer = result.Buffer;
var consumed = buffer.Start;
var examined = buffer.Start;
try
{
if (buffer.IsEmpty && result.IsCompleted)
{
// We're done with this connection
return;
}
var parserResult = _parser.ParseRequest(buffer, out consumed, out examined);
switch (parserResult)
{
case HttpRequestParser.ParseResult.Incomplete:
if (result.IsCompleted)
{
// Didn't get the whole request and the connection ended
throw new EndOfStreamException();
}
// Need more data
continue;
case HttpRequestParser.ParseResult.Complete:
// Done
break;
case HttpRequestParser.ParseResult.BadRequest:
// TODO: Don't throw here;
throw new Exception();
default:
break;
}
}
catch (Exception)
{
StatusCode = 400;
await EndResponse();
return;
}
finally
{
_input.Advance(consumed, examined);
}
var context = _application.CreateContext(this);
try
{
await _application.ProcessRequestAsync(context);
}
catch (Exception ex)
{
StatusCode = 500;
_application.DisposeContext(context, ex);
}
finally
{
await EndResponse();
}
if (!KeepAlive)
{
break;
}
Reset();
}
}
private async Task EndResponse()
{
var buffer = _output.Alloc();
if (!HasStarted)
{
WriteBeginResponseHeaders(buffer);
}
if (_autoChunk)
{
WriteEndResponse(buffer);
}
await buffer.FlushAsync();
}
private void Reset()
{
RequestBody = _requestBody;
ResponseBody = _responseBody;
_parser.Reset();
ResponseHeaders.Reset();
HasStarted = false;
StatusCode = 200;
_autoChunk = false;
_method = null;
_path = null;
}
public Task WriteAsync(Span<byte> data)
{
var buffer = _output.Alloc();
if (!HasStarted)
{
WriteBeginResponseHeaders(buffer);
}
if (_autoChunk)
{
buffer.Append(data.Length, SymbolTable.InvariantUtf8, 'x');
buffer.Write(_endChunkBytes);
buffer.Write(data);
buffer.Write(_endChunkBytes);
}
else
{
buffer.Write(data);
}
return FlushAsync(buffer);
}
public async Task FlushAsync(WritableBuffer buffer)
{
await buffer.FlushAsync();
}
private void WriteBeginResponseHeaders(WritableBuffer buffer)
{
if (HasStarted)
{
return;
}
HasStarted = true;
buffer.Write(_http11Bytes);
var status = ReasonPhrases.ToStatusBytes(StatusCode);
buffer.Write(status);
_autoChunk = !HasContentLength && !HasTransferEncoding && KeepAlive;
ResponseHeaders.CopyTo(_autoChunk, buffer);
}
private void WriteEndResponse(WritableBuffer buffer)
{
buffer.Write(_chunkedEndBytes);
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="BaseDataReader.cs" company="Sirenix IVS">
// Copyright (c) 2018 Sirenix IVS
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
//-----------------------------------------------------------------------
namespace Stratus.OdinSerializer
{
using System;
using System.IO;
/// <summary>
/// Provides basic functionality and overridable abstract methods for implementing a data reader.
/// <para />
/// If you inherit this class, it is VERY IMPORTANT that you implement each abstract method to the *exact* specifications the documentation specifies.
/// </summary>
/// <seealso cref="BaseDataReaderWriter" />
/// <seealso cref="IDataReader" />
public abstract class BaseDataReader : BaseDataReaderWriter, IDataReader
{
private DeserializationContext context;
private Stream stream;
/// <summary>
/// Initializes a new instance of the <see cref="BaseDataReader" /> class.
/// </summary>
/// <param name="stream">The base stream of the reader.</param>
/// <param name="context">The deserialization context to use.</param>
/// <exception cref="System.ArgumentNullException">The stream or context is null.</exception>
/// <exception cref="System.ArgumentException">Cannot read from stream.</exception>
protected BaseDataReader(Stream stream, DeserializationContext context)
{
this.context = context;
if (stream != null)
{
this.Stream = stream;
}
}
/// <summary>
/// Gets the current node id. If this is less than zero, the current node has no id.
/// </summary>
/// <value>
/// The current node id.
/// </value>
public int CurrentNodeId { get { return this.CurrentNode.Id; } }
/// <summary>
/// Gets the current node depth. In other words, the current count of the node stack.
/// </summary>
/// <value>
/// The current node depth.
/// </value>
public int CurrentNodeDepth { get { return this.NodeDepth; } }
/// <summary>
/// Gets the name of the current node.
/// </summary>
/// <value>
/// The name of the current node.
/// </value>
public string CurrentNodeName { get { return this.CurrentNode.Name; } }
/// <summary>
/// Gets or sets the base stream of the reader.
/// </summary>
/// <value>
/// The base stream of the reader.
/// </value>
/// <exception cref="System.ArgumentNullException">value</exception>
/// <exception cref="System.ArgumentException">Cannot read from stream</exception>
public virtual Stream Stream
{
get
{
return this.stream;
}
set
{
if (value == null)
{
throw new ArgumentNullException("value");
}
if (value.CanRead == false)
{
throw new ArgumentException("Cannot read from stream");
}
this.stream = value;
}
}
/// <summary>
/// Gets the deserialization context.
/// </summary>
/// <value>
/// The deserialization context.
/// </value>
public DeserializationContext Context
{
get
{
if (this.context == null)
{
this.context = new DeserializationContext();
}
return this.context;
}
set
{
this.context = value;
}
}
/// <summary>
/// Tries to enter a node. This will succeed if the next entry is an <see cref="EntryType.StartOfNode"/>.
/// <para />
/// This call MUST (eventually) be followed by a corresponding call to <see cref="IDataReader.ExitNode(DeserializationContext)"/>
/// <para />
/// This call will change the values of the <see cref="IDataReader.IsInArrayNode"/>, <see cref="IDataReader.CurrentNodeName"/>, <see cref="IDataReader.CurrentNodeId"/> and <see cref="IDataReader.CurrentNodeDepth"/> properties to the correct values for the current node.
/// </summary>
/// <param name="type">The type of the node. This value will be null if there was no metadata, or if the reader's serialization binder failed to resolve the type name.</param>
/// <returns><c>true</c> if entering a node succeeded, otherwise <c>false</c></returns>
public abstract bool EnterNode(out Type type);
/// <summary>
/// Exits the current node. This method will keep skipping entries using <see cref="IDataReader.SkipEntry(DeserializationContext)"/> until an <see cref="EntryType.EndOfNode"/> is reached, or the end of the stream is reached.
/// <para />
/// This call MUST have been preceded by a corresponding call to <see cref="IDataReader.EnterNode(out Type)"/>.
/// <para />
/// This call will change the values of the <see cref="IDataReader.IsInArrayNode"/>, <see cref="IDataReader.CurrentNodeName"/>, <see cref="IDataReader.CurrentNodeId"/> and <see cref="IDataReader.CurrentNodeDepth"/> to the correct values for the node that was prior to the current node.
/// </summary>
/// <returns><c>true</c> if the method exited a node, <c>false</c> if it reached the end of the stream.</returns>
public abstract bool ExitNode();
/// <summary>
/// Tries to enters an array node. This will succeed if the next entry is an <see cref="EntryType.StartOfArray"/>.
/// <para />
/// This call MUST (eventually) be followed by a corresponding call to <see cref="IDataReader.ExitArray(DeserializationContext)"/>
/// <para />
/// This call will change the values of the <see cref="IDataReader.IsInArrayNode"/>, <see cref="IDataReader.CurrentNodeName"/>, <see cref="IDataReader.CurrentNodeId"/> and <see cref="IDataReader.CurrentNodeDepth"/> properties to the correct values for the current array node.
/// </summary>
/// <param name="length">The length of the array that was entered.</param>
/// <returns><c>true</c> if an array was entered, otherwise <c>false</c></returns>
public abstract bool EnterArray(out long length);
/// <summary>
/// Exits the closest array. This method will keep skipping entries using <see cref="IDataReader.SkipEntry(DeserializationContext)"/> until an <see cref="EntryType.EndOfArray"/> is reached, or the end of the stream is reached.
/// <para />
/// This call MUST have been preceded by a corresponding call to <see cref="IDataReader.EnterArray(out long)"/>.
/// <para />
/// This call will change the values of the <see cref="IDataReader.IsInArrayNode"/>, <see cref="IDataReader.CurrentNodeName"/>, <see cref="IDataReader.CurrentNodeId"/> and <see cref="IDataReader.CurrentNodeDepth"/> to the correct values for the node that was prior to the exited array node.
/// </summary>
/// <returns><c>true</c> if the method exited an array, <c>false</c> if it reached the end of the stream.</returns>
public abstract bool ExitArray();
/// <summary>
/// Reads a primitive array value. This call will succeed if the next entry is an <see cref="EntryType.PrimitiveArray"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <typeparam name="T">The element type of the primitive array. Valid element types can be determined using <see cref="FormatterUtilities.IsPrimitiveArrayType(Type)"/>.</typeparam>
/// <param name="array">The resulting primitive array.</param>
/// <returns><c>true</c> if reading a primitive array succeeded, otherwise <c>false</c></returns>
public abstract bool ReadPrimitiveArray<T>(out T[] array) where T : struct;
/// <summary>
/// Peeks ahead and returns the type of the next entry in the stream.
/// </summary>
/// <param name="name">The name of the next entry, if it has one.</param>
/// <returns>The type of the next entry.</returns>
public abstract EntryType PeekEntry(out string name);
/// <summary>
/// Reads an internal reference id. This call will succeed if the next entry is an <see cref="EntryType.InternalReference"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="id">The internal reference id.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadInternalReference(out int id);
/// <summary>
/// Reads an external reference index. This call will succeed if the next entry is an <see cref="EntryType.ExternalReferenceByIndex"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="index">The external reference index.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadExternalReference(out int index);
/// <summary>
/// Reads an external reference guid. This call will succeed if the next entry is an <see cref="EntryType.ExternalReferenceByGuid"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="guid">The external reference guid.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadExternalReference(out Guid guid);
/// <summary>
/// Reads an external reference string. This call will succeed if the next entry is an <see cref="EntryType.ExternalReferenceByString" />.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode" /> or an <see cref="EntryType.EndOfArray" />.
/// </summary>
/// <param name="id">The external reference string.</param>
/// <returns>
/// <c>true</c> if reading the value succeeded, otherwise <c>false</c>
/// </returns>
public abstract bool ReadExternalReference(out string id);
/// <summary>
/// Reads a <see cref="char"/> value. This call will succeed if the next entry is an <see cref="EntryType.String"/>.
/// <para />
/// If the string of the entry is longer than 1 character, the first character of the string will be taken as the result.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadChar(out char value);
/// <summary>
/// Reads a <see cref="string"/> value. This call will succeed if the next entry is an <see cref="EntryType.String"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadString(out string value);
/// <summary>
/// Reads a <see cref="Guid"/> value. This call will succeed if the next entry is an <see cref="EntryType.Guid"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadGuid(out Guid value);
/// <summary>
/// Reads an <see cref="sbyte"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="sbyte.MinValue"/> or larger than <see cref="sbyte.MaxValue"/>, the result will be default(<see cref="sbyte"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadSByte(out sbyte value);
/// <summary>
/// Reads a <see cref="short"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="short.MinValue"/> or larger than <see cref="short.MaxValue"/>, the result will be default(<see cref="short"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadInt16(out short value);
/// <summary>
/// Reads an <see cref="int"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="int.MinValue"/> or larger than <see cref="int.MaxValue"/>, the result will be default(<see cref="int"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadInt32(out int value);
/// <summary>
/// Reads a <see cref="long"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="long.MinValue"/> or larger than <see cref="long.MaxValue"/>, the result will be default(<see cref="long"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadInt64(out long value);
/// <summary>
/// Reads a <see cref="byte"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="byte.MinValue"/> or larger than <see cref="byte.MaxValue"/>, the result will be default(<see cref="byte"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadByte(out byte value);
/// <summary>
/// Reads an <see cref="ushort"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="ushort.MinValue"/> or larger than <see cref="ushort.MaxValue"/>, the result will be default(<see cref="ushort"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadUInt16(out ushort value);
/// <summary>
/// Reads an <see cref="uint"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="uint.MinValue"/> or larger than <see cref="uint.MaxValue"/>, the result will be default(<see cref="uint"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadUInt32(out uint value);
/// <summary>
/// Reads an <see cref="ulong"/> value. This call will succeed if the next entry is an <see cref="EntryType.Integer"/>.
/// <para />
/// If the value of the stored integer is smaller than <see cref="ulong.MinValue"/> or larger than <see cref="ulong.MaxValue"/>, the result will be default(<see cref="ulong"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadUInt64(out ulong value);
/// <summary>
/// Reads a <see cref="decimal"/> value. This call will succeed if the next entry is an <see cref="EntryType.FloatingPoint"/> or an <see cref="EntryType.Integer"/>.
/// <para />
/// If the stored integer or floating point value is smaller than <see cref="decimal.MinValue"/> or larger than <see cref="decimal.MaxValue"/>, the result will be default(<see cref="decimal"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadDecimal(out decimal value);
/// <summary>
/// Reads a <see cref="float"/> value. This call will succeed if the next entry is an <see cref="EntryType.FloatingPoint"/> or an <see cref="EntryType.Integer"/>.
/// <para />
/// If the stored integer or floating point value is smaller than <see cref="float.MinValue"/> or larger than <see cref="float.MaxValue"/>, the result will be default(<see cref="float"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadSingle(out float value);
/// <summary>
/// Reads a <see cref="double"/> value. This call will succeed if the next entry is an <see cref="EntryType.FloatingPoint"/> or an <see cref="EntryType.Integer"/>.
/// <para />
/// If the stored integer or floating point value is smaller than <see cref="double.MinValue"/> or larger than <see cref="double.MaxValue"/>, the result will be default(<see cref="double"/>).
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadDouble(out double value);
/// <summary>
/// Reads a <see cref="bool"/> value. This call will succeed if the next entry is an <see cref="EntryType.Boolean"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <param name="value">The value that has been read.</param>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadBoolean(out bool value);
/// <summary>
/// Reads a <c>null</c> value. This call will succeed if the next entry is an <see cref="EntryType.Null"/>.
/// <para />
/// If the call fails (and returns <c>false</c>), it will skip the current entry value, unless that entry is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>.
/// </summary>
/// <returns><c>true</c> if reading the value succeeded, otherwise <c>false</c></returns>
public abstract bool ReadNull();
/// <summary>
/// Skips the next entry value, unless it is an <see cref="EntryType.EndOfNode"/> or an <see cref="EntryType.EndOfArray"/>. If the next entry value is an <see cref="EntryType.StartOfNode"/> or an <see cref="EntryType.StartOfArray"/>, all of its contents will be processed, deserialized and registered in the deserialization context, so that internal reference values are not lost to entries further down the stream.
/// </summary>
public virtual void SkipEntry()
{
var peekedEntry = this.PeekEntry();
if (peekedEntry == EntryType.StartOfNode)
{
Type type;
bool exitNode = true;
this.EnterNode(out type);
try
{
if (type != null)
{
// We have the necessary metadata to read this type, and register all of its reference values (perhaps including itself) in the serialization context
// Sadly, we have no choice but to risk boxing of structs here
// Luckily, this is a rare case
if (FormatterUtilities.IsPrimitiveType(type))
{
// It is a boxed primitive type; we read the value and register it
var serializer = Serializer.Get(type);
object value = serializer.ReadValueWeak(this);
if (this.CurrentNodeId >= 0)
{
this.Context.RegisterInternalReference(this.CurrentNodeId, value);
}
}
else
{
var formatter = FormatterLocator.GetFormatter(type, this.Context.Config.SerializationPolicy);
object value = formatter.Deserialize(this);
if (this.CurrentNodeId >= 0)
{
this.Context.RegisterInternalReference(this.CurrentNodeId, value);
}
}
}
else
{
// We have no metadata, and reference values might be lost
// We must read until a node on the same level terminates
while (true)
{
peekedEntry = this.PeekEntry();
if (peekedEntry == EntryType.EndOfStream)
{
break;
}
else if (peekedEntry == EntryType.EndOfNode)
{
break;
}
else if (peekedEntry == EntryType.EndOfArray)
{
this.ReadToNextEntry(); // Consume end of arrays that we can potentially get stuck on
}
else
{
this.SkipEntry();
}
}
}
}
catch (SerializationAbortException ex)
{
exitNode = false;
throw ex;
}
finally
{
if (exitNode)
{
this.ExitNode();
}
}
}
else if (peekedEntry == EntryType.StartOfArray)
{
// We must read until an array on the same level terminates
this.ReadToNextEntry(); // Consume start of array
while (true)
{
peekedEntry = this.PeekEntry();
if (peekedEntry == EntryType.EndOfStream)
{
break;
}
else if (peekedEntry == EntryType.EndOfArray)
{
this.ReadToNextEntry(); // Consume end of array and break
break;
}
else if (peekedEntry == EntryType.EndOfNode)
{
this.ReadToNextEntry(); // Consume end of nodes that we can potentially get stuck on
}
else
{
this.SkipEntry();
}
}
}
else if (peekedEntry != EntryType.EndOfArray && peekedEntry != EntryType.EndOfNode) // We can't skip end of arrays and end of nodes
{
this.ReadToNextEntry(); // We can just skip a single value entry
}
}
/// <summary>
/// Disposes all resources and streams kept by the data reader.
/// </summary>
public abstract void Dispose();
/// <summary>
/// Tells the reader that a new serialization session is about to begin, and that it should clear all cached values left over from any prior serialization sessions.
/// This method is only relevant when the same reader is used to deserialize several different, unrelated values.
/// </summary>
public virtual void PrepareNewSerializationSession()
{
this.ClearNodes();
}
/// <summary>
/// Gets a dump of the data being read by the writer. The format of this dump varies, but should be useful for debugging purposes.
/// </summary>
public abstract string GetDataDump();
/// <summary>
/// Peeks the current entry.
/// </summary>
/// <returns>The peeked entry.</returns>
protected abstract EntryType PeekEntry();
/// <summary>
/// Consumes the current entry, and reads to the next one.
/// </summary>
/// <returns>The next entry.</returns>
protected abstract EntryType ReadToNextEntry();
}
}
| |
/*******************************************************************************
* Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and
* limitations under the License.
* *****************************************************************************
* __ _ _ ___
* ( )( \/\/ )/ __)
* /__\ \ / \__ \
* (_)(_) \/\/ (___/
*
* AWS SDK for .NET
* API Version: 2009-04-15
*/
using System;
using System.Collections.Generic;
using System.Globalization;
namespace Amazon.SimpleDB.Util
{
/// <summary>
/// Provides a collection of static functions to:
/// 1. Convert various values into strings that may be compared lexicographically
/// 2. Decode a Base64 Encoded string
/// 3. Decode an Amazon SimpleDB Attribute's properties
/// 4. Decode an Amazon SimpleDB Item's properties and constituent Item(s)
/// </summary>
public static class AmazonSimpleDBUtil
{
/// <summary>
/// Date format String, e.g. 2007-12-06T10:32:43.141-08:00
/// </summary>
private static string dateFormat = "yyyy-MM-ddTHH:mm:ss.fffzzzz";
private static string base64Str = "base64";
/// <summary>
/// Encodes positive integer value into a string by zero-padding it up to the specified number of digits.
/// </summary>
/// <remarks>
/// For example, the integer 123 encoded with a 6 digit maximum would be represented as 000123
/// </remarks>
/// <param name="number">positive integer to be encoded</param>
/// <param name="maxNumDigits">maximum number of digits in the largest value in the data set</param>
/// <returns>A string representation of the zero-padded integer</returns>
public static string EncodeZeroPadding(int number, int maxNumDigits)
{
return number.ToString(CultureInfo.InvariantCulture).PadLeft(maxNumDigits, '0');
}
/// <summary>
/// Encodes positive single-precision floating point value into a string by zero-padding it to the specified number of digits.
/// </summary>
/// <remarks>
/// This function only zero-pads digits to the left of the decimal point.
///
/// For example, the value 123.456 encoded with a 6 digit maximum would be represented as 000123.456
/// </remarks>
/// <param name="number">positive floating point value to be encoded</param>
/// <param name="maxNumDigits">maximum number of digits in the largest value in the data set</param>
/// <returns>A string representation of the zero-padded floating point value</returns>
public static string EncodeZeroPadding(float number, int maxNumDigits)
{
string fltStr = number.ToString(CultureInfo.InvariantCulture);
int decPt = fltStr.IndexOf('.');
if (decPt == -1)
{
return fltStr.PadLeft(maxNumDigits, '0');
}
else
{
return fltStr.PadLeft(maxNumDigits + (fltStr.Length - decPt), '0');
}
}
/// <summary>
/// Encodes real integer value into a string by offsetting and zero-padding
/// number up to the specified number of digits. Use this encoding method if the data
/// range set includes both positive and negative values.
/// </summary>
/// <remarks>
/// For example, the integer value -123 offset by 1000 with a maximum of 6 digits would be:
/// -123 + 1000, padded to 6 digits: 000877
/// </remarks>
/// <param name="number">integer to be encoded</param>
/// <param name="maxNumDigits">maximum number of digits in the largest absolute value in the data set</param>
/// <param name="offsetValue">offset value, has to be greater than absolute value of any negative number in the data set.</param>
/// <returns>A string representation of the integer</returns>
public static string EncodeRealNumberRange(int number, int maxNumDigits, int offsetValue)
{
return (number + offsetValue).ToString(CultureInfo.InvariantCulture).PadLeft(maxNumDigits, '0');
}
/// <summary>
/// Encodes real float value into a string by offsetting and zero-padding
/// number up to the specified number of digits. Use this encoding method if the data
/// range set includes both positive and negative values.
/// </summary>
/// <remarks>
/// For example, the floating point value -123.456 offset by 1000 with
/// a maximum of 6 digits to the left, and 4 to the right would be:
/// 0008765440
/// </remarks>
/// <param name="number">floating point value to be encoded</param>
/// <param name="maxDigitsLeft">maximum number of digits left of the decimal point in the largest absolute value in the data set</param>
/// <param name="maxDigitsRight">maximum number of digits right of the decimal point in the largest absolute value in the data set, i.e. precision</param>
/// <param name="offsetValue">offset value, has to be greater than absolute value of any negative number in the data set.</param>
/// <returns>A string representation of the integer</returns>
public static string EncodeRealNumberRange(float number, int maxDigitsLeft, int maxDigitsRight, int offsetValue)
{
long shiftMultiplier = (long)Math.Pow(10, maxDigitsRight);
long shiftedNumber = (long)Math.Round((number + offsetValue) * shiftMultiplier);
return shiftedNumber.ToString(CultureInfo.InvariantCulture).PadLeft(maxDigitsLeft + maxDigitsRight, '0');
}
/// <summary>
/// Decodes zero-padded positive float value from the string representation
/// </summary>
/// <param name="value">zero-padded string representation of the float value</param>
/// <returns>original float value</returns>
public static float DecodeZeroPaddingFloat(string value)
{
return float.Parse(value, CultureInfo.InvariantCulture);
}
/// <summary>
/// Decodes zero-padded positive integer value from the string representation
/// </summary>
/// <param name="value">zero-padded string representation of the integer</param>
/// <returns>original integer value</returns>
public static int DecodeZeroPaddingInt(string value)
{
return int.Parse(value, CultureInfo.InvariantCulture);
}
/// <summary>
/// Decodes float value from the string representation that was created by using encodeRealNumberRange(..) function.
/// </summary>
/// <param name="value">string representation of the integer value</param>
/// <param name="offsetValue">offset value that was used in the original encoding</param>
/// <returns>original integer value</returns>
public static int DecodeRealNumberRangeInt(string value, int offsetValue)
{
return (int)(long.Parse(value, CultureInfo.InvariantCulture) - offsetValue);
}
/// <summary>
/// Decodes float value from the string representation that was created by using encodeRealNumberRange(..) function.
/// </summary>
/// <param name="value">string representation of the integer value</param>
/// <param name="maxDigitsRight">maximum number of digits left of the decimal point in the largest absolute
/// value in the data set (must be the same as the one used for encoding).</param>
/// <param name="offsetValue">offset value that was used in the original encoding</param>
/// <returns>original float value</returns>
public static float DecodeRealNumberRangeFloat(string value, int maxDigitsRight, int offsetValue)
{
return (float)(long.Parse(value, CultureInfo.InvariantCulture) / Math.Pow(10, maxDigitsRight) - offsetValue);
}
/// <summary>
/// Encodes date value into string format that can be compared lexicographically
/// </summary>
/// <param name="date">date value to be encoded</param>
/// <returns>string representation of the date value</returns>
public static string EncodeDate(DateTime date)
{
return date.ToString(dateFormat, CultureInfo.InvariantCulture);
}
/// <summary>
/// Decodes date value from the string representation created using encodeDate(..) function.
/// </summary>
/// <param name="value">string representation of the date value</param>
/// <returns>original date value</returns>
public static DateTime DecodeDate(string value)
{
return DateTime.ParseExact(value, dateFormat, CultureInfo.InvariantCulture);
}
/// <summary>
/// Gets the Current Date as an ISO8601 formatted Timestamp
/// </summary>
/// <returns>ISO8601 formatted current timestamp String</returns>
public static string FormattedCurrentTimestamp
{
get
{
return Amazon.Util.AWSSDKUtils.FormattedCurrentTimestampISO8601;
}
}
/// <summary>
/// Decodes the base64 encoded properties of the Attribute.
/// The Name and/or Value properties of an Attribute can be base64 encoded.
/// </summary>
/// <param name="inputAttribute">The properties of this Attribute will be decoded</param>
/// <seealso cref="P:Amazon.SimpleDB.Model.Attribute.NameEncoding" />
/// <seealso cref="P:Amazon.SimpleDB.Model.Attribute.ValueEncoding" />
public static void DecodeAttribute(Amazon.SimpleDB.Model.Attribute inputAttribute)
{
if (null == inputAttribute)
{
throw new ArgumentNullException("inputAttribute", "The Attribute passed in was null");
}
string encoding = inputAttribute.AlternateNameEncoding;
if (null != encoding)
{
if (string.Equals(encoding, base64Str, StringComparison.OrdinalIgnoreCase))
{
// The Name is base64 encoded
inputAttribute.Name = AmazonSimpleDBUtil.DecodeBase64String(inputAttribute.Name);
inputAttribute.AlternateNameEncoding = "";
}
}
encoding = inputAttribute.AlternateValueEncoding;
if (null != encoding)
{
if (string.Equals(encoding, base64Str, StringComparison.OrdinalIgnoreCase))
{
// The Value is base64 encoded
inputAttribute.Value = AmazonSimpleDBUtil.DecodeBase64String(inputAttribute.Value);
inputAttribute.AlternateValueEncoding = "";
}
}
}
/// <summary>
/// Decodes the base64 properties of every SimpleDB Attribute specified in
/// list of attributes specified as input.
/// </summary>
/// <param name="attributes">The Attributes in this list will be decoded</param>
/// <seealso cref="P:Amazon.SimpleDB.Model.Attribute.NameEncoding" />
/// <seealso cref="P:Amazon.SimpleDB.Model.Attribute.ValueEncoding" />
/// <seealso cref="P:Amazon.SimpleDB.Util.AmazonSimpleDBUtil.DecodeAttribute" />
public static void DecodeAttributes(List<Amazon.SimpleDB.Model.Attribute> attributes)
{
if (attributes != null &&
attributes.Count > 0)
{
foreach (Amazon.SimpleDB.Model.Attribute at in attributes)
{
AmazonSimpleDBUtil.DecodeAttribute(at);
}
}
}
/// <summary>
/// Decodes the base64 encoded members of the Item if necessary.
/// The Name property of an Item can be base64 encoded.
/// This method also decodes any encoded properties of the Attributes
/// associated with the Input Item.
/// </summary>
/// <param name="inputItem">The Item to be decoded</param>
/// <seealso cref="P:Amazon.SimpleDB.Model.Item.NameEncoding" />
/// <seealso cref="P:Amazon.SimpleDB.Util.AmazonSimpleDBUtil.DecodeAttributes" />
public static void DecodeItem(Amazon.SimpleDB.Model.Item inputItem)
{
if (null == inputItem)
{
throw new ArgumentNullException("inputItem", "The Item passed in was null");
}
string encoding = inputItem.AlternateNameEncoding;
if (null != encoding)
{
if (string.Equals(encoding, base64Str, StringComparison.OrdinalIgnoreCase))
{
// The Name is base64 encoded
inputItem.Name = AmazonSimpleDBUtil.DecodeBase64String(inputItem.Name);
inputItem.AlternateNameEncoding = "";
}
}
AmazonSimpleDBUtil.DecodeAttributes(inputItem.Attributes);
}
/// <summary>
/// Decodes the base64 encoded members of the Item List.
/// </summary>
/// <param name="inputItems">The Item List to be decoded</param>
/// <seealso cref="P:Amazon.SimpleDB.Model.Item.NameEncoding" />
/// <seealso cref="P:Amazon.SimpleDB.Util.AmazonSimpleDBUtil.DecodeAttributes" />
/// <seealso cref="P:Amazon.SimpleDB.Util.AmazonSimpleDBUtil.DecodeItem" />
public static void DecodeItems(List<Amazon.SimpleDB.Model.Item> inputItems)
{
if (inputItems != null &&
inputItems.Count > 0)
{
foreach (Amazon.SimpleDB.Model.Item it in inputItems)
{
AmazonSimpleDBUtil.DecodeItem(it);
}
}
}
/// <summary>
/// Returns the Base64 decoded version of the input string.
/// </summary>
/// <param name="encoded">The Base64 encoded string</param>
/// <returns>Decoded version of the Base64 input string</returns>
public static string DecodeBase64String(string encoded)
{
if (null == encoded)
{
throw new ArgumentNullException("encoded", "The Encoded String passed in was null");
}
byte[] encodedDataAsBytes = System.Convert.FromBase64String(encoded);
return System.Text.UTF8Encoding.UTF8.GetString(encodedDataAsBytes);
}
}
}
| |
/*
Copyright 2006 - 2010 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Xml;
using System.Text;
using System.Threading;
using System.Reflection;
using System.Collections;
using System.Diagnostics;
using OpenSource.UPnP.AV;
using OpenSource.Utilities;
using System.Runtime.Serialization;
using OpenSource.UPnP.AV.CdsMetadata;
namespace OpenSource.UPnP.AV.MediaServer.DV
{
/// <summary>
/// <para>
/// This class inherits all basic metadata of a ContentDirectory media item entry
/// (for use in representing "object.item" and derived UPNP media classes),
/// for use in managing a content hierarchy intended for the MediaServer implementation
/// provided in the OpenSource.UPnP.AV.CdsMetadata namespace.
/// </para>
///
/// <para>
/// The <see cref="MediaServerDevice"/> class
/// owns a <see cref="DvRootContainer"/> instance.
/// Public programmers can add
/// <see cref="DvMediaItem"/>and
/// <see cref="DvMediaItem"/>
/// objects to the root, thus building a content hierarchy.
/// </para>
///
/// <para>
/// Although some MediaServer implementations might choose to enforce a
/// rule where a reference item has identical metadata and resources
/// of an underlying item, the implementation for
/// <see cref="MediaServerDevice"/> does not make
/// such an assumption. However, the programmer can opt to enforce that in their
/// own incarnation of the content hierarchy.
/// </para>
/// </summary>
[Serializable()]
public sealed class DvMediaItem : MediaItem, IDvMedia, IDvItem
{
/// <summary>
/// This method should only be used when deserializing a media items.
///
/// <para>
/// A media item can point to another media item. The item doing the pointing
/// is called the reference item. The item that is being pointed to is the
/// underlying item.
/// </para>
///
/// <para>
/// The standard binary serialization process destroys the object reference
/// between a reference item and its underlying item. This method provides a means
/// to restore the reference relationship.
/// </para>
/// </summary>
/// <param name="underlyingItem">
/// The media item that is being referred.
/// </param>
/// <param name="refItem">
/// The media item that is doing the referring.
/// </param>
public static void AttachRefItem (DvMediaItem underlyingItem, DvMediaItem refItem)
{
underlyingItem.LockReferenceList();
if (underlyingItem.m_ReferringItems == null)
{
underlyingItem.m_ReferringItems = new ArrayList();
}
//underlyingItem.m_ReferringItems.Add(new WeakReference(refItem));
underlyingItem.m_ReferringItems.Add(refItem);
// Just to avoid confusion - a refitem is actually an underlying item.
// The naming convention follows the AV spec - the spec opted to use
// "refID" notation instead of using "underlyingItem"...
refItem.m_RefItem = underlyingItem;
refItem.m_RefID = "";
underlyingItem.UnlockReferenceList();
}
/// <summary>
/// Override for set - do not allow application layer to set the ID
/// directly. The CDS infrastructure must ensure that ID values are
/// unique, so this ability is blocked for public use.
/// <para>
/// The method also causes the MediaServer to event changes on UPNP network.
/// </para>
/// </summary>
public override string ID
{
get
{
return base.ID;
}
set
{
#if (DEBUG)
this.CheckRuntimeBindings(new StackTrace());
#endif
if (string.Compare(value, base.m_ID) != 0)
{
base.m_ID = value;
this.NotifyRootOfChange();
}
}
}
/// <summary>
/// Override set:
/// Checks access rights first and then calls base class implementation for set.
/// </summary>
/// <exception cref="InvalidCastException">
/// Thrown if the set-operation could not cast the parent into
/// a DvMediaContainer object.
/// </exception>
public override IMediaContainer Parent
{
get
{
return base.Parent;
}
set
{
#if (DEBUG)
// pulling a stack trace is slow - only implement this
// for debug builds
this.CheckRuntimeBindings(new StackTrace());
#endif
// we do an explicit cast first to ensure that
// the value is a DvMediaContainer, before
// assigning to base.Parent because base.Parent
// will take anything that implements IMediaContainer.
DvMediaContainer p = (DvMediaContainer) value;
base.Parent = p;
// do not call NotifyRootOfChange() because this
// method is called internally by DvMediaContainer
// when adding child objects... there's no need to
// event the changes for this object since the
// parent will event changes.
}
}
/// <summary>
/// Changes this object's parent to another container.
/// Strongly recommended that this container already
/// share the object's current hierarchy.
/// </summary>
/// <param name="diffParent"></param>
/// <exception cref="InvalidCastException">
/// Thrown if this object was improperly added to a
/// container that is not a <see cref="DvMediaContainer"/>.
/// </exception>
public void ChangeParent(IDvContainer diffParent)
{
DvMediaContainer.ChangeParent2(this, (DvMediaContainer)diffParent);
}
/// <summary>
/// Override set: Changing this value causes the
/// object to event the chagne on the UPNP network.
/// </summary>
public override bool IsRestricted
{
get
{
return base.IsRestricted;
}
set
{
bool val = this.m_Restricted;
this.m_Restricted = value;
if (val != value)
{
this.NotifyRootOfChange();
}
}
}
/// <summary>
/// Calls DvMediaContainer.CheckProtection() if the item has a parent.
/// This method is used to prevent
/// public programmers from doing a set() on the Parent
/// property.
/// </summary>
/// <param name="st"></param>
public override void CheckRuntimeBindings(StackTrace st)
{
#if (DEBUG)
if (this.Parent != null)
{
DvMediaContainer parent = (DvMediaContainer) this.Parent;
parent.CheckRuntimeBindings(st);
}
#endif
}
/// <summary>
/// <para>
/// Calls the parent container's
/// <see cref="DvMediaContainer.NotifyRootOfChange"/> method.
/// </para>
///
/// <para>
/// Enumerates through this item's reference items and instructs
/// each to notify their parents of the change.
/// </para>
/// </summary>
/// <exception cref="InvalidCastException">
/// Thrown when parent container is not a <see cref="DvMediaContainer"/> object.
/// </exception>
public void NotifyRootOfChange()
{
DvMediaContainer mc = (DvMediaContainer) this.m_Parent;
if (mc != null) { mc.NotifyRootOfChange(); }
this.LockReferenceList();
if (this.m_ReferringItems != null)
{
foreach (IDvItem refItem in this.m_ReferringItems)
{
mc = (DvMediaContainer) refItem.Parent;
if (mc != null) { mc.NotifyRootOfChange(); }
}
}
this.UnlockReferenceList();
}
/// <summary>
/// Returns the item that this object points to. Will return
/// null if no refItem is found.
/// </summary>
/// <returns></returns>
/// <exception cref="InvalidCastException">
/// Thrown if the set() operation has a non-DvMediaItem instance as the parameter.
/// </exception>
public override IMediaItem RefItem
{
get
{
return this.m_RefItem;
}
set
{
#if (DEBUG)
this.CheckRuntimeBindings(new StackTrace());
#endif
this.m_RefItem = (DvMediaItem) value;
}
}
/// <summary>
/// Default constructor. No metadata is initialized in this
/// method. It is STRONGLY recommended that programmers
/// use the <see cref="DvMediaBuilder"/>.CreateXXX methods to instantiate
/// DvMediaObjects objects.
/// </summary>
public DvMediaItem()
{
}
/// <summary>
/// <para>
/// This constructor calls the base class constructor(XmlElement), and
/// if and only if the type of the instances is a DvMediaItem will
/// the constructor call the base class implementation of UpdateEverything().
/// Any derived classes that use this constructor will have to make the
/// calls to UpdateEverything() if appropriate.
/// </para>
/// </summary>
/// <param name="xmlElement">XmlElement representing a DIDL-Lite item element</param>
public DvMediaItem (XmlElement xmlElement)
: base (xmlElement)
{
}
/// <summary>
/// Makes it so that a DvMediaItem instantiated from an XmlElement
/// instantiates its child resources as <see cref="DvMediaResource"/> objects.
///
/// <para>
/// Derived classes that expect different types for their resources and child
/// media objects need to override this method.
/// </para>
/// </summary>
/// <param name="xmlElement"></param>
protected override void FinishInitFromXml(XmlElement xmlElement)
{
ArrayList children;
base.UpdateEverything(true, true, typeof(DvMediaResource), typeof(DvMediaItem), typeof(DvMediaContainer), xmlElement, out children);
if (this.m_ID.StartsWith(MediaBuilder.Seed) == false)
{
this.m_ID = MediaBuilder.GetUniqueId();
}
}
/// <summary>
/// Special ISerializable constructor.
/// Do basic initialization and then serialize from the info object.
/// Serialized MediaItem objects do not have their child objects
/// serialized with them. Ignore the compiler warning about a protected
/// member in a sealed class as the MSDN documentation says to make this
/// constructor protected.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
private DvMediaItem(SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
: base (info, context)
{
}
/// <summary>
/// Custom serializer - required for ISerializable.
/// Serializes all fields that are not marked as [NonSerialized()].
/// Some fields were originally marked as [NonSerialized()] because
/// this class did not implement ISerializable. I've continued to
/// use the attribute in the code.
///
/// Serialized DvMediaItem objects do not save any information
/// about an underlying item's reference items.
/// As a corollary, DvMediaItem instances do not save any pointers
/// to their underlying items.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
public override void GetObjectData(SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
{
base.GetObjectData(info, context);
}
/// <summary>
/// Calls base class implementation of Init()
/// and then initializes the fields for this class.
/// </summary>
protected override void Init()
{
base.Init();
this.m_LockReferences = new object();
this.m_ReferringItems = null;
this.m_Deleting = false;
this.m_RefItem = null;
}
/// <summary>
/// Checks that the resource is a <see cref="IDvResource"/> object.
/// Calls base class and notifies owner of change.
/// </summary>
/// <param name="newResource"></param>
/// <exception cref="InvalidCastException">
/// Thrown if newResource is not a IDvResource.
/// </exception>
public override void AddResource(IMediaResource newResource)
{
// cast and throw exception if needed
IDvResource res = (IDvResource) newResource;
base.AddResource(res);
this.NotifyRootOfChange();
}
/// <summary>
/// Checks that each resource in the collection is a <see cref="IDvResource"/> object.
/// Calls base class and notifies owner of change.
/// </summary>
/// <param name="newResources">
/// A collection of <see cref="IDvResource"/> objects to add as resources.
/// The new <see cref="OpenSource.UPnP.AV.CdsMetadata.MediaResource"/> objects
/// are best instantiated through the <see cref="DvResourceBuilder"/>.CreateDvXXX methods.
/// </param>
/// <exception cref="InvalidCastException">
/// Thrown if a resource is not a <see cref="IDvResource"/> object.
/// </exception>
public override void AddResources(ICollection newResources)
{
// iterate through new resources; throw exception is not correct type
foreach (IDvResource res in newResources);
base.AddResources(newResources);
this.NotifyRootOfChange();
}
/// <summary>
/// Removes a resource from the media item.
/// </summary>
/// <param name="removeThis"></param>
public override void RemoveResource(IMediaResource removeThis)
{
// ArrayList.Remove does not return a boolean to indicate
// that the resource was remove - so just assume that
// something was removed.
base.RemoveResource(removeThis);
this.NotifyRootOfChange();
}
/// <summary>
/// Calls base class and notifies owner of change.
/// </summary>
/// <param name="removeThese">A collection of desired
/// <see cref="IDvResource"/> objects for removal.
/// </param>
public override void RemoveResources(ICollection removeThese)
{
base.RemoveResources(removeThese);
this.NotifyRootOfChange();
}
/// <summary>
/// Returns true if the object has been marked for removal from
/// a container. DvMediaItem instances have to instruct referring
/// DvMediaItem instances to delete themselves. For this reason,
/// a DvMediaItem may have a period of time between it is marked
/// for deletion and when it is actually removed from the
/// parent contiainer.
/// </summary>
public bool IsDeletePending
{
get
{
bool retVal;
lock(this.m_LockReferences)
{
retVal = this.m_Deleting;
}
return retVal;
}
}
/// <summary>
/// Returns a shallow-copy thread-safe listing of <see cref="IDvItem"/>
/// objects that point to this item.
/// </summary>
public IList ReferenceItems
{
get
{
ArrayList refItems;
this.LockReferenceList();
refItems = (ArrayList) this.m_ReferringItems.Clone();
this.UnlockReferenceList();
return refItems;
}
}
/// <summary>
/// Call this method before calling CreateReference().
/// This helps to ensure thread-safety.
/// </summary>
public void LockReferenceList()
{
System.Threading.Monitor.Enter(this.m_LockReferences);
}
/// <summary>
/// Call this method after CreateReference() has been called
/// and the new item has been added to container that should
/// own it. This helps to ensure thread safety.
/// </summary>
public void UnlockReferenceList()
{
System.Threading.Monitor.Exit(this.m_LockReferences);
}
public DvMediaReference CreateDvMediaReference ()
{
lock (this.m_LockReferences)
{
if (this.m_Deleting == false)
{
DvMediaReference newItem = new DvMediaReference(this);
if (m_ReferringItems == null)
{
this.m_ReferringItems = new ArrayList(1);
}
this.m_ReferringItems.Add(newItem);
return newItem;
}
else
{
throw new Error_PendingDeleteException(this);
}
}
}
/// <summary>
/// This creates a new DvMediaItem instance that refers
/// to this instance, by simply specifying the intended ID.
/// Generally, public programmers should use the
/// <see cref="DvMediaContainer.AddReference"/> method
/// to prevent object ID collisions.
/// </summary>
/// <returns>a new DvMediaItem instance that refers to this instance</returns>
/// <exception cref="Error_PendingDeleteException">
/// Thrown if this item is marked for deletion. Cannot create a reference
/// to an item that is pending removal from the content hierarchy.
/// </exception>
public IDvItem CreateReference (string id)
{
lock(this.m_LockReferences)
{
if (this.m_Deleting == false)
{
DvMediaItem newItem = new DvMediaItem();
newItem.m_ID = id;
newItem.m_RefItem = this;
if (m_ReferringItems == null)
{
this.m_ReferringItems = new ArrayList(1);
}
//this.m_ReferringItems.Add(new WeakReference(newItem));
this.m_ReferringItems.Add(newItem);
// Set the remaining the base metadata by using values
// from the underlying item.
//
newItem.m_Restricted = this.m_Restricted;
newItem.SetClass(this.Class.ToString(), this.Class.FriendlyName);
newItem.Title = this.Title;
return newItem;
}
else
{
throw new Error_PendingDeleteException(this);
}
}
}
/// <summary>
/// This creates a new DvMediaItem instance that refers
/// to this instance. Public programmers should take
/// caution when using this method. It's possible
/// to create an item that exists in a content
/// hierarchy that points to another item that
/// is not in the content hierarchy.
/// </summary>
/// <returns>a new DvMediaItem instance</returns>
public IDvItem CreateReference()
{
string id = MediaBuilder.GetUniqueId();
return CreateReference(id);
}
/// <summary>
/// <see cref="DvMediaContainer.RemoveObject"/>
/// and <see cref="DvMediaContainer.RemoveObjects"/>
/// call this method before removing it from its child list.
/// This will automatically remove items that refer to this item
/// from their respective parents.
/// </summary>
public void NotifyPendingDelete()
{
lock (this.m_LockReferences)
{
this.m_Deleting = true;
ArrayList removeThese = new ArrayList();
if (m_ReferringItems != null)
{
//foreach (WeakReference wr in this.m_ReferringItems)
foreach (IDvItem referringItem in this.m_ReferringItems)
{
// force a strong reference of the target before checking it's alive
//IDvItem referringItem = (IDvItem) wr.Target;
//if (wr.IsAlive)
//{
IDvContainer parent = (IDvContainer) referringItem.Parent;
if (parent != null)
{
parent.RemoveObject(referringItem);
}
else
{
//removeThese.Add(wr);
removeThese.Add(referringItem);
}
//}
//else
//{
// removeThese.Add(wr);
//}
}
//foreach (WeakReference wr in removeThese)
foreach (IDvItem referringItem in removeThese)
{
//this.m_ReferringItems.Remove(wr);
this.m_ReferringItems.Remove(referringItem);
}
}
this.m_ReferringItems = null;
this.m_RefItem = null;
}
}
/// <summary>
/// Updates the metadata and resources of this instance to match
/// the information of the provided
/// <see cref="IDvMedia"/> instance.
/// </summary>
/// <param name="newObj">the object contain</param>
/// <exception cref="InvalidCastException">
/// Throws an exception if the provided object's
/// resources include non-<see cref="IDvResource"/> objects.
/// </exception>
public override void UpdateObject (IUPnPMedia newObj)
{
foreach (IDvResource res in newObj.Resources);
base.UpdateObject(newObj);
this.NotifyRootOfChange();
}
/// <summary>
/// Implementation is as follows.
/// <para>
/// <code>
/// ArrayList proposedChildren;
/// this.UpdateEverything(false, false, typeof(DvMediaResource), typeof(DvMediaItem), typeof(DvMediaContainer), xmlElement, out proposedChildren);
/// </code>
/// </para>
/// </summary>
/// <param name="xmlElement"></param>
public override void UpdateMetadata(XmlElement xmlElement)
{
ArrayList proposedChildren;
this.UpdateEverything(false, false, typeof(DvMediaResource), typeof(DvMediaItem), typeof(DvMediaContainer), xmlElement, out proposedChildren);
}
/// <summary>
/// <para>
/// Implementation is as follows.
/// <code>
/// ArrayList proposedChildren;
/// this.UpdateEverything(true, false, typeof(DvMediaResource), typeof(DvMediaItem), typeof(DvMediaContainer), xmlElement, out proposedChildren);
/// </code>
/// </para>
/// </summary>
/// <param name="xmlElement"></param>
public override void UpdateObject(XmlElement xmlElement)
{
ArrayList proposedChildren;
this.UpdateEverything(true, false, typeof(DvMediaResource), typeof(DvMediaItem), typeof(DvMediaContainer), xmlElement, out proposedChildren);
}
/// <summary>
/// Override - Implementation will call <see cref="DvMediaItem.UpdateMediaMetadata"/>
/// if the delegate is non-null. The delegate is executed before the base class
/// the XML is written. The implementation is also responsible for printing
/// the XML in such a way that each automapped resource is printed once for each
/// network interface.
/// </summary>
/// <param name="formatter">
/// A <see cref="ToXmlFormatter"/> object that
/// specifies method implementations for printing
/// media objects and metadata.
/// </param>
/// <param name="data">
/// This object should be a <see cref="ToXmlDataDv"/>
/// object that contains additional instructions used
/// by this implementation.
/// </param>
/// <param name="xmlWriter">
/// The <see cref="XmlTextWriter"/> object that
/// will format the representation in an XML
/// valid way.
/// </param>
/// <exception cref="InvalidCastException">
/// Thrown if the "data" argument is not a <see cref="ToXmlDataDv"/> object.
/// </exception>
/// <exception cref="InvalidCastException">
/// Thrown if the one of the UpdateStoragexxx delegates needs to get executed
/// whilst the provided value for the metadata is not a PropertyULong instance.
/// </exception>
public override void WriteInnerXml(ToXmlFormatter formatter, object data, XmlTextWriter xmlWriter)
{
// To prevent constant updating of metadata for a media object,
// we don't have a callback for updating item metadata, unlike
// DvMediaContainer. DvMediaItem relies on the parent container
// to update the metadata of the item.
ToXmlDataDv txdv = (ToXmlDataDv) data;
InnerXmlWriter.WriteInnerXml
(
this,
new InnerXmlWriter.DelegateWriteProperties(InnerXmlWriter.WriteInnerXmlProperties),
new InnerXmlWriter.DelegateShouldPrintResources(this.PrintResources),
new InnerXmlWriter.DelegateWriteResources(InnerXmlWriterDv.WriteInnerXmlResources),
new InnerXmlWriter.DelegateWriteDescNodes(InnerXmlWriter.WriteInnerXmlDescNodes),
formatter,
txdv,
xmlWriter
);
}
/// <summary>
/// Method executes when m_Properties.OnMetadataChanged fires.
/// </summary>
/// <param name="sender"></param>
/// <param name="stateNumber"></param>
protected override void Sink_OnMediaPropertiesChanged (MediaProperties sender, int stateNumber)
{
base.UpdateCache();
this.NotifyRootOfChange();
}
/// <summary>
/// Returns a IDvResource object assocated with the item.
/// </summary>
/// <param name="resourceID">the resource ID of the desired IDvResource</param>
/// <returns>The IDvResource instance, or null if it doesn't exist.</returns>
internal IDvResource GetResource(string resourceID)
{
IDvResource retVal = null;
this.m_LockResources.AcquireReaderLock(-1);
if (this.m_Resources != null)
{
foreach (IDvResource res in this.m_Resources)
{
if (res.ResourceID == resourceID)
{
retVal = res;
break;
}
}
}
this.m_LockResources.ReleaseReaderLock();
return retVal;
}
public override void TrimToSize()
{
base.TrimToSize();
lock(this.m_LockReferences)
{
if (this.m_ReferringItems != null)
{
this.m_ReferringItems.TrimToSize();
}
}
}
/// <summary>
/// This locks the m_ReferringItems list for reading and writing.
/// </summary>
[NonSerialized()] private object m_LockReferences = new object();
/// <summary>
/// This keeps a listing of all DvMediaItems that were created
/// through the CreateReference() method.
/// </summary>
[NonSerialized()] private ArrayList m_ReferringItems = null;
/// <summary>
/// This flag indicates whether the item has been marked
/// for removal from the parent container.
/// </summary>
[NonSerialized()] private bool m_Deleting = false;
/// <summary>
/// Items can refer to other items. This class allows
/// us to keep a reference to the actual item that
/// is being referred.
/// </summary>
[NonSerialized()] internal MediaItem m_RefItem = null;
}
}
| |
/* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2021 the ZAP development team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Text;
/*
* This file was automatically generated.
*/
namespace OWASPZAPDotNetAPI.Generated
{
public class Autoupdate
{
private ClientApi api = null;
public Autoupdate(ClientApi api)
{
this.api = api;
}
/// <summary>
///Returns the latest version number
/// </summary>
/// <returns></returns>
public IApiResponse latestVersionNumber()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "latestVersionNumber", parameters);
}
/// <summary>
///Returns 'true' if ZAP is on the latest version
/// </summary>
/// <returns></returns>
public IApiResponse isLatestVersion()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "isLatestVersion", parameters);
}
/// <summary>
///Return a list of all of the installed add-ons
/// </summary>
/// <returns></returns>
public IApiResponse installedAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "installedAddons", parameters);
}
/// <summary>
///Returns a list with all local add-ons, installed or not.
/// </summary>
/// <returns></returns>
public IApiResponse localAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "localAddons", parameters);
}
/// <summary>
///Return a list of any add-ons that have been added to the Marketplace since the last check for updates
/// </summary>
/// <returns></returns>
public IApiResponse newAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "newAddons", parameters);
}
/// <summary>
///Return a list of any add-ons that have been changed in the Marketplace since the last check for updates
/// </summary>
/// <returns></returns>
public IApiResponse updatedAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "updatedAddons", parameters);
}
/// <summary>
///Return a list of all of the add-ons on the ZAP Marketplace (this information is read once and then cached)
/// </summary>
/// <returns></returns>
public IApiResponse marketplaceAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "marketplaceAddons", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionAddonDirectories()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionAddonDirectories", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionDayLastChecked()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionDayLastChecked", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionDayLastInstallWarned()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionDayLastInstallWarned", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionDayLastUpdateWarned()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionDayLastUpdateWarned", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionDownloadDirectory()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionDownloadDirectory", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionCheckAddonUpdates()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionCheckAddonUpdates", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionCheckOnStart()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionCheckOnStart", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionDownloadNewRelease()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionDownloadNewRelease", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionInstallAddonUpdates()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionInstallAddonUpdates", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionInstallScannerRules()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionInstallScannerRules", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionReportAlphaAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionReportAlphaAddons", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionReportBetaAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionReportBetaAddons", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse optionReportReleaseAddons()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "view", "optionReportReleaseAddons", parameters);
}
/// <summary>
///Downloads the latest release, if any
/// </summary>
/// <returns></returns>
public IApiResponse downloadLatestRelease()
{
Dictionary<string, string> parameters = null;
return api.CallApi("autoupdate", "action", "downloadLatestRelease", parameters);
}
/// <summary>
///Installs or updates the specified add-on, returning when complete (i.e. not asynchronously)
/// </summary>
/// <returns></returns>
public IApiResponse installAddon(string id)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("id", id);
return api.CallApi("autoupdate", "action", "installAddon", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse installLocalAddon(string file)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("file", file);
return api.CallApi("autoupdate", "action", "installLocalAddon", parameters);
}
/// <summary>
///Uninstalls the specified add-on
/// </summary>
/// <returns></returns>
public IApiResponse uninstallAddon(string id)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("id", id);
return api.CallApi("autoupdate", "action", "uninstallAddon", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionCheckAddonUpdates(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionCheckAddonUpdates", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionCheckOnStart(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionCheckOnStart", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionDownloadNewRelease(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionDownloadNewRelease", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionInstallAddonUpdates(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionInstallAddonUpdates", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionInstallScannerRules(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionInstallScannerRules", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionReportAlphaAddons(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionReportAlphaAddons", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionReportBetaAddons(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionReportBetaAddons", parameters);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public IApiResponse setOptionReportReleaseAddons(bool boolean)
{
Dictionary<string, string> parameters = null;
parameters = new Dictionary<string, string>();
parameters.Add("Boolean", Convert.ToString(boolean));
return api.CallApi("autoupdate", "action", "setOptionReportReleaseAddons", parameters);
}
}
}
| |
using System;
using System.Collections;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Reflection;
using CorDebugInterop;
using nanoFramework.Tools.Debugger;
namespace nanoFramework.Tools.VisualStudio.Debugger
{
public class CorDebugFunction : ICorDebugFunction , ICorDebugFunction2
{
CorDebugClass m_class;
Pdbx.Method m_pdbxMethod;
CorDebugCode m_codeNative;
CorDebugCode m_codeIL;
uint m_tkSymbolless;
public CorDebugFunction(CorDebugClass cls, Pdbx.Method method)
{
m_class = cls;
m_pdbxMethod = method;
}
public CorDebugFunction (CorDebugClass cls, uint tkSymbolless) : this (cls, null)
{
m_tkSymbolless = tkSymbolless;
}
public ICorDebugFunction ICorDebugFunction
{
get { return (ICorDebugFunction)this; }
}
public ICorDebugFunction2 ICorDebugFunction2
{
get { return (ICorDebugFunction2)this; }
}
public CorDebugClass Class
{
[System.Diagnostics.DebuggerHidden]
get { return m_class; }
}
public CorDebugAppDomain AppDomain
{
[System.Diagnostics.DebuggerHidden]
get { return this.Class.AppDomain; }
}
public CorDebugProcess Process
{
[System.Diagnostics.DebuggerHidden]
get { return this.Class.Process; }
}
public CorDebugAssembly Assembly
{
[System.Diagnostics.DebuggerHidden]
get { return this.Class.Assembly; }
}
private Engine Engine
{
[System.Diagnostics.DebuggerHidden]
get { return this.Class.Engine; }
}
[System.Diagnostics.DebuggerStepThrough]
private CorDebugCode GetCode(ref CorDebugCode code)
{
if (code == null)
code = new CorDebugCode(this);
return code;
}
public bool HasSymbols
{
get { return m_pdbxMethod != null; }
}
public uint MethodDef_Index
{
get
{
uint tk = HasSymbols ? m_pdbxMethod.Token.nanoCLR : m_tkSymbolless;
return nanoCLR_TypeSystem.ClassMemberIndexFromnanoCLRToken (tk, this.m_class.Assembly);
}
}
public Pdbx.Method PdbxMethod
{
[System.Diagnostics.DebuggerHidden]
get {return m_pdbxMethod;}
}
public bool IsInternal
{
get {return MetaData.Helper.MethodIsInternal (this.Class.Assembly.MetaDataImport, this.m_pdbxMethod.Token.CLR); }
}
public bool IsInstance
{
get { return MetaData.Helper.MethodIsInstance(this.Class.Assembly.MetaDataImport, this.m_pdbxMethod.Token.CLR); }
}
public bool IsVirtual
{
get { return MetaData.Helper.MethodIsVirtual(this.Class.Assembly.MetaDataImport, this.m_pdbxMethod.Token.CLR); }
}
public uint NumArg
{
get {return MetaData.Helper.MethodGetNumArg (this.Class.Assembly.MetaDataImport, this.m_pdbxMethod.Token.CLR); }
}
public uint GetILCLRFromILnanoCLR(uint ilnanoCLR)
{
uint ilCLR;
//Special case for CatchHandlerFound and AppDomain transitions; possibly used elsewhere.
if (ilnanoCLR == uint.MaxValue) return uint.MaxValue;
ilCLR = ILComparer.Map(false, m_pdbxMethod.ILMap, ilnanoCLR);
Debug.Assert(ilnanoCLR <= ilCLR);
return ilCLR;
}
public uint GetILnanoCLRFromILCLR(uint ilCLR)
{
//Special case for when CPDE wants to step to the end of the function?
if (ilCLR == uint.MaxValue) return uint.MaxValue;
uint ilnanoCLR = ILComparer.Map(true, m_pdbxMethod.ILMap, ilCLR);
Debug.Assert(ilnanoCLR <= ilCLR);
return ilnanoCLR;
}
private class ILComparer : IComparer
{
bool m_fCLR;
private ILComparer(bool fCLR)
{
m_fCLR = fCLR;
}
private static uint GetIL(bool fCLR, Pdbx.IL il)
{
return fCLR ? il.CLR : il.nanoCLR;
}
private uint GetIL(Pdbx.IL il)
{
return GetIL(m_fCLR, il);
}
private static void SetIL(bool fCLR, Pdbx.IL il, uint offset)
{
if (fCLR)
il.CLR = offset;
else
il.nanoCLR = offset;
}
private void SetIL(Pdbx.IL il, uint offset)
{
SetIL(m_fCLR, il, offset);
}
public int Compare(object o1, object o2)
{
return GetIL(o1 as Pdbx.IL).CompareTo(GetIL(o2 as Pdbx.IL));
}
public static uint Map(bool fCLR, Pdbx.IL [] ilMap, uint offset)
{
ILComparer ilComparer = new ILComparer(fCLR);
Pdbx.IL il = new Pdbx.IL();
ilComparer.SetIL(il, offset);
int i = Array.BinarySearch(ilMap, il, ilComparer);
uint ret = 0;
if (i >= 0)
{
//Exact match
ret = GetIL(!fCLR, ilMap[i]);
}
else
{
i = ~i;
if (i == 0)
{
//Before the IL diverges
ret = offset;
}
else
{
//Somewhere in between
i--;
il = ilMap[i];
ret = offset - GetIL(fCLR, il) + GetIL(!fCLR, il);
}
}
Debug.Assert(ret >= 0);
return ret;
}
}
#region ICorDebugFunction Members
int ICorDebugFunction.GetLocalVarSigToken( out uint pmdSig )
{
pmdSig = 0;
return COM_HResults.E_NOTIMPL;
}
int ICorDebugFunction.CreateBreakpoint( out ICorDebugFunctionBreakpoint ppBreakpoint )
{
ppBreakpoint = new CorDebugFunctionBreakpoint( this, 0 );
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetILCode( out ICorDebugCode ppCode )
{
ppCode = GetCode( ref m_codeIL );
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetModule( out ICorDebugModule ppModule )
{
m_class.ICorDebugClass.GetModule( out ppModule );
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetNativeCode( out ICorDebugCode ppCode )
{
ppCode = GetCode( ref m_codeNative );
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetToken( out uint pMethodDef )
{
pMethodDef = HasSymbols ? m_pdbxMethod.Token.CLR : m_tkSymbolless;
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetClass( out ICorDebugClass ppClass )
{
ppClass = m_class;
return COM_HResults.S_OK;
}
int ICorDebugFunction.GetCurrentVersionNumber( out uint pnCurrentVersion )
{
pnCurrentVersion = 0;
return COM_HResults.S_OK;
}
#endregion
#region ICorDebugFunction2 Members
int ICorDebugFunction2.SetJMCStatus( int bIsJustMyCode )
{
bool fJMC = Boolean.IntToBool( bIsJustMyCode );
Debug.Assert( Utility.FImplies( fJMC, this.HasSymbols ) );
int hres = fJMC ? COM_HResults.E_FAIL : COM_HResults.S_OK;
if(this.HasSymbols)
{
if(fJMC != this.m_pdbxMethod.IsJMC && m_pdbxMethod.CanSetJMC)
{
var setJMC = this.Engine.Info_SetJMCAsync(fJMC, ReflectionDefinition.Kind.REFLECTION_METHOD, this.MethodDef_Index);
setJMC.Wait();
if (setJMC.Result)
{
if( !this.Assembly.IsFrameworkAssembly)
{
//now update the debugger JMC state...
this.m_pdbxMethod.IsJMC = fJMC;
}
hres = COM_HResults.S_OK;
}
}
}
return hres;
}
int ICorDebugFunction2.GetJMCStatus( out int pbIsJustMyCode )
{
pbIsJustMyCode = Boolean.BoolToInt( this.HasSymbols ? this.m_pdbxMethod.IsJMC : false );
return COM_HResults.S_OK;
}
int ICorDebugFunction2.GetVersionNumber( out uint pnVersion )
{
// CorDebugFunction.GetVersionNumber is not implemented
pnVersion = 1;
return COM_HResults.S_OK;
}
int ICorDebugFunction2.EnumerateNativeCode( out ICorDebugCodeEnum ppCodeEnum )
{
ppCodeEnum = null;
return COM_HResults.S_OK;
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// Central spin logic used across the entire code-base.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Diagnostics;
using Internal.Runtime.Augments;
namespace System.Threading
{
// SpinWait is just a little value type that encapsulates some common spinning
// logic. It ensures we always yield on single-proc machines (instead of using busy
// waits), and that we work well on HT. It encapsulates a good mixture of spinning
// and real yielding. It's a value type so that various areas of the engine can use
// one by allocating it on the stack w/out unnecessary GC allocation overhead, e.g.:
//
// void f() {
// SpinWait wait = new SpinWait();
// while (!p) { wait.SpinOnce(); }
// ...
// }
//
// Internally it just maintains a counter that is used to decide when to yield, etc.
//
// A common usage is to spin before blocking. In those cases, the NextSpinWillYield
// property allows a user to decide to fall back to waiting once it returns true:
//
// void f() {
// SpinWait wait = new SpinWait();
// while (!p) {
// if (wait.NextSpinWillYield) { /* block! */ }
// else { wait.SpinOnce(); }
// }
// ...
// }
/// <summary>
/// Provides support for spin-based waiting.
/// </summary>
/// <remarks>
/// <para>
/// <see cref="SpinWait"/> encapsulates common spinning logic. On single-processor machines, yields are
/// always used instead of busy waits, and on computers with Intel(R) processors employing Hyper-Threading
/// technology, it helps to prevent hardware thread starvation. SpinWait encapsulates a good mixture of
/// spinning and true yielding.
/// </para>
/// <para>
/// <see cref="SpinWait"/> is a value type, which means that low-level code can utilize SpinWait without
/// fear of unnecessary allocation overheads. SpinWait is not generally useful for ordinary applications.
/// In most cases, you should use the synchronization classes provided by the .NET Framework, such as
/// <see cref="System.Threading.Monitor"/>. For most purposes where spin waiting is required, however,
/// the <see cref="SpinWait"/> type should be preferred over the <see
/// cref="System.Threading.Thread.SpinWait"/> method.
/// </para>
/// <para>
/// While SpinWait is designed to be used in concurrent applications, it is not designed to be
/// used from multiple threads concurrently. SpinWait's members are not thread-safe. If multiple
/// threads must spin, each should use its own instance of SpinWait.
/// </para>
/// </remarks>
public struct SpinWait
{
// These constants determine the frequency of yields versus spinning. The
// numbers may seem fairly arbitrary, but were derived with at least some
// thought in the design document. I fully expect they will need to change
// over time as we gain more experience with performance.
internal const int YieldThreshold = 10; // When to switch over to a true yield.
private const int Sleep0EveryHowManyYields = 5; // After how many yields should we Sleep(0)?
internal const int DefaultSleep1Threshold = 20; // After how many yields should we Sleep(1) frequently?
/// <summary>
/// A suggested number of spin iterations before doing a proper wait, such as waiting on an event that becomes signaled
/// when the resource becomes available.
/// </summary>
/// <remarks>
/// These numbers were arrived at by experimenting with different numbers in various cases that currently use it. It's
/// only a suggested value and typically works well when the proper wait is something like an event.
///
/// Spinning less can lead to early waiting and more context switching, spinning more can decrease latency but may use
/// up some CPU time unnecessarily. Depends on the situation too, for instance SemaphoreSlim uses more iterations
/// because the waiting there is currently a lot more expensive (involves more spinning, taking a lock, etc.). It also
/// depends on the likelihood of the spin being successful and how long the wait would be but those are not accounted
/// for here.
/// </remarks>
internal static readonly int SpinCountforSpinBeforeWait = PlatformHelper.IsSingleProcessor ? 1 : 35;
internal const int Sleep1ThresholdForSpinBeforeWait = 40; // should be greater than SpinCountforSpinBeforeWait
// The number of times we've spun already.
private int _count;
/// <summary>
/// Gets the number of times <see cref="SpinOnce"/> has been called on this instance.
/// </summary>
public int Count
{
get => _count;
internal set
{
Debug.Assert(value >= 0);
_count = value;
}
}
/// <summary>
/// Gets whether the next call to <see cref="SpinOnce"/> will yield the processor, triggering a
/// forced context switch.
/// </summary>
/// <value>Whether the next call to <see cref="SpinOnce"/> will yield the processor, triggering a
/// forced context switch.</value>
/// <remarks>
/// On a single-CPU machine, <see cref="SpinOnce"/> always yields the processor. On machines with
/// multiple CPUs, <see cref="SpinOnce"/> may yield after an unspecified number of calls.
/// </remarks>
public bool NextSpinWillYield => _count >= YieldThreshold || PlatformHelper.IsSingleProcessor;
/// <summary>
/// Performs a single spin.
/// </summary>
/// <remarks>
/// This is typically called in a loop, and may change in behavior based on the number of times a
/// <see cref="SpinOnce"/> has been called thus far on this instance.
/// </remarks>
public void SpinOnce()
{
SpinOnce(DefaultSleep1Threshold);
}
internal void SpinOnce(int sleep1Threshold)
{
Debug.Assert(sleep1Threshold >= YieldThreshold || PlatformHelper.IsSingleProcessor); // so that NextSpinWillYield behaves as requested
// (_count - YieldThreshold) % 2 == 0: The purpose of this check is to interleave Thread.Yield/Sleep(0) with
// Thread.SpinWait. Otherwise, the following issues occur:
// - When there are no threads to switch to, Yield and Sleep(0) become no-op and it turns the spin loop into a
// busy-spin that may quickly reach the max spin count and cause the thread to enter a wait state, or may
// just busy-spin for longer than desired before a Sleep(1). Completing the spin loop too early can cause
// excessive context switcing if a wait follows, and entering the Sleep(1) stage too early can cause
// excessive delays.
// - If there are multiple threads doing Yield and Sleep(0) (typically from the same spin loop due to
// contention), they may switch between one another, delaying work that can make progress.
if ((
_count >= YieldThreshold &&
(_count >= sleep1Threshold || (_count - YieldThreshold) % 2 == 0)
) ||
PlatformHelper.IsSingleProcessor)
{
//
// We must yield.
//
// We prefer to call Thread.Yield first, triggering a SwitchToThread. This
// unfortunately doesn't consider all runnable threads on all OS SKUs. In
// some cases, it may only consult the runnable threads whose ideal processor
// is the one currently executing code. Thus we occasionally issue a call to
// Sleep(0), which considers all runnable threads at equal priority. Even this
// is insufficient since we may be spin waiting for lower priority threads to
// execute; we therefore must call Sleep(1) once in a while too, which considers
// all runnable threads, regardless of ideal processor and priority, but may
// remove the thread from the scheduler's queue for 10+ms, if the system is
// configured to use the (default) coarse-grained system timer.
//
if (_count >= sleep1Threshold)
{
RuntimeThread.Sleep(1);
}
else
{
int yieldsSoFar = _count >= YieldThreshold ? (_count - YieldThreshold) / 2 : _count;
if ((yieldsSoFar % Sleep0EveryHowManyYields) == (Sleep0EveryHowManyYields - 1))
{
RuntimeThread.Sleep(0);
}
else
{
RuntimeThread.Yield();
}
}
}
else
{
//
// Otherwise, we will spin.
//
// We do this using the CLR's SpinWait API, which is just a busy loop that
// issues YIELD/PAUSE instructions to ensure multi-threaded CPUs can react
// intelligently to avoid starving. (These are NOOPs on other CPUs.) We
// choose a number for the loop iteration count such that each successive
// call spins for longer, to reduce cache contention. We cap the total
// number of spins we are willing to tolerate to reduce delay to the caller,
// since we expect most callers will eventually block anyway.
//
// Also, cap the maximum spin count to a value such that many thousands of CPU cycles would not be wasted doing
// the equivalent of YieldProcessor(), as that that point SwitchToThread/Sleep(0) are more likely to be able to
// allow other useful work to run. Long YieldProcessor() loops can help to reduce contention, but Sleep(1) is
// usually better for that.
//
// RuntimeThread.OptimalMaxSpinWaitsPerSpinIteration:
// - See Thread::InitializeYieldProcessorNormalized(), which describes and calculates this value.
//
int n = RuntimeThread.OptimalMaxSpinWaitsPerSpinIteration;
if (_count <= 30 && (1 << _count) < n)
{
n = 1 << _count;
}
RuntimeThread.SpinWait(n);
}
// Finally, increment our spin counter.
_count = (_count == int.MaxValue ? YieldThreshold : _count + 1);
}
/// <summary>
/// Resets the spin counter.
/// </summary>
/// <remarks>
/// This makes <see cref="SpinOnce"/> and <see cref="NextSpinWillYield"/> behave as though no calls
/// to <see cref="SpinOnce"/> had been issued on this instance. If a <see cref="SpinWait"/> instance
/// is reused many times, it may be useful to reset it to avoid yielding too soon.
/// </remarks>
public void Reset()
{
_count = 0;
}
#region Static Methods
/// <summary>
/// Spins until the specified condition is satisfied.
/// </summary>
/// <param name="condition">A delegate to be executed over and over until it returns true.</param>
/// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception>
public static void SpinUntil(Func<bool> condition)
{
#if DEBUG
bool result =
#endif
SpinUntil(condition, Timeout.Infinite);
#if DEBUG
Debug.Assert(result);
#endif
}
/// <summary>
/// Spins until the specified condition is satisfied or until the specified timeout is expired.
/// </summary>
/// <param name="condition">A delegate to be executed over and over until it returns true.</param>
/// <param name="timeout">
/// A <see cref="TimeSpan"/> that represents the number of milliseconds to wait,
/// or a TimeSpan that represents -1 milliseconds to wait indefinitely.</param>
/// <returns>True if the condition is satisfied within the timeout; otherwise, false</returns>
/// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception>
/// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/> is a negative number
/// other than -1 milliseconds, which represents an infinite time-out -or- timeout is greater than
/// <see cref="System.Int32.MaxValue"/>.</exception>
public static bool SpinUntil(Func<bool> condition, TimeSpan timeout)
{
// Validate the timeout
long totalMilliseconds = (long)timeout.TotalMilliseconds;
if (totalMilliseconds < -1 || totalMilliseconds > int.MaxValue)
{
throw new System.ArgumentOutOfRangeException(
nameof(timeout), timeout, SR.SpinWait_SpinUntil_TimeoutWrong);
}
// Call wait with the timeout milliseconds
return SpinUntil(condition, (int)totalMilliseconds);
}
/// <summary>
/// Spins until the specified condition is satisfied or until the specified timeout is expired.
/// </summary>
/// <param name="condition">A delegate to be executed over and over until it returns true.</param>
/// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see
/// cref="System.Threading.Timeout.Infinite"/> (-1) to wait indefinitely.</param>
/// <returns>True if the condition is satisfied within the timeout; otherwise, false</returns>
/// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception>
/// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a
/// negative number other than -1, which represents an infinite time-out.</exception>
public static bool SpinUntil(Func<bool> condition, int millisecondsTimeout)
{
if (millisecondsTimeout < Timeout.Infinite)
{
throw new ArgumentOutOfRangeException(
nameof(millisecondsTimeout), millisecondsTimeout, SR.SpinWait_SpinUntil_TimeoutWrong);
}
if (condition == null)
{
throw new ArgumentNullException(nameof(condition), SR.SpinWait_SpinUntil_ArgumentNull);
}
uint startTime = 0;
if (millisecondsTimeout != 0 && millisecondsTimeout != Timeout.Infinite)
{
startTime = TimeoutHelper.GetTime();
}
SpinWait spinner = new SpinWait();
while (!condition())
{
if (millisecondsTimeout == 0)
{
return false;
}
spinner.SpinOnce();
if (millisecondsTimeout != Timeout.Infinite && spinner.NextSpinWillYield)
{
if (millisecondsTimeout <= (TimeoutHelper.GetTime() - startTime))
{
return false;
}
}
}
return true;
}
#endregion
}
/// <summary>
/// A helper class to get the number of processors, it updates the numbers of processors every sampling interval.
/// </summary>
internal static class PlatformHelper
{
private const int PROCESSOR_COUNT_REFRESH_INTERVAL_MS = 30000; // How often to refresh the count, in milliseconds.
private static volatile int s_processorCount; // The last count seen.
private static volatile int s_lastProcessorCountRefreshTicks; // The last time we refreshed.
/// <summary>
/// Gets the number of available processors
/// </summary>
internal static int ProcessorCount
{
get
{
int now = Environment.TickCount;
int procCount = s_processorCount;
if (procCount == 0 || (now - s_lastProcessorCountRefreshTicks) >= PROCESSOR_COUNT_REFRESH_INTERVAL_MS)
{
s_processorCount = procCount = Environment.ProcessorCount;
s_lastProcessorCountRefreshTicks = now;
}
Debug.Assert(procCount > 0,
"Processor count should be greater than 0.");
return procCount;
}
}
/// <summary>
/// Gets whether the current machine has only a single processor.
/// </summary>
/// <remarks>This typically does not change on a machine, so it's checked only once.</remarks>
internal static readonly bool IsSingleProcessor = ProcessorCount == 1;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System {
using System;
using System.Threading;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Runtime.Serialization;
using System.Security.Permissions;
using System.Diagnostics.Contracts;
// DateTimeOffset is a value type that consists of a DateTime and a time zone offset,
// ie. how far away the time is from GMT. The DateTime is stored whole, and the offset
// is stored as an Int16 internally to save space, but presented as a TimeSpan.
//
// The range is constrained so that both the represented clock time and the represented
// UTC time fit within the boundaries of MaxValue. This gives it the same range as DateTime
// for actual UTC times, and a slightly constrained range on one end when an offset is
// present.
//
// This class should be substitutable for date time in most cases; so most operations
// effectively work on the clock time. However, the underlying UTC time is what counts
// for the purposes of identity, sorting and subtracting two instances.
//
//
// There are theoretically two date times stored, the UTC and the relative local representation
// or the 'clock' time. It actually does not matter which is stored in m_dateTime, so it is desirable
// for most methods to go through the helpers UtcDateTime and ClockDateTime both to abstract this
// out and for internal readability.
[StructLayout(LayoutKind.Auto)]
#if FEATURE_SERIALIZATION
[Serializable]
#endif
public struct DateTimeOffset : IComparable, IFormattable,
IComparable<DateTimeOffset>, IEquatable<DateTimeOffset>
#if FEATURE_SERIALIZATION
, ISerializable, IDeserializationCallback
#endif
{
// Constants
internal const Int64 MaxOffset = TimeSpan.TicksPerHour * 14;
internal const Int64 MinOffset = -MaxOffset;
private const long UnixEpochTicks = TimeSpan.TicksPerDay * DateTime.DaysTo1970; // 621,355,968,000,000,000
private const long UnixEpochSeconds = UnixEpochTicks / TimeSpan.TicksPerSecond; // 62,135,596,800
private const long UnixEpochMilliseconds = UnixEpochTicks / TimeSpan.TicksPerMillisecond; // 62,135,596,800,000
internal const long UnixMinSeconds = DateTime.MinTicks / TimeSpan.TicksPerSecond - UnixEpochSeconds;
internal const long UnixMaxSeconds = DateTime.MaxTicks / TimeSpan.TicksPerSecond - UnixEpochSeconds;
// Static Fields
public static readonly DateTimeOffset MinValue = new DateTimeOffset(DateTime.MinTicks, TimeSpan.Zero);
public static readonly DateTimeOffset MaxValue = new DateTimeOffset(DateTime.MaxTicks, TimeSpan.Zero);
// Instance Fields
private DateTime m_dateTime;
private Int16 m_offsetMinutes;
// Constructors
// Constructs a DateTimeOffset from a tick count and offset
public DateTimeOffset(long ticks, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
// Let the DateTime constructor do the range checks
DateTime dateTime = new DateTime(ticks);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a DateTime. For Local and Unspecified kinds,
// extracts the local offset. For UTC, creates a UTC instance with a zero offset.
public DateTimeOffset(DateTime dateTime) {
TimeSpan offset;
if (dateTime.Kind != DateTimeKind.Utc) {
// Local and Unspecified are both treated as Local
offset = TimeZoneInfo.GetLocalUtcOffset(dateTime, TimeZoneInfoOptions.NoThrowOnInvalidTime);
}
else {
offset = new TimeSpan(0);
}
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a DateTime. And an offset. Always makes the clock time
// consistent with the DateTime. For Utc ensures the offset is zero. For local, ensures that
// the offset corresponds to the local.
public DateTimeOffset(DateTime dateTime, TimeSpan offset) {
if (dateTime.Kind == DateTimeKind.Local) {
if (offset != TimeZoneInfo.GetLocalUtcOffset(dateTime, TimeZoneInfoOptions.NoThrowOnInvalidTime)) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetLocalMismatch"), "offset");
}
}
else if (dateTime.Kind == DateTimeKind.Utc) {
if (offset != TimeSpan.Zero) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetUtcMismatch"), "offset");
}
}
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second and offset.
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second), offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second, millsecond and offset
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, int millisecond, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second, millisecond), offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second, millsecond, Calendar and offset.
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, int millisecond, Calendar calendar, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second, millisecond, calendar), offset);
}
// Returns a DateTimeOffset representing the current date and time. The
// resolution of the returned value depends on the system timer. For
// Windows NT 3.5 and later the timer resolution is approximately 10ms,
// for Windows NT 3.1 it is approximately 16ms, and for Windows 95 and 98
// it is approximately 55ms.
//
public static DateTimeOffset Now {
get {
return new DateTimeOffset(DateTime.Now);
}
}
public static DateTimeOffset UtcNow {
get {
return new DateTimeOffset(DateTime.UtcNow);
}
}
public DateTime DateTime {
get {
return ClockDateTime;
}
}
public DateTime UtcDateTime {
[Pure]
get {
Contract.Ensures(Contract.Result<DateTime>().Kind == DateTimeKind.Utc);
return DateTime.SpecifyKind(m_dateTime, DateTimeKind.Utc);
}
}
public DateTime LocalDateTime {
[Pure]
get {
Contract.Ensures(Contract.Result<DateTime>().Kind == DateTimeKind.Local);
return UtcDateTime.ToLocalTime();
}
}
// Adjust to a given offset with the same UTC time. Can throw ArgumentException
//
public DateTimeOffset ToOffset(TimeSpan offset) {
return new DateTimeOffset((m_dateTime + offset).Ticks, offset);
}
// Instance Properties
// The clock or visible time represented. This is just a wrapper around the internal date because this is
// the chosen storage mechanism. Going through this helper is good for readability and maintainability.
// This should be used for display but not identity.
private DateTime ClockDateTime {
get {
return new DateTime((m_dateTime + Offset).Ticks, DateTimeKind.Unspecified);
}
}
// Returns the date part of this DateTimeOffset. The resulting value
// corresponds to this DateTimeOffset with the time-of-day part set to
// zero (midnight).
//
public DateTime Date {
get {
return ClockDateTime.Date;
}
}
// Returns the day-of-month part of this DateTimeOffset. The returned
// value is an integer between 1 and 31.
//
public int Day {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
Contract.Ensures(Contract.Result<int>() <= 31);
return ClockDateTime.Day;
}
}
// Returns the day-of-week part of this DateTimeOffset. The returned value
// is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates
// Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates
// Thursday, 5 indicates Friday, and 6 indicates Saturday.
//
public DayOfWeek DayOfWeek {
get {
Contract.Ensures(Contract.Result<DayOfWeek>() >= DayOfWeek.Sunday);
Contract.Ensures(Contract.Result<DayOfWeek>() <= DayOfWeek.Saturday);
return ClockDateTime.DayOfWeek;
}
}
// Returns the day-of-year part of this DateTimeOffset. The returned value
// is an integer between 1 and 366.
//
public int DayOfYear {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
Contract.Ensures(Contract.Result<int>() <= 366); // leap year
return ClockDateTime.DayOfYear;
}
}
// Returns the hour part of this DateTimeOffset. The returned value is an
// integer between 0 and 23.
//
public int Hour {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 24);
return ClockDateTime.Hour;
}
}
// Returns the millisecond part of this DateTimeOffset. The returned value
// is an integer between 0 and 999.
//
public int Millisecond {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 1000);
return ClockDateTime.Millisecond;
}
}
// Returns the minute part of this DateTimeOffset. The returned value is
// an integer between 0 and 59.
//
public int Minute {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 60);
return ClockDateTime.Minute;
}
}
// Returns the month part of this DateTimeOffset. The returned value is an
// integer between 1 and 12.
//
public int Month {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
return ClockDateTime.Month;
}
}
public TimeSpan Offset {
get {
return new TimeSpan(0, m_offsetMinutes, 0);
}
}
// Returns the second part of this DateTimeOffset. The returned value is
// an integer between 0 and 59.
//
public int Second {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 60);
return ClockDateTime.Second;
}
}
// Returns the tick count for this DateTimeOffset. The returned value is
// the number of 100-nanosecond intervals that have elapsed since 1/1/0001
// 12:00am.
//
public long Ticks {
get {
return ClockDateTime.Ticks;
}
}
public long UtcTicks {
get {
return UtcDateTime.Ticks;
}
}
// Returns the time-of-day part of this DateTimeOffset. The returned value
// is a TimeSpan that indicates the time elapsed since midnight.
//
public TimeSpan TimeOfDay {
get {
return ClockDateTime.TimeOfDay;
}
}
// Returns the year part of this DateTimeOffset. The returned value is an
// integer between 1 and 9999.
//
public int Year {
get {
Contract.Ensures(Contract.Result<int>() >= 1 && Contract.Result<int>() <= 9999);
return ClockDateTime.Year;
}
}
// Returns the DateTimeOffset resulting from adding the given
// TimeSpan to this DateTimeOffset.
//
public DateTimeOffset Add(TimeSpan timeSpan) {
return new DateTimeOffset(ClockDateTime.Add(timeSpan), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// days to this DateTimeOffset. The result is computed by rounding the
// fractional number of days given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddDays(double days) {
return new DateTimeOffset(ClockDateTime.AddDays(days), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// hours to this DateTimeOffset. The result is computed by rounding the
// fractional number of hours given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddHours(double hours) {
return new DateTimeOffset(ClockDateTime.AddHours(hours), Offset);
}
// Returns the DateTimeOffset resulting from the given number of
// milliseconds to this DateTimeOffset. The result is computed by rounding
// the number of milliseconds given by value to the nearest integer,
// and adding that interval to this DateTimeOffset. The value
// argument is permitted to be negative.
//
public DateTimeOffset AddMilliseconds(double milliseconds) {
return new DateTimeOffset(ClockDateTime.AddMilliseconds(milliseconds), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// minutes to this DateTimeOffset. The result is computed by rounding the
// fractional number of minutes given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddMinutes(double minutes) {
return new DateTimeOffset(ClockDateTime.AddMinutes(minutes), Offset);
}
public DateTimeOffset AddMonths(int months) {
return new DateTimeOffset(ClockDateTime.AddMonths(months), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// seconds to this DateTimeOffset. The result is computed by rounding the
// fractional number of seconds given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddSeconds(double seconds) {
return new DateTimeOffset(ClockDateTime.AddSeconds(seconds), Offset);
}
// Returns the DateTimeOffset resulting from adding the given number of
// 100-nanosecond ticks to this DateTimeOffset. The value argument
// is permitted to be negative.
//
public DateTimeOffset AddTicks(long ticks) {
return new DateTimeOffset(ClockDateTime.AddTicks(ticks), Offset);
}
// Returns the DateTimeOffset resulting from adding the given number of
// years to this DateTimeOffset. The result is computed by incrementing
// (or decrementing) the year part of this DateTimeOffset by value
// years. If the month and day of this DateTimeOffset is 2/29, and if the
// resulting year is not a leap year, the month and day of the resulting
// DateTimeOffset becomes 2/28. Otherwise, the month, day, and time-of-day
// parts of the result are the same as those of this DateTimeOffset.
//
public DateTimeOffset AddYears(int years) {
return new DateTimeOffset(ClockDateTime.AddYears(years), Offset);
}
// Compares two DateTimeOffset values, returning an integer that indicates
// their relationship.
//
public static int Compare(DateTimeOffset first, DateTimeOffset second) {
return DateTime.Compare(first.UtcDateTime, second.UtcDateTime);
}
// Compares this DateTimeOffset to a given object. This method provides an
// implementation of the IComparable interface. The object
// argument must be another DateTimeOffset, or otherwise an exception
// occurs. Null is considered less than any instance.
//
int IComparable.CompareTo(Object obj) {
if (obj == null) return 1;
if (!(obj is DateTimeOffset)) {
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDateTimeOffset"));
}
DateTime objUtc = ((DateTimeOffset)obj).UtcDateTime;
DateTime utc = UtcDateTime;
if (utc > objUtc) return 1;
if (utc < objUtc) return -1;
return 0;
}
public int CompareTo(DateTimeOffset other) {
DateTime otherUtc = other.UtcDateTime;
DateTime utc = UtcDateTime;
if (utc > otherUtc) return 1;
if (utc < otherUtc) return -1;
return 0;
}
// Checks if this DateTimeOffset is equal to a given object. Returns
// true if the given object is a boxed DateTimeOffset and its value
// is equal to the value of this DateTimeOffset. Returns false
// otherwise.
//
public override bool Equals(Object obj) {
if (obj is DateTimeOffset) {
return UtcDateTime.Equals(((DateTimeOffset)obj).UtcDateTime);
}
return false;
}
public bool Equals(DateTimeOffset other) {
return UtcDateTime.Equals(other.UtcDateTime);
}
public bool EqualsExact(DateTimeOffset other) {
//
// returns true when the ClockDateTime, Kind, and Offset match
//
// currently the Kind should always be Unspecified, but there is always the possibility that a future version
// of DateTimeOffset overloads the Kind field
//
return (ClockDateTime == other.ClockDateTime && Offset == other.Offset && ClockDateTime.Kind == other.ClockDateTime.Kind);
}
// Compares two DateTimeOffset values for equality. Returns true if
// the two DateTimeOffset values are equal, or false if they are
// not equal.
//
public static bool Equals(DateTimeOffset first, DateTimeOffset second) {
return DateTime.Equals(first.UtcDateTime, second.UtcDateTime);
}
// Creates a DateTimeOffset from a Windows filetime. A Windows filetime is
// a long representing the date and time as the number of
// 100-nanosecond intervals that have elapsed since 1/1/1601 12:00am.
//
public static DateTimeOffset FromFileTime(long fileTime) {
return new DateTimeOffset(DateTime.FromFileTime(fileTime));
}
public static DateTimeOffset FromUnixTimeSeconds(long seconds) {
if (seconds < UnixMinSeconds || seconds > UnixMaxSeconds) {
throw new ArgumentOutOfRangeException("seconds",
string.Format(Environment.GetResourceString("ArgumentOutOfRange_Range"), UnixMinSeconds, UnixMaxSeconds));
}
long ticks = seconds * TimeSpan.TicksPerSecond + UnixEpochTicks;
return new DateTimeOffset(ticks, TimeSpan.Zero);
}
public static DateTimeOffset FromUnixTimeMilliseconds(long milliseconds) {
const long MinMilliseconds = DateTime.MinTicks / TimeSpan.TicksPerMillisecond - UnixEpochMilliseconds;
const long MaxMilliseconds = DateTime.MaxTicks / TimeSpan.TicksPerMillisecond - UnixEpochMilliseconds;
if (milliseconds < MinMilliseconds || milliseconds > MaxMilliseconds) {
throw new ArgumentOutOfRangeException("milliseconds",
string.Format(Environment.GetResourceString("ArgumentOutOfRange_Range"), MinMilliseconds, MaxMilliseconds));
}
long ticks = milliseconds * TimeSpan.TicksPerMillisecond + UnixEpochTicks;
return new DateTimeOffset(ticks, TimeSpan.Zero);
}
// ----- SECTION: private serialization instance methods ----------------*
#if FEATURE_SERIALIZATION
void IDeserializationCallback.OnDeserialization(Object sender) {
try {
m_offsetMinutes = ValidateOffset(Offset);
m_dateTime = ValidateDate(ClockDateTime, Offset);
}
catch (ArgumentException e) {
throw new SerializationException(Environment.GetResourceString("Serialization_InvalidData"), e);
}
}
[System.Security.SecurityCritical] // auto-generated_required
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) {
if (info == null) {
throw new ArgumentNullException("info");
}
Contract.EndContractBlock();
info.AddValue("DateTime", m_dateTime);
info.AddValue("OffsetMinutes", m_offsetMinutes);
}
DateTimeOffset(SerializationInfo info, StreamingContext context) {
if (info == null) {
throw new ArgumentNullException("info");
}
m_dateTime = (DateTime)info.GetValue("DateTime", typeof(DateTime));
m_offsetMinutes = (Int16)info.GetValue("OffsetMinutes", typeof(Int16));
}
#endif
// Returns the hash code for this DateTimeOffset.
//
public override int GetHashCode() {
return UtcDateTime.GetHashCode();
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset Parse(String input) {
TimeSpan offset;
DateTime dateResult = DateTimeParse.Parse(input,
DateTimeFormatInfo.CurrentInfo,
DateTimeStyles.None,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset Parse(String input, IFormatProvider formatProvider) {
return Parse(input, formatProvider, DateTimeStyles.None);
}
public static DateTimeOffset Parse(String input, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.Parse(input,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset ParseExact(String input, String format, IFormatProvider formatProvider) {
return ParseExact(input, format, formatProvider, DateTimeStyles.None);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset ParseExact(String input, String format, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.ParseExact(input,
format,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
public static DateTimeOffset ParseExact(String input, String[] formats, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.ParseExactMultiple(input,
formats,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
public TimeSpan Subtract(DateTimeOffset value) {
return UtcDateTime.Subtract(value.UtcDateTime);
}
public DateTimeOffset Subtract(TimeSpan value) {
return new DateTimeOffset(ClockDateTime.Subtract(value), Offset);
}
public long ToFileTime() {
return UtcDateTime.ToFileTime();
}
public long ToUnixTimeSeconds() {
// Truncate sub-second precision before offsetting by the Unix Epoch to avoid
// the last digit being off by one for dates that result in negative Unix times.
//
// For example, consider the DateTimeOffset 12/31/1969 12:59:59.001 +0
// ticks = 621355967990010000
// ticksFromEpoch = ticks - UnixEpochTicks = -9990000
// secondsFromEpoch = ticksFromEpoch / TimeSpan.TicksPerSecond = 0
//
// Notice that secondsFromEpoch is rounded *up* by the truncation induced by integer division,
// whereas we actually always want to round *down* when converting to Unix time. This happens
// automatically for positive Unix time values. Now the example becomes:
// seconds = ticks / TimeSpan.TicksPerSecond = 62135596799
// secondsFromEpoch = seconds - UnixEpochSeconds = -1
//
// In other words, we want to consistently round toward the time 1/1/0001 00:00:00,
// rather than toward the Unix Epoch (1/1/1970 00:00:00).
long seconds = UtcDateTime.Ticks / TimeSpan.TicksPerSecond;
return seconds - UnixEpochSeconds;
}
public long ToUnixTimeMilliseconds() {
// Truncate sub-millisecond precision before offsetting by the Unix Epoch to avoid
// the last digit being off by one for dates that result in negative Unix times
long milliseconds = UtcDateTime.Ticks / TimeSpan.TicksPerMillisecond;
return milliseconds - UnixEpochMilliseconds;
}
public DateTimeOffset ToLocalTime() {
return ToLocalTime(false);
}
internal DateTimeOffset ToLocalTime(bool throwOnOverflow)
{
return new DateTimeOffset(UtcDateTime.ToLocalTime(throwOnOverflow));
}
public override String ToString() {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, null, DateTimeFormatInfo.CurrentInfo, Offset);
}
public String ToString(String format) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, format, DateTimeFormatInfo.CurrentInfo, Offset);
}
public String ToString(IFormatProvider formatProvider) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, null, DateTimeFormatInfo.GetInstance(formatProvider), Offset);
}
public String ToString(String format, IFormatProvider formatProvider) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, format, DateTimeFormatInfo.GetInstance(formatProvider), Offset);
}
public DateTimeOffset ToUniversalTime() {
return new DateTimeOffset(UtcDateTime);
}
public static Boolean TryParse(String input, out DateTimeOffset result) {
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParse(input,
DateTimeFormatInfo.CurrentInfo,
DateTimeStyles.None,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParse(String input, IFormatProvider formatProvider, DateTimeStyles styles, out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParse(input,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParseExact(String input, String format, IFormatProvider formatProvider, DateTimeStyles styles,
out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParseExact(input,
format,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParseExact(String input, String[] formats, IFormatProvider formatProvider, DateTimeStyles styles,
out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParseExactMultiple(input,
formats,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
// Ensures the TimeSpan is valid to go in a DateTimeOffset.
private static Int16 ValidateOffset(TimeSpan offset) {
Int64 ticks = offset.Ticks;
if (ticks % TimeSpan.TicksPerMinute != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetPrecision"), "offset");
}
if (ticks < MinOffset || ticks > MaxOffset) {
throw new ArgumentOutOfRangeException("offset", Environment.GetResourceString("Argument_OffsetOutOfRange"));
}
return (Int16)(offset.Ticks / TimeSpan.TicksPerMinute);
}
// Ensures that the time and offset are in range.
private static DateTime ValidateDate(DateTime dateTime, TimeSpan offset) {
// The key validation is that both the UTC and clock times fit. The clock time is validated
// by the DateTime constructor.
Contract.Assert(offset.Ticks >= MinOffset && offset.Ticks <= MaxOffset, "Offset not validated.");
// This operation cannot overflow because offset should have already been validated to be within
// 14 hours and the DateTime instance is more than that distance from the boundaries of Int64.
Int64 utcTicks = dateTime.Ticks - offset.Ticks;
if (utcTicks < DateTime.MinTicks || utcTicks > DateTime.MaxTicks) {
throw new ArgumentOutOfRangeException("offset", Environment.GetResourceString("Argument_UTCOutOfRange"));
}
// make sure the Kind is set to Unspecified
//
return new DateTime(utcTicks, DateTimeKind.Unspecified);
}
private static DateTimeStyles ValidateStyles(DateTimeStyles style, String parameterName) {
if ((style & DateTimeFormatInfo.InvalidDateTimeStyles) != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDateTimeStyles"), parameterName);
}
if (((style & (DateTimeStyles.AssumeLocal)) != 0) && ((style & (DateTimeStyles.AssumeUniversal)) != 0)) {
throw new ArgumentException(Environment.GetResourceString("Argument_ConflictingDateTimeStyles"), parameterName);
}
if ((style & DateTimeStyles.NoCurrentDateDefault) != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_DateTimeOffsetInvalidDateTimeStyles"), parameterName);
}
Contract.EndContractBlock();
// RoundtripKind does not make sense for DateTimeOffset; ignore this flag for backward compatibility with DateTime
style &= ~DateTimeStyles.RoundtripKind;
// AssumeLocal is also ignored as that is what we do by default with DateTimeOffset.Parse
style &= ~DateTimeStyles.AssumeLocal;
return style;
}
// Operators
public static implicit operator DateTimeOffset (DateTime dateTime) {
return new DateTimeOffset(dateTime);
}
public static DateTimeOffset operator +(DateTimeOffset dateTimeOffset, TimeSpan timeSpan) {
return new DateTimeOffset(dateTimeOffset.ClockDateTime + timeSpan, dateTimeOffset.Offset);
}
public static DateTimeOffset operator -(DateTimeOffset dateTimeOffset, TimeSpan timeSpan) {
return new DateTimeOffset(dateTimeOffset.ClockDateTime - timeSpan, dateTimeOffset.Offset);
}
public static TimeSpan operator -(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime - right.UtcDateTime;
}
public static bool operator ==(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime == right.UtcDateTime;
}
public static bool operator !=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime != right.UtcDateTime;
}
public static bool operator <(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime < right.UtcDateTime;
}
public static bool operator <=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime <= right.UtcDateTime;
}
public static bool operator >(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime > right.UtcDateTime;
}
public static bool operator >=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime >= right.UtcDateTime;
}
}
}
| |
using Microsoft.Xna.Framework.Graphics;
using System;
namespace Cocos2D
{
public class CCLabelTTF : CCSprite, ICCLabelProtocol
{
private float m_fFontSize;
private CCTextAlignment m_hAlignment;
private string m_pFontName;
protected string m_pString = String.Empty;
private CCSize m_tDimensions;
private CCVerticalTextAlignment m_vAlignment;
public CCLabelTTF ()
{
m_hAlignment = CCTextAlignment.Center;
m_vAlignment = CCVerticalTextAlignment.Top;
m_pFontName = string.Empty;
m_fFontSize = 0.0f;
Init();
}
public CCLabelTTF (string text, string fontName, float fontSize) :
this (text, fontName, fontSize, CCSize.Zero, CCTextAlignment.Center,
CCVerticalTextAlignment.Top)
{ }
public CCLabelTTF (string text, string fontName, float fontSize, CCSize dimensions, CCTextAlignment hAlignment) :
this (text, fontName, fontSize, dimensions, hAlignment, CCVerticalTextAlignment.Top)
{ }
public CCLabelTTF (string text, string fontName, float fontSize, CCSize dimensions, CCTextAlignment hAlignment,
CCVerticalTextAlignment vAlignment)
{
InitWithString(text, fontName, fontSize, dimensions, hAlignment, vAlignment);
}
public string FontName
{
get { return m_pFontName; }
set
{
if (m_pFontName != value)
{
m_pFontName = value;
if (m_pString.Length > 0)
{
Refresh();
}
}
}
}
public float FontSize
{
get { return m_fFontSize; }
set
{
if (m_fFontSize != value)
{
m_fFontSize = value;
if (m_pString.Length > 0)
{
Refresh();
}
}
}
}
public CCSize Dimensions
{
get { return m_tDimensions; }
set
{
if (!m_tDimensions.Equals(value))
{
m_tDimensions = value;
if (m_pString.Length > 0)
{
Refresh();
}
}
}
}
public CCVerticalTextAlignment VerticalAlignment
{
get { return m_vAlignment; }
set
{
if (m_vAlignment != value)
{
m_vAlignment = value;
if (m_pString.Length > 0)
{
Refresh();
}
}
}
}
public CCTextAlignment HorizontalAlignment
{
get { return m_hAlignment; }
set
{
if (m_hAlignment != value)
{
m_hAlignment = value;
if (m_pString.Length > 0)
{
Refresh();
}
}
}
}
internal void Refresh()
{
//
// This can only happen when the frame buffer is ready...
//
try
{
updateTexture();
Dirty = false;
}
catch (Exception)
{
}
}
#region ICCLabelProtocol Members
/*
* This is where the texture should be created, but it messes with the drawing
* of the object tree
*
public override void Draw()
{
if (Dirty)
{
updateTexture();
Dirty = false;
}
base.Draw();
}
*/
public string Text
{
get { return m_pString; }
set
{
// This is called in the update() call, so it should not do any drawing ...
if (m_pString != value)
{
m_pString = value;
updateTexture();
Dirty = false;
}
// Dirty = true;
}
}
[Obsolete("Use Label Property")]
public void SetString(string label)
{
Text = label;
}
[Obsolete("Use Label Property")]
public string GetString()
{
return Text;
}
#endregion
public override string ToString()
{
return string.Format("FontName:{0}, FontSize:{1}", m_pFontName, m_fFontSize);
}
public override bool Init()
{
return InitWithString("", "Helvetica", 12);
}
public bool InitWithString(string label, string fontName, float fontSize, CCSize dimensions, CCTextAlignment alignment)
{
return InitWithString(label, fontName, fontSize, dimensions, alignment, CCVerticalTextAlignment.Top);
}
public bool InitWithString(string label, string fontName, float fontSize)
{
return InitWithString(label, fontName, fontSize, CCSize.Zero, CCTextAlignment.Left,
CCVerticalTextAlignment.Top);
}
public bool InitWithString(string text, string fontName, float fontSize,
CCSize dimensions, CCTextAlignment hAlignment,
CCVerticalTextAlignment vAlignment)
{
if (base.Init())
{
// shader program
//this->setShaderProgram(CCShaderCache::sharedShaderCache()->programForKey(SHADER_PROGRAM));
m_tDimensions = new CCSize(dimensions.Width, dimensions.Height);
m_hAlignment = hAlignment;
m_vAlignment = vAlignment;
m_pFontName = fontName;
m_fFontSize = fontSize;
Text = (text);
return true;
}
return false;
}
private void updateTexture()
{
CCTexture2D tex;
// Dump the old one
if (Texture != null)
{
Texture.Dispose();
}
// let system compute label's width or height when its value is 0
// refer to cocos2d-x issue #1430
tex = new CCTexture2D();
var result = tex.InitWithString(m_pString,
m_tDimensions.PointsToPixels(),
m_hAlignment,
m_vAlignment,
m_pFontName,
m_fFontSize * CCMacros.CCContentScaleFactor());
//#if MACOS || IPHONE || IOS
// // There was a problem loading the text for some reason or another if result is not true
// // For MonoMac and IOS Applications we will try to create a Native Label automatically
// // If the font is not found then a default font will be selected by the device and used.
// if (!result && !string.IsNullOrEmpty(m_pString))
// {
// tex = CCLabelUtilities.CreateLabelTexture (m_pString,
// CCMacros.CCSizePointsToPixels (m_tDimensions),
// m_hAlignment,
// m_vAlignment,
// m_pFontName,
// m_fFontSize * CCMacros.CCContentScaleFactor (),
// new CCColor4B(Microsoft.Xna.Framework.Color.White) );
// }
//#endif
Texture = tex;
CCRect rect = CCRect.Zero;
rect.Size = m_pobTexture.ContentSize;
SetTextureRect(rect);
}
}
}
| |
using System;
using System.Globalization;
/// <summary>
/// Convert.ToString(System.Int32,System.IFormatProvider)
/// </summary>
public class ConvertToString14
{
public static int Main()
{
ConvertToString14 testObj = new ConvertToString14();
TestLibrary.TestFramework.BeginTestCase("for method: Convert.ToString(System.Int32,System.IFormatProvider)");
if (testObj.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
retVal = PosTest5() && retVal;
retVal = PosTest6() && retVal;
return retVal;
}
#region Positive tests
public bool PosTest1()
{
bool retVal = true;
string c_TEST_DESC = "PosTest1: Verify value is a random Int32 and IFormatProvider is a null reference... ";
string c_TEST_ID = "P001";
Int32 intValue = GetInt32(Int32.MinValue, Int32.MaxValue);
IFormatProvider provider = null;
String actualValue = intValue.ToString(provider);
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, provider);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
errorDesc += "\n Int32 value is " + intValue;
TestLibrary.TestFramework.LogError("001" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
string c_TEST_DESC = "PosTest2: Verify value is a random Int32 and IFormatProvider is en-US CultureInfo... ";
string c_TEST_ID = "P002";
Int32 intValue = GetInt32(Int32.MinValue, Int32.MaxValue);
IFormatProvider provider = new CultureInfo("en-US");
String actualValue = intValue.ToString(provider);
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, provider);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
errorDesc += "\n Int32 value is " + intValue;
TestLibrary.TestFramework.LogError("003" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
string c_TEST_DESC = "PosTest3: Verify value is a random Int32 and IFormatProvider is fr-FR CultureInfo... ";
string c_TEST_ID = "P003";
Int32 intValue = GetInt32(Int32.MinValue, Int32.MaxValue);
IFormatProvider provider = new CultureInfo("fr-FR");
String actualValue = intValue.ToString(provider);
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, provider);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
errorDesc += "\n Int32 value is " + intValue;
TestLibrary.TestFramework.LogError("005" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
string c_TEST_DESC = "PosTest4: Verify value is -32465641235 and IFormatProvider is user-defined NumberFormatInfo... ";
string c_TEST_ID = "P004";
Int32 intValue = -465641235;
NumberFormatInfo numberFormatInfo = new NumberFormatInfo();
numberFormatInfo.NegativeSign = "minus ";
numberFormatInfo.NumberDecimalSeparator = " point ";
String actualValue = "minus 465641235";
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, numberFormatInfo);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
errorDesc += "\n Int32 value is " + intValue;
TestLibrary.TestFramework.LogError("007" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest5()
{
bool retVal = true;
string c_TEST_DESC = "PosTest5: Verify value is Int32.MaxValue and IFormatProvider is fr-FR CultureInfo... ";
string c_TEST_ID = "P005";
Int32 intValue = Int32.MaxValue;
IFormatProvider provider = new CultureInfo("fr-FR");
String actualValue = "2147483647";
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, provider);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
TestLibrary.TestFramework.LogError("009" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest6()
{
bool retVal = true;
string c_TEST_DESC = "PosTest6: Verify value is Int32.MinValue and IFormatProvider is fr-FR CultureInfo... ";
string c_TEST_ID = "P006";
Int32 intValue = Int32.MinValue;
IFormatProvider provider = new CultureInfo("fr-FR");
String actualValue = "-2147483648";
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
String resValue = Convert.ToString(intValue, provider);
if (actualValue != resValue)
{
string errorDesc = "value is not " + resValue + " as expected: Actual is " + actualValue;
TestLibrary.TestFramework.LogError("011" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("012", "unexpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
#endregion
#region HelpMethod
private Int32 GetInt32(Int32 minValue, Int32 maxValue)
{
try
{
if (minValue == maxValue)
{
return (minValue);
}
if (minValue < maxValue)
{
return (Int32)(minValue + TestLibrary.Generator.GetInt64(-55) % (maxValue - minValue));
}
}
catch
{
throw;
}
return minValue;
}
#endregion
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Xml;
using System.Globalization;
namespace System.Runtime.Serialization.Json
{
internal class JsonWriterDelegator : XmlWriterDelegator
{
private DateTimeFormat _dateTimeFormat;
public JsonWriterDelegator(XmlWriter writer)
: base(writer)
{
}
public JsonWriterDelegator(XmlWriter writer, DateTimeFormat dateTimeFormat)
: this(writer)
{
_dateTimeFormat = dateTimeFormat;
}
internal override void WriteChar(char value)
{
WriteString(XmlConvert.ToString(value));
}
internal override void WriteBase64(byte[] bytes)
{
if (bytes == null)
{
return;
}
ByteArrayHelperWithString.Instance.WriteArray(Writer, bytes, 0, bytes.Length);
}
internal override void WriteQName(XmlQualifiedName value)
{
if (value != XmlQualifiedName.Empty)
{
writer.WriteString(value.Name);
writer.WriteString(JsonGlobals.NameValueSeparatorString);
writer.WriteString(value.Namespace);
}
}
internal override void WriteUnsignedLong(ulong value)
{
WriteDecimal((decimal)value);
}
internal override void WriteDecimal(decimal value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteDecimal(value);
}
internal override void WriteDouble(double value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteDouble(value);
}
internal override void WriteFloat(float value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteFloat(value);
}
internal override void WriteLong(long value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteLong(value);
}
internal override void WriteSignedByte(sbyte value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteSignedByte(value);
}
internal override void WriteUnsignedInt(uint value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteUnsignedInt(value);
}
internal override void WriteUnsignedShort(ushort value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteUnsignedShort(value);
}
internal override void WriteUnsignedByte(byte value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteUnsignedByte(value);
}
internal override void WriteShort(short value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteShort(value);
}
internal override void WriteBoolean(bool value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.booleanString);
base.WriteBoolean(value);
}
internal override void WriteInt(int value)
{
writer.WriteAttributeString(JsonGlobals.typeString, JsonGlobals.numberString);
base.WriteInt(value);
}
internal void WriteJsonBooleanArray(bool[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteBoolean(value[i], itemName, itemNamespace);
}
}
internal void WriteJsonDateTimeArray(DateTime[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteDateTime(value[i], itemName, itemNamespace);
}
}
internal void WriteJsonDecimalArray(decimal[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteDecimal(value[i], itemName, itemNamespace);
}
}
internal void WriteJsonInt32Array(int[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteInt(value[i], itemName, itemNamespace);
}
}
internal void WriteJsonInt64Array(long[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteLong(value[i], itemName, itemNamespace);
}
}
internal override void WriteDateTime(DateTime value)
{
if (_dateTimeFormat == null)
{
WriteDateTimeInDefaultFormat(value);
}
else
{
writer.WriteString(value.ToString(_dateTimeFormat.FormatString, _dateTimeFormat.FormatProvider));
}
}
private void WriteDateTimeInDefaultFormat(DateTime value)
{
// ToUniversalTime() truncates dates to DateTime.MaxValue or DateTime.MinValue instead of throwing
// This will break round-tripping of these dates (see
if (value.Kind != DateTimeKind.Utc)
{
//long tickCount = value.Ticks - TimeZone.CurrentTimeZone.GetUtcOffset(value).Ticks;
long tickCount = value.Ticks - TimeZoneInfo.Local.GetUtcOffset(value).Ticks;
if ((tickCount > DateTime.MaxValue.Ticks) || (tickCount < DateTime.MinValue.Ticks))
{
throw XmlObjectSerializer.CreateSerializationException(SR.JsonDateTimeOutOfRange, new ArgumentOutOfRangeException(nameof(value)));
}
}
writer.WriteString(JsonGlobals.DateTimeStartGuardReader);
writer.WriteValue((value.ToUniversalTime().Ticks - JsonGlobals.unixEpochTicks) / 10000);
switch (value.Kind)
{
case DateTimeKind.Unspecified:
case DateTimeKind.Local:
// +"zzzz";
//TimeSpan ts = TimeZone.CurrentTimeZone.GetUtcOffset(value.ToLocalTime());
TimeSpan ts = TimeZoneInfo.Local.GetUtcOffset(value.ToLocalTime());
if (ts.Ticks < 0)
{
writer.WriteString("-");
}
else
{
writer.WriteString("+");
}
int hours = Math.Abs(ts.Hours);
writer.WriteString((hours < 10) ? "0" + hours : hours.ToString(CultureInfo.InvariantCulture));
int minutes = Math.Abs(ts.Minutes);
writer.WriteString((minutes < 10) ? "0" + minutes : minutes.ToString(CultureInfo.InvariantCulture));
break;
case DateTimeKind.Utc:
break;
}
writer.WriteString(JsonGlobals.DateTimeEndGuardReader);
}
internal void WriteJsonSingleArray(float[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteFloat(value[i], itemName, itemNamespace);
}
}
internal void WriteJsonDoubleArray(double[] value, XmlDictionaryString itemName, XmlDictionaryString itemNamespace)
{
for (int i = 0; i < value.Length; i++)
{
WriteDouble(value[i], itemName, itemNamespace);
}
}
internal override void WriteStartElement(string prefix, string localName, string ns)
{
if (localName != null && localName.Length == 0)
{
base.WriteStartElement(JsonGlobals.itemString, JsonGlobals.itemString);
base.WriteAttributeString(null, JsonGlobals.itemString, null, localName);
}
else
{
base.WriteStartElement(prefix, localName, ns);
}
}
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class GenerateGraph {
private List<Node> nodes;
public Node endNode;
public GenerateGraph() {
//get nav mesh characteristics from pre-made nav mesh. Will write script later that generates
//a nav-mesh for any map.
NavMeshTriangulation navmesh = NavMesh.CalculateTriangulation();
//initialize triangles array
Triangle[] meshTriangles = new Triangle[navmesh.indices.Length/3];
//will contain mapping from a string containing the Vector3 pair side and the lane type of a node(ex: (1,2,3) and (4,5,6)
//in "middle" will be respresented as "1,2,3 - 4,5,6 middle" with the smaller Vector3 coming first) to the node on that
//side with that lane type.
Dictionary<string, Node> sideToNode = new Dictionary<string, Node>();
//will contain mapping from a Node to the list of Triangles that contain that Node on a side
Dictionary<Node, List<Triangle>> nodeToTriangles = new Dictionary<Node, List<Triangle>>();
nodes = new List<Node>();
//will contain a mapping from Vector3 coordinates (ex: (1,2,3) will be represented as "1,2,3") to
//a Node
Dictionary<string, Node> coordinatesToNode = new Dictionary<string, Node>();
//Made sure nav mesh indices is a multiple of 3
for (int i = 0; i < navmesh.indices.Length / 3; i++) {
Vector3[] currentVectors = new Vector3[3];
Vector3 v1 = navmesh.vertices[navmesh.indices[i*3]];
Vector3 v2 = navmesh.vertices[navmesh.indices[i*3 + 1]];
Vector3 v3 = navmesh.vertices[navmesh.indices[i*3 + 2]];
meshTriangles[i] = new Triangle(v1, v2, v3, NavMesh.GetAreaCost(navmesh.areas[i]));
List<Vector3Pair> trianglePairs = new List<Vector3Pair>();
//Add the pair v1, v2 to trianglePairs
trianglePairs.Add(new Vector3Pair(v1, v2));
//Add the pair v2, v3 trianglePairs
trianglePairs.Add(new Vector3Pair(v2, v3));
//Add the pair v1, v3
trianglePairs.Add(new Vector3Pair(v1, v3));
//Calculate bisections. Needed to generate smoother paths
foreach (Vector3Pair currentVector3Pair in trianglePairs) {
Vector3 currentFirst = currentVector3Pair.first;
Vector3 currentSecond = currentVector3Pair.second;
Vector3 bisect1 = new Vector3((currentFirst.x + currentSecond.x)/2, (currentFirst.y + currentSecond.y)/2,
(currentFirst.z + currentSecond.z)/2);
Vector3 bisect2 = new Vector3((bisect1.x + currentFirst.x)/2, (bisect1.y + currentFirst.y)/2,
(bisect1.z + currentFirst.z)/2);
Vector3 bisect3 = new Vector3((bisect1.x + currentSecond.x)/2, (bisect1.y + currentSecond.y)/2,
(bisect1.z + currentSecond.z)/2);
Node bisect1Node = getNodeWithVectorCoordinates(ref coordinatesToNode, bisect1);
Node bisect2Node = getNodeWithVectorCoordinates(ref coordinatesToNode, bisect2);
Node bisect3Node = getNodeWithVectorCoordinates(ref coordinatesToNode, bisect3);
AddToDictionary(ref nodeToTriangles, bisect1Node, meshTriangles[i]);
AddToDictionary(ref nodeToTriangles, bisect2Node, meshTriangles[i]);
AddToDictionary(ref nodeToTriangles, bisect3Node, meshTriangles[i]);
sideToNode[GetPairString(currentFirst, currentSecond) + " middle"] = bisect1Node;
sideToNode[GetPairString(currentFirst, currentSecond) + " outer1"] = bisect2Node;
sideToNode[GetPairString(currentFirst, currentSecond) + " outer2"] = bisect3Node;
}
Vector3 currentCentroid = meshTriangles[i].Centroid ();
Node centroidNode = getNodeWithVectorCoordinates(ref coordinatesToNode, currentCentroid);
AddToDictionary(ref nodeToTriangles, centroidNode, meshTriangles[i]);
sideToNode[GetPairString (currentCentroid, currentCentroid) + " middle"] = centroidNode;
}
//create list of item nodes
List<Node> itemNodes = new List<Node>();
foreach (GameObject item in ItemsAI.itemList) {
Node currentItemNode = new Node (ItemsAI.objectToPosition [item]);
itemNodes.Add(currentItemNode);
PathPlanningDataStructures.nodeToCount[currentItemNode.position] = 0;
}
//set neighbors of each node
foreach (var item in nodeToTriangles) {
Node currentNode = item.Key;
//iterate through all triangles that contain the currentNode on a side
foreach (Triangle t in item.Value) {
//centroid of the triangle
Vector3 currentCentroid = t.Centroid();
//list of sides of the triangle
List<Vector3Pair> triangleSides = new List<Vector3Pair>();
triangleSides.Add(new Vector3Pair(t.vertex1, t.vertex2));
triangleSides.Add(new Vector3Pair(t.vertex2, t.vertex3));
triangleSides.Add(new Vector3Pair(t.vertex1, t.vertex3));
//iterate through each item node to check if it is contained within the current triangle; if so,
//make the centroid of the triangle, currentNode, and the item neighbors of each other
//bool[] nodeInTriangle = new bool[itemNodes.Count];
for(int i = 0; i < itemNodes.Count; i++) {
Node currentItemNode = itemNodes [i];
if (t.PointInTriangle (currentItemNode.position)) {
addNodeNeighbor(sideToNode, ref currentItemNode, currentCentroid, currentCentroid, "middle");
itemNodes[i].neighbors.Add (currentNode);
currentNode.neighbors.Add (itemNodes [i]);
}
}
foreach (Vector3Pair triangleSide in triangleSides) {
Vector3 currentFirst = triangleSide.first;
Vector3 currentSecond = triangleSide.second;
addNodeNeighbor(sideToNode, ref currentNode, currentFirst, currentSecond, "middle");
addNodeNeighbor(sideToNode, ref currentNode, currentFirst, currentSecond, "outer1");
addNodeNeighbor(sideToNode, ref currentNode, currentFirst, currentSecond, "outer2");
for (int i = 0; i < itemNodes.Count; i++) {
Node currentItemNode = itemNodes [i];
if (t.PointInTriangle (currentItemNode.position)) {
addNodeNeighbor(sideToNode, ref currentItemNode, currentFirst, currentSecond, "middle");
addNodeNeighbor(sideToNode, ref currentItemNode, currentFirst, currentSecond, "outer1");
addNodeNeighbor(sideToNode, ref currentItemNode, currentFirst, currentSecond, "outer2");
}
}
}
addNodeNeighbor(sideToNode, ref currentNode, currentCentroid, currentCentroid, "middle");
}
nodes.Add(currentNode);
PathPlanningDataStructures.nodeToCount[currentNode.position] = 0;
}
//set end node of the cars
endNode = getClosestNode (GameObject.Find("FinishLine").transform.position);
}
// <summary>
// Returns the node that corresponds to the coordinates of the given Vector3. Checks
// for the node corresponding to the key constructured from the coordinates in the
// coordinatesToNode dictionary; if not in the dictionary, creates the node and adds
// it to the dictionary.
// </summary>
// <param name="coordinatesToNode">
// dictionary containing mappings from the coordinates of a Vector3 to the corresponding
// node
// </param>
// <param name="givenVector"> a Vector3</param>
public Node getNodeWithVectorCoordinates(ref Dictionary<string, Node> coordinatesToNode,
Vector3 givenVector) {
string vectorKey = givenVector.x + "," + givenVector.y + "," + givenVector.z;
Node nodeOfVector;
if (coordinatesToNode.ContainsKey(vectorKey)) {
nodeOfVector = coordinatesToNode[vectorKey];
} else {
nodeOfVector = new Node(givenVector);
coordinatesToNode.Add(vectorKey, nodeOfVector);
}
return nodeOfVector;
}
// <summary>
// Given a Vector3 pos, returns the Node in the list of Nodes that is closest to it.
// </summary>
// <param name="pos"> a Vector3 </param>
public Node getClosestNode(Vector3 pos) {
float minimumDistance = Mathf.Infinity;
Node closestNode = null;
foreach (Node node in nodes) {
float distance = Vector3.Distance(node.position, pos);
if (distance < minimumDistance) {
closestNode = node;
minimumDistance = distance;
}
}
return closestNode;
}
// <summary>
// Adds a neighbor to a given Node. The value of the neighbor is constructed from the
// sideToNode dictionary that is passed in; the dictionary requires a key specified
// by two Vector3 points and a laneName
// </summary>
// <param name="sideToNode">
// Maps a key specified by two Vector3 points (to specify a side) and a
// laneName ("middle", "outer1", or "outer2") to a Node
// </param>
// <param name="givenNode"> a Node to add a neighbor to </param>
// <param name="first"> the first Vector3 point </param>
// <param name="second"> the second Vector3 point </param>
// <param name="laneName"> the lane name ("middle", "outer1", or "outer2") </param>
public void addNodeNeighbor(Dictionary<string, Node> sideToNode, ref Node givenNode,
Vector3 first, Vector3 second, string laneName) {
if (sideToNode.ContainsKey(GetPairString (first, second) + " " + laneName)) {
Node neighbor = sideToNode[GetPairString (first, second) + " " + laneName];
if (neighbor != givenNode) {
givenNode.neighbors.Add (neighbor);
}
}
}
// <summary>
// Given a dictionary that maps a Node to the list of Triangles that contain
// that Node on a side, add the value to the list of Triangles that the key
// currently maps to
// </summary>
// <param name="dict">
// the dictionary that maps a Node to the list of Triangles that contain
// that Node on a side
// </param>
// <param name="key"> a Node </param>
// <param name="value"> a Triangle to add to the list of Triangles that the key currently maps to </param>
public void AddToDictionary(ref Dictionary<Node, List<Triangle>> dict, Node key, Triangle value) {
if (dict.ContainsKey (key)) {
List<Triangle> currentNodes = dict[key];
currentNodes.Add(value);
dict[key] = currentNodes;
} else {
List<Triangle> newNodes = new List<Triangle>();
newNodes.Add(value);
dict.Add(key, newNodes);
}
}
// <summary>
// Given two Vector3 objects, creates a string representation of that pair with the
// smaller Vector3 object coming first (ex: (1,2,3) and (4,5,6) will be respresented
// as "1,2,3 - 4,5,6" with the smaller Vector3 coming first)
// </summary>
// <param name="v1"> the first given Vector3 </param>
// <param name="v2"> the second given Vector3 </param>
public string GetPairString(Vector3 v1, Vector3 v2) {
float[] v1Components = new float[3];
v1Components[0] = v1.x;
v1Components[1] = v1.y;
v1Components[2] = v1.z;
float[] v2Components = new float[3];
v2Components[0] = v2.x;
v2Components[1] = v2.y;
v2Components[2] = v2.z;
Vector3 first = v1;
Vector3 second = v2;
for (int i = 0; i < 3; i++) {
if (v1Components[i] > v2Components[i]) {
Vector3 temp = second;
second = first;
first = temp;
break;
} else if(v1Components[i] < v2Components[i]) {
break;
}
}
return first.x + "," + first.y + "," + first.z + " - " + second.x + "," +
second.y + "," + second.z;
}
public int Size() {
return nodes.Count;
}
// <summary>
// Returns a string representation of all the triangles of the nodes
// </summary>
public override string ToString() {
string return_string = "";
foreach (Node node in nodes) {
return_string += "\n" + (node.position.ToString());
}
return return_string;
}
public string ToStringWithNeighbors() {
string return_string = "";
Dictionary<Node, int> pairToNodes =
new Dictionary<Node, int>();
for (int i = 0; i < nodes.Count; i++) {
pairToNodes.Add (nodes[i], i);
}
for (int i = 0; i < nodes.Count; i++) {
return_string += "\n" + "Node: " + i + " has neighbors ";
for (int j = 0; j < nodes[i].neighbors.Count; j++) {
return_string += pairToNodes[nodes[i].neighbors[j]] + ", ";
}
}
return return_string;
}
public class Vector3Pair {
public Vector3 first; //represents the first Vector3 in the Vector3Pair
public Vector3 second; //represents the second Vector3 in the Vector3Pair
public Vector3Pair(Vector3 first, Vector3 second) {
this.first = first;
this.second = second;
}
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
namespace Microsoft.Zelig.Debugger.ArmProcessor
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.IO;
using System.Windows.Forms;
using System.Threading;
using IR = Microsoft.Zelig.CodeGeneration.IR;
using RT = Microsoft.Zelig.Runtime;
using TS = Microsoft.Zelig.Runtime.TypeSystem;
using Cfg = Microsoft.Zelig.Configuration.Environment;
public partial class SessionManagerForm : Form
{
const string c_registry_RootKey = "SessionManager";
const string c_registry_LastSession = "LastSession";
const string c_file_Sessions = "Sessions";
const string c_file_Extension = "zeligsession";
const string c_file_SearchPattern = "*." + c_file_Extension;
//
// State
//
DebuggerMainForm m_owner;
List< Session > m_sessions;
Session m_defaultSession;
Session m_selectedSession;
//
// Constructor Methods
//
public SessionManagerForm( DebuggerMainForm owner )
{
m_owner = owner;
InitializeComponent();
//--//
m_sessions = new List< Session >();
LoadSessions();
}
//
// Helper Methods
//
private DirectoryInfo GetDataPath()
{
return new DirectoryInfo( Path.Combine( Application.UserAppDataPath, c_file_Sessions ) );
}
public void SaveSessions()
{
Win32.RegistryKey rootKey = Application.UserAppDataRegistry;
Win32.RegistryKey nodeKey = rootKey.CreateSubKey( c_registry_RootKey );
if(nodeKey != null)
{
if(m_defaultSession != null && m_defaultSession.IsTemporary == false)
{
nodeKey.SetValue( c_registry_LastSession, m_defaultSession.Id.ToString(), Microsoft.Win32.RegistryValueKind.String );
}
nodeKey.Close();
}
var di = GetDataPath();
if(!di.Exists)
{
di.Create();
}
foreach(Session session in m_sessions)
{
if(session.IsTemporary == false)
{
if(session.Dirty)
{
if(session.SettingsFile != null)
{
if(MessageBox.Show( string.Format( "Save session '{0}'?", session.SettingsFile ), "Workplace", MessageBoxButtons.YesNo ) == DialogResult.Yes)
{
try
{
session.Save( session.SettingsFile, false );
}
catch
{
}
}
}
else
{
try
{
string file = Path.Combine( di.FullName, string.Format( "{0}.{1}", session.Id, c_file_Extension ) );
session.Save( file, false );
}
catch
{
}
}
}
}
}
}
private void LoadSessions()
{
string defaultSessionId = null;
Win32.RegistryKey rootKey = Application.UserAppDataRegistry;
Win32.RegistryKey nodeKey = rootKey.OpenSubKey( c_registry_RootKey );
if(nodeKey != null)
{
defaultSessionId = nodeKey.GetValue( c_registry_LastSession, null, Microsoft.Win32.RegistryValueOptions.DoNotExpandEnvironmentNames ) as string;
nodeKey.Close();
}
var di = GetDataPath();
if(di.Exists)
{
foreach(var fi in di.GetFiles( c_file_SearchPattern ))
{
try
{
AddSession( Session.Load( fi.FullName ), defaultSessionId );
}
catch
{
}
}
}
}
public Session LoadSession( string file )
{
Session session = Session.LoadAndSetOrigin( file );
InsertUniqueSession( session );
return session;
}
public void SelectSession( Session session )
{
m_selectedSession = session;
UpdateList();
}
public Session SelectSession( bool fSetDefault )
{
if(m_sessions.Count > 0)
{
if(fSetDefault)
{
SelectSession( m_defaultSession );
}
if(this.ShowDialog() != DialogResult.OK)
{
return null;
}
}
else
{
Session session = new Session();
InputForm form = new InputForm( "Enter Name For New Session", session.DisplayName );
if(form.ShowDialog() == DialogResult.OK)
{
session.DisplayName = form.Result;
if(m_owner.Action_EditConfiguration( session ) == DialogResult.OK)
{
InsertUniqueSession( session );
SelectSession( session );
}
}
}
m_defaultSession = m_selectedSession;
return m_defaultSession;
}
public Session FindSession( string name )
{
foreach(Session session in m_sessions)
{
if(session.DisplayName == name)
{
return session;
}
}
return null;
}
public Session LoadSession( string file ,
bool fTemporary )
{
Session session = Session.LoadAndSetOrigin( file );
session.IsTemporary = fTemporary;
InsertUniqueSession( session );
return session;
}
//--//
private void AddSession( Session session ,
string defaultSessionId )
{
InsertUniqueSession( session );
if(session.Id.ToString() == defaultSessionId)
{
m_defaultSession = session;
}
}
private void InsertUniqueSession( Session newSession )
{
for(int i = 0; i < m_sessions.Count; i++)
{
Session session = m_sessions[i];
if(session.Id == newSession.Id)
{
m_sessions[i] = newSession;
return;
}
}
m_sessions.Add( newSession );
}
private void UpdateList()
{
ListView.ListViewItemCollection items = listView1.Items;
ListViewItem itemToSelect = null;
items.Clear();
foreach(Session session in m_sessions)
{
string name = session.DisplayName;
if(string.IsNullOrEmpty( name ))
{
name = "<unnamed>";
}
var item = new ListViewItem( name );
item.Tag = session;
item.SubItems.Add( session.SelectedEngine .ToString() );
item.SubItems.Add( session.SelectedProduct.ToString() );
item.SubItems.Add( session.LastModified .ToString() );
item.SubItems.Add( session.ImageToLoad );
items.Add( item );
if(m_selectedSession == session)
{
itemToSelect = item;
}
}
if(itemToSelect != null)
{
itemToSelect.Selected = true;
listView1.Focus();
}
UpdateButtons();
}
private void UpdateButtons()
{
bool fEnable = (m_selectedSession != null);
buttonSelect.Enabled = fEnable;
buttonClone .Enabled = fEnable;
buttonRemove.Enabled = fEnable;
buttonSave .Enabled = fEnable;
}
//
// Access Methods
//
//
// Event Methods
//
private void SessionManagerForm_Load( object sender ,
EventArgs e )
{
UpdateList();
}
private void buttonSelect_Click( object sender ,
EventArgs e )
{
this.DialogResult = DialogResult.OK;
this.Close();
}
private void buttonNew_Click( object sender ,
EventArgs e )
{
Session session = new Session();
InputForm form = new InputForm( "Enter Name For Session", session.DisplayName );
if(form.ShowDialog() == DialogResult.OK)
{
session.DisplayName = form.Result;
if(m_owner.Action_EditConfiguration( session ) == DialogResult.OK)
{
InsertUniqueSession( session );
SelectSession( session );
}
}
}
private void buttonRename_Click( object sender ,
EventArgs e )
{
InputForm form = new InputForm( "Enter Name For Session", m_selectedSession.DisplayName );
if(form.ShowDialog() == DialogResult.OK)
{
m_selectedSession.DisplayName = form.Result;
UpdateList();
}
}
private void buttonClone_Click( object sender ,
EventArgs e )
{
Session session = new Session( m_selectedSession );
InsertUniqueSession( session );
SelectSession( session );
}
private void buttonRemove_Click( object sender ,
EventArgs e )
{
var session = m_selectedSession;
if(session != null)
{
if(session.SettingsFile != null)
{
if(MessageBox.Show( string.Format( "Delete session file '{0}'?", session.SettingsFile ), "Workplace", MessageBoxButtons.YesNo ) == DialogResult.Yes)
{
try
{
File.Delete( session.SettingsFile );
}
catch
{
}
}
}
try
{
var di = GetDataPath();
if(di.Exists)
{
string file = Path.Combine( di.FullName, string.Format( "{0}.{1}", session.Id, c_file_Extension ) );
File.Delete( file );
}
}
catch
{
}
//--//
m_sessions.Remove( session );
m_selectedSession = null;
}
UpdateList();
}
private void buttonSave_Click( object sender ,
EventArgs e )
{
if(m_selectedSession.Dirty)
{
string file = m_owner.Action_SelectSessionToSave( m_selectedSession.SettingsFile );
if(file != null)
{
m_selectedSession.Save( file, false );
UpdateList();
}
}
}
private void buttonExport_Click( object sender, EventArgs e )
{
if(m_selectedSession != null)
{
string file = m_owner.Action_SelectSessionToSave( null );
if(file != null)
{
m_selectedSession.Save( file, false );
UpdateList();
}
}
}
private void buttonBrowse_Click( object sender ,
EventArgs e )
{
string file = m_owner.Action_SelectSessionToLoad( null );
if(file != null)
{
SelectSession( LoadSession( file ) );
}
}
private void buttonCancel_Click( object sender ,
EventArgs e )
{
this.DialogResult = DialogResult.Cancel;
this.Close();
}
//--//
private void listView1_ItemSelectionChanged( object sender ,
ListViewItemSelectionChangedEventArgs e )
{
if(e.IsSelected)
{
m_selectedSession = (Session)e.Item.Tag;
}
else
{
m_selectedSession = null;
}
UpdateButtons();
}
private void listView1_DoubleClick( object sender ,
EventArgs e )
{
if(m_selectedSession != null)
{
this.DialogResult = DialogResult.OK;
this.Close();
}
}
}
}
| |
// Graph Engine
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.Net;
using System.Text;
using Trinity;
using Trinity.Network.Messaging;
using Trinity.TSL.Lib;
namespace Trinity.Storage
{
/// <summary>
/// Represents an abstract storage class. It defines a set of cell accessing and manipulation interfaces.
/// </summary>
public unsafe abstract class Storage : IDisposable
{
static Storage()
{
TrinityConfig.LoadTrinityConfig();
}
#region Key-value Store interfaces
/// <summary>
/// Determines whether there is a cell with the specified cell Id in Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <returns>true if a cell whose Id is cellId is found; otherwise, false.</returns>
public abstract bool Contains(long cellId);
/// <summary>
/// Gets the type of the cell with specified cell Id.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="cellType">The type of the cell specified by cellId.</param>
/// <returns>A Trinity error code. Possible values are E_SUCCESS and E_NOT_FOUND.</returns>
public abstract TrinityErrorCode GetCellType(long cellId, out ushort cellType);
/// <summary>
/// Adds a new cell to the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="size">The size of the cell.</param>
/// <param name="cellType">Indicates the cell type.</param>
/// <returns>true if adding succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode AddCell(long cellId, byte* buff, int size, ushort cellType);
/// <summary>
/// Adds a new cell to the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if adding succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode AddCell(long cellId, byte* buff, int size);
/// <summary>
/// Adds a new cell to the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <returns>true if adding succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode AddCell(long cellId, byte[] buff);
/// <summary>
/// Adds a new cell to the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="offset">The byte offset into the buff.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if adding succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode AddCell(long cellId, byte[] buff, int offset, int size);
/// <summary>
/// Adds a new cell to the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="offset">The byte offset into the buff.</param>
/// <param name="size">The size of the cell.</param>
/// <param name="cellType">Indicates the cell type.</param>
/// <returns>true if adding succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode AddCell(long cellId, byte[] buff, int offset, int size, ushort cellType);
/// <summary>
/// Updates an existing cell in the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if updating succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode UpdateCell(long cellId, byte* buff, int size);
/// <summary>
/// Updates an existing cell in the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <returns>true if updating succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode UpdateCell(long cellId, byte[] buff);
/// <summary>
/// Updates an existing cell in the Trinity key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="offset">The byte offset into the buff.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if updating succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode UpdateCell(long cellId, byte[] buff, int offset, int size);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode SaveCell(long cellId, byte* buff, int size);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// </summary>
/// <param name="cell_id">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode SaveCell(long cell_id, byte[] buff);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// </summary>
/// <param name="cell_id">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="offset">The byte offset into the buff.</param>
/// <param name="size">The size of the cell.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode SaveCell(long cell_id, byte[] buff, int offset, int size);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="offset">The byte offset into the buff.</param>
/// <param name="size">The size of the cell.</param>
/// <param name="cellType">Indicates the cell type.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode SaveCell(long cellId, byte[] buff, int offset, int size, ushort cellType);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="buff">A memory buffer that contains the cell content.</param>
/// <param name="size">The size of the cell.</param>
/// <param name="cellType">Indicates the cell type.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode SaveCell(long cellId, byte* buff, int size, ushort cellType);
/// <summary>
/// Loads the bytes of the cell with the specified cell Id.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="cellBuff">The bytes of the cell. An empty byte array is returned if the cell is not found.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode LoadCell(long cellId, out byte[] cellBuff);
/// <summary>
/// Loads the bytes of the cell with the specified cell Id.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <param name="cellBuff">The bytes of the cell. An empty byte array is returned if the cell is not found.</param>
/// <param name="cellType">The type of the cell, represented with a 16-bit unsigned integer.</param>
/// <returns>true if saving succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode LoadCell(long cellId, out byte[] cellBuff, out ushort cellType);
/// <summary>
/// Removes the cell with the specified cell Id from the key-value store.
/// </summary>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <returns>true if removing succeeds; otherwise, false.</returns>
public abstract TrinityErrorCode RemoveCell(long cellId);
#endregion
#region Message Sending Interfaces
internal abstract void SendMessage(TrinityMessage message);
internal abstract void SendMessage(TrinityMessage message, out TrinityResponse response);
/*------------------------------------------------------------------------------*/
internal abstract void SendMessage(byte* message, int size);
internal abstract void SendMessage(byte* message, int size, out TrinityResponse response);
/*------------------------------------------------------------------------------*/
internal abstract void SendMessage(byte** message, int* sizes, int count);
internal abstract void SendMessage(byte** message, int* sizes, int count, out TrinityResponse response);
#endregion
/// <summary>
/// Releases the resources used by the current storage instance.
/// </summary>
public abstract void Dispose();
}
/// <summary>
/// Exposes methods for generic cell manipulation.
/// </summary>
public interface IGenericCellOperations
{
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// Note that the generic cell will be saved as a strongly typed cell. It can then be loaded into either a strongly-typed cell or a generic cell.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cell">The cell to be saved.</param>
void SaveGenericCell(LocalMemoryStorage storage, ICell cell);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// Note that the generic cell will be saved as a strongly typed cell. It can then be loaded into either a strongly-typed cell or a generic cell.
/// The <paramref name="cellId"/> overrides the cell id in <paramref name="cell"/>.
/// </summary>
/// <param name="cellId">A 64-bit cell id.</param>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cell">The cell to be saved.</param>
void SaveGenericCell(LocalMemoryStorage storage, long cellId, ICell cell);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// Note that the generic cell will be saved as a strongly typed cell. It can then be loaded into either a strongly-typed cell or a generic cell.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param>
/// <param name="cell">The cell to be saved.</param>
void SaveGenericCell(LocalMemoryStorage storage, CellAccessOptions writeAheadLogOptions, ICell cell);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// Note that the generic cell will be saved as a strongly typed cell. It can then be loaded into either a strongly-typed cell or a generic cell.
/// The <paramref name="cellId"/> overrides the cell id in <paramref name="cell"/>.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param>
/// <param name="cellId">A 64-bit cell id.</param>
/// <param name="cell">The cell to be saved.</param>
void SaveGenericCell(LocalMemoryStorage storage, CellAccessOptions writeAheadLogOptions, long cellId, ICell cell);
/// <summary>
/// Loads the content of the cell with the specified cell Id.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <returns>An generic cell instance that implements <see cref="Trinity.Storage.ICell"/> interfaces.</returns>
ICell LoadGenericCell(LocalMemoryStorage storage, long cellId);
/// <summary>
/// Instantiate a new generic cell with the specified type.
/// </summary>
/// <param name="cellType">The string representation of the cell type.</param>
/// <returns>The allocated generic cell.</returns>
ICell NewGenericCell(string cellType);
/// <summary>
/// Instantiate a new generic cell with the specified type and a cell ID.
/// </summary>
/// <param name="cellId">Cell Id.</param>
/// <param name="cellType">The string representation of the cell type.</param>
/// <returns>The allocated generic cell.</returns>
ICell NewGenericCell(long cellId, string cellType);
/// <summary>
/// Instantiate a new generic cell with the specified type and a cell ID.
/// </summary>
/// <param name="cellType">The string representation of the cell type.</param>
/// <param name="content">The json representation of the cell.</param>
/// <returns>The allocated generic cell.</returns>
ICell NewGenericCell(string cellType, string content);
/// <summary>
/// Allocate a generic cell accessor on the specified cell.
/// If <c><see cref="Trinity.TrinityConfig.ReadOnly"/> == false</c>,
/// on calling this method, it attempts to acquire the lock of the cell,
/// and blocks until it gets the lock.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cellId">The id of the specified cell.</param>
/// <returns>A <see cref="Trinity.Storage.ICellAccessor"/> instance.</returns>
ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId);
/// <summary>
/// Allocate a generic cell accessor on the specified cell.
/// If <c><see cref="Trinity.TrinityConfig.ReadOnly"/> == false</c>,
/// on calling this method, it attempts to acquire the lock of the cell,
/// and blocks until it gets the lock.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cellId">The id of the specified cell.</param>
/// <param name="options">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param>
/// <returns>A <see cref="Trinity.Storage.ICellAccessor"/> instance.</returns>
ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId, CellAccessOptions options);
/// <summary>
/// Allocate a generic cell accessor on the specified cell.
/// If <c><see cref="Trinity.TrinityConfig.ReadOnly"/> == false</c>,
/// on calling this method, it attempts to acquire the lock of the cell,
/// and blocks until it gets the lock.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <param name="cellId">The id of the specified cell.</param>
/// <param name="options">Cell access options.</param>
/// <param name="cellType">Specifies the type of cell to be created.</param>
/// <returns>A <see cref="Trinity.Storage.ICellAccessor"/> instance.</returns>
ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId, CellAccessOptions options, string cellType);
/// <summary>
/// Enumerates all the typed cells within the local memory storage.
/// The cells without a type (where CellType == 0) are skipped.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <returns>All the typed cells within the local memory storage.</returns>
IEnumerable<ICell> EnumerateGenericCells(LocalMemoryStorage storage);
/// <summary>
/// Enumerates accessors of all the typed cells within the local memory storage.
/// The cells without a type (where CellType == 0) are skipped.
/// </summary>
/// <param name="storage">A <see cref="Trinity.Storage.LocalMemoryStorage"/> object.</param>
/// <returns>The accessors of all the typed cells within the local memory storage.</returns>
IEnumerable<ICellAccessor> EnumerateGenericCellAccessors(LocalMemoryStorage storage);
/// <summary>
/// Loads the content of the cell with the specified cell Id.
/// </summary>
/// <param name="storage">The cloud storage to load from.</param>
/// <param name="cellId">A 64-bit cell Id.</param>
/// <returns>An generic cell instance that implements <see cref="Trinity.Storage.ICell"/> interfaces.</returns>
ICell LoadGenericCell(MemoryCloud storage, long cellId);
/// <summary>
/// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists.
/// Note that the generic cell will be saved as a strongly typed cell. It can then be loaded into either a strongly-typed cell or a generic cell.
/// </summary>
/// <param name="storage">The cloud storage to save to.</param>
/// <param name="cell">The cell to be saved.</param>
void SaveGenericCell(MemoryCloud storage, ICell cell);
}
internal class DefaultStorageSchema : IStorageSchema
{
public IEnumerable<ICellDescriptor> CellDescriptors
{
get { yield break; }
}
public ushort GetCellType(string cellTypeString)
{
throw new NotImplementedException();
}
public IEnumerable<string> CellTypeSignatures
{
get { yield break; }
}
}
internal class DefaultGenericCellOperations : IGenericCellOperations
{
public void SaveGenericCell(LocalMemoryStorage storage, ICell cell)
{
throw new NotImplementedException();
}
public void SaveGenericCell(LocalMemoryStorage storage, long cellId, ICell cell)
{
throw new NotImplementedException();
}
public void SaveGenericCell(LocalMemoryStorage storage, CellAccessOptions writeAheadLogOptions, ICell cell)
{
throw new NotImplementedException();
}
public void SaveGenericCell(LocalMemoryStorage storage, CellAccessOptions writeAheadLogOptions, long cellId, ICell cell)
{
throw new NotImplementedException();
}
public ICell LoadGenericCell(LocalMemoryStorage storage, long cellId)
{
throw new NotImplementedException();
}
public ICell NewGenericCell(string cellType)
{
throw new NotImplementedException();
}
public ICell NewGenericCell(long cellId, string cellType)
{
throw new NotImplementedException();
}
public ICell NewGenericCell(string cellType, string content)
{
throw new NotImplementedException();
}
public ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId)
{
throw new NotImplementedException();
}
public ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId, CellAccessOptions options)
{
throw new NotImplementedException();
}
public ICellAccessor UseGenericCell(LocalMemoryStorage storage, long cellId, CellAccessOptions options, string cellType)
{
throw new NotImplementedException();
}
public ICell LoadGenericCell(MemoryCloud storage, long cellId)
{
throw new NotImplementedException();
}
public void SaveGenericCell(MemoryCloud storage, ICell cell)
{
throw new NotImplementedException();
}
public IEnumerable<ICell> EnumerateGenericCells(LocalMemoryStorage storage)
{
throw new NotImplementedException();
}
public IEnumerable<ICellAccessor> EnumerateGenericCellAccessors(LocalMemoryStorage storage)
{
throw new NotImplementedException();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using NUnit.Framework;
using Toscana.Exceptions;
namespace Toscana.Tests
{
[TestFixture]
public class ToscaServiceTemplateBuilderTests
{
[Test]
public void Build_Capabilities_Of_Base_And_Derived_Node_Types_Are_Merged()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Capabilities.Add("base_capability1", "base_capability1_type");
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Capabilities.Add("capability1", "capability1_type");
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile)
.Build();
// Assert
var combinedNodeType = combinedToscaProfile.NodeTypes["node1"];
combinedNodeType.Capabilities.Should().HaveCount(2);
combinedNodeType.Capabilities["capability1"].Type.Should().Be("capability1_type");
combinedNodeType.Capabilities["base_capability1"].Type.Should().Be("base_capability1_type");
}
[Test]
public void Exception_Thrown_When_Base_NodeType_Is_Missing()
{
// Arrange
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Capabilities.Add("capability1", "capability1_type");
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>().WithMessage("Definition of Node Type base_node is missing");
}
[Test]
public void Exception_Thrown_When_Duplicate_Capability_Appear_On_Base_And_Derived_Node_Type()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Capabilities.Add("capability1", "capability1_type");
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Capabilities.Add("capability1", "capability1_type");
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>()
.WithMessage("Duplicate capability definition of capability capability1");
}
[Test]
public void Exception_Thrown_When_Duplicate_Node_Types_Appear()
{
// Arrange
var nodeType = new ToscaNodeType();
var profile1 = new ToscaServiceTemplate();
profile1.NodeTypes.Add("duplicate_node", nodeType);
var profile2 = new ToscaServiceTemplate();
profile2.NodeTypes.Add("duplicate_node", nodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(profile1)
.Append(profile2);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>().WithMessage("Node type duplicate_node is duplicate");
}
[Test]
public void Interfaces_Of_Base_And_Derived_Node_Types_Are_Merged()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Interfaces.Add("base_interface1", new Dictionary<string, object> {{"method1", "code"}});
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Interfaces.Add("interface1", new Dictionary<string, object> {{"method2", "code"}});
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile)
.Build();
// Assert
var combinedNodeType = combinedToscaProfile.NodeTypes["node1"];
combinedNodeType.Interfaces.Should().HaveCount(2);
combinedNodeType.Interfaces["base_interface1"]["method1"].Should().Be("code");
combinedNodeType.Interfaces["interface1"]["method2"].Should().Be("code");
}
[Test]
public void Exception_Thrown_When_Duplicate_Interface_Appears_On_Base_And_Derived_Node_Type()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Interfaces.Add("interface1", new Dictionary<string, object>());
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Interfaces.Add("interface1", new Dictionary<string, object>());
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>()
.WithMessage("Duplicate interface definition of interface interface1");
}
/// <summary>
/// node_types:
/// Root
/// /
/// A
/// / \
/// B C
/// </summary>
[Test]
public void Node_Types_Capabilities_Should_Be_Successfully_Parsed_With_Several_Leaves_With_Same_Base_Node_Types()
{
// Arrange
var rootNode = new ToscaNodeType();
rootNode.Capabilities.Add("feature", "feature");
var toscaSimpleProfile = new ToscaServiceTemplate();
toscaSimpleProfile.NodeTypes.Add("root", rootNode);
var nodeTypeA = new ToscaNodeType {DerivedFrom = "root"};
toscaSimpleProfile.NodeTypes.Add("A", nodeTypeA);
var nodeTypeB = new ToscaNodeType {DerivedFrom = "A"};
toscaSimpleProfile.NodeTypes.Add("B", nodeTypeB);
var nodeTypeC = new ToscaNodeType {DerivedFrom = "A"};
toscaSimpleProfile.NodeTypes.Add("C", nodeTypeC);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(toscaSimpleProfile)
.Build();
// Assert
combinedToscaProfile.NodeTypes.Should().HaveCount(5);
var toscaRootNode = combinedToscaProfile.NodeTypes["tosca.nodes.Root"];
toscaRootNode.Attributes.Should().HaveCount(3);
var rootNodeType = combinedToscaProfile.NodeTypes["root"];
rootNodeType.Capabilities.Should().HaveCount(1);
rootNodeType.Capabilities["feature"].Type.Should().Be("feature");
var combinedNodeTypeA = combinedToscaProfile.NodeTypes["A"];
combinedNodeTypeA.Capabilities.Should().HaveCount(1);
combinedNodeTypeA.Capabilities["feature"].Type.Should().Be("feature");
var combinedNodeTypeB = combinedToscaProfile.NodeTypes["B"];
combinedNodeTypeB.Capabilities.Should().HaveCount(1);
combinedNodeTypeB.Capabilities["feature"].Type.Should().Be("feature");
var combinedNodeTypeC = combinedToscaProfile.NodeTypes["C"];
combinedNodeTypeC.Capabilities.Should().HaveCount(1);
combinedNodeTypeC.Capabilities["feature"].Type.Should().Be("feature");
}
[Test]
public void Properties_Of_Base_And_Derived_Node_Types_Are_Merged()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Properties.Add("base_property1", new ToscaPropertyDefinition {Type = "string"});
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Properties.Add("property1", new ToscaPropertyDefinition {Type = "int"});
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile)
.Build();
// Assert
var combinedNodeType = combinedToscaProfile.NodeTypes["node1"];
combinedNodeType.Properties.Should().HaveCount(2);
combinedNodeType.Properties["base_property1"].Type.Should().Be("string");
combinedNodeType.Properties["property1"].Type.Should().Be("int");
}
[Test]
public void Exception_Thrown_When_Duplicate_Property_Appears_On_Base_And_Derived_Node_Type()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Properties.Add("property1", new ToscaPropertyDefinition { Type = "string"});
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Properties.Add("property1", new ToscaPropertyDefinition() { Type = "string" });
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>()
.WithMessage("Duplicate property definition of property property1");
}
[Test]
public void Requirements_Of_Base_And_Derived_Node_Types_Are_Merged()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Requirements.Add(new Dictionary<string, ToscaRequirement>
{
{"base_requirement1", new ToscaRequirement {Capability = "attachment1"}}
});
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
nodeType.Requirements.Add(new Dictionary<string, ToscaRequirement>
{
{"requirement1", new ToscaRequirement {Capability = "attachment2"}}
});
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile)
.Build();
// Assert
var combinedNodeType = combinedToscaProfile.NodeTypes["node1"];
combinedNodeType.Requirements.Should().HaveCount(2);
combinedNodeType.Requirements.Single(r => r.ContainsKey("base_requirement1"))["base_requirement1"]
.Capability.Should().Be("attachment1");
combinedNodeType.Requirements.Single(r => r.ContainsKey("requirement1"))["requirement1"].Capability.Should()
.Be("attachment2");
}
[Test]
public void Exception_Thrown_When_Duplicate_Requirement_Appears_On_Base_And_Derived_Node_Type()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Requirements.Add(new Dictionary<string, ToscaRequirement>{{"requirement1", new ToscaRequirement()}});
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
nodeType.Requirements.Add(new Dictionary<string, ToscaRequirement> { { "requirement1", new ToscaRequirement() } });
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>()
.WithMessage("Duplicate requirement definition of requirement requirement1");
}
[Test]
public void Attributes_Of_Base_And_Derived_Node_Types_Are_Merged()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Attributes.Add("base_attribute1", new ToscaAttributeDefinition { Type = "string" });
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
nodeType.Attributes.Add("attribute1", new ToscaAttributeDefinition { Type = "int" });
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var combinedToscaProfile = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile)
.Build();
// Assert
var combinedNodeType = combinedToscaProfile.NodeTypes["node1"];
combinedNodeType.Attributes.Should().HaveCount(2);
combinedNodeType.Attributes["base_attribute1"].Type.Should().Be("string");
combinedNodeType.Attributes["attribute1"].Type.Should().Be("int");
}
[Test]
public void Exception_Thrown_When_Duplicate_Attribute_Appears_On_Base_And_Derived_Node_Type()
{
// Arrange
var nodeType = new ToscaNodeType();
nodeType.Attributes.Add("attribute1", new ToscaAttributeDefinition { Type = "string" });
var baseProfile = new ToscaServiceTemplate();
baseProfile.NodeTypes.Add("base_node", nodeType);
var derivedNodeType = new ToscaNodeType
{
DerivedFrom = "base_node"
};
derivedNodeType.Attributes.Add("attribute1", new ToscaAttributeDefinition { Type = "string" });
var derivedProfile = new ToscaServiceTemplate();
derivedProfile.NodeTypes.Add("node1", derivedNodeType);
// Act
var toscaSimpleProfileBuilder = new ToscaServiceTemplateBuilder()
.Append(baseProfile)
.Append(derivedProfile);
Action action = () => toscaSimpleProfileBuilder.Build();
// Assert
action.ShouldThrow<ToscaValidationException>()
.WithMessage("Duplicate attribute definition of attribute attribute1");
}
[Test]
public void Node_Types_Derives_From_Tosca_Root_Node_Should_Be_Properly_Build()
{
var toscaSimpleProfile = new ToscaServiceTemplate();
toscaSimpleProfile.NodeTypes.Add("node1", new ToscaNodeType
{
DerivedFrom = "tosca.nodes.Root"
});
var verifiedProfile = new ToscaServiceTemplateBuilder().Append(toscaSimpleProfile).Build();
verifiedProfile.NodeTypes["node1"].Attributes.Should().HaveCount(3);
verifiedProfile.NodeTypes["node1"].Attributes["tosca_id"].Type.Should().Be("string");
verifiedProfile.NodeTypes["node1"].Attributes["tosca_name"].Type.Should().Be("string");
verifiedProfile.NodeTypes["node1"].Attributes["state"].Type.Should().Be("string");
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using Microsoft.Win32;
using System;
using System.Text;
using System.Threading;
namespace Microsoft.Win32.RegistryTests
{
public class RegistryKey_CreateSubKey_str : IDisposable
{
// Variables needed
private RegistryKey _rk1, _rk2;
private string _str1;
private String _testKeyName = "REG_TEST_1";
private static int s_keyCount = 0;
public void TestInitialize()
{
var counter = Interlocked.Increment(ref s_keyCount);
_testKeyName = _testKeyName + counter.ToString();
_rk1 = Microsoft.Win32.Registry.CurrentUser;
if (_rk1.OpenSubKey(_testKeyName) != null)
_rk1.DeleteSubKeyTree(_testKeyName);
}
public RegistryKey_CreateSubKey_str()
{
TestInitialize();
}
[Fact]
public void Test01()
{
// [] Passing in null should throw ArgumentNullException
_rk1 = Microsoft.Win32.Registry.CurrentUser;
Action a = () => { _rk2 = _rk1.CreateSubKey(null); };
Assert.Throws<ArgumentNullException>(() => { a(); });
}
[Fact]
public void Test02()
{
try
{
_rk2 = _rk1.CreateSubKey(String.Empty);
if (_rk2.ToString() != (_rk1.ToString() + @"\"))
{
Assert.False(true, "Error CreateSubKey returned some unexpected results... ");
}
}
catch (Exception exc)
{
Assert.False(true, "Error Unexpected expected occured, got exc==" + exc.ToString());
}
}
[Fact]
public void Test03()
{
// [] Creating new SubKey and check that it exists
_rk1 = Microsoft.Win32.Registry.CurrentUser;
_rk1.CreateSubKey(_testKeyName);
if (_rk1.OpenSubKey(_testKeyName) == null)
{
Assert.False(true, "Error SubKey does not exist.");
}
_rk1.DeleteSubKey(_testKeyName);
if (_rk1.OpenSubKey(_testKeyName) != null)
{
Assert.False(true, "Error SubKey not removed properly");
}
}
[Fact]
public void Test04()
{
// [] Give subkey a value and get it back
_rk1 = Microsoft.Win32.Registry.CurrentUser;
_rk1.CreateSubKey(_testKeyName);
_rk1.SetValue(_testKeyName, new Decimal(5));
if (_rk1.OpenSubKey(_testKeyName) == null)
{
Assert.False(true, "Error Could not get subkey");
}
//Remeber, this will be written as a string value
Object obj11 = _rk1.GetValue(_testKeyName);
if (Convert.ToDecimal(_rk1.GetValue(_testKeyName)) != new Decimal(5))
{
Assert.False(true, "Error got value==" + _rk1.GetValue(_testKeyName));
}
}
[Fact]
public void Test05()
{
// [] CreateSubKey should open subkey if it already exists
_rk2 = _rk1.CreateSubKey(_testKeyName);
_rk2.CreateSubKey("BLAH");
if (_rk1.OpenSubKey(_testKeyName).OpenSubKey("BLAH") == null)
{
Assert.False(true, "Error Expected get not returned");
}
_rk2.DeleteSubKey("BLAH");
}
[Fact]
public void Test06()
{
// [] Create subkey and check GetSubKeyCount
_rk2 = _rk1.CreateSubKey(_testKeyName);
for (int i = 0; i < 10; i++)
_rk2.CreateSubKey("BLAH_" + i.ToString());
if (_rk2.SubKeyCount != 10)
{
Assert.False(true, "Error Incorrect number of subkeys , coun==" + _rk2.SubKeyCount);
}
for (int i = 0; i < 10; i++)
{
if (!_rk2.GetSubKeyNames()[i].Equals("BLAH_" + i.ToString()))
{
Assert.False(true, "Error" + i.ToString() + "! Incorrect name of subKey");
}
}
}
[Fact]
public void Test07()
{
_rk2 = _rk1.CreateSubKey(_testKeyName);
_str1 = "Dyalog APL/W 10.0";
_rk2.CreateSubKey(_str1);
if (_rk2.OpenSubKey(_str1) == null)
{
Assert.False(true, "Error SubKey does not exist.");
}
}
[Fact]
public void Test08()
{
//[]we should open keys with multiple \ in the name
_rk2 = _rk1.CreateSubKey(_testKeyName);
_str1 = @"a\b\c\d\e\f\g\h";
_rk2.CreateSubKey(_str1);
if (_rk2.OpenSubKey(_str1) == null)
{
Assert.False(true, "Error SubKey does not exist.");
}
_rk1.DeleteSubKeyTree(_testKeyName);
}
[Fact]
public void Test09()
{
//[]play around with the \ and the / keys
_rk2 = _rk1.CreateSubKey(_testKeyName);
_str1 = @"a\b\c\/d\//e\f\g\h\//\\";
_rk2.CreateSubKey(_str1);
if (_rk2.OpenSubKey(_str1) == null)
{
Assert.False(true, "Error SubKey does not exist.");
}
_rk1.DeleteSubKeyTree(_testKeyName);
}
[Fact]
public void Test10()
{
//[] how deep can we go with this
_rk2 = _rk1.CreateSubKey(_testKeyName);
_str1 = String.Empty;
// Changed the number of times we repeat str1 from 100 to 30 in response to the Windows OS
//There is a restriction of 255 charcters for the keyname even if it is multikeys. Not worth to pursue as a bug
// reduced further to allow for WoW64 changes to the string.
for (int i = 0; i < 25 && _str1.Length < 230; i++)
_str1 = _str1 + i.ToString() + @"\";
_rk2.CreateSubKey(_str1);
if (_rk2.OpenSubKey(_str1) == null)
{
Assert.False(true, "Error SubKey does not exist.");
}
//However, we are interested in ensuring that there are no buffer overflow issues with a deeply nested keys
for (int i = 0; i < 3; i++)
{
_rk2 = _rk2.OpenSubKey(_str1, true);
if (_rk2 == null)
{
Assert.False(true, "Err Wrong value returned, " + i);
break;
}
_rk2.CreateSubKey(_str1);
}
_rk1.DeleteSubKeyTree(_testKeyName);
}
[Fact]
public void Test11()
{
// [] Should throw ArgumentException if key name is too long
StringBuilder sb = new StringBuilder("");
for (int i = 0; i < 256; i++)
sb.Append(",");
Action a = () => { _rk1.CreateSubKey(sb.ToString()); };
Assert.Throws<ArgumentException>(() => { a(); });
}
public void Dispose()
{
_rk1 = Microsoft.Win32.Registry.CurrentUser;
if (_rk1.OpenSubKey(_testKeyName) != null)
_rk1.DeleteSubKeyTree(_testKeyName);
if (_rk1.GetValue(_testKeyName) != null)
_rk1.DeleteValue(_testKeyName);
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2007 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Xml;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal.Filters;
namespace NUnit.Framework.Internal
{
/// <summary>
/// Interface to be implemented by filters applied to tests.
/// The filter applies when running the test, after it has been
/// loaded, since this is the only time an ITest exists.
/// </summary>
#if !PORTABLE && !NETSTANDARD1_6
[Serializable]
#endif
public abstract class TestFilter : ITestFilter
{
/// <summary>
/// Unique Empty filter.
/// </summary>
public readonly static TestFilter Empty = new EmptyFilter();
/// <summary>
/// Indicates whether this is the EmptyFilter
/// </summary>
public bool IsEmpty
{
get { return this is TestFilter.EmptyFilter; }
}
/// <summary>
/// Indicates whether this is a top-level filter,
/// not contained in any other filter.
/// </summary>
public bool TopLevel { get; set; }
/// <summary>
/// Determine if a particular test passes the filter criteria. The default
/// implementation checks the test itself, its parents and any descendants.
///
/// Derived classes may override this method or any of the Match methods
/// to change the behavior of the filter.
/// </summary>
/// <param name="test">The test to which the filter is applied</param>
/// <returns>True if the test passes the filter, otherwise false</returns>
public virtual bool Pass(ITest test)
{
return Match(test) || MatchParent(test) || MatchDescendant(test);
}
/// <summary>
/// Determine if a test matches the filter explicitly. That is, it must
/// be a direct match of the test itself or one of it's children.
/// </summary>
/// <param name="test">The test to which the filter is applied</param>
/// <returns>True if the test matches the filter explicitly, otherwise false</returns>
public virtual bool IsExplicitMatch(ITest test)
{
return Match(test) || MatchDescendant(test);
}
/// <summary>
/// Determine whether the test itself matches the filter criteria, without
/// examining either parents or descendants. This is overridden by each
/// different type of filter to perform the necessary tests.
/// </summary>
/// <param name="test">The test to which the filter is applied</param>
/// <returns>True if the filter matches the any parent of the test</returns>
public abstract bool Match(ITest test);
/// <summary>
/// Determine whether any ancestor of the test matches the filter criteria
/// </summary>
/// <param name="test">The test to which the filter is applied</param>
/// <returns>True if the filter matches the an ancestor of the test</returns>
public bool MatchParent(ITest test)
{
return test.Parent != null && (Match(test.Parent) || MatchParent(test.Parent));
}
/// <summary>
/// Determine whether any descendant of the test matches the filter criteria.
/// </summary>
/// <param name="test">The test to be matched</param>
/// <returns>True if at least one descendant matches the filter criteria</returns>
protected virtual bool MatchDescendant(ITest test)
{
if (test.Tests == null)
return false;
foreach (ITest child in test.Tests)
{
if (Match(child) || MatchDescendant(child))
return true;
}
return false;
}
/// <summary>
/// Create a TestFilter instance from an xml representation.
/// </summary>
public static TestFilter FromXml(string xmlText)
{
TNode topNode = TNode.FromXml(xmlText);
if (topNode.Name != "filter")
throw new Exception("Expected filter element at top level");
int count = topNode.ChildNodes.Count;
TestFilter filter = count == 0
? TestFilter.Empty
: count == 1
? FromXml(topNode.FirstChild)
: FromXml(topNode);
filter.TopLevel = true;
return filter;
}
/// <summary>
/// Create a TestFilter from it's TNode representation
/// </summary>
public static TestFilter FromXml(TNode node)
{
bool isRegex = node.Attributes["re"] == "1";
switch (node.Name)
{
case "filter":
case "and":
var andFilter = new AndFilter();
foreach (var childNode in node.ChildNodes)
andFilter.Add(FromXml(childNode));
return andFilter;
case "or":
var orFilter = new OrFilter();
foreach (var childNode in node.ChildNodes)
orFilter.Add(FromXml(childNode));
return orFilter;
case "not":
return new NotFilter(FromXml(node.FirstChild));
case "id":
return new IdFilter(node.Value);
case "test":
return new FullNameFilter(node.Value) { IsRegex = isRegex };
case "name":
return new TestNameFilter(node.Value) { IsRegex = isRegex };
case "method":
return new MethodNameFilter(node.Value) { IsRegex = isRegex };
case "class":
return new ClassNameFilter(node.Value) { IsRegex = isRegex };
case "cat":
return new CategoryFilter(node.Value) { IsRegex = isRegex };
case "prop":
string name = node.Attributes["name"];
if (name != null)
return new PropertyFilter(name, node.Value) { IsRegex = isRegex };
break;
}
throw new ArgumentException("Invalid filter element: " + node.Name, "xmlNode");
}
/// <summary>
/// Nested class provides an empty filter - one that always
/// returns true when called. It never matches explicitly.
/// </summary>
#if !PORTABLE && !NETSTANDARD1_6
[Serializable]
#endif
private class EmptyFilter : TestFilter
{
public override bool Match( ITest test )
{
return true;
}
public override bool Pass( ITest test )
{
return true;
}
public override bool IsExplicitMatch( ITest test )
{
return false;
}
public override TNode AddToXml(TNode parentNode, bool recursive)
{
return parentNode.AddElement("filter");
}
}
#region IXmlNodeBuilder Implementation
/// <summary>
/// Adds an XML node
/// </summary>
/// <param name="recursive">True if recursive</param>
/// <returns>The added XML node</returns>
public TNode ToXml(bool recursive)
{
return AddToXml(new TNode("dummy"), recursive);
}
/// <summary>
/// Adds an XML node
/// </summary>
/// <param name="parentNode">Parent node</param>
/// <param name="recursive">True if recursive</param>
/// <returns>The added XML node</returns>
public abstract TNode AddToXml(TNode parentNode, bool recursive);
#endregion
}
}
| |
using System;
using System.Text;
using Lucene.Net.Documents;
using Lucene.Net.Index;
namespace Lucene.Net.Search
{
using System.IO;
using System.Runtime.CompilerServices;
using AtomicReader = Lucene.Net.Index.AtomicReader;
using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
using Bits = Lucene.Net.Util.Bits;
using BytesRef = Lucene.Net.Util.BytesRef;
using DocTermOrds = Lucene.Net.Index.DocTermOrds;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using DoubleField = DoubleField;
using FloatField = FloatField;
using IntField = IntField;
using LongField = LongField;
using NumericDocValuesField = NumericDocValuesField;
using NumericUtils = Lucene.Net.Util.NumericUtils;
using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
// javadocs
using SortedDocValues = Lucene.Net.Index.SortedDocValues;
using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
using Terms = Lucene.Net.Index.Terms;
using TermsEnum = Lucene.Net.Index.TermsEnum;
/// <summary>
/// Expert: Maintains caches of term values.
///
/// <p>Created: May 19, 2004 11:13:14 AM
///
/// @since lucene 1.4 </summary>
/// <seealso cref=Lucene.Net.Util.FieldCacheSanityChecker</seealso>
public interface IFieldCache
{
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is found,
/// reads the terms in <code>field</code> and returns a bit set at the size of
/// <code>reader.maxDoc()</code>, with turned on bits for each docid that
/// does have a value for this field.
/// </summary>
Bits GetDocsWithField(AtomicReader reader, string field);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is
/// found, reads the terms in <code>field</code> as a single byte and returns an array
/// of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the single byte values. </param>
/// <param name="setDocsWithField"> If true then <seealso cref="#getDocsWithField"/> will
/// also be computed and stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
/// @deprecated (4.4) Index as a numeric field using <seealso cref="IntField"/> and then use <seealso cref="#getInts(AtomicReader, String, boolean)"/> instead.
FieldCache.Bytes GetBytes(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is found,
/// reads the terms in <code>field</code> as bytes and returns an array of
/// size <code>reader.maxDoc()</code> of the value each document has in the
/// given field. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the bytes. </param>
/// <param name="parser"> Computes byte for string values. </param>
/// <param name="setDocsWithField"> If true then <seealso cref="#getDocsWithField"/> will
/// also be computed and stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
/// @deprecated (4.4) Index as a numeric field using <seealso cref="IntField"/> and then use <seealso cref="#getInts(AtomicReader, String, boolean)"/> instead.
FieldCache.Bytes GetBytes(AtomicReader reader, string field, FieldCache.IByteParser parser, bool setDocsWithField);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is
/// found, reads the terms in <code>field</code> as shorts and returns an array
/// of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the shorts. </param>
/// <param name="setDocsWithField"> If true then <seealso cref="#getDocsWithField"/> will
/// also be computed and stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
/// @deprecated (4.4) Index as a numeric field using <seealso cref="IntField"/> and then use <seealso cref="#getInts(AtomicReader, String, boolean)"/> instead.
FieldCache.Shorts GetShorts(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is found,
/// reads the terms in <code>field</code> as shorts and returns an array of
/// size <code>reader.maxDoc()</code> of the value each document has in the
/// given field. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the shorts. </param>
/// <param name="parser"> Computes short for string values. </param>
/// <param name="setDocsWithField"> If true then <seealso cref="#getDocsWithField"/> will
/// also be computed and stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
/// @deprecated (4.4) Index as a numeric field using <seealso cref="IntField"/> and then use <seealso cref="#getInts(AtomicReader, String, boolean)"/> instead.
FieldCache.Shorts GetShorts(AtomicReader reader, string field, FieldCache.IShortParser parser, bool setDocsWithField);
/// <summary>
/// Returns an <seealso cref="FieldCache.Ints"/> over the values found in documents in the given
/// field.
/// </summary>
/// <seealso cref= #getInts(AtomicReader, String, IntParser, boolean) </seealso>
FieldCache.Ints GetInts(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Returns an <seealso cref="FieldCache.Ints"/> over the values found in documents in the given
/// field. If the field was indexed as <seealso cref="NumericDocValuesField"/>, it simply
/// uses <seealso cref="AtomicReader#getNumericDocValues(String)"/> to read the values.
/// Otherwise, it checks the internal cache for an appropriate entry, and if
/// none is found, reads the terms in <code>field</code> as ints and returns
/// an array of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field.
/// </summary>
/// <param name="reader">
/// Used to get field values. </param>
/// <param name="field">
/// Which field contains the longs. </param>
/// <param name="parser">
/// Computes int for string values. May be {@code null} if the
/// requested field was indexed as <seealso cref="NumericDocValuesField"/> or
/// <seealso cref="IntField"/>. </param>
/// <param name="setDocsWithField">
/// If true then <seealso cref="#getDocsWithField"/> will also be computed and
/// stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException">
/// If any error occurs. </exception>
FieldCache.Ints GetInts(AtomicReader reader, string field, FieldCache.IIntParser parser, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Floats"/> over the values found in documents in the given
/// field.
/// </summary>
/// <seealso cref= #getFloats(AtomicReader, String, FloatParser, boolean) </seealso>
FieldCache.Floats GetFloats(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Floats"/> over the values found in documents in the given
/// field. If the field was indexed as <seealso cref="NumericDocValuesField"/>, it simply
/// uses <seealso cref="AtomicReader#getNumericDocValues(String)"/> to read the values.
/// Otherwise, it checks the internal cache for an appropriate entry, and if
/// none is found, reads the terms in <code>field</code> as floats and returns
/// an array of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field.
/// </summary>
/// <param name="reader">
/// Used to get field values. </param>
/// <param name="field">
/// Which field contains the floats. </param>
/// <param name="parser">
/// Computes float for string values. May be {@code null} if the
/// requested field was indexed as <seealso cref="NumericDocValuesField"/> or
/// <seealso cref="FloatField"/>. </param>
/// <param name="setDocsWithField">
/// If true then <seealso cref="#getDocsWithField"/> will also be computed and
/// stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException">
/// If any error occurs. </exception>
FieldCache.Floats GetFloats(AtomicReader reader, string field, FieldCache.IFloatParser parser, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Longs"/> over the values found in documents in the given
/// field.
/// </summary>
/// <seealso cref= #getLongs(AtomicReader, String, LongParser, boolean) </seealso>
FieldCache.Longs GetLongs(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Longs"/> over the values found in documents in the given
/// field. If the field was indexed as <seealso cref="NumericDocValuesField"/>, it simply
/// uses <seealso cref="AtomicReader#getNumericDocValues(String)"/> to read the values.
/// Otherwise, it checks the internal cache for an appropriate entry, and if
/// none is found, reads the terms in <code>field</code> as longs and returns
/// an array of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field.
/// </summary>
/// <param name="reader">
/// Used to get field values. </param>
/// <param name="field">
/// Which field contains the longs. </param>
/// <param name="parser">
/// Computes long for string values. May be {@code null} if the
/// requested field was indexed as <seealso cref="NumericDocValuesField"/> or
/// <seealso cref="LongField"/>. </param>
/// <param name="setDocsWithField">
/// If true then <seealso cref="#getDocsWithField"/> will also be computed and
/// stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException">
/// If any error occurs. </exception>
FieldCache.Longs GetLongs(AtomicReader reader, string field, FieldCache.ILongParser parser, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Doubles"/> over the values found in documents in the given
/// field.
/// </summary>
/// <seealso cref= #getDoubles(AtomicReader, String, DoubleParser, boolean) </seealso>
FieldCache.Doubles GetDoubles(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Returns a <seealso cref="Doubles"/> over the values found in documents in the given
/// field. If the field was indexed as <seealso cref="NumericDocValuesField"/>, it simply
/// uses <seealso cref="AtomicReader#getNumericDocValues(String)"/> to read the values.
/// Otherwise, it checks the internal cache for an appropriate entry, and if
/// none is found, reads the terms in <code>field</code> as doubles and returns
/// an array of size <code>reader.maxDoc()</code> of the value each document
/// has in the given field.
/// </summary>
/// <param name="reader">
/// Used to get field values. </param>
/// <param name="field">
/// Which field contains the longs. </param>
/// <param name="parser">
/// Computes double for string values. May be {@code null} if the
/// requested field was indexed as <seealso cref="NumericDocValuesField"/> or
/// <seealso cref="DoubleField"/>. </param>
/// <param name="setDocsWithField">
/// If true then <seealso cref="#getDocsWithField"/> will also be computed and
/// stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException">
/// If any error occurs. </exception>
FieldCache.Doubles GetDoubles(AtomicReader reader, string field, FieldCache.IDoubleParser parser, bool setDocsWithField);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none
/// is found, reads the term values in <code>field</code>
/// and returns a <seealso cref="BinaryDocValues"/> instance, providing a
/// method to retrieve the term (as a BytesRef) per document. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the strings. </param>
/// <param name="setDocsWithField"> If true then <seealso cref="#getDocsWithField"/> will
/// also be computed and stored in the FieldCache. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
BinaryDocValues GetTerms(AtomicReader reader, string field, bool setDocsWithField);
/// <summary>
/// Expert: just like <seealso cref="#getTerms(AtomicReader,String,boolean)"/>,
/// but you can specify whether more RAM should be consumed in exchange for
/// faster lookups (default is "true"). Note that the
/// first call for a given reader and field "wins",
/// subsequent calls will share the same cache entry.
/// </summary>
BinaryDocValues GetTerms(AtomicReader reader, string field, bool setDocsWithField, float acceptableOverheadRatio);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none
/// is found, reads the term values in <code>field</code>
/// and returns a <seealso cref="SortedDocValues"/> instance,
/// providing methods to retrieve sort ordinals and terms
/// (as a ByteRef) per document. </summary>
/// <param name="reader"> Used to get field values. </param>
/// <param name="field"> Which field contains the strings. </param>
/// <returns> The values in the given field for each document. </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
SortedDocValues GetTermsIndex(AtomicReader reader, string field);
/// <summary>
/// Expert: just like {@link
/// #getTermsIndex(AtomicReader,String)}, but you can specify
/// whether more RAM should be consumed in exchange for
/// faster lookups (default is "true"). Note that the
/// first call for a given reader and field "wins",
/// subsequent calls will share the same cache entry.
/// </summary>
SortedDocValues GetTermsIndex(AtomicReader reader, string field, float acceptableOverheadRatio);
/// <summary>
/// Checks the internal cache for an appropriate entry, and if none is found, reads the term values
/// in <code>field</code> and returns a <seealso cref="DocTermOrds"/> instance, providing a method to retrieve
/// the terms (as ords) per document.
/// </summary>
/// <param name="reader"> Used to build a <seealso cref="DocTermOrds"/> instance </param>
/// <param name="field"> Which field contains the strings. </param>
/// <returns> a <seealso cref="DocTermOrds"/> instance </returns>
/// <exception cref="IOException"> If any error occurs. </exception>
SortedSetDocValues GetDocTermOrds(AtomicReader reader, string field);
/// <summary>
/// EXPERT: A unique Identifier/Description for each item in the FieldCache.
/// Can be useful for logging/debugging.
/// @lucene.experimental
/// </summary>
/// <summary>
/// EXPERT: Generates an array of CacheEntry objects representing all items
/// currently in the FieldCache.
/// <p>
/// NOTE: These CacheEntry objects maintain a strong reference to the
/// Cached Values. Maintaining references to a CacheEntry the AtomicIndexReader
/// associated with it has garbage collected will prevent the Value itself
/// from being garbage collected when the Cache drops the WeakReference.
/// </p>
/// @lucene.experimental
/// </summary>
FieldCache.CacheEntry[] CacheEntries { get; }
/// <summary>
/// <p>
/// EXPERT: Instructs the FieldCache to forcibly expunge all entries
/// from the underlying caches. this is intended only to be used for
/// test methods as a way to ensure a known base state of the Cache
/// (with out needing to rely on GC to free WeakReferences).
/// It should not be relied on for "Cache maintenance" in general
/// application code.
/// </p>
/// @lucene.experimental
/// </summary>
void PurgeAllCaches();
/// <summary>
/// Expert: drops all cache entries associated with this
/// reader <seealso cref="IndexReader#getCoreCacheKey"/>. NOTE: this cache key must
/// precisely match the reader that the cache entry is
/// keyed on. If you pass a top-level reader, it usually
/// will have no effect as Lucene now caches at the segment
/// reader level.
/// </summary>
void PurgeByCacheKey(object coreCacheKey);
/// <summary>
/// If non-null, FieldCacheImpl will warn whenever
/// entries are created that are not sane according to
/// <seealso cref="Lucene.Net.Util.FieldCacheSanityChecker"/>.
/// </summary>
StreamWriter InfoStream { set; get; }
}
public static class FieldCache
{
public abstract class Bytes
{
public abstract sbyte Get(int docID);
public static readonly Bytes EMPTY = new EmptyBytes();
public sealed class EmptyBytes : Bytes
{
public override sbyte Get(int docID)
{
return 0;
}
}
}
public abstract class Shorts
{
public abstract short Get(int docID);
public static readonly Shorts EMPTY = new EmptyShorts();
public sealed class EmptyShorts : Shorts
{
public override short Get(int docID)
{
return 0;
}
}
}
public abstract class Ints
{
public abstract int Get(int docID);
public static readonly Ints EMPTY = new EmptyInts();
public sealed class EmptyInts : Ints
{
public override int Get(int docID)
{
return 0;
}
}
}
public abstract class Longs
{
public abstract long Get(int docID);
public static readonly Longs EMPTY = new EmptyLongs();
public sealed class EmptyLongs : Longs
{
public override long Get(int docID)
{
return 0;
}
}
}
public abstract class Floats
{
public abstract float Get(int docID);
public static readonly Floats EMPTY = new EmptyFloats();
public sealed class EmptyFloats : Floats
{
public override float Get(int docID)
{
return 0;
}
}
}
public abstract class Doubles
{
public abstract double Get(int docID);
public static readonly Doubles EMPTY = new EmptyDoubles();
public sealed class EmptyDoubles : Doubles
{
public override double Get(int docID)
{
return 0;
}
}
}
public sealed class CreationPlaceholder
{
internal object Value;
}
public interface IParser
{
TermsEnum TermsEnum(Terms terms);
}
public interface IByteParser : IParser
{
sbyte ParseByte(BytesRef term);
}
public interface IShortParser : IParser
{
short ParseShort(BytesRef term);
}
public interface IIntParser : IParser
{
int ParseInt(BytesRef term);
}
public interface IFloatParser : IParser
{
float ParseFloat(BytesRef term);
}
public interface ILongParser : IParser
{
long ParseLong(BytesRef term);
}
public interface IDoubleParser : IParser
{
double ParseDouble(BytesRef term);
}
public static IFieldCache DEFAULT = new FieldCacheImpl();
public static readonly IByteParser DEFAULT_BYTE_PARSER = new AnonymousByteParser();
private sealed class AnonymousByteParser : IByteParser
{
public sbyte ParseByte(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return sbyte.Parse(term.Utf8ToString());
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_BYTE_PARSER";
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
}
public static readonly IShortParser DEFAULT_SHORT_PARSER = new AnonymousShortParser();
private sealed class AnonymousShortParser : IShortParser
{
public short ParseShort(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return short.Parse(term.Utf8ToString());
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_SHORT_PARSER";
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
}
public static readonly IIntParser DEFAULT_INT_PARSER = new AnonymousIntParser();
private sealed class AnonymousIntParser : IIntParser
{
public int ParseInt(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return int.Parse(term.Utf8ToString());
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_INT_PARSER";
}
}
public static readonly IFloatParser DEFAULT_FLOAT_PARSER = new AnonymousFloatParser();
private sealed class AnonymousFloatParser : IFloatParser
{
public float ParseFloat(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// FloatField, instead, which already decodes
// directly from byte[]
return float.Parse(term.Utf8ToString());
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_FLOAT_PARSER";
}
}
public static readonly ILongParser DEFAULT_LONG_PARSER = new AnonymousLongParser();
private sealed class AnonymousLongParser : ILongParser
{
public long ParseLong(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// LongField, instead, which already decodes
// directly from byte[]
return long.Parse(term.Utf8ToString());
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_LONG_PARSER";
}
}
public static readonly IDoubleParser DEFAULT_DOUBLE_PARSER = new AnonymousDoubleParser();
private sealed class AnonymousDoubleParser : IDoubleParser
{
public double ParseDouble(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// DoubleField, instead, which already decodes
// directly from byte[]
return double.Parse(term.Utf8ToString());
}
public TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".DEFAULT_DOUBLE_PARSER";
}
}
public static readonly IIntParser NUMERIC_UTILS_INT_PARSER = new AnonymousNumericUtilsIntParser();
private sealed class AnonymousNumericUtilsIntParser : IIntParser
{
public int ParseInt(BytesRef term)
{
return NumericUtils.PrefixCodedToInt(term);
}
public TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedInts(terms.Iterator(null));
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_INT_PARSER";
}
}
public static readonly IFloatParser NUMERIC_UTILS_FLOAT_PARSER = new AnonymousNumericUtilsFloatParser();
private sealed class AnonymousNumericUtilsFloatParser : IFloatParser
{
public float ParseFloat(BytesRef term)
{
return NumericUtils.SortableIntToFloat(NumericUtils.PrefixCodedToInt(term));
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_FLOAT_PARSER";
}
public TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedInts(terms.Iterator(null));
}
}
public static readonly ILongParser NUMERIC_UTILS_LONG_PARSER = new AnonymousNumericUtilsLongParser();
private sealed class AnonymousNumericUtilsLongParser : ILongParser
{
public long ParseLong(BytesRef term)
{
return NumericUtils.PrefixCodedToLong(term);
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_LONG_PARSER";
}
public TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedLongs(terms.Iterator(null));
}
}
public static readonly IDoubleParser NUMERIC_UTILS_DOUBLE_PARSER = new AnonymousNumericUtilsDoubleParser();
private sealed class AnonymousNumericUtilsDoubleParser : IDoubleParser
{
public double ParseDouble(BytesRef term)
{
return NumericUtils.SortableLongToDouble(NumericUtils.PrefixCodedToLong(term));
}
public override string ToString()
{
return typeof(IFieldCache).FullName + ".NUMERIC_UTILS_DOUBLE_PARSER";
}
public TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedLongs(terms.Iterator(null));
}
}
// .NET Port: skipping down to about line 681 of java version. The actual interface methods of FieldCache are in IFieldCache below.
public sealed class CacheEntry
{
private readonly object readerKey;
private readonly string fieldName;
private readonly Type cacheType;
private readonly object custom;
private readonly object value;
private string size;
public CacheEntry(object readerKey, string fieldName,
Type cacheType,
object custom,
object value)
{
this.readerKey = readerKey;
this.fieldName = fieldName;
this.cacheType = cacheType;
this.custom = custom;
this.value = value;
}
public object ReaderKey
{
get { return readerKey; }
}
public string FieldName
{
get { return fieldName; }
}
public Type CacheType
{
get { return cacheType; }
}
public object Custom
{
get { return custom; }
}
public object Value
{
get { return value; }
}
public void EstimateSize()
{
long bytesUsed = RamUsageEstimator.SizeOf(Value);
size = RamUsageEstimator.HumanReadableUnits(bytesUsed);
}
public string EstimatedSize
{
get { return size; }
}
public override string ToString()
{
StringBuilder b = new StringBuilder();
b.Append("'").Append(ReaderKey).Append("'=>");
b.Append("'").Append(FieldName).Append("',");
b.Append(CacheType).Append(",").Append(Custom);
b.Append("=>").Append(Value.GetType().FullName).Append("#");
b.Append(RuntimeHelpers.GetHashCode(Value));
String s = EstimatedSize;
if (null != s)
{
b.Append(" (size =~ ").Append(s).Append(')');
}
return b.ToString();
}
}
}
/*LUCENE TO-DO refactoring because of enum nonsense
public static readonly FieldCache DEFAULT = new FieldCacheImpl();
private class FieldCache_ByteParserAnonymousInnerClassHelper : FieldCache_ByteParser
{
public FieldCache_ByteParserAnonymousInnerClassHelper()
{
}
public virtual sbyte ParseByte(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return (sbyte)Convert.ToByte(term.Utf8ToString());
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_BYTE_PARSER";
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
}
private class FieldCache_ShortParserAnonymousInnerClassHelper : FieldCache_ShortParser
{
public FieldCache_ShortParserAnonymousInnerClassHelper()
{
}
public virtual short ParseShort(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return Convert.ToInt16(term.Utf8ToString());
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_SHORT_PARSER";
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
}
private class FieldCache_IntParserAnonymousInnerClassHelper : FieldCache_IntParser
{
public FieldCache_IntParserAnonymousInnerClassHelper()
{
}
public virtual int ParseInt(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return Convert.ToInt32(term.Utf8ToString());
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_INT_PARSER";
}
}
private class FieldCache_FloatParserAnonymousInnerClassHelper : FieldCache_FloatParser
{
public FieldCache_FloatParserAnonymousInnerClassHelper()
{
}
public virtual float ParseFloat(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// FloatField, instead, which already decodes
// directly from byte[]
return Convert.ToSingle(term.Utf8ToString());
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_FLOAT_PARSER";
}
}
private class FieldCache_LongParserAnonymousInnerClassHelper : FieldCache_LongParser
{
public FieldCache_LongParserAnonymousInnerClassHelper()
{
}
public virtual long ParseLong(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// LongField, instead, which already decodes
// directly from byte[]
return Convert.ToInt64(term.Utf8ToString());
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_LONG_PARSER";
}
}
private class FieldCache_DoubleParserAnonymousInnerClassHelper : FieldCache_DoubleParser
{
public FieldCache_DoubleParserAnonymousInnerClassHelper()
{
}
public virtual double ParseDouble(BytesRef term)
{
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// DoubleField, instead, which already decodes
// directly from byte[]
return Convert.ToDouble(term.Utf8ToString());
}
public virtual TermsEnum TermsEnum(Terms terms)
{
return terms.Iterator(null);
}
public override string ToString()
{
return typeof(FieldCache).Name + ".DEFAULT_DOUBLE_PARSER";
}
}
private class FieldCache_IntParserAnonymousInnerClassHelper2 : FieldCache_IntParser
{
public FieldCache_IntParserAnonymousInnerClassHelper2()
{
}
public override int ParseInt(BytesRef term)
{
return NumericUtils.PrefixCodedToInt(term);
}
public override TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedInts(terms.Iterator(null));
}
public override string ToString()
{
return typeof(FieldCache).Name + ".NUMERIC_UTILS_INT_PARSER";
}
}
private class FieldCache_FloatParserAnonymousInnerClassHelper2 : FieldCache_FloatParser
{
public FieldCache_FloatParserAnonymousInnerClassHelper2()
{
}
public override float ParseFloat(BytesRef term)
{
return NumericUtils.SortableIntToFloat(NumericUtils.PrefixCodedToInt(term));
}
public override string ToString()
{
return typeof(FieldCache).Name + ".NUMERIC_UTILS_FLOAT_PARSER";
}
public override TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedInts(terms.Iterator(null));
}
}
private class FieldCache_LongParserAnonymousInnerClassHelper2 : FieldCache_LongParser
{
public FieldCache_LongParserAnonymousInnerClassHelper2()
{
}
public override long ParseLong(BytesRef term)
{
return NumericUtils.PrefixCodedToLong(term);
}
public override string ToString()
{
return typeof(FieldCache).Name + ".NUMERIC_UTILS_LONG_PARSER";
}
public override TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedLongs(terms.Iterator(null));
}
}
private class FieldCache_DoubleParserAnonymousInnerClassHelper2 : FieldCache_DoubleParser
{
public FieldCache_DoubleParserAnonymousInnerClassHelper2()
{
}
public override double ParseDouble(BytesRef term)
{
return NumericUtils.SortableLongToDouble(NumericUtils.PrefixCodedToLong(term));
}
public override string ToString()
{
return typeof(FieldCache).Name + ".NUMERIC_UTILS_DOUBLE_PARSER";
}
public override TermsEnum TermsEnum(Terms terms)
{
return NumericUtils.FilterPrefixCodedLongs(terms.Iterator(null));
}
}
}
public abstract class FieldCache_Bytes
{
/// <summary>
/// Return a single Byte representation of this field's value. </summary>
public abstract sbyte Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Bytes EMPTY = new FieldCache_BytesAnonymousInnerClassHelper();
private class FieldCache_BytesAnonymousInnerClassHelper : FieldCache_Bytes
{
public FieldCache_BytesAnonymousInnerClassHelper()
{
}
public override sbyte Get(int docID)
{
return 0;
}
}
}
public abstract class FieldCache_Shorts
{
/// <summary>
/// Return a short representation of this field's value. </summary>
public abstract short Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Shorts EMPTY = new FieldCache_ShortsAnonymousInnerClassHelper();
private class FieldCache_ShortsAnonymousInnerClassHelper : FieldCache_Shorts
{
public FieldCache_ShortsAnonymousInnerClassHelper()
{
}
public override short Get(int docID)
{
return 0;
}
}
}
public abstract class FieldCache_Ints
{
/// <summary>
/// Return an integer representation of this field's value. </summary>
public abstract int Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Ints EMPTY = new FieldCache_IntsAnonymousInnerClassHelper();
private class FieldCache_IntsAnonymousInnerClassHelper : FieldCache_Ints
{
public FieldCache_IntsAnonymousInnerClassHelper()
{
}
public override int Get(int docID)
{
return 0;
}
}
}
public abstract class FieldCache_Longs
{
/// <summary>
/// Return an long representation of this field's value. </summary>
public abstract long Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Longs EMPTY = new FieldCache_LongsAnonymousInnerClassHelper();
private class FieldCache_LongsAnonymousInnerClassHelper : FieldCache_Longs
{
public FieldCache_LongsAnonymousInnerClassHelper()
{
}
public override long Get(int docID)
{
return 0;
}
}
}
public abstract class FieldCache_Floats
{
/// <summary>
/// Return an float representation of this field's value. </summary>
public abstract float Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Floats EMPTY = new FieldCache_FloatsAnonymousInnerClassHelper();
private class FieldCache_FloatsAnonymousInnerClassHelper : FieldCache_Floats
{
public FieldCache_FloatsAnonymousInnerClassHelper()
{
}
public override float Get(int docID)
{
return 0;
}
}
}
public abstract class FieldCache_Doubles
{
/// <summary>
/// Return an double representation of this field's value. </summary>
public abstract double Get(int docID);
/// <summary>
/// Zero value for every document </summary>
public static readonly FieldCache_Doubles EMPTY = new FieldCache_DoublesAnonymousInnerClassHelper();
private class FieldCache_DoublesAnonymousInnerClassHelper : FieldCache_Doubles
{
public FieldCache_DoublesAnonymousInnerClassHelper()
{
}
public override double Get(int docID)
{
return 0;
}
}
}
public sealed class FieldCache_CreationPlaceholder
{
internal object Value;
}
public interface FieldCache_Parser
{
/// <summary>
/// Pulls a <seealso cref="TermsEnum"/> from the given <seealso cref="Terms"/>. this method allows certain parsers
/// to filter the actual TermsEnum before the field cache is filled.
/// </summary>
/// <param name="terms"> the <seealso cref="Terms"/> instance to create the <seealso cref="TermsEnum"/> from. </param>
/// <returns> a possibly filtered <seealso cref="TermsEnum"/> instance, this method must not return <code>null</code>. </returns>
/// <exception cref="IOException"> if an <seealso cref="IOException"/> occurs </exception>
TermsEnum TermsEnum(Terms terms);
}
[Obsolete]
public interface FieldCache_ByteParser : FieldCache_Parser
{
/// <summary>
/// Return a single Byte representation of this field's value. </summary>
sbyte ParseByte(BytesRef term);
}
[Obsolete]
public interface FieldCache_ShortParser : FieldCache_Parser
{
/// <summary>
/// Return a short representation of this field's value. </summary>
short ParseShort(BytesRef term);
}
public interface FieldCache_IntParser : FieldCache_Parser
{
/// <summary>
/// Return an integer representation of this field's value. </summary>
int ParseInt(BytesRef term);
}
public interface FieldCache_FloatParser : FieldCache_Parser
{
/// <summary>
/// Return an float representation of this field's value. </summary>
float ParseFloat(BytesRef term);
}
public interface FieldCache_LongParser : FieldCache_Parser
{
/// <summary>
/// Return an long representation of this field's value. </summary>
long ParseLong(BytesRef term);
}
public interface FieldCache_DoubleParser : FieldCache_Parser
{
/// <summary>
/// Return an double representation of this field's value. </summary>
double ParseDouble(BytesRef term);
}
public sealed class FieldCache_CacheEntry
{
private readonly object readerKey;
private readonly string fieldName;
private readonly Type cacheType;
private readonly object custom;
private readonly object value;
private string Size;
public FieldCache_CacheEntry(object readerKey, string fieldName, Type cacheType, object custom, object value)
{
this.readerKey = readerKey;
this.fieldName = fieldName;
this.cacheType = cacheType;
this.custom = custom;
this.value = value;
}
public object ReaderKey
{
get
{
return readerKey;
}
}
public string FieldName
{
get
{
return fieldName;
}
}
public Type CacheType
{
get
{
return cacheType;
}
}
public object Custom
{
get
{
return custom;
}
}
public object Value
{
get
{
return value;
}
}
/// <summary>
/// Computes (and stores) the estimated size of the cache Value </summary>
/// <seealso cref= #getEstimatedSize </seealso>
public void EstimateSize()
{
long bytesUsed = RamUsageEstimator.SizeOf(Value);
Size = RamUsageEstimator.HumanReadableUnits(bytesUsed);
}
/// <summary>
/// The most recently estimated size of the value, null unless
/// estimateSize has been called.
/// </summary>
public string EstimatedSize
{
get
{
return Size;
}
}
public override string ToString()
{
StringBuilder b = new StringBuilder();
b.Append("'").Append(ReaderKey).Append("'=>");
b.Append("'").Append(FieldName).Append("',");
b.Append(CacheType).Append(",").Append(Custom);
b.Append("=>").Append(Value.GetType().Name).Append("#");
b.Append(System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(Value));
string s = EstimatedSize;
if (null != s)
{
b.Append(" (size =~ ").Append(s).Append(')');
}
return b.ToString();
}
}*/
}
| |
/*
*
* fuzzynet: Fuzzy Logic Library for Microsoft .NET
* Copyright (C) 2008 Dmitry Kaluzhny (kaluzhny_dmitrie@mail.ru)
*
* */
using System;
using System.Collections.Generic;
namespace AI.Fuzzy.Library
{
/// <summary>
/// Sugeno fuzzy inference system
/// </summary>
public class SugenoFuzzySystem : GenericFuzzySystem
{
List<SugenoVariable> _output = new List<SugenoVariable>();
List<SugenoFuzzyRule> _rules = new List<SugenoFuzzyRule>();
/// <summary>
/// Default constructor
/// </summary>
public SugenoFuzzySystem()
{}
/// <summary>
/// Output of the system
/// </summary>
public List<SugenoVariable> Output
{
get { return _output; }
}
/// <summary>
/// List of rules of the system
/// </summary>
public List<SugenoFuzzyRule> Rules
{
get { return _rules; }
}
/// <summary>
/// Get the output variable of the system by name
/// </summary>
/// <param name="name">Name of the variable</param>
/// <returns>Found variable</returns>
public SugenoVariable OutputByName(string name)
{
foreach (SugenoVariable var in _output)
{
if (var.Name == name)
{
return var;
}
}
throw new KeyNotFoundException();
}
/// <summary>
/// Use this method to create a linear function for the Sugeno fuzzy system
/// </summary>
/// <param name="name">Name of the function</param>
/// <param name="coeffs">List of coefficients. List length must be less or equal to the input lenght.</param>
/// <param name="constValue"></param>
/// <returns>Created function</returns>
public LinearSugenoFunction CreateSugenoFunction(string name, Dictionary<FuzzyVariable, double> coeffs, double constValue)
{
return new LinearSugenoFunction(name, this.Input, coeffs, constValue);
}
/// <summary>
/// Use this method to create a linear function for the Sugeno fuzzy system
/// </summary>
/// <param name="name">Name of the function</param>
/// <param name="coeffs">List of coefficients. List length must be less or equal to the input lenght.</param>
/// <returns>Created function</returns>
public LinearSugenoFunction CreateSugenoFunction(string name, double[] coeffs)
{
return new LinearSugenoFunction(name, this.Input, coeffs);
}
/// <summary>
/// Use this method to create an empty rule for the system
/// </summary>
/// <returns>Created rule</returns>
public SugenoFuzzyRule EmptyRule()
{
return new SugenoFuzzyRule();
}
/// <summary>
/// Use this method to create rule by its textual representation
/// </summary>
/// <param name="rule">Rule in text form</param>
/// <returns>Created rule</returns>
public SugenoFuzzyRule ParseRule(string rule)
{
return RuleParser<SugenoFuzzyRule, SugenoVariable, ISugenoFunction>.Parse(rule, EmptyRule(), Input, Output);
}
/// <summary>
/// Evaluate conditions
/// </summary>
/// <param name="fuzzifiedInput">Input in fuzzified form</param>
/// <returns>Result of evaluation</returns>
public Dictionary<SugenoFuzzyRule, double> EvaluateConditions(Dictionary<FuzzyVariable, Dictionary<FuzzyTerm, double>> fuzzifiedInput)
{
Dictionary<SugenoFuzzyRule, double> result = new Dictionary<SugenoFuzzyRule, double>();
foreach (SugenoFuzzyRule rule in Rules)
{
result.Add(rule, EvaluateCondition(rule.Condition, fuzzifiedInput));
}
return result;
}
/// <summary>
/// Calculate functions' results
/// </summary>
/// <param name="inputValues">Input values</param>
/// <returns>Results</returns>
public Dictionary<SugenoVariable, Dictionary<ISugenoFunction, double>> EvaluateFunctions(Dictionary<FuzzyVariable, double> inputValues)
{
Dictionary<SugenoVariable, Dictionary<ISugenoFunction, double>> result = new Dictionary<SugenoVariable, Dictionary<ISugenoFunction, double>>();
foreach (SugenoVariable var in Output)
{
Dictionary<ISugenoFunction, double> varResult = new Dictionary<ISugenoFunction, double>();
foreach (ISugenoFunction func in var.Functions)
{
varResult.Add(func, func.Evaluate(inputValues));
}
result.Add(var, varResult);
}
return result;
}
/// <summary>
/// Combine results of functions and rule evaluation
/// </summary>
/// <param name="ruleWeights">Rule weights (results of evaluation)</param>
/// <param name="functionResults">Result of functions evaluation</param>
/// <returns>Result of calculations</returns>
public Dictionary<SugenoVariable, double> CombineResult(Dictionary<SugenoFuzzyRule, double> ruleWeights, Dictionary<SugenoVariable, Dictionary<ISugenoFunction, double>> functionResults)
{
Dictionary<SugenoVariable, double> numerators = new Dictionary<SugenoVariable, double>();
Dictionary<SugenoVariable, double> denominators = new Dictionary<SugenoVariable, double>();
Dictionary<SugenoVariable, double> results = new Dictionary<SugenoVariable, double>();
//
// Calculate numerator and denominator separately for each output
//
foreach (SugenoVariable var in Output)
{
numerators.Add(var, 0.0);
denominators.Add(var, 0.0);
}
foreach (SugenoFuzzyRule rule in ruleWeights.Keys)
{
SugenoVariable var = rule.Conclusion.Var;
double z = functionResults[var][rule.Conclusion.Term];
double w = ruleWeights[rule];
numerators[var] += z * w;
denominators[var] += w;
}
//
// Calculate the fractions
//
foreach (SugenoVariable var in Output)
{
if (denominators[var] == 0.0)
{
results[var] = 0.0;
}
else
{
results[var] = numerators[var] / denominators[var];
}
}
return results;
}
/// <summary>
/// Calculate output of fuzzy system
/// </summary>
/// <param name="inputValues">Input values</param>
/// <returns>Output values</returns>
public Dictionary<SugenoVariable, double> Calculate(Dictionary<FuzzyVariable, double> inputValues)
{
//
// There should be one rule as minimum
//
if (_rules.Count == 0)
{
throw new Exception("There should be one rule as minimum.");
}
//
// Fuzzification step
//
Dictionary<FuzzyVariable, Dictionary<FuzzyTerm, double>> fuzzifiedInput =
Fuzzify(inputValues);
//
// Evaluate the conditions
//
Dictionary<SugenoFuzzyRule, double> ruleWeights = EvaluateConditions(fuzzifiedInput);
//
// Functions evaluation
//
Dictionary<SugenoVariable, Dictionary<ISugenoFunction, double>> functionsResult = EvaluateFunctions(inputValues);
//
// Combine output
//
Dictionary<SugenoVariable, double> result = CombineResult(ruleWeights, functionsResult);
return result;
}
}
}
| |
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
// PARTICULAR PURPOSE.
//
// Copyright (c) Microsoft Corporation. All rights reserved
using System;
using System.Collections.Generic;
using Prism.Interfaces;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Navigation;
namespace Prism
{
/// <summary>
/// Abstracts the Windows.UI.Xaml.Controls.Frame object for use by apps that derive from the MvvmAppBase class.
/// </summary>
public class FrameFacadeAdapter : IFrameFacade
{
private readonly Windows.UI.Xaml.Controls.Frame _frame;
private readonly List<EventHandler<MvvmNavigatedEventArgs>> _navigatedEventHandlers = new List<EventHandler<MvvmNavigatedEventArgs>>();
private readonly List<EventHandler> _navigatingEventHandlers = new List<EventHandler>();
/// <summary>
/// Initializes a new instance of the <see cref="FrameFacadeAdapter"/> class.
/// </summary>
/// <param name="frame">The Frame that will be wrapped.</param>
public FrameFacadeAdapter(Windows.UI.Xaml.Controls.Frame frame)
{
_frame = frame;
}
/// <summary>
/// Gets or sets the content of a ContentControl.
/// </summary>
///
/// <returns>
/// An object that contains the control's content. The default is null.
/// </returns>
public object Content
{
get { return _frame.Content; }
set { _frame.Content = value; }
}
/// <summary>
/// Navigates to the most recent item in back navigation history, if a Frame manages its own navigation history.
/// </summary>
public void GoBack()
{
_frame.GoBack();
}
/// <returns>
/// The string-form serialized navigation history. See Remarks.
/// </returns>
public string GetNavigationState()
{
var navigationState = _frame.GetNavigationState();
return navigationState;
}
/// <summary>
/// Reads and restores the navigation history of a Frame from a provided serialization string.
/// </summary>
/// <param name="navigationState">The serialization string that supplies the restore point for navigation history.</param>
public void SetNavigationState(string navigationState)
{
_frame.SetNavigationState(navigationState);
}
/// <summary>
/// Navigates to a page of the requested type.
/// </summary>
/// <param name="sourcePageType">The type of the page that will be navigated to.</param>
/// <param name="parameter">The page's navigation parameter.</param>
///
/// <returns>True if navigation was successful; false otherwise.</returns>
public bool Navigate(Type sourcePageType, object parameter)
{
return _frame.Navigate(sourcePageType, parameter);
}
/// <summary>
/// Gets the number of entries in the navigation back stack.
/// </summary>
///
/// <returns>
/// The number of entries in the navigation back stack.
/// </returns>
public int BackStackDepth
{
get { return _frame.BackStackDepth; }
}
/// <summary>
/// Gets a value that indicates whether there is at least one entry in back navigation history.
/// </summary>
///
/// <returns>
/// True if there is at least one entry in back navigation history; false if there are no entries in back navigation history or the Frame does not own its own navigation history.
/// </returns>
public bool CanGoBack
{
get { return _frame.CanGoBack; }
}
/// <summary>
/// Occurs when the content that is being navigated to has been found and is available from the Content property, although it may not have completed loading.
/// </summary>
public event EventHandler<MvvmNavigatedEventArgs> Navigated
{
add
{
if (_navigatedEventHandlers.Contains(value)) return;
_navigatedEventHandlers.Add(value);
if (_navigatedEventHandlers.Count == 1)
{
_frame.Navigated += FacadeNavigatedEventHandler;
}
}
remove
{
if (!_navigatedEventHandlers.Contains(value)) return;
_navigatedEventHandlers.Remove(value);
if (_navigatedEventHandlers.Count == 0)
{
_frame.Navigated -= FacadeNavigatedEventHandler;
}
}
}
/// <summary>
/// Occurs when a new navigation is requested.
/// </summary>
public event EventHandler Navigating
{
add
{
if (_navigatingEventHandlers.Contains(value)) return;
_navigatingEventHandlers.Add(value);
if (_navigatingEventHandlers.Count == 1)
{
_frame.Navigating += FacadeNavigatingCancelEventHandler;
}
}
remove
{
if (!_navigatingEventHandlers.Contains(value)) return;
_navigatingEventHandlers.Remove(value);
if (_navigatingEventHandlers.Count == 0)
{
_frame.Navigating -= FacadeNavigatingCancelEventHandler;
}
}
}
/// <summary>
/// Returns the current effective value of a dependency property from a DependencyObject.
/// </summary>
///
/// <returns>
/// Returns the current effective value.
/// </returns>
/// <param name="dependencyProperty">The DependencyProperty identifier of the property for which to retrieve the value.</param>
public object GetValue(DependencyProperty dependencyProperty)
{
return _frame.GetValue(dependencyProperty);
}
/// <summary>
/// Sets the local value of a dependency property on a DependencyObject.
/// </summary>
/// <param name="dependencyProperty">The identifier of the dependency property to set.</param><param name="value">The new local value.</param>
public void SetValue(DependencyProperty dependencyProperty, object value)
{
_frame.SetValue(dependencyProperty, value);
}
/// <summary>
/// Clears the local value of a dependency property.
/// </summary>
/// <param name="dependencyProperty">The DependencyProperty identifier of the property for which to clear the value.</param>
public void ClearValue(DependencyProperty dependencyProperty)
{
_frame.ClearValue(dependencyProperty);
}
private void FacadeNavigatedEventHandler(object sender, NavigationEventArgs e)
{
foreach (var handler in _navigatedEventHandlers)
{
var eventArgs = new MvvmNavigatedEventArgs()
{
NavigationMode = e.NavigationMode,
Parameter = e.Parameter
};
handler(this, eventArgs);
}
}
private void FacadeNavigatingCancelEventHandler(object sender, NavigatingCancelEventArgs e)
{
foreach (var handler in _navigatingEventHandlers)
{
handler(this, new EventArgs());
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text;
using ILCompiler.DependencyAnalysisFramework;
using Internal.Text;
using Internal.TypeSystem;
using Internal.Runtime;
using Internal.IL;
using Internal.NativeFormat;
namespace ILCompiler.DependencyAnalysis
{
public abstract partial class NodeFactory
{
private TargetDetails _target;
private CompilerTypeSystemContext _context;
private CompilationModuleGroup _compilationModuleGroup;
private bool _markingComplete;
public NodeFactory(CompilerTypeSystemContext context, CompilationModuleGroup compilationModuleGroup,
MetadataManager metadataManager, NameMangler nameMangler)
{
_target = context.Target;
_context = context;
_compilationModuleGroup = compilationModuleGroup;
NameMangler = nameMangler;
InteropStubManager = new InteropStubManager(compilationModuleGroup, context, new InteropStateManager(compilationModuleGroup.GeneratedAssembly));
CreateNodeCaches();
MetadataManager = metadataManager;
ThreadStaticsRegion = new ThreadStaticsRegionNode(
"__ThreadStaticRegionStart", "__ThreadStaticRegionEnd", null, _target.Abi);
}
public void SetMarkingComplete()
{
_markingComplete = true;
}
public bool MarkingComplete => _markingComplete;
public TargetDetails Target
{
get
{
return _target;
}
}
public CompilationModuleGroup CompilationModuleGroup
{
get
{
return _compilationModuleGroup;
}
}
public CompilerTypeSystemContext TypeSystemContext
{
get
{
return _context;
}
}
public MetadataManager MetadataManager
{
get;
}
public NameMangler NameMangler
{
get;
}
public InteropStubManager InteropStubManager
{
get;
}
// Temporary workaround that is used to disable certain features from lighting up
// in CppCodegen because they're not fully implemented yet.
public virtual bool IsCppCodegenTemporaryWorkaround
{
get { return false; }
}
/// <summary>
/// Return true if the type is not permitted by the rules of the runtime to have an EEType.
/// The implementation here is not intended to be complete, but represents many conditions
/// which make a type ineligible to be an EEType. (This function is intended for use in assertions only)
/// </summary>
private static bool TypeCannotHaveEEType(TypeDesc type)
{
if (type.GetTypeDefinition() is INonEmittableType)
return true;
if (type.IsRuntimeDeterminedSubtype)
return true;
if (type.IsSignatureVariable)
return true;
if (type.IsGenericParameter)
return true;
return false;
}
protected struct NodeCache<TKey, TValue>
{
private Func<TKey, TValue> _creator;
private ConcurrentDictionary<TKey, TValue> _cache;
public NodeCache(Func<TKey, TValue> creator, IEqualityComparer<TKey> comparer)
{
_creator = creator;
_cache = new ConcurrentDictionary<TKey, TValue>(comparer);
}
public NodeCache(Func<TKey, TValue> creator)
{
_creator = creator;
_cache = new ConcurrentDictionary<TKey, TValue>();
}
public TValue GetOrAdd(TKey key)
{
return _cache.GetOrAdd(key, _creator);
}
}
private void CreateNodeCaches()
{
_typeSymbols = new NodeCache<TypeDesc, IEETypeNode>((TypeDesc type) =>
{
Debug.Assert(!_compilationModuleGroup.ShouldReferenceThroughImportTable(type));
if (_compilationModuleGroup.ContainsType(type))
{
if (type.IsGenericDefinition)
{
return new GenericDefinitionEETypeNode(this, type);
}
else if (type.IsCanonicalDefinitionType(CanonicalFormKind.Any))
{
return new CanonicalDefinitionEETypeNode(this, type);
}
else if (type.IsCanonicalSubtype(CanonicalFormKind.Any))
{
return new NecessaryCanonicalEETypeNode(this, type);
}
else
{
return new EETypeNode(this, type);
}
}
else
{
return new ExternEETypeSymbolNode(this, type);
}
});
_constructedTypeSymbols = new NodeCache<TypeDesc, IEETypeNode>((TypeDesc type) =>
{
// Canonical definition types are *not* constructed types (call NecessaryTypeSymbol to get them)
Debug.Assert(!type.IsCanonicalDefinitionType(CanonicalFormKind.Any));
Debug.Assert(!_compilationModuleGroup.ShouldReferenceThroughImportTable(type));
if (_compilationModuleGroup.ContainsType(type))
{
if (type.IsCanonicalSubtype(CanonicalFormKind.Any))
{
return new CanonicalEETypeNode(this, type);
}
else
{
return new ConstructedEETypeNode(this, type);
}
}
else
{
return new ExternEETypeSymbolNode(this, type);
}
});
_clonedTypeSymbols = new NodeCache<TypeDesc, IEETypeNode>((TypeDesc type) =>
{
// Only types that reside in other binaries should be cloned
Debug.Assert(_compilationModuleGroup.ShouldReferenceThroughImportTable(type));
return new ClonedConstructedEETypeNode(this, type);
});
_importedTypeSymbols = new NodeCache<TypeDesc, IEETypeNode>((TypeDesc type) =>
{
Debug.Assert(_compilationModuleGroup.ShouldReferenceThroughImportTable(type));
return new ImportedEETypeSymbolNode(this, type);
});
_nonGCStatics = new NodeCache<MetadataType, ISymbolNode>((MetadataType type) =>
{
if (_compilationModuleGroup.ContainsType(type))
{
return new NonGCStaticsNode(type, this);
}
else if (_compilationModuleGroup.ShouldReferenceThroughImportTable(type))
{
return new ImportedNonGCStaticsNode(this, type);
}
else
{
return new ExternSymbolNode(NonGCStaticsNode.GetMangledName(type, NameMangler));
}
});
_GCStatics = new NodeCache<MetadataType, ISymbolNode>((MetadataType type) =>
{
if (_compilationModuleGroup.ContainsType(type))
{
return new GCStaticsNode(type);
}
else if (_compilationModuleGroup.ShouldReferenceThroughImportTable(type))
{
return new ImportedGCStaticsNode(this, type);
}
else
{
return new ExternSymbolNode(GCStaticsNode.GetMangledName(type, NameMangler));
}
});
_GCStaticIndirectionNodes = new NodeCache<MetadataType, EmbeddedObjectNode>((MetadataType type) =>
{
ISymbolNode gcStaticsNode = TypeGCStaticsSymbol(type);
Debug.Assert(gcStaticsNode is GCStaticsNode);
return GCStaticsRegion.NewNode((GCStaticsNode)gcStaticsNode);
});
_threadStatics = new NodeCache<MetadataType, ThreadStaticsNode>((MetadataType type) =>
{
return new ThreadStaticsNode(type, this);
});
_typeThreadStaticIndices = new NodeCache<MetadataType, TypeThreadStaticIndexNode>(type =>
{
return new TypeThreadStaticIndexNode(type);
});
_GCStaticEETypes = new NodeCache<GCPointerMap, GCStaticEETypeNode>((GCPointerMap gcMap) =>
{
return new GCStaticEETypeNode(Target, gcMap);
});
_readOnlyDataBlobs = new NodeCache<ReadOnlyDataBlobKey, BlobNode>(key =>
{
return new BlobNode(key.Name, ObjectNodeSection.ReadOnlyDataSection, key.Data, key.Alignment);
});
_externSymbols = new NodeCache<string, ExternSymbolNode>((string name) =>
{
return new ExternSymbolNode(name);
});
_pInvokeModuleFixups = new NodeCache<string, PInvokeModuleFixupNode>((string name) =>
{
return new PInvokeModuleFixupNode(name);
});
_pInvokeMethodFixups = new NodeCache<Tuple<string, string>, PInvokeMethodFixupNode>((Tuple<string, string> key) =>
{
return new PInvokeMethodFixupNode(key.Item1, key.Item2);
});
_methodEntrypoints = new NodeCache<MethodDesc, IMethodNode>(CreateMethodEntrypointNode);
_unboxingStubs = new NodeCache<MethodDesc, IMethodNode>(CreateUnboxingStubNode);
_fatFunctionPointers = new NodeCache<MethodKey, FatFunctionPointerNode>(method =>
{
return new FatFunctionPointerNode(method.Method, method.IsUnboxingStub);
});
_gvmDependenciesNode = new NodeCache<MethodDesc, GVMDependenciesNode>(method =>
{
return new GVMDependenciesNode(method);
});
_gvmTableEntries = new NodeCache<TypeDesc, TypeGVMEntriesNode>(type =>
{
return new TypeGVMEntriesNode(type);
});
_reflectableMethods = new NodeCache<MethodDesc, ReflectableMethodNode>(method =>
{
return new ReflectableMethodNode(method);
});
_shadowConcreteMethods = new NodeCache<MethodKey, IMethodNode>(methodKey =>
{
MethodDesc canonMethod = methodKey.Method.GetCanonMethodTarget(CanonicalFormKind.Specific);
if (methodKey.IsUnboxingStub)
{
return new ShadowConcreteUnboxingThunkNode(methodKey.Method, MethodEntrypoint(canonMethod, true));
}
else
{
return new ShadowConcreteMethodNode(methodKey.Method, MethodEntrypoint(canonMethod));
}
});
_runtimeDeterminedMethods = new NodeCache<MethodDesc, IMethodNode>(method =>
{
return new RuntimeDeterminedMethodNode(method,
MethodEntrypoint(method.GetCanonMethodTarget(CanonicalFormKind.Specific)));
});
_virtMethods = new NodeCache<MethodDesc, VirtualMethodUseNode>((MethodDesc method) =>
{
// We don't need to track virtual method uses for types that are producing full vtables.
// It's a waste of CPU time and memory.
Debug.Assert(!CompilationModuleGroup.ShouldProduceFullVTable(method.OwningType));
return new VirtualMethodUseNode(method);
});
_readyToRunHelpers = new NodeCache<ReadyToRunHelperKey, ISymbolNode>(CreateReadyToRunHelperNode);
_genericReadyToRunHelpersFromDict = new NodeCache<ReadyToRunGenericHelperKey, ISymbolNode>(data =>
{
return new ReadyToRunGenericLookupFromDictionaryNode(this, data.HelperId, data.Target, data.DictionaryOwner);
});
_genericReadyToRunHelpersFromType = new NodeCache<ReadyToRunGenericHelperKey, ISymbolNode>(data =>
{
return new ReadyToRunGenericLookupFromTypeNode(this, data.HelperId, data.Target, data.DictionaryOwner);
});
_indirectionNodes = new NodeCache<ISymbolNode, ISymbolNode>(indirectedNode =>
{
return new IndirectionNode(Target, indirectedNode, 0);
});
_frozenStringNodes = new NodeCache<string, FrozenStringNode>((string data) =>
{
return new FrozenStringNode(data, Target);
});
_interfaceDispatchCells = new NodeCache<DispatchCellKey, InterfaceDispatchCellNode>(callSiteCell =>
{
return new InterfaceDispatchCellNode(callSiteCell.Target, callSiteCell.CallsiteId);
});
_interfaceDispatchMaps = new NodeCache<TypeDesc, InterfaceDispatchMapNode>((TypeDesc type) =>
{
return new InterfaceDispatchMapNode(type);
});
_runtimeMethodHandles = new NodeCache<MethodDesc, RuntimeMethodHandleNode>((MethodDesc method) =>
{
return new RuntimeMethodHandleNode(method);
});
_runtimeFieldHandles = new NodeCache<FieldDesc, RuntimeFieldHandleNode>((FieldDesc field) =>
{
return new RuntimeFieldHandleNode(field);
});
_interfaceDispatchMapIndirectionNodes = new NodeCache<TypeDesc, EmbeddedObjectNode>((TypeDesc type) =>
{
var dispatchMap = InterfaceDispatchMap(type);
return DispatchMapTable.NewNodeWithSymbol(dispatchMap, (indirectionNode) =>
{
dispatchMap.SetDispatchMapIndex(this, DispatchMapTable.IndexOfEmbeddedObject(indirectionNode));
});
});
_genericCompositions = new NodeCache<GenericCompositionDetails, GenericCompositionNode>((GenericCompositionDetails details) =>
{
return new GenericCompositionNode(details);
});
_eagerCctorIndirectionNodes = new NodeCache<MethodDesc, EmbeddedObjectNode>((MethodDesc method) =>
{
Debug.Assert(method.IsStaticConstructor);
Debug.Assert(TypeSystemContext.HasEagerStaticConstructor((MetadataType)method.OwningType));
return EagerCctorTable.NewNode(MethodEntrypoint(method));
});
_vTableNodes = new NodeCache<TypeDesc, VTableSliceNode>((TypeDesc type ) =>
{
if (CompilationModuleGroup.ShouldProduceFullVTable(type))
return new EagerlyBuiltVTableSliceNode(type);
else
return new LazilyBuiltVTableSliceNode(type);
});
_methodGenericDictionaries = new NodeCache<MethodDesc, ISymbolNode>(method =>
{
if (CompilationModuleGroup.ContainsMethod(method))
{
return new MethodGenericDictionaryNode(method);
}
else
{
return new ImportedMethodGenericDictionaryNode(this, method);
}
});
_typeGenericDictionaries = new NodeCache<TypeDesc, ISymbolNode>(type =>
{
if (CompilationModuleGroup.ContainsType(type))
{
return new TypeGenericDictionaryNode(type);
}
else
{
return new ImportedTypeGenericDictionaryNode(this, type);
}
});
_genericDictionaryLayouts = new NodeCache<TypeSystemEntity, DictionaryLayoutNode>(methodOrType =>
{
return new DictionaryLayoutNode(methodOrType);
});
_stringAllocators = new NodeCache<MethodDesc, IMethodNode>(constructor =>
{
return new StringAllocatorMethodNode(constructor);
});
NativeLayout = new NativeLayoutHelper(this);
}
protected abstract IMethodNode CreateMethodEntrypointNode(MethodDesc method);
protected abstract IMethodNode CreateUnboxingStubNode(MethodDesc method);
protected abstract ISymbolNode CreateReadyToRunHelperNode(ReadyToRunHelperKey helperCall);
private NodeCache<TypeDesc, IEETypeNode> _typeSymbols;
public IEETypeNode NecessaryTypeSymbol(TypeDesc type)
{
if (_compilationModuleGroup.ShouldReferenceThroughImportTable(type))
{
return ImportedEETypeSymbol(type);
}
if (_compilationModuleGroup.ShouldPromoteToFullType(type))
{
return ConstructedTypeSymbol(type);
}
Debug.Assert(!TypeCannotHaveEEType(type));
return _typeSymbols.GetOrAdd(type);
}
private NodeCache<TypeDesc, IEETypeNode> _constructedTypeSymbols;
public IEETypeNode ConstructedTypeSymbol(TypeDesc type)
{
if (_compilationModuleGroup.ShouldReferenceThroughImportTable(type))
{
return ImportedEETypeSymbol(type);
}
Debug.Assert(!TypeCannotHaveEEType(type));
return _constructedTypeSymbols.GetOrAdd(type);
}
private NodeCache<TypeDesc, IEETypeNode> _clonedTypeSymbols;
public IEETypeNode ConstructedClonedTypeSymbol(TypeDesc type)
{
Debug.Assert(!TypeCannotHaveEEType(type));
return _clonedTypeSymbols.GetOrAdd(type);
}
private NodeCache<TypeDesc, IEETypeNode> _importedTypeSymbols;
private IEETypeNode ImportedEETypeSymbol(TypeDesc type)
{
Debug.Assert(_compilationModuleGroup.ShouldReferenceThroughImportTable(type));
return _importedTypeSymbols.GetOrAdd(type);
}
private NodeCache<MetadataType, ISymbolNode> _nonGCStatics;
public ISymbolNode TypeNonGCStaticsSymbol(MetadataType type)
{
Debug.Assert(!TypeCannotHaveEEType(type));
return _nonGCStatics.GetOrAdd(type);
}
private NodeCache<MetadataType, ISymbolNode> _GCStatics;
public ISymbolNode TypeGCStaticsSymbol(MetadataType type)
{
Debug.Assert(!TypeCannotHaveEEType(type));
return _GCStatics.GetOrAdd(type);
}
private NodeCache<MetadataType, EmbeddedObjectNode> _GCStaticIndirectionNodes;
public EmbeddedObjectNode GCStaticIndirection(MetadataType type)
{
return _GCStaticIndirectionNodes.GetOrAdd(type);
}
private NodeCache<MetadataType, ThreadStaticsNode> _threadStatics;
public ThreadStaticsNode TypeThreadStaticsSymbol(MetadataType type)
{
// This node is always used in the context of its index within the region.
// We should never ask for this if the current compilation doesn't contain the
// associated type.
Debug.Assert(_compilationModuleGroup.ContainsType(type));
return _threadStatics.GetOrAdd(type);
}
private NodeCache<MetadataType, TypeThreadStaticIndexNode> _typeThreadStaticIndices;
public ISymbolNode TypeThreadStaticIndex(MetadataType type)
{
if (_compilationModuleGroup.ContainsType(type))
{
return _typeThreadStaticIndices.GetOrAdd(type);
}
else
{
return ExternSymbol("__TypeThreadStaticIndex_" + NameMangler.GetMangledTypeName(type));
}
}
private NodeCache<DispatchCellKey, InterfaceDispatchCellNode> _interfaceDispatchCells;
public InterfaceDispatchCellNode InterfaceDispatchCell(MethodDesc method, string callSite = null)
{
return _interfaceDispatchCells.GetOrAdd(new DispatchCellKey(method, callSite));
}
private NodeCache<MethodDesc, RuntimeMethodHandleNode> _runtimeMethodHandles;
public RuntimeMethodHandleNode RuntimeMethodHandle(MethodDesc method)
{
return _runtimeMethodHandles.GetOrAdd(method);
}
private NodeCache<FieldDesc, RuntimeFieldHandleNode> _runtimeFieldHandles;
public RuntimeFieldHandleNode RuntimeFieldHandle(FieldDesc field)
{
return _runtimeFieldHandles.GetOrAdd(field);
}
private NodeCache<GCPointerMap, GCStaticEETypeNode> _GCStaticEETypes;
public ISymbolNode GCStaticEEType(GCPointerMap gcMap)
{
return _GCStaticEETypes.GetOrAdd(gcMap);
}
private NodeCache<ReadOnlyDataBlobKey, BlobNode> _readOnlyDataBlobs;
public BlobNode ReadOnlyDataBlob(Utf8String name, byte[] blobData, int alignment)
{
return _readOnlyDataBlobs.GetOrAdd(new ReadOnlyDataBlobKey(name, blobData, alignment));
}
private NodeCache<TypeDesc, InterfaceDispatchMapNode> _interfaceDispatchMaps;
internal InterfaceDispatchMapNode InterfaceDispatchMap(TypeDesc type)
{
return _interfaceDispatchMaps.GetOrAdd(type);
}
private NodeCache<TypeDesc, EmbeddedObjectNode> _interfaceDispatchMapIndirectionNodes;
public EmbeddedObjectNode InterfaceDispatchMapIndirection(TypeDesc type)
{
return _interfaceDispatchMapIndirectionNodes.GetOrAdd(type);
}
private NodeCache<GenericCompositionDetails, GenericCompositionNode> _genericCompositions;
internal ISymbolNode GenericComposition(GenericCompositionDetails details)
{
return _genericCompositions.GetOrAdd(details);
}
private NodeCache<string, ExternSymbolNode> _externSymbols;
public ISymbolNode ExternSymbol(string name)
{
return _externSymbols.GetOrAdd(name);
}
private NodeCache<string, PInvokeModuleFixupNode> _pInvokeModuleFixups;
public ISymbolNode PInvokeModuleFixup(string moduleName)
{
return _pInvokeModuleFixups.GetOrAdd(moduleName);
}
private NodeCache<Tuple<string, string>, PInvokeMethodFixupNode> _pInvokeMethodFixups;
public PInvokeMethodFixupNode PInvokeMethodFixup(string moduleName, string entryPointName)
{
return _pInvokeMethodFixups.GetOrAdd(new Tuple<string, string>(moduleName, entryPointName));
}
private NodeCache<TypeDesc, VTableSliceNode> _vTableNodes;
public VTableSliceNode VTable(TypeDesc type)
{
return _vTableNodes.GetOrAdd(type);
}
private NodeCache<MethodDesc, ISymbolNode> _methodGenericDictionaries;
public ISymbolNode MethodGenericDictionary(MethodDesc method)
{
return _methodGenericDictionaries.GetOrAdd(method);
}
private NodeCache<TypeDesc, ISymbolNode> _typeGenericDictionaries;
public ISymbolNode TypeGenericDictionary(TypeDesc type)
{
return _typeGenericDictionaries.GetOrAdd(type);
}
private NodeCache<TypeSystemEntity, DictionaryLayoutNode> _genericDictionaryLayouts;
public virtual DictionaryLayoutNode GenericDictionaryLayout(TypeSystemEntity methodOrType)
{
return _genericDictionaryLayouts.GetOrAdd(methodOrType);
}
private NodeCache<MethodDesc, IMethodNode> _stringAllocators;
public IMethodNode StringAllocator(MethodDesc stringConstructor)
{
return _stringAllocators.GetOrAdd(stringConstructor);
}
private NodeCache<MethodDesc, IMethodNode> _methodEntrypoints;
private NodeCache<MethodDesc, IMethodNode> _unboxingStubs;
public IMethodNode MethodEntrypoint(MethodDesc method, bool unboxingStub = false)
{
if (unboxingStub)
{
return _unboxingStubs.GetOrAdd(method);
}
return _methodEntrypoints.GetOrAdd(method);
}
private NodeCache<MethodKey, FatFunctionPointerNode> _fatFunctionPointers;
public IMethodNode FatFunctionPointer(MethodDesc method, bool isUnboxingStub = false)
{
return _fatFunctionPointers.GetOrAdd(new MethodKey(method, isUnboxingStub));
}
public IMethodNode ExactCallableAddress(MethodDesc method, bool isUnboxingStub = false)
{
MethodDesc canonMethod = method.GetCanonMethodTarget(CanonicalFormKind.Specific);
if (method != canonMethod)
return FatFunctionPointer(method, isUnboxingStub);
else
return MethodEntrypoint(method, isUnboxingStub);
}
public IMethodNode CanonicalEntrypoint(MethodDesc method, bool isUnboxingStub = false)
{
MethodDesc canonMethod = method.GetCanonMethodTarget(CanonicalFormKind.Specific);
if (method != canonMethod)
return ShadowConcreteMethod(method, isUnboxingStub);
else
return MethodEntrypoint(method, isUnboxingStub);
}
private NodeCache<MethodDesc, GVMDependenciesNode> _gvmDependenciesNode;
internal GVMDependenciesNode GVMDependencies(MethodDesc method)
{
return _gvmDependenciesNode.GetOrAdd(method);
}
private NodeCache<TypeDesc, TypeGVMEntriesNode> _gvmTableEntries;
internal TypeGVMEntriesNode TypeGVMEntries(TypeDesc type)
{
return _gvmTableEntries.GetOrAdd(type);
}
private NodeCache<MethodDesc, ReflectableMethodNode> _reflectableMethods;
internal ReflectableMethodNode ReflectableMethod(MethodDesc method)
{
return _reflectableMethods.GetOrAdd(method);
}
private NodeCache<MethodKey, IMethodNode> _shadowConcreteMethods;
public IMethodNode ShadowConcreteMethod(MethodDesc method, bool isUnboxingStub = false)
{
return _shadowConcreteMethods.GetOrAdd(new MethodKey(method, isUnboxingStub));
}
private NodeCache<MethodDesc, IMethodNode> _runtimeDeterminedMethods;
public IMethodNode RuntimeDeterminedMethod(MethodDesc method)
{
return _runtimeDeterminedMethods.GetOrAdd(method);
}
private static readonly string[][] s_helperEntrypointNames = new string[][] {
new string[] { "System.Runtime.CompilerServices", "ClassConstructorRunner", "CheckStaticClassConstructionReturnGCStaticBase" },
new string[] { "System.Runtime.CompilerServices", "ClassConstructorRunner", "CheckStaticClassConstructionReturnNonGCStaticBase" },
new string[] { "System.Runtime.CompilerServices", "ClassConstructorRunner", "CheckStaticClassConstructionReturnThreadStaticBase" },
new string[] { "Internal.Runtime", "ThreadStatics", "GetThreadStaticBaseForType" }
};
private ISymbolNode[] _helperEntrypointSymbols;
public ISymbolNode HelperEntrypoint(HelperEntrypoint entrypoint)
{
if (_helperEntrypointSymbols == null)
_helperEntrypointSymbols = new ISymbolNode[s_helperEntrypointNames.Length];
int index = (int)entrypoint;
ISymbolNode symbol = _helperEntrypointSymbols[index];
if (symbol == null)
{
var entry = s_helperEntrypointNames[index];
var type = _context.SystemModule.GetKnownType(entry[0], entry[1]);
var method = type.GetKnownMethod(entry[2], null);
symbol = MethodEntrypoint(method);
_helperEntrypointSymbols[index] = symbol;
}
return symbol;
}
private MetadataType _systemArrayOfTClass;
public MetadataType ArrayOfTClass
{
get
{
if (_systemArrayOfTClass == null)
{
_systemArrayOfTClass = _context.SystemModule.GetKnownType("System", "Array`1");
}
return _systemArrayOfTClass;
}
}
private TypeDesc _systemArrayOfTEnumeratorType;
public TypeDesc ArrayOfTEnumeratorType
{
get
{
if (_systemArrayOfTEnumeratorType == null)
{
_systemArrayOfTEnumeratorType = ArrayOfTClass.GetNestedType("ArrayEnumerator");
}
return _systemArrayOfTEnumeratorType;
}
}
private TypeDesc _systemICastableType;
public TypeDesc ICastableInterface
{
get
{
if (_systemICastableType == null)
{
_systemICastableType = _context.SystemModule.GetKnownType("System.Runtime.CompilerServices", "ICastable");
}
return _systemICastableType;
}
}
private NodeCache<MethodDesc, VirtualMethodUseNode> _virtMethods;
public DependencyNodeCore<NodeFactory> VirtualMethodUse(MethodDesc decl)
{
return _virtMethods.GetOrAdd(decl);
}
private NodeCache<ReadyToRunHelperKey, ISymbolNode> _readyToRunHelpers;
public ISymbolNode ReadyToRunHelper(ReadyToRunHelperId id, Object target)
{
return _readyToRunHelpers.GetOrAdd(new ReadyToRunHelperKey(id, target));
}
private NodeCache<ReadyToRunGenericHelperKey, ISymbolNode> _genericReadyToRunHelpersFromDict;
public ISymbolNode ReadyToRunHelperFromDictionaryLookup(ReadyToRunHelperId id, Object target, TypeSystemEntity dictionaryOwner)
{
return _genericReadyToRunHelpersFromDict.GetOrAdd(new ReadyToRunGenericHelperKey(id, target, dictionaryOwner));
}
private NodeCache<ReadyToRunGenericHelperKey, ISymbolNode> _genericReadyToRunHelpersFromType;
public ISymbolNode ReadyToRunHelperFromTypeLookup(ReadyToRunHelperId id, Object target, TypeSystemEntity dictionaryOwner)
{
return _genericReadyToRunHelpersFromType.GetOrAdd(new ReadyToRunGenericHelperKey(id, target, dictionaryOwner));
}
private NodeCache<ISymbolNode, ISymbolNode> _indirectionNodes;
public ISymbolNode Indirection(ISymbolNode symbol)
{
if (symbol.RepresentsIndirectionCell)
{
return symbol;
}
else
{
return _indirectionNodes.GetOrAdd(symbol);
}
}
private NodeCache<string, FrozenStringNode> _frozenStringNodes;
public FrozenStringNode SerializedStringObject(string data)
{
return _frozenStringNodes.GetOrAdd(data);
}
private NodeCache<MethodDesc, EmbeddedObjectNode> _eagerCctorIndirectionNodes;
public EmbeddedObjectNode EagerCctorIndirection(MethodDesc cctorMethod)
{
return _eagerCctorIndirectionNodes.GetOrAdd(cctorMethod);
}
public ISymbolNode ConstantUtf8String(string str)
{
int stringBytesCount = Encoding.UTF8.GetByteCount(str);
byte[] stringBytes = new byte[stringBytesCount + 1];
Encoding.UTF8.GetBytes(str, 0, str.Length, stringBytes, 0);
string symbolName = "__utf8str_" + NameMangler.GetMangledStringName(str);
return ReadOnlyDataBlob(symbolName, stringBytes, 1);
}
/// <summary>
/// Returns alternative symbol name that object writer should produce for given symbols
/// in addition to the regular one.
/// </summary>
public string GetSymbolAlternateName(ISymbolNode node)
{
string value;
if (!NodeAliases.TryGetValue(node, out value))
return null;
return value;
}
public ArrayOfEmbeddedPointersNode<GCStaticsNode> GCStaticsRegion = new ArrayOfEmbeddedPointersNode<GCStaticsNode>(
"__GCStaticRegionStart",
"__GCStaticRegionEnd",
null);
public ThreadStaticsRegionNode ThreadStaticsRegion;
public ArrayOfEmbeddedPointersNode<IMethodNode> EagerCctorTable = new ArrayOfEmbeddedPointersNode<IMethodNode>(
"__EagerCctorStart",
"__EagerCctorEnd",
null);
public ArrayOfEmbeddedPointersNode<InterfaceDispatchMapNode> DispatchMapTable = new ArrayOfEmbeddedPointersNode<InterfaceDispatchMapNode>(
"__DispatchMapTableStart",
"__DispatchMapTableEnd",
null);
public ArrayOfEmbeddedDataNode<FrozenStringNode> FrozenSegmentRegion = new ArrayOfFrozenObjectsNode<FrozenStringNode>(
"__FrozenSegmentRegionStart",
"__FrozenSegmentRegionEnd",
null);
public ReadyToRunHeaderNode ReadyToRunHeader;
public Dictionary<ISymbolNode, string> NodeAliases = new Dictionary<ISymbolNode, string>();
protected internal TypeManagerIndirectionNode TypeManagerIndirection = new TypeManagerIndirectionNode();
public virtual void AttachToDependencyGraph(DependencyAnalyzerBase<NodeFactory> graph)
{
ReadyToRunHeader = new ReadyToRunHeaderNode(Target);
graph.AddRoot(ReadyToRunHeader, "ReadyToRunHeader is always generated");
graph.AddRoot(new ModulesSectionNode(Target), "ModulesSection is always generated");
graph.AddRoot(GCStaticsRegion, "GC StaticsRegion is always generated");
graph.AddRoot(ThreadStaticsRegion, "ThreadStaticsRegion is always generated");
graph.AddRoot(EagerCctorTable, "EagerCctorTable is always generated");
graph.AddRoot(TypeManagerIndirection, "TypeManagerIndirection is always generated");
graph.AddRoot(DispatchMapTable, "DispatchMapTable is always generated");
graph.AddRoot(FrozenSegmentRegion, "FrozenSegmentRegion is always generated");
ReadyToRunHeader.Add(ReadyToRunSectionType.GCStaticRegion, GCStaticsRegion, GCStaticsRegion.StartSymbol, GCStaticsRegion.EndSymbol);
ReadyToRunHeader.Add(ReadyToRunSectionType.ThreadStaticRegion, ThreadStaticsRegion, ThreadStaticsRegion.StartSymbol, ThreadStaticsRegion.EndSymbol);
ReadyToRunHeader.Add(ReadyToRunSectionType.EagerCctor, EagerCctorTable, EagerCctorTable.StartSymbol, EagerCctorTable.EndSymbol);
ReadyToRunHeader.Add(ReadyToRunSectionType.TypeManagerIndirection, TypeManagerIndirection, TypeManagerIndirection);
ReadyToRunHeader.Add(ReadyToRunSectionType.InterfaceDispatchTable, DispatchMapTable, DispatchMapTable.StartSymbol);
ReadyToRunHeader.Add(ReadyToRunSectionType.FrozenObjectRegion, FrozenSegmentRegion, FrozenSegmentRegion.StartSymbol, FrozenSegmentRegion.EndSymbol);
MetadataManager.AddToReadyToRunHeader(ReadyToRunHeader, this);
MetadataManager.AttachToDependencyGraph(graph);
}
protected struct MethodKey : IEquatable<MethodKey>
{
public readonly MethodDesc Method;
public readonly bool IsUnboxingStub;
public MethodKey(MethodDesc method, bool isUnboxingStub)
{
Method = method;
IsUnboxingStub = isUnboxingStub;
}
public bool Equals(MethodKey other) => Method == other.Method && IsUnboxingStub == other.IsUnboxingStub;
public override bool Equals(object obj) => obj is MethodKey && Equals((MethodKey)obj);
public override int GetHashCode() => Method.GetHashCode();
}
protected struct ReadyToRunHelperKey : IEquatable<ReadyToRunHelperKey>
{
public readonly object Target;
public readonly ReadyToRunHelperId HelperId;
public ReadyToRunHelperKey(ReadyToRunHelperId helperId, object target)
{
HelperId = helperId;
Target = target;
}
public bool Equals(ReadyToRunHelperKey other) => HelperId == other.HelperId && Target.Equals(other.Target);
public override bool Equals(object obj) => obj is ReadyToRunHelperKey && Equals((ReadyToRunHelperKey)obj);
public override int GetHashCode()
{
int hashCode = (int)HelperId * 0x5498341 + 0x832424;
hashCode = hashCode * 23 + Target.GetHashCode();
return hashCode;
}
}
protected struct ReadyToRunGenericHelperKey : IEquatable<ReadyToRunGenericHelperKey>
{
public readonly object Target;
public readonly TypeSystemEntity DictionaryOwner;
public readonly ReadyToRunHelperId HelperId;
public ReadyToRunGenericHelperKey(ReadyToRunHelperId helperId, object target, TypeSystemEntity dictionaryOwner)
{
HelperId = helperId;
Target = target;
DictionaryOwner = dictionaryOwner;
}
public bool Equals(ReadyToRunGenericHelperKey other)
=> HelperId == other.HelperId && DictionaryOwner == other.DictionaryOwner && Target.Equals(other.Target);
public override bool Equals(object obj) => obj is ReadyToRunGenericHelperKey && Equals((ReadyToRunGenericHelperKey)obj);
public override int GetHashCode()
{
int hashCode = (int)HelperId * 0x5498341 + 0x832424;
hashCode = hashCode * 23 + Target.GetHashCode();
hashCode = hashCode * 23 + DictionaryOwner.GetHashCode();
return hashCode;
}
}
protected struct DispatchCellKey : IEquatable<DispatchCellKey>
{
public readonly MethodDesc Target;
public readonly string CallsiteId;
public DispatchCellKey(MethodDesc target, string callsiteId)
{
Target = target;
CallsiteId = callsiteId;
}
public bool Equals(DispatchCellKey other) => Target == other.Target && CallsiteId == other.CallsiteId;
public override bool Equals(object obj) => obj is DispatchCellKey && Equals((DispatchCellKey)obj);
public override int GetHashCode()
{
int hashCode = Target.GetHashCode();
if (CallsiteId != null)
hashCode = hashCode * 23 + CallsiteId.GetHashCode();
return hashCode;
}
}
protected struct ReadOnlyDataBlobKey : IEquatable<ReadOnlyDataBlobKey>
{
public readonly Utf8String Name;
public readonly byte[] Data;
public readonly int Alignment;
public ReadOnlyDataBlobKey(Utf8String name, byte[] data, int alignment)
{
Name = name;
Data = data;
Alignment = alignment;
}
// The assumption here is that the name of the blob is unique.
// We can't emit two blobs with the same name and different contents.
// The name is part of the symbolic name and we don't do any mangling on it.
public bool Equals(ReadOnlyDataBlobKey other) => Name.Equals(other.Name);
public override bool Equals(object obj) => obj is ReadOnlyDataBlobKey && Equals((ReadOnlyDataBlobKey)obj);
public override int GetHashCode() => Name.GetHashCode();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Signum.Utilities;
using Signum.Engine.DynamicQuery;
using Signum.Entities.DynamicQuery;
using Signum.Engine.Maps;
using Signum.Entities.Basics;
using Signum.Entities;
using System.Globalization;
using Signum.Engine.Operations;
using Signum.Entities.Help;
using Signum.Engine.Basics;
using Newtonsoft.Json;
using DocumentFormat.OpenXml.EMMA;
using Signum.Entities.Reflection;
using Signum.Utilities.ExpressionTrees;
namespace Signum.Engine.Help
{
public abstract class BaseHelp
{
public abstract string? IsAllowed();
public void AssertAllowed()
{
string? error = IsAllowed();
if (error != null)
throw new UnauthorizedAccessException(EngineMessage.UnauthorizedAccessTo0Because1.NiceToString().FormatWith(this.GetType(), error));
}
public abstract override string ToString();
}
public class NamespaceHelp : BaseHelp
{
public readonly string Namespace;
public readonly string? Before;
public readonly string Title;
public readonly string? Description;
[JsonIgnore]
public readonly CultureInfo Culture;
[JsonIgnore]
public readonly NamespaceHelpEntity? DBEntity;
[JsonIgnore]
public readonly Type[] Types;
public NamespaceHelp(string @namespace, CultureInfo culture, NamespaceHelpEntity? entity, Type[] types)
{
Culture = culture;
Namespace = @namespace;
Types = types;
var clean = @namespace.Replace(".Entities", "");
Title = entity?.Let(a => a.Title.DefaultToNull()) ?? clean.TryAfterLast('.') ?? clean;
Before = clean.TryBeforeLast('.');
Description = entity?.Description;
DBEntity = entity;
}
public NamespaceHelpEntity Entity
{
get
{
var result = new NamespaceHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Name = this.Namespace,
};
if (DBEntity != null)
{
result.Title = DBEntity.Title;
result.Description = DBEntity.Description;
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
}
public EntityItem[] AllowedTypes
{
get
{
Schema s = Schema.Current;
return Types.Where(t => s.IsAllowed(t, inUserInterface: true) == null).Select(t => new EntityItem(t)).ToArray();
}
}
public override string? IsAllowed()
{
if (AllowedTypes.Any())
return null;
return "all the types in the nemespace are not allowed";
}
public override string ToString()
{
return "Namespace " + Namespace;
}
}
public class EntityItem
{
public string CleanName;
public bool HasDescription;
public EntityItem(Type t)
{
CleanName = TypeLogic.GetCleanName(t);
HasDescription = HelpLogic.GetTypeHelp(t).HasEntity;
}
}
public class TypeHelp : BaseHelp
{
public readonly Type Type;
public readonly CultureInfo Culture;
public readonly bool HasEntity;
public TypeHelpEntity? DBEntity;
public readonly string Info;
public readonly Dictionary<PropertyRoute, PropertyHelp> Properties;
public readonly Dictionary<OperationSymbol, OperationHelp> Operations;
public readonly Dictionary<object, QueryHelp> Queries;
public TypeHelp(Type type, CultureInfo culture, TypeHelpEntity? entity)
{
Type = type;
Culture = culture;
Info = HelpGenerator.GetEntityHelp(type);
var props = DBEntity?.Properties.ToDictionaryEx(a => a.Property.ToPropertyRoute(), a => a.Info);
var opers = DBEntity?.Operations.ToDictionaryEx(a => a.Operation, a => a.Info);
Properties = PropertyRoute.GenerateRoutes(type)
.ToDictionary(pp => pp, pp => new PropertyHelp(pp, props?.TryGetC(pp)));
Operations = OperationLogic.TypeOperations(type)
.ToDictionary(op => op.OperationSymbol, op => new OperationHelp(op.OperationSymbol, type, opers?.TryGetC(op.OperationSymbol)));
var allQueries = HelpLogic.CachedQueriesHelp();
Queries = HelpLogic.TypeToQuery.Value.TryGetC(this.Type).EmptyIfNull().Select(a => allQueries.GetOrThrow(a)).ToDictionary(qh => qh.QueryName);
DBEntity = entity;
}
public TypeHelpEntity GetEntity()
{
var result = new TypeHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Type = this.Type.ToTypeEntity(),
Description = DBEntity?.Description,
Info = Info
};
result.Properties.AddRange(
from pre in PropertyRouteLogic.RetrieveOrGenerateProperties(this.Type.ToTypeEntity())
let pr = pre.ToPropertyRoute()
where !(pr.PropertyInfo != null && pr.PropertyInfo.SetMethod == null && ExpressionCleaner.HasExpansions(pr.PropertyInfo.DeclaringType!, pr.PropertyInfo))
let ph = Properties.GetOrThrow(pre.ToPropertyRoute())
where ph.IsAllowed() == null
select new PropertyRouteHelpEmbedded
{
Property = pre,
Info = ph.Info,
Description = ph.UserDescription,
});
result.Operations.AddRange(
from oh in Operations.Values
where oh.IsAllowed() == null
select new OperationHelpEmbedded
{
Operation = oh.OperationSymbol,
Info = oh.Info,
Description = oh.UserDescription,
});
result.Queries.AddRange(
from qn in QueryLogic.Queries.GetTypeQueries(this.Type).Keys
let qh = HelpLogic.GetQueryHelp(qn)
where qh.IsAllowed() == null
select qh.GetEntity());
if (DBEntity != null)
{
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
public override string? IsAllowed()
{
return Schema.Current.IsAllowed(Type, inUserInterface: true);
}
public override string ToString()
{
return "Type " + TypeLogic.GetCleanName(Type);
}
}
public class PropertyHelp : BaseHelp
{
public PropertyHelp(PropertyRoute propertyRoute, string? userDescription)
{
if(propertyRoute.PropertyRouteType != PropertyRouteType.FieldOrProperty)
throw new ArgumentException("propertyRoute should be of type Property");
this.PropertyRoute = propertyRoute;
this.Info = HelpGenerator.GetPropertyHelp(propertyRoute);
this.UserDescription = userDescription;
}
public readonly string Info;
public readonly PropertyRoute PropertyRoute;
public readonly string? UserDescription;
public PropertyInfo PropertyInfo { get { return PropertyRoute.PropertyInfo!; } }
public override string? IsAllowed()
{
return PropertyRoute.IsAllowed();
}
public override string ToString()
{
return "Property " + this.PropertyRoute.ToString();
}
}
public class OperationHelp : BaseHelp
{
public readonly OperationSymbol OperationSymbol;
public readonly Type Type;
public readonly string Info;
public readonly string? UserDescription;
public OperationHelp(OperationSymbol operationSymbol, Type type, string? userDescription)
{
this.OperationSymbol = operationSymbol;
this.Type = type;
this.Info = HelpGenerator.GetOperationHelp(type, operationSymbol);
this.UserDescription = userDescription;
}
public override string? IsAllowed()
{
return OperationLogic.OperationAllowed(OperationSymbol, this.Type, inUserInterface: true) ? null :
OperationMessage.Operation01IsNotAuthorized.NiceToString(this.OperationSymbol.NiceToString(), this.OperationSymbol.Key);
}
public override string ToString()
{
return "Operation " + this.OperationSymbol.Key;
}
}
public class QueryHelp : BaseHelp
{
public readonly object QueryName;
public readonly CultureInfo Culture;
public readonly string Info;
public readonly Dictionary<string, QueryColumnHelp> Columns;
public readonly QueryHelpEntity? DBEntity;
public readonly string? UserDescription;
public QueryHelp(object queryName, CultureInfo ci, QueryHelpEntity? entity)
{
QueryName = queryName;
Culture = ci;
Info = HelpGenerator.GetQueryHelp(QueryLogic.Queries.GetQuery(queryName).Core.Value);
var cols = entity?.Columns.ToDictionary(a => a.ColumnName, a => a.Description);
Columns = QueryLogic.Queries.GetQuery(queryName).Core.Value.StaticColumns.ToDictionary(
cf => cf.Name,
cf => new QueryColumnHelp(cf, cf.DisplayName(), HelpGenerator.GetQueryColumnHelp(cf), cols?.TryGetCN(cf.Name)));
DBEntity = entity;
UserDescription = entity?.Description;
}
public QueryHelpEntity GetEntity()
{
var cd = DBEntity?.Columns.ToDictionary(a => a.ColumnName, a => a.Description);
var result = new QueryHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Query = QueryLogic.GetQueryEntity(this.QueryName),
Description = DBEntity?.Description,
Info = Info,
Columns = this.Columns.Values.Where(a => a.Column.IsAllowed() == null)
.Select(c => new QueryColumnHelpEmbedded
{
ColumnName = c.Column.Name,
Description = cd?.TryGetCN(c.Column.Name)!,
NiceName = c.NiceName,
Info = c.Info,
}).ToMList()
};
if (DBEntity != null)
{
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
public override string ToString()
{
return "Query " + QueryUtils.GetKey(this.QueryName);
}
public override string? IsAllowed()
{
return QueryLogic.Queries.QueryAllowed(this.QueryName, false) ? null :
"Access to query {0} not allowed".FormatWith(QueryUtils.GetKey(this.QueryName));
}
}
public class QueryColumnHelp : BaseHelp
{
public ColumnDescriptionFactory Column;
public string NiceName;
public string Info;
public string? UserDescription;
public QueryColumnHelp(ColumnDescriptionFactory column, string niceName, string info, string? userDescription)
{
this.Column = column;
this.NiceName = niceName;
this.Info = info;
this.UserDescription = userDescription;
}
public override string? IsAllowed()
{
return Column.IsAllowed();
}
public override string ToString()
{
return "Column " + Column.Name;
}
}
}
| |
#region License
// Copyright (c) 2010-2019, Mark Final
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of BuildAMation nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion // License
using Bam.Core;
using QtCommon.MocExtension;
using System.Linq;
namespace Qt5Test1
{
sealed class Qt5Application :
C.Cxx.GUIApplication
{
protected override void
Init()
{
base.Init();
var mocHeaders = this.CreateHeaderCollection("$(packagedir)/source/*.h");
var source = this.CreateCxxSourceCollection("$(packagedir)/source/*.cpp");
foreach (var mocHeader in mocHeaders.Children)
{
var myObjectMocTuple = source.MocHeader(mocHeader as C.HeaderFile);
// first item in Tuple is the generated moc source file
myObjectMocTuple.Item1.PrivatePatch(settings =>
{
var mocSettings = settings as QtCommon.IMocSettings;
mocSettings.PreprocessorDefinitions.Add("GENERATING_MOC");
});
// second item in Tuple is the C++ compilation of that generated source
myObjectMocTuple.Item2.PrivatePatch(settings =>
{
var preprocessor = settings as C.ICommonPreprocessorSettings;
preprocessor.PreprocessorDefines.Add("COMPILING_GENERATED_MOC");
});
}
source.PrivatePatch(settings =>
{
if (settings is GccCommon.ICommonCompilerSettings gccCompiler)
{
// because Qt5.6.0/5.6/gcc_64/include/QtCore/qglobal.h:1090:4: error: #error "You must build your code with position independent code if Qt was built with -reduce-relocations. " "Compile your code with -fPIC (-fPIE is not enough)."
gccCompiler.PositionIndependentCode = true;
}
if (settings is VisualCCommon.ICommonCompilerSettings vcCompiler)
{
if (source.Compiler.Version.AtLeast(VisualCCommon.ToolchainVersion.VC2015))
{
var cxxCompiler = settings as C.ICxxOnlyCompilerSettings;
cxxCompiler.ExceptionHandler = C.Cxx.EExceptionHandler.Synchronous; // C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\iosfwd(343): warning C4577: 'noexcept' used with no exception handling mode specified; termination on exception is not guaranteed. Specify /EHsc
}
}
});
this.PrivatePatch(settings =>
{
if (settings is GccCommon.ICommonLinkerSettings gccLinker)
{
gccLinker.CanUseOrigin = true;
gccLinker.RPath.AddUnique("$ORIGIN/../lib");
}
if (settings is ClangCommon.ICommonLinkerSettings clangLinker)
{
clangLinker.RPath.AddUnique("@executable_path/../Frameworks");
}
});
if (this.BuildEnvironment.Platform.Includes(Bam.Core.EPlatform.OSX))
{
this.CompileAndLinkAgainst<Qt.CoreFramework>(source);
this.CompileAndLinkAgainst<Qt.WidgetsFramework>(source);
}
else
{
this.CompileAndLinkAgainst<Qt.Core>(source);
this.CompileAndLinkAgainst<Qt.Widgets>(source);
}
if (this.BuildEnvironment.Platform.Includes(Bam.Core.EPlatform.Windows))
{
this.CreateWinResourceCollection("$(packagedir)/resources/*.rc");
}
}
}
sealed class Qt5Test1Runtime :
Publisher.Collation
{
protected override void
Init()
{
base.Init();
this.SetDefaultMacrosAndMappings(EPublishingType.WindowedApplication);
var appAnchor = this.Include<Qt5Application>(C.Cxx.GUIApplication.ExecutableKey);
var qtPlatformPlugin = this.Find<QtCommon.PlatformPlugin>().First();
(qtPlatformPlugin as Publisher.CollatedObject).SetPublishingDirectory("$(0)/platforms", this.PluginDir);
if (this.BuildEnvironment.Platform.Includes(Bam.Core.EPlatform.OSX))
{
var collatedQtFrameworks = this.Find<QtCommon.CommonFramework>();
collatedQtFrameworks.ToList().ForEach(collatedFramework =>
// must be a public patch in order for the stripping mode to inherit the settings
(collatedFramework as Publisher.CollatedObject).PublicPatch((settings, appliedTo) =>
{
var rsyncSettings = settings as Publisher.IRsyncSettings;
rsyncSettings.Exclusions = (collatedFramework.SourceModule as QtCommon.CommonFramework).PublishingExclusions;
}));
this.IncludeFiles(
this.CreateTokenizedString("$(packagedir)/resources/osx/qt.conf"),
this.Macros["macOSAppBundleResourcesDir"],
appAnchor);
}
else if (this.BuildEnvironment.Platform.Includes(Bam.Core.EPlatform.Linux))
{
this.IncludeFiles(
this.CreateTokenizedString("$(packagedir)/resources/linux/qt.conf"),
this.ExecutableDir,
appAnchor);
}
else if (this.BuildEnvironment.Platform.Includes(Bam.Core.EPlatform.Windows))
{
this.IncludeFiles(
this.CreateTokenizedString("$(packagedir)/resources/windows/qt.conf"),
this.ExecutableDir,
appAnchor);
var app = appAnchor.SourceModule as Qt5Application;
if (this.BuildEnvironment.Configuration != EConfiguration.Debug &&
app.Linker is VisualCCommon.LinkerBase)
{
var runtimeLibrary = Bam.Core.Graph.Instance.PackageMetaData<VisualCCommon.IRuntimeLibraryPathMeta>("VisualC");
this.IncludeFiles(runtimeLibrary.CRuntimePaths(app.BitDepth), this.ExecutableDir, appAnchor);
this.IncludeFiles(runtimeLibrary.CxxRuntimePaths(app.BitDepth), this.ExecutableDir, appAnchor);
}
}
else
{
throw new Bam.Core.Exception("Unknown platform");
}
}
}
[Bam.Core.ConfigurationFilter(Bam.Core.EConfiguration.NotDebug)]
sealed class Qt5Test1DebugSymbols :
Publisher.DebugSymbolCollation
{
protected override void
Init()
{
base.Init();
this.CreateSymbolsFrom<Qt5Test1Runtime>();
}
}
[Bam.Core.ConfigurationFilter(Bam.Core.EConfiguration.NotDebug)]
sealed class Qt5Test1Stripped :
Publisher.StrippedBinaryCollation
{
protected override void
Init()
{
base.Init();
this.StripBinariesFrom<Qt5Test1Runtime, Qt5Test1DebugSymbols>();
}
}
[Bam.Core.ConfigurationFilter(Bam.Core.EConfiguration.NotDebug)]
sealed class TarBallInstaller :
Installer.TarBall
{
protected override void
Init()
{
base.Init();
this.SourceFolder<Qt5Test1Stripped>(Publisher.StrippedBinaryCollation.StripBinaryDirectoryKey);
}
}
}
| |
using System;
using NUnit.Framework;
using System.Collections.ObjectModel;
namespace OpenQA.Selenium
{
// TODO: Remove NeedsFreshDriver attribute when ChromeDriver moves
// to default of using W3C protocol dialect
[TestFixture]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public class ExecutingAsyncJavascriptTest : DriverTestFixture
{
private IJavaScriptExecutor executor;
private TimeSpan originalTimeout = TimeSpan.MinValue;
[SetUp]
public void SetUpEnvironment()
{
if (driver is IJavaScriptExecutor)
{
executor = (IJavaScriptExecutor)driver;
}
try
{
originalTimeout = driver.Manage().Timeouts().AsynchronousJavaScript;
}
catch (NotImplementedException)
{
// For driver implementations that do not support getting timeouts,
// just set a default 30-second timeout.
originalTimeout = TimeSpan.FromSeconds(30);
}
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(1);
}
[TearDown]
public void TearDownEnvironment()
{
driver.Manage().Timeouts().AsynchronousJavaScript = originalTimeout;
}
[Test]
public void ShouldNotTimeoutIfCallbackInvokedImmediately()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](123);");
Assert.That(result, Is.InstanceOf<long>());
Assert.That((long)result, Is.EqualTo(123));
}
[Test]
public void ShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NeitherNullNorUndefined()
{
driver.Url = ajaxyPage;
Assert.That((long)executor.ExecuteAsyncScript("arguments[arguments.length - 1](123);"), Is.EqualTo(123));
driver.Url = ajaxyPage;
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1]('abc');").ToString(), Is.EqualTo("abc"));
driver.Url = ajaxyPage;
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](false);"), Is.False);
driver.Url = ajaxyPage;
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](true);"), Is.True);
}
[Test]
public void ShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NullAndUndefined()
{
driver.Url = ajaxyPage;
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1](null);"), Is.Null);
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1]();"), Is.Null);
}
[Test]
public void ShouldBeAbleToReturnAnArrayLiteralFromAnAsyncScript()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
Assert.That((ReadOnlyCollection<object>)result, Has.Count.EqualTo(0));
}
[Test]
public void ShouldBeAbleToReturnAnArrayObjectFromAnAsyncScript()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](new Array());");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
Assert.That((ReadOnlyCollection<object>)result, Has.Count.EqualTo(0));
}
[Test]
public void ShouldBeAbleToReturnArraysOfPrimitivesFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([null, 123, 'abc', true, false]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
ReadOnlyCollection<object> resultList = result as ReadOnlyCollection<object>;
Assert.That(resultList.Count, Is.EqualTo(5));
Assert.That(resultList[0], Is.Null);
Assert.That((long)resultList[1], Is.EqualTo(123));
Assert.That(resultList[2].ToString(), Is.EqualTo("abc"));
Assert.That((bool)resultList[3], Is.True);
Assert.That((bool)resultList[4], Is.False);
}
[Test]
public void ShouldBeAbleToReturnWebElementsFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](document.body);");
Assert.That(result, Is.InstanceOf<IWebElement>());
Assert.That(((IWebElement)result).TagName.ToLower(), Is.EqualTo("body"));
}
[Test]
public void ShouldBeAbleToReturnArraysOfWebElementsFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([document.body, document.body]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<IWebElement>>());
ReadOnlyCollection<IWebElement> resultsList = (ReadOnlyCollection<IWebElement>)result;
Assert.That(resultsList, Has.Count.EqualTo(2));
Assert.That(resultsList[0], Is.InstanceOf<IWebElement>());
Assert.That(resultsList[1], Is.InstanceOf<IWebElement>());
Assert.That(((IWebElement)resultsList[0]).TagName.ToLower(), Is.EqualTo("body"));
Assert.That(((IWebElement)resultsList[0]), Is.EqualTo((IWebElement)resultsList[1]));
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallback()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("return 1 + 2;"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallbackWithAZeroTimeout()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("window.setTimeout(function() {}, 0);"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldNotTimeoutIfScriptCallsbackInsideAZeroTimeout()
{
driver.Url = ajaxyPage;
executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];" +
"window.setTimeout(function() { callback(123); }, 0)");
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallbackWithLongTimeout()
{
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromMilliseconds(500);
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];" +
"window.setTimeout(callback, 1500);"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldDetectPageLoadsWhileWaitingOnAnAsyncScriptAndReturnAnError()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("window.location = '" + dynamicPage + "';"), Throws.InstanceOf<WebDriverException>());
}
[Test]
public void ShouldCatchErrorsWhenExecutingInitialScript()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("throw Error('you should catch this!');"), Throws.InstanceOf<WebDriverException>());
}
[Test]
public void ShouldNotTimeoutWithMultipleCallsTheFirstOneBeingSynchronous()
{
driver.Url = ajaxyPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromMilliseconds(1000);
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](true);"), Is.True);
Assert.That((bool)executor.ExecuteAsyncScript("var cb = arguments[arguments.length - 1]; window.setTimeout(function(){cb(true);}, 9);"), Is.True);
}
[Test]
[IgnoreBrowser(Browser.Chrome, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Firefox, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.IE, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Edge, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Safari, ".NET language bindings do not properly parse JavaScript stack trace")]
public void ShouldCatchErrorsWithMessageAndStacktraceWhenExecutingInitialScript()
{
driver.Url = ajaxyPage;
string js = "function functionB() { throw Error('errormessage'); };"
+ "function functionA() { functionB(); };"
+ "functionA();";
Exception ex = Assert.Catch(() => executor.ExecuteAsyncScript(js));
Assert.That(ex, Is.InstanceOf<WebDriverException>());
Assert.That(ex.Message.Contains("errormessage"));
Assert.That(ex.StackTrace.Contains("functionB"));
}
[Test]
public void ShouldBeAbleToExecuteAsynchronousScripts()
{
// Reset the timeout to the 30-second default instead of zero.
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(30);
driver.Url = ajaxyPage;
IWebElement typer = driver.FindElement(By.Name("typer"));
typer.SendKeys("bob");
Assert.AreEqual("bob", typer.GetAttribute("value"));
driver.FindElement(By.Id("red")).Click();
driver.FindElement(By.Name("submit")).Click();
Assert.AreEqual(1, GetNumberOfDivElements(), "There should only be 1 DIV at this point, which is used for the butter message");
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(10);
string text = (string)executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];"
+ "window.registerListener(arguments[arguments.length - 1]);");
Assert.AreEqual("bob", text);
Assert.AreEqual("", typer.GetAttribute("value"));
Assert.AreEqual(2, GetNumberOfDivElements(), "There should be 1 DIV (for the butter message) + 1 DIV (for the new label)");
}
[Test]
public void ShouldBeAbleToPassMultipleArgumentsToAsyncScripts()
{
driver.Url = ajaxyPage;
long result = (long)executor.ExecuteAsyncScript("arguments[arguments.length - 1](arguments[0] + arguments[1]);", 1, 2);
Assert.AreEqual(3, result);
}
[Test]
public void ShouldBeAbleToMakeXMLHttpRequestsAndWaitForTheResponse()
{
string script =
"var url = arguments[0];" +
"var callback = arguments[arguments.length - 1];" +
// Adapted from http://www.quirksmode.org/js/xmlhttp.html
"var XMLHttpFactories = [" +
" function () {return new XMLHttpRequest()}," +
" function () {return new ActiveXObject('Msxml2.XMLHTTP')}," +
" function () {return new ActiveXObject('Msxml3.XMLHTTP')}," +
" function () {return new ActiveXObject('Microsoft.XMLHTTP')}" +
"];" +
"var xhr = false;" +
"while (!xhr && XMLHttpFactories.length) {" +
" try {" +
" xhr = XMLHttpFactories.shift().call();" +
" } catch (e) {}" +
"}" +
"if (!xhr) throw Error('unable to create XHR object');" +
"xhr.open('GET', url, true);" +
"xhr.onreadystatechange = function() {" +
" if (xhr.readyState == 4) callback(xhr.responseText);" +
"};" +
"xhr.send();";
driver.Url = ajaxyPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(3);
string response = (string)executor.ExecuteAsyncScript(script, sleepingPage + "?time=2");
Assert.AreEqual("<html><head><title>Done</title></head><body>Slept for 2s</body></html>", response.Trim());
}
[Test]
// [IgnoreBrowser(Browser.Chrome, "Driver does not handle async alerts in OSS protocol dialect mode")]
[IgnoreBrowser(Browser.Safari, "Does not alerts thrown during async JavaScript; driver hangs until alert dismissed")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfScriptTriggersAlert()
{
driver.Url = simpleTestPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript(
"setTimeout(arguments[0], 200) ; setTimeout(function() { window.alert('Look! An alert!'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
string title = driver.Title;
}
[Test]
//[IgnoreBrowser(Browser.Chrome, "Driver does not handle async alerts in OSS protocol dialect mode")]
[IgnoreBrowser(Browser.Safari, "Does not alerts thrown during async JavaScript; driver hangs until alert dismissed")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfAlertHappensDuringScript()
{
driver.Url = slowLoadingAlertPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript("setTimeout(arguments[0], 1000);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
//[IgnoreBrowser(Browser.Chrome, "Driver does not handle async alerts in OSS protocol dialect mode")]
[IgnoreBrowser(Browser.Safari, "Does not alerts thrown during async JavaScript; driver hangs until alert dismissed")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfScriptTriggersAlertWhichTimesOut()
{
driver.Url = simpleTestPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver)
.ExecuteAsyncScript("setTimeout(function() { window.alert('Look! An alert!'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
//[IgnoreBrowser(Browser.Chrome, "Driver does not handle async alerts in OSS protocol dialect mode")]
[IgnoreBrowser(Browser.Safari, "Does not alerts thrown during async JavaScript; driver hangs until alert dismissed")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfAlertHappensDuringScriptWhichTimesOut()
{
driver.Url = slowLoadingAlertPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript("");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Driver chooses not to return text from unhandled alert")]
[IgnoreBrowser(Browser.Edge, "Driver chooses not to return text from unhandled alert")]
[IgnoreBrowser(Browser.Firefox, "Driver chooses not to return text from unhandled alert")]
[IgnoreBrowser(Browser.Safari, "Does not alerts thrown during async JavaScript; driver hangs until alert dismissed")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void IncludesAlertTextInUnhandledAlertException()
{
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
string alertText = "Look! An alert!";
((IJavaScriptExecutor)driver).ExecuteAsyncScript(
"setTimeout(arguments[0], 200) ; setTimeout(function() { window.alert('" + alertText
+ "'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>().With.Property("AlertText").EqualTo(alertText));
}
private long GetNumberOfDivElements()
{
IJavaScriptExecutor jsExecutor = driver as IJavaScriptExecutor;
// Selenium does not support "findElements" yet, so we have to do this through a script.
return (long)jsExecutor.ExecuteScript("return document.getElementsByTagName('div').length;");
}
}
}
| |
// The MIT License (MIT)
//
// Copyright (c) Andrew Armstrong/FacticiusVir 2020
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// This file was automatically generated and should not be edited directly.
using System;
using System.Runtime.InteropServices;
namespace SharpVk
{
/// <summary>
///
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public partial struct RenderPassCreateInfo2
{
/// <summary>
///
/// </summary>
public SharpVk.RenderPassCreateFlags? Flags
{
get;
set;
}
/// <summary>
///
/// </summary>
public SharpVk.AttachmentDescription2[] Attachments
{
get;
set;
}
/// <summary>
///
/// </summary>
public SharpVk.SubpassDescription2[] Subpasses
{
get;
set;
}
/// <summary>
///
/// </summary>
public SharpVk.SubpassDependency2[] Dependencies
{
get;
set;
}
/// <summary>
///
/// </summary>
public uint[] CorrelatedViewMasks
{
get;
set;
}
/// <summary>
///
/// </summary>
/// <param name="pointer">
/// </param>
internal unsafe void MarshalTo(SharpVk.Interop.RenderPassCreateInfo2* pointer)
{
pointer->SType = StructureType.RenderPassCreateInfo2Version;
pointer->Next = null;
if (this.Flags != null)
{
pointer->Flags = this.Flags.Value;
}
else
{
pointer->Flags = default(SharpVk.RenderPassCreateFlags);
}
pointer->AttachmentCount = (uint)(Interop.HeapUtil.GetLength(this.Attachments));
if (this.Attachments != null)
{
var fieldPointer = (SharpVk.Interop.AttachmentDescription2*)(Interop.HeapUtil.AllocateAndClear<SharpVk.Interop.AttachmentDescription2>(this.Attachments.Length).ToPointer());
for(int index = 0; index < (uint)(this.Attachments.Length); index++)
{
this.Attachments[index].MarshalTo(&fieldPointer[index]);
}
pointer->Attachments = fieldPointer;
}
else
{
pointer->Attachments = null;
}
pointer->SubpassCount = (uint)(Interop.HeapUtil.GetLength(this.Subpasses));
if (this.Subpasses != null)
{
var fieldPointer = (SharpVk.Interop.SubpassDescription2*)(Interop.HeapUtil.AllocateAndClear<SharpVk.Interop.SubpassDescription2>(this.Subpasses.Length).ToPointer());
for(int index = 0; index < (uint)(this.Subpasses.Length); index++)
{
this.Subpasses[index].MarshalTo(&fieldPointer[index]);
}
pointer->Subpasses = fieldPointer;
}
else
{
pointer->Subpasses = null;
}
pointer->DependencyCount = (uint)(Interop.HeapUtil.GetLength(this.Dependencies));
if (this.Dependencies != null)
{
var fieldPointer = (SharpVk.Interop.SubpassDependency2*)(Interop.HeapUtil.AllocateAndClear<SharpVk.Interop.SubpassDependency2>(this.Dependencies.Length).ToPointer());
for(int index = 0; index < (uint)(this.Dependencies.Length); index++)
{
this.Dependencies[index].MarshalTo(&fieldPointer[index]);
}
pointer->Dependencies = fieldPointer;
}
else
{
pointer->Dependencies = null;
}
pointer->CorrelatedViewMaskCount = (uint)(Interop.HeapUtil.GetLength(this.CorrelatedViewMasks));
if (this.CorrelatedViewMasks != null)
{
var fieldPointer = (uint*)(Interop.HeapUtil.AllocateAndClear<uint>(this.CorrelatedViewMasks.Length).ToPointer());
for(int index = 0; index < (uint)(this.CorrelatedViewMasks.Length); index++)
{
fieldPointer[index] = this.CorrelatedViewMasks[index];
}
pointer->CorrelatedViewMasks = fieldPointer;
}
else
{
pointer->CorrelatedViewMasks = null;
}
}
/// <summary>
///
/// </summary>
/// <param name="pointer">
/// </param>
internal static unsafe RenderPassCreateInfo2 MarshalFrom(SharpVk.Interop.RenderPassCreateInfo2* pointer)
{
RenderPassCreateInfo2 result = default(RenderPassCreateInfo2);
result.Flags = pointer->Flags;
if (pointer->Attachments != null)
{
var fieldPointer = new SharpVk.AttachmentDescription2[(uint)(pointer->AttachmentCount)];
for(int index = 0; index < (uint)(pointer->AttachmentCount); index++)
{
fieldPointer[index] = SharpVk.AttachmentDescription2.MarshalFrom(&pointer->Attachments[index]);
}
result.Attachments = fieldPointer;
}
else
{
result.Attachments = null;
}
if (pointer->Subpasses != null)
{
var fieldPointer = new SharpVk.SubpassDescription2[(uint)(pointer->SubpassCount)];
for(int index = 0; index < (uint)(pointer->SubpassCount); index++)
{
fieldPointer[index] = SharpVk.SubpassDescription2.MarshalFrom(&pointer->Subpasses[index]);
}
result.Subpasses = fieldPointer;
}
else
{
result.Subpasses = null;
}
if (pointer->Dependencies != null)
{
var fieldPointer = new SharpVk.SubpassDependency2[(uint)(pointer->DependencyCount)];
for(int index = 0; index < (uint)(pointer->DependencyCount); index++)
{
fieldPointer[index] = SharpVk.SubpassDependency2.MarshalFrom(&pointer->Dependencies[index]);
}
result.Dependencies = fieldPointer;
}
else
{
result.Dependencies = null;
}
if (pointer->CorrelatedViewMasks != null)
{
var fieldPointer = new uint[(uint)(pointer->CorrelatedViewMaskCount)];
for(int index = 0; index < (uint)(pointer->CorrelatedViewMaskCount); index++)
{
fieldPointer[index] = pointer->CorrelatedViewMasks[index];
}
result.CorrelatedViewMasks = fieldPointer;
}
else
{
result.CorrelatedViewMasks = null;
}
return result;
}
}
}
| |
//! \file ImageAG.cs
//! \date Sun May 10 23:53:34 2015
//! \brief Masys Enhanced Game Unit image format.
//
// Copyright (C) 2015-2018 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.ComponentModel.Composition;
using System.Windows.Media;
using System.IO;
namespace GameRes.Formats.Megu
{
[Export(typeof(ImageFormat))]
public class AgFormat : ImageFormat
{
public override string Tag { get { return "ACG"; } }
public override string Description { get { return "Masys image format"; } }
public override uint Signature { get { return 0x00644741u; } } // 'AGd'
public override ImageMetaData ReadMetaData (IBinaryStream file)
{
file.Position = 4;
var info = new ImageMetaData();
info.Width = file.ReadUInt32();
info.Height = file.ReadUInt32();
file.Position = 0x38;
int alpha_size = file.ReadInt32();
info.BPP = 0 == alpha_size ? 24 : 32;
return info;
}
public override ImageData Read (IBinaryStream stream, ImageMetaData info)
{
var reader = new AgReader (stream, info);
reader.Unpack();
return ImageData.Create (info, reader.Format, null, reader.Data);
}
public override void Write (Stream file, ImageData image)
{
throw new NotImplementedException ("AgFormat.Write not implemented");
}
}
internal class AgReader
{
AgBitStream in1;
AgBitStream in2;
AgBitStream in3;
AgBitStream in4;
AgBitStream in5;
byte[] m_alpha;
byte[] m_output;
int m_width;
int m_height;
int m_pixel_size;
byte[] m_first = new byte[3];
public byte[] Data { get { return m_output; } }
public PixelFormat Format { get; private set; }
public AgReader (IBinaryStream input, ImageMetaData info)
{
m_width = (int)info.Width;
m_height = (int)info.Height;
input.Position = 0x0c;
uint offset1 = input.ReadUInt32();
int size1 = input.ReadInt32();
uint offset2 = input.ReadUInt32();
int size2 = input.ReadInt32();
uint offset3 = input.ReadUInt32();
int size3 = input.ReadInt32();
uint offset4 = input.ReadUInt32();
int size4 = input.ReadInt32();
uint offset5 = input.ReadUInt32();
int size5 = input.ReadInt32();
uint offset6 = input.ReadUInt32();
int size6 = input.ReadInt32();
input.Read (m_first, 0, 3);
if (size1 != 0)
in1 = new AgBitStream (input, offset1, size1);
if (size2 != 0)
in2 = new AgBitStream (input, offset2, size2);
if (size3 != 0)
in3 = new AgBitStream (input, offset3, size3);
if (size4 != 0)
in4 = new AgBitStream (input, offset4, size4);
if (size5 != 0)
in5 = new AgBitStream (input, offset5, size5);
if (size6 != 0)
{
input.Position = offset6;
m_alpha = new byte[m_height*m_width];
RleDecode (input, m_alpha);
Format = PixelFormats.Bgra32;
m_pixel_size = 4;
}
else
{
Format = PixelFormats.Bgr24;
m_pixel_size = 3;
}
m_output = new byte[m_width*m_height*m_pixel_size];
}
static internal byte[] ReadSection (IBinaryStream input, long offset, int size)
{
input.Position = offset;
var buf = new byte[size + 4];
if (size != input.Read (buf, 0, size))
throw new InvalidFormatException ("Unexpected end of file");
return buf;
}
public void Unpack ()
{
int dst = 0;
int stride = m_width * m_pixel_size;
for (int y = 0; y < m_height; ++y)
{
for (int x = 0; x < stride; x += m_pixel_size)
{
byte B = ReadColor (0);
byte G = ReadColor (1);
byte R = ReadColor (2);
m_output[dst+x ] = B;
m_output[dst+x+1] = G;
m_output[dst+x+2] = R;
m_first[0] = B;
m_first[1] = G;
m_first[2] = R;
}
m_first[0] = m_output[dst];
m_first[1] = m_output[dst+1];
m_first[2] = m_output[dst+2];
dst += stride;
}
if (m_alpha != null)
ApplyAlpha();
}
private byte ReadColor (int channel)
{
byte c;
if (0 != in1.GetBit())
{
c = in5.GetByte();
}
else if (0 != in3.GetBit())
{
c = m_first[channel];
}
else
{
c = (byte)(in4.GetNibble() + 1);
if (0 != in2.GetBit())
c = (byte)(m_first[channel] - c);
else
c += m_first[channel];
}
return c;
}
private void ApplyAlpha ()
{
int src = 0;
for (int i = 3; i < m_output.Length; i += 4)
{
int alpha = Math.Min (m_alpha[src++]*0xff/0x40, 0xff);
m_output[i] = (byte)alpha;
}
}
private static void RleDecode (IBinaryStream src, byte[] dst_buf)
{
int remaining = dst_buf.Length;
int dst = 0;
while (remaining > 0)
{
byte v = src.ReadUInt8();
int count;
if (0 != (v & 0x80))
{
v &= 0x7F;
count = src.ReadUInt16();
for (int j = 0; j < count; ++j)
{
dst_buf[dst++] = v;
}
}
else
{
dst_buf[dst++] = v;
count = 1;
}
remaining -= count;
}
}
}
internal class AgBitStream
{
byte[] m_input;
int m_src = 0;
int m_bits = 1;
public AgBitStream (IBinaryStream input, long offset, int size)
{
m_input = AgReader.ReadSection (input, offset, size);
}
public int GetBit ()
{
if (1 == m_bits)
{
m_bits = m_input[m_src++] | 0x100;
}
int bit = m_bits & 1;
m_bits >>= 1;
return bit;
}
public int GetNibble ()
{
if (1 == m_bits)
{
m_bits = m_input[m_src++] | 0x100;
}
int bits = m_bits & 0xF;
m_bits >>= 4;
return bits;
}
public byte GetByte ()
{
return m_input[m_src++];
}
}
}
| |
namespace six2015.Models
{
using System;
using System.Data.Entity;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
public partial class Model1 : DbContext
{
public Model1()
: base("name=Model1")
{
}
public virtual DbSet<HISTORY> HISTORY { get; set; }
public virtual DbSet<MESSAGE> MESSAGE { get; set; }
public virtual DbSet<SIXUSER> SIXUSER { get; set; }
public virtual DbSet<ABSTUDY> ABSTUDY { get; set; }
public virtual DbSet<COUNT> COUNT { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<HISTORY>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<HISTORY>()
.Property(e => e.IDCARD)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.LICENCE)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.SREMARK)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.PHONENUMBER)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.DEDUCTPOINTS)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.ZHIDUINUMBER)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.ADDRESS)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.FILENAME)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.LICENCENUMBER)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.STATUS)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.PHOTO)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.PRINTED)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.PROCESSED)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.MESSAGED)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.STUDYLOG)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.FAILURE)
.IsUnicode(false);
modelBuilder.Entity<HISTORY>()
.Property(e => e.COUNTY)
.IsUnicode(false);
modelBuilder.Entity<MESSAGE>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<MESSAGE>()
.Property(e => e.HISTORYID)
.HasPrecision(38, 0);
modelBuilder.Entity<MESSAGE>()
.Property(e => e.CONTENT)
.IsUnicode(false);
modelBuilder.Entity<MESSAGE>()
.Property(e => e.SENT)
.IsUnicode(false);
modelBuilder.Entity<SIXUSER>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<SIXUSER>()
.Property(e => e.PASSWORD)
.IsUnicode(false);
modelBuilder.Entity<SIXUSER>()
.Property(e => e.POWER)
.HasPrecision(38, 0);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.IDCARD)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.SNAME)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.LICENCE)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.SREMARK)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.PHOTO)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.PHONENUMBER)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.DEDUCTPOINTS)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.LICENCENUMBER)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.FILENAME)
.IsUnicode(false);
modelBuilder.Entity<ABSTUDY>()
.Property(e => e.STATUS)
.IsUnicode(false);
modelBuilder.Entity<COUNT>()
.Property(e => e.PAGEVIEW)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.PAGEVIEWDAY)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.APPLICATION)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.APPLICATIONDAY)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.KAIFAQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.ZHIFUQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.FUSHANQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.MUPINGQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.LAISHANQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.LONGKOU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.ZHAOYUAN)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.QIXIA)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.LAIZHOU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.CHANGDAO)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.HAIYANG)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.LAIYANG)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.PENGLAI)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.GAOXINQU)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.OTHER)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.STARTLEARNINGVOLUME)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNT>()
.Property(e => e.STARTLEARNINGVOLUMETODAY)
.HasPrecision(38, 0);
}
}
}
| |
using Assimp;
using ImGuiNET;
using Microsoft.Xna.Framework.Input;
using OpenKh.Kh2;
using OpenKh.Tools.Common.CustomImGui;
using OpenKh.Tools.Kh2MapStudio.Windows;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Windows;
using Xe.Tools.Wpf.Dialogs;
using static OpenKh.Tools.Common.CustomImGui.ImGuiEx;
using xna = Microsoft.Xna.Framework;
namespace OpenKh.Tools.Kh2MapStudio
{
class App : IDisposable
{
private static readonly List<FileDialogFilter> MapFilter =
FileDialogFilterComposer.Compose()
.AddExtensions("MAP file", "map")
.AddAllFiles();
private static readonly List<FileDialogFilter> ArdFilter =
FileDialogFilterComposer.Compose()
.AddExtensions("ARD file", "ard")
.AddAllFiles();
private static readonly List<FileDialogFilter> ModelFilter =
FileDialogFilterComposer.Compose()
.AddExtensions("glTF file (GL Transmission Format)", "gltf")
.AddExtensions("FBX file", "fbx")
.AddExtensions("DAE file (Collada) (might be unaccurate)", "dae")
.AddExtensions("OBJ file (Wavefront) (might lose some information)", "obj")
.AddAllFiles();
private readonly Vector4 BgUiColor = new Vector4(0.0f, 0.0f, 0.0f, 0.5f);
private readonly MonoGameImGuiBootstrap _bootstrap;
private bool _exitFlag = false;
private readonly Dictionary<Keys, Action> _keyMapping = new Dictionary<Keys, Action>();
private readonly MapRenderer _mapRenderer;
private string _gamePath;
private string _mapName;
private string _region;
private string _ardPath;
private string _mapPath;
private string _objPath;
private List<string> _mapList = new List<string>();
private ObjEntryController _objEntryController;
private xna.Point _previousMousePosition;
public string Title
{
get
{
var mapName = _mapName != null ? $"{_mapName}@" : string.Empty;
return $"{mapName}{_gamePath ?? "unloaded"} | {MonoGameImGuiBootstrap.ApplicationName}";
}
}
private string GamePath
{
get => _gamePath;
set
{
_gamePath = value;
UpdateTitle();
EnumerateMapList();
_objEntryController?.Dispose();
_objEntryController = new ObjEntryController(
_bootstrap.GraphicsDevice,
_objPath,
Path.Combine(_gamePath, "00objentry.bin"));
_mapRenderer.ObjEntryController = _objEntryController;
Settings.Default.GamePath = value;
Settings.Default.Save();
}
}
private string MapName
{
get => _mapName;
set
{
_mapName = value;
UpdateTitle();
_mapRenderer.Close();
_mapRenderer.OpenMap(Path.Combine(_mapPath, $"{_mapName}.map"));
_mapRenderer.OpenArd(Path.Combine(_ardPath, $"{_mapName}.ard"));
}
}
private bool IsGameOpen => !string.IsNullOrEmpty(_gamePath);
private bool IsMapOpen => !string.IsNullOrEmpty(_mapName);
private bool IsOpen => IsGameOpen && IsMapOpen;
public App(MonoGameImGuiBootstrap bootstrap, string gamePath = null)
{
_bootstrap = bootstrap;
_bootstrap.Title = Title;
_mapRenderer = new MapRenderer(bootstrap.Content, bootstrap.GraphicsDeviceManager);
AddKeyMapping(Keys.O, MenuFileOpen);
AddKeyMapping(Keys.S, MenuFileSave);
AddKeyMapping(Keys.Q, MenuFileUnload);
if (string.IsNullOrEmpty(gamePath))
gamePath = Settings.Default.GamePath;
if (!string.IsNullOrEmpty(gamePath))
OpenFolder(gamePath);
ImGui.PushStyleColor(ImGuiCol.MenuBarBg, BgUiColor);
}
public bool MainLoop()
{
_bootstrap.GraphicsDevice.Clear(xna.Color.CornflowerBlue);
ProcessKeyMapping();
if (!_bootstrap.ImGuiWantTextInput)
ProcessKeyboardInput(Keyboard.GetState(), 1f / 60);
if (!_bootstrap.ImGuiWantCaptureMouse)
ProcessMouseInput(Mouse.GetState());
ImGui.PushStyleColor(ImGuiCol.WindowBg, BgUiColor);
ForControl(ImGui.BeginMainMenuBar, ImGui.EndMainMenuBar, MainMenu);
MainWindow();
ForWindow("Tools", () =>
{
if (EditorSettings.ViewCamera)
CameraWindow.Run(_mapRenderer.Camera);
if (EditorSettings.ViewLayerControl)
LayerControllerWindow.Run(_mapRenderer);
if (EditorSettings.ViewSpawnPoint)
SpawnPointWindow.Run(_mapRenderer);
if (EditorSettings.ViewMeshGroup)
MeshGroupWindow.Run(_mapRenderer.MapMeshGroups);
if (EditorSettings.ViewBobDescriptor)
BobDescriptorWindow.Run(_mapRenderer.BobDescriptors, _mapRenderer.BobMeshGroups.Count);
if (EditorSettings.ViewSpawnScriptMap)
SpawnScriptWindow.Run("map", _mapRenderer.SpawnScriptMap);
if (EditorSettings.ViewSpawnScriptBattle)
SpawnScriptWindow.Run("btl", _mapRenderer.SpawnScriptBattle);
if (EditorSettings.ViewSpawnScriptEvent)
SpawnScriptWindow.Run("evt", _mapRenderer.SpawnScriptEvent);
});
ImGui.PopStyleColor();
return _exitFlag;
}
public void Dispose()
{
_objEntryController?.Dispose();
}
private void MainWindow()
{
if (!IsGameOpen)
{
ImGui.Text("Game content not loaded.");
return;
}
ForControl(() =>
{
var nextPos = ImGui.GetCursorPos();
var ret = ImGui.Begin("MapList",
ImGuiWindowFlags.NoDecoration |
ImGuiWindowFlags.NoCollapse |
ImGuiWindowFlags.NoMove);
ImGui.SetWindowPos(nextPos);
ImGui.SetWindowSize(new Vector2(64, 0));
return ret;
}, () => { }, () =>
{
foreach (var map in _mapList)
{
if (ImGui.Selectable(map, MapName == map))
{
MapName = map;
}
}
});
ImGui.SameLine();
if (!IsMapOpen)
{
ImGui.Text("Please select a map to edit.");
return;
}
_mapRenderer.Update(1f / 60);
_mapRenderer.Draw();
}
void MainMenu()
{
ForMenuBar(() =>
{
ForMenu("File", () =>
{
ForMenuItem("Open extracted game folder...", "CTRL+O", MenuFileOpen);
ForMenuItem("Unload current map+ard", "CTRL+Q", MenuFileUnload, IsOpen);
ForMenuItem("Import extern MAP file", MenuFileOpenMap, IsGameOpen);
ForMenuItem("Import extern ARD file", MenuFileOpenArd, IsGameOpen);
ForMenuItem("Save map+ard", "CTRL+S", MenuFileSave, IsOpen);
ForMenuItem("Save map as...", MenuFileSaveMapAs, IsOpen);
ForMenuItem("Save ard as...", MenuFileSaveArdAs, IsOpen);
ImGui.Separator();
ForMenu("Export", () =>
{
ForMenuItem("Map Collision", ExportMapCollision, _mapRenderer.ShowMapCollision.HasValue);
ForMenuItem("Camera Collision", ExportCameraCollision, _mapRenderer.ShowCameraCollision.HasValue);
ForMenuItem("Light Collision", ExportLightCollision, _mapRenderer.ShowLightCollision.HasValue);
});
ImGui.Separator();
ForMenu("Preferences", () =>
{
ForEdit("Movement speed", () => EditorSettings.MoveSpeed, x => EditorSettings.MoveSpeed = x);
ForEdit("Movement speed (shift)", () => EditorSettings.MoveSpeedShift, x => EditorSettings.MoveSpeedShift = x);
});
ImGui.Separator();
ForMenuItem("Exit", MenuFileExit);
});
ForMenu("View", () =>
{
ForMenuCheck("Camera", () => EditorSettings.ViewCamera, x => EditorSettings.ViewCamera = x);
ForMenuCheck("Layer control", () => EditorSettings.ViewLayerControl, x => EditorSettings.ViewLayerControl = x);
ForMenuCheck("Spawn points", () => EditorSettings.ViewSpawnPoint, x => EditorSettings.ViewSpawnPoint = x);
ForMenuCheck("BOB descriptors", () => EditorSettings.ViewBobDescriptor, x => EditorSettings.ViewBobDescriptor = x);
ForMenuCheck("Mesh group", () => EditorSettings.ViewMeshGroup, x => EditorSettings.ViewMeshGroup = x);
ForMenuCheck("Spawn script MAP", () => EditorSettings.ViewSpawnScriptMap, x => EditorSettings.ViewSpawnScriptMap = x);
ForMenuCheck("Spawn script BTL", () => EditorSettings.ViewSpawnScriptBattle, x => EditorSettings.ViewSpawnScriptBattle = x);
ForMenuCheck("Spawn script EVT", () => EditorSettings.ViewSpawnScriptEvent, x => EditorSettings.ViewSpawnScriptEvent = x);
});
ForMenu("Help", () =>
{
ForMenuItem("About", ShowAboutDialog);
});
});
}
private void MenuFileOpen() => FileDialog.OnFolder(OpenFolder);
private void MenuFileUnload() => _mapRenderer.Close();
private void MenuFileOpenMap() => FileDialog.OnOpen(_mapRenderer.OpenMap, MapFilter);
private void MenuFileOpenArd() => FileDialog.OnOpen(_mapRenderer.OpenArd, ArdFilter);
private void MenuFileSave()
{
_mapRenderer.SaveMap(Path.Combine(_mapPath, MapName + ".map"));
_mapRenderer.SaveArd(Path.Combine(_ardPath, MapName + ".ard"));
}
private void MenuFileSaveMapAs()
{
var defaultName = MapName + ".map";
FileDialog.OnSave(_mapRenderer.SaveMap, MapFilter, defaultName);
}
private void MenuFileSaveArdAs()
{
var defaultName = MapName + ".ard";
FileDialog.OnSave(_mapRenderer.SaveArd, ArdFilter, defaultName);
}
private void ExportMapCollision() => FileDialog.OnSave(fileName =>
{
ExportScene(fileName, _mapRenderer.MapCollision.Scene);
}, ModelFilter, $"{MapName}_map-collision.dae");
private void ExportCameraCollision() => FileDialog.OnSave(fileName =>
{
ExportScene(fileName, _mapRenderer.CameraCollision.Scene);
}, ModelFilter, $"{MapName}_camera-collision.dae");
private void ExportLightCollision() => FileDialog.OnSave(fileName =>
{
ExportScene(fileName, _mapRenderer.LightCollision.Scene);
}, ModelFilter, $"{MapName}_light-collision.dae");
private void MenuFileExit() => _exitFlag = true;
public void OpenFolder(string gamePath)
{
try
{
if (!Directory.Exists(_ardPath = Path.Combine(gamePath, "ard")) ||
!Directory.Exists(_mapPath = Path.Combine(gamePath, "map")) ||
!Directory.Exists(_objPath = Path.Combine(gamePath, "obj")))
throw new DirectoryNotFoundException(
"The specified directory must contain the full extracted copy of the game.");
GamePath = gamePath;
}
catch (Exception ex)
{
ShowError(ex.Message);
}
}
private void UpdateTitle()
{
_bootstrap.Title = Title;
}
private void EnumerateMapList()
{
var mapFiles = Array.Empty<string>();
foreach (var region in Constants.Regions)
{
var testPath = Path.Combine(_mapPath, region);
if (Directory.Exists(testPath))
{
mapFiles = Directory.GetFiles(testPath, "*.map");
if (mapFiles.Length != 0)
{
_mapPath = testPath;
_region = region;
break;
}
}
}
_mapList.Clear();
_mapList.AddRange(mapFiles.Select(Path.GetFileNameWithoutExtension));
}
private void AddKeyMapping(Keys key, Action action)
{
_keyMapping[key] = action;
}
private void ProcessKeyMapping()
{
var k = Keyboard.GetState();
if (k.IsKeyDown(Keys.LeftControl))
{
var keys = k.GetPressedKeys();
foreach (var key in keys)
{
if (_keyMapping.TryGetValue(key, out var action))
action();
}
}
}
private void ProcessKeyboardInput(KeyboardState keyboard, float deltaTime)
{
var speed = (float)(deltaTime * EditorSettings.MoveSpeed);
var moveSpeed = speed;
if (keyboard.IsKeyDown(Keys.LeftShift) || keyboard.IsKeyDown(Keys.RightShift))
moveSpeed = (float)(deltaTime * EditorSettings.MoveSpeedShift);
var camera = _mapRenderer.Camera;
if (keyboard.IsKeyDown(Keys.W))
camera.CameraPosition += Vector3.Multiply(camera.CameraLookAtX, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.S))
camera.CameraPosition -= Vector3.Multiply(camera.CameraLookAtX, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.D))
camera.CameraPosition -= Vector3.Multiply(camera.CameraLookAtY, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.A))
camera.CameraPosition += Vector3.Multiply(camera.CameraLookAtY, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.Q))
camera.CameraPosition += Vector3.Multiply(camera.CameraLookAtZ, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.E))
camera.CameraPosition -= Vector3.Multiply(camera.CameraLookAtZ, moveSpeed * 5);
if (keyboard.IsKeyDown(Keys.Up))
camera.CameraRotationYawPitchRoll += new Vector3(0, 0, 1 * speed);
if (keyboard.IsKeyDown(Keys.Down))
camera.CameraRotationYawPitchRoll -= new Vector3(0, 0, 1 * speed);
if (keyboard.IsKeyDown(Keys.Left))
camera.CameraRotationYawPitchRoll += new Vector3(1 * speed, 0, 0);
if (keyboard.IsKeyDown(Keys.Right))
camera.CameraRotationYawPitchRoll -= new Vector3(1 * speed, 0, 0);
}
private void ProcessMouseInput(MouseState mouse)
{
const float Speed = 0.25f;
if (mouse.LeftButton == ButtonState.Pressed)
{
var camera = _mapRenderer.Camera;
var xSpeed = (_previousMousePosition.X - mouse.Position.X) * Speed;
var ySpeed = (_previousMousePosition.Y - mouse.Position.Y) * Speed;
camera.CameraRotationYawPitchRoll += new Vector3(1 * -xSpeed, 0, 0);
camera.CameraRotationYawPitchRoll += new Vector3(0, 0, 1 * ySpeed);
}
_previousMousePosition = mouse.Position;
}
private static void ExportScene(string fileName, Scene scene)
{
using var ctx = new AssimpContext();
var extension = Path.GetExtension(fileName).ToLower();
var exportFormat = ctx.GetSupportedExportFormats();
foreach (var format in exportFormat)
{
if ($".{format.FileExtension}" == extension)
{
var material = new Material();
material.Clear();
scene.Materials.Add(material);
ctx.ExportFile(scene, fileName, format.FormatId);
return;
}
}
ShowError($"Unable to export with '{extension}' extension.");
}
public static void ShowError(string message, string title = "Error") =>
MessageBox.Show(message, title, MessageBoxButton.OK, MessageBoxImage.Error);
private void ShowAboutDialog() =>
MessageBox.Show("OpenKH is amazing.");
}
}
| |
using System;
using System.Collections;
using System.Text;
namespace Rainbow.Framework.Helpers
{
/// <summary>
/// This struct stores custom parametes needed by
/// the search helper for do the search string.
/// This make the search string consistent and easy
/// to change without modify all the searchable modules
/// </summary>
public struct SearchDefinition
{
private const string strItm = "itm.";
/// <summary>
/// Initializes a new instance of the <see cref="T:SearchDefinition"/> class.
/// </summary>
/// <param name="tableName">Name of the table.</param>
/// <param name="titleField">The title field.</param>
/// <param name="abstractField">The abstract field.</param>
/// <param name="searchField">The search field.</param>
/// <returns>
/// A void value...
/// </returns>
public SearchDefinition(string tableName, string titleField, string abstractField, string searchField)
{
TableName = tableName;
PageIDField = "mod.TabID";
ItemIDField = "ItemID";
TitleField = titleField;
AbstractField = abstractField;
CreatedByUserField = "''";
CreatedDateField = "''";
ArrSearchFields = new ArrayList();
if (searchField == string.Empty)
{
ArrSearchFields.Add(strItm + TitleField);
ArrSearchFields.Add(strItm + AbstractField);
}
else
{
if (searchField == "Title")
ArrSearchFields.Add(strItm + TitleField);
else
ArrSearchFields.Add(strItm + searchField);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="T:SearchDefinition"/> class.
/// </summary>
/// <param name="tableName">Name of the table.</param>
/// <param name="titleField">The title field.</param>
/// <param name="abstractField">The abstract field.</param>
/// <param name="createdByUserField">The created by user field.</param>
/// <param name="createdDateField">The created date field.</param>
/// <param name="searchField">The search field.</param>
/// <returns>
/// A void value...
/// </returns>
public SearchDefinition(string tableName, string titleField, string abstractField, string createdByUserField,
string createdDateField, string searchField)
{
TableName = tableName;
PageIDField = "mod.TabID";
ItemIDField = "ItemID";
TitleField = titleField;
AbstractField = abstractField;
CreatedByUserField = createdByUserField;
CreatedDateField = createdDateField;
ArrSearchFields = new ArrayList();
if (searchField == string.Empty)
{
ArrSearchFields.Add(strItm + TitleField);
ArrSearchFields.Add(strItm + AbstractField);
}
else
{
if (searchField == "Title")
ArrSearchFields.Add(strItm + TitleField);
else
ArrSearchFields.Add(strItm + searchField);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="T:SearchDefinition"/> class.
/// </summary>
/// <param name="tableName">Name of the table.</param>
/// <param name="tabIDField">The tab ID field.</param>
/// <param name="itemIDField">The item ID field.</param>
/// <param name="titleField">The title field.</param>
/// <param name="abstractField">The abstract field.</param>
/// <param name="createdByUserField">The created by user field.</param>
/// <param name="createdDateField">The created date field.</param>
/// <param name="searchField">The search field.</param>
/// <returns>
/// A void value...
/// </returns>
public SearchDefinition(string tableName, string tabIDField, string itemIDField, string titleField,
string abstractField, string createdByUserField, string createdDateField,
string searchField)
{
TableName = tableName;
PageIDField = tabIDField;
ItemIDField = itemIDField;
TitleField = titleField;
AbstractField = abstractField;
CreatedByUserField = createdByUserField;
CreatedDateField = createdDateField;
ArrSearchFields = new ArrayList();
if (searchField == string.Empty)
{
ArrSearchFields.Add(strItm + TitleField);
ArrSearchFields.Add(strItm + AbstractField);
}
else
{
if (searchField == "Title")
ArrSearchFields.Add(strItm + TitleField);
else
ArrSearchFields.Add(strItm + searchField);
}
}
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string TableName;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string PageIDField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string ItemIDField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string TitleField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string AbstractField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string CreatedByUserField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public string CreatedDateField;
/// <summary>
///
/// </summary>
/// <remarks>
///
/// </remarks>
public ArrayList ArrSearchFields;
/// <summary>
/// Searches the SQL select.
/// </summary>
/// <param name="portalID">The portal ID.</param>
/// <param name="userID">The user ID.</param>
/// <param name="searchStr">The search STR.</param>
/// <returns>A string value...</returns>
public string SearchSqlSelect(int portalID, int userID, string searchStr)
{
return SearchSqlSelect(portalID, userID, searchStr, true);
}
/// <summary>
/// SQL injection prevention
/// </summary>
/// <param name="toClean">To clean.</param>
/// <returns></returns>
private string FilterString(string toClean)
{
StringBuilder c = new StringBuilder(toClean);
string[] knownbad =
{
"select", "insert",
"update", "delete", "drop",
"--", "'", "char", ";"
};
for (int i = 0; i < knownbad.Length; i++)
c.Replace(knownbad[i], string.Empty);
return c.ToString();
}
/// <summary>
/// Builds a SELECT query using given parameters
/// </summary>
/// <param name="portalID">The portal ID.</param>
/// <param name="userID">The user ID.</param>
/// <param name="searchStr">The search STR.</param>
/// <param name="hasItemID">if set to <c>true</c> [has item ID].</param>
/// <returns></returns>
public string SearchSqlSelect(int portalID, int userID, string searchStr, bool hasItemID)
{
if (CreatedByUserField == null || CreatedByUserField.Length == 0)
CreatedByUserField = "''";
if (CreatedDateField == null || CreatedDateField.Length == 0)
CreatedDateField = "''";
//SQL inection filter
searchStr = FilterString(searchStr);
if (searchStr.Length < 3)
throw new ArgumentException(
"Please use a word with at least 3 valid chars (invalid chars were removed).");
// special extended search feature (used by RSS/Community Service). Added by Jakob Hansen
string ExtraSQL = string.Empty;
if (searchStr.StartsWith("AddExtraSQL:"))
{
int posSS = searchStr.IndexOf("SearchString:");
if (posSS > 0)
{
// Get the added searchstring
if (posSS > 12)
ExtraSQL = searchStr.Substring(12, posSS - 12).Trim();
else
ExtraSQL = string.Empty; // no SQL - only searchstring
searchStr = searchStr.Substring(posSS + 14).Trim();
}
else
{
// There are no added searchstring
ExtraSQL = searchStr.Substring(12).Trim();
searchStr = string.Empty;
}
// Are the required "AND " missing? (then add it!)
if (ExtraSQL.Length != 0 && !ExtraSQL.StartsWith("AND"))
ExtraSQL = "AND " + ExtraSQL;
}
StringBuilder select = new StringBuilder();
select.Append("SELECT TOP 50 ");
select.Append("genModDef.FriendlyName AS ModuleName, ");
select.Append("CAST (itm.");
select.Append(TitleField);
select.Append(" AS NVARCHAR(100)) AS Title, ");
select.Append("CAST (itm.");
select.Append(AbstractField);
select.Append(" AS NVARCHAR(100)) AS Abstract, ");
select.Append("itm.ModuleID AS ModuleID, ");
if (hasItemID)
select.Append(strItm + ItemIDField + " AS ItemID, ");
else
select.Append("itm.ModuleID AS ItemID, ");
if (!CreatedByUserField.StartsWith("'"))
select.Append(strItm); // Add itm only if not a constant value
select.Append(CreatedByUserField);
select.Append(" AS CreatedByUser, ");
if (!CreatedDateField.StartsWith("'"))
select.Append(strItm); // Add itm only if not a constant value
select.Append(CreatedDateField);
select.Append(" AS CreatedDate, ");
select.Append(PageIDField + " AS TabID, ");
select.Append("tab.TabName AS TabName, ");
select.Append("genModDef.GeneralModDefID AS GeneralModDefID, ");
select.Append("mod.ModuleTitle AS ModuleTitle ");
select.Append("FROM ");
select.Append(TableName);
select.Append(" itm INNER JOIN ");
select.Append("rb_Modules mod ON itm.ModuleID = mod.ModuleID INNER JOIN ");
select.Append("rb_ModuleDefinitions modDef ON mod.ModuleDefID = modDef.ModuleDefID INNER JOIN ");
select.Append("rb_Tabs tab ON mod.TabID = tab.TabID INNER JOIN ");
select.Append("rb_GeneralModuleDefinitions genModDef ON modDef.GeneralModDefID = genModDef.GeneralModDefID ");
// if (topicName.Length != 0)
// select.Append("INNER JOIN rb_ModuleSettings modSet ON mod.ModuleID = modSet.ModuleID");
select.Append("%TOPIC_PLACEHOLDER_JOIN%");
SearchHelper.AddSharedSQL(portalID, userID, ref select, TitleField);
// if (topicName.Length != 0)
// select.Append(" AND (modSet.SettingName = 'TopicName' AND modSet.SettingValue='" + topicName + "')");
select.Append("%TOPIC_PLACEHOLDER%");
if (searchStr.Length != 0)
select.Append(" AND " + SearchHelper.CreateTestSQL(ArrSearchFields, searchStr, true));
if (ExtraSQL.Length != 0)
select.Append(ExtraSQL);
return select.ToString();
}
/* Jakob Hansen, 20 may: Before the RSS/Web Service community release
/// <summary>
/// Builds a SELECT query using given parameters
/// </summary>
/// <param name="portalID"></param>
/// <param name="userID"></param>
/// <param name="searchString"></param>
/// <returns></returns>
public string SearchSqlSelect(int portalID, int userID, string
searchString, bool hasItemID)
{
System.Text.StringBuilder select = new System.Text.StringBuilder();
select.Append("SELECT ");
select.Append("genModDef.FriendlyName AS ModuleName, ");
select.Append("CAST (itm.");
select.Append(TitleField);
select.Append(" AS NVARCHAR(100)) AS Title, ");
select.Append("CAST (itm.");
select.Append(AbstractField);
select.Append(" AS NVARCHAR(100)) AS Abstract, ");
select.Append("itm.ModuleID AS ModuleID, ");
if (hasItemID)
select.Append("itm.ItemID AS ItemID, ");
else
select.Append("itm.ModuleID AS ItemID, ");
if (!CreatedByUserField.StartsWith("'"))
select.Append(strItm); // Add itm only if not a constant value
select.Append(CreatedByUserField);
select.Append(" AS CreatedByUser, ");
if (!CreatedDateField.StartsWith("'"))
select.Append(strItm); // Add itm only if not a constant value
select.Append(CreatedDateField);
select.Append(" AS CreatedDate, ");
select.Append("mod.TabID AS TabID, ");
select.Append("tab.TabName AS TabName, ");
select.Append("genModDef.GeneralModDefID AS GeneralModDefID, ");
select.Append("mod.ModuleTitle AS ModuleTitle ");
select.Append("FROM ");
select.Append(TableName);
select.Append(" itm INNER JOIN ");
select.Append("rb_Modules mod ON itm.ModuleID = mod.ModuleID INNER JOIN ");
select.Append("rb_ModuleDefinitions modDef ON mod.ModuleDefID = modDef.ModuleDefID INNER JOIN ");
select.Append("rb_Tabs tab ON mod.TabID = tab.TabID INNER JOIN ");
select.Append("rb_GeneralModuleDefinitions genModDef ON modDef.GeneralModDefID = genModDef.GeneralModDefID ");
Helpers.SearchHelper.AddSharedSQL(portalID, userID, ref select, TitleField);
select.Append(" AND " + Rainbow.Framework.Helpers.SearchHelper.CreateTestSQL(ArrSearchFields, searchString, true));
return select.ToString();
}
*/
}
}
| |
using System.Diagnostics;
using Lucene.Net.Documents;
namespace Lucene.Net.Search
{
using NUnit.Framework;
using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
using Directory = Lucene.Net.Store.Directory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Document = Documents.Document;
using Field = Field;
using IndexReader = Lucene.Net.Index.IndexReader;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
using Term = Lucene.Net.Index.Term;
/// <summary>
/// this class only tests some basic functionality in CSQ, the main parts are mostly
/// tested by MultiTermQuery tests, explanations seems to be tested in TestExplanations!
/// </summary>
[TestFixture]
public class TestConstantScoreQuery : LuceneTestCase
{
[Test]
public virtual void TestCSQ()
{
Query q1 = new ConstantScoreQuery(new TermQuery(new Term("a", "b")));
Query q2 = new ConstantScoreQuery(new TermQuery(new Term("a", "c")));
Query q3 = new ConstantScoreQuery(TermRangeFilter.NewStringRange("a", "b", "c", true, true));
QueryUtils.Check(q1);
QueryUtils.Check(q2);
QueryUtils.CheckEqual(q1, q1);
QueryUtils.CheckEqual(q2, q2);
QueryUtils.CheckEqual(q3, q3);
QueryUtils.CheckUnequal(q1, q2);
QueryUtils.CheckUnequal(q2, q3);
QueryUtils.CheckUnequal(q1, q3);
QueryUtils.CheckUnequal(q1, new TermQuery(new Term("a", "b")));
}
private void CheckHits(IndexSearcher searcher, Query q, float expectedScore, string scorerClassName, string innerScorerClassName)
{
int[] count = new int[1];
searcher.Search(q, new CollectorAnonymousInnerClassHelper(this, expectedScore, scorerClassName, innerScorerClassName, count));
Assert.AreEqual(1, count[0], "invalid number of results");
}
private class CollectorAnonymousInnerClassHelper : ICollector
{
private readonly TestConstantScoreQuery OuterInstance;
private float ExpectedScore;
private string ScorerClassName;
private string InnerScorerClassName;
private int[] Count;
public CollectorAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance, float expectedScore, string scorerClassName, string innerScorerClassName, int[] count)
{
this.OuterInstance = outerInstance;
this.ExpectedScore = expectedScore;
this.ScorerClassName = scorerClassName;
this.InnerScorerClassName = innerScorerClassName;
this.Count = count;
}
private Scorer scorer;
public virtual void SetScorer(Scorer scorer)
{
this.scorer = scorer;
Assert.AreEqual(ScorerClassName, scorer.GetType().Name, "Scorer is implemented by wrong class");
if (InnerScorerClassName != null && scorer is ConstantScoreQuery.ConstantScorer)
{
ConstantScoreQuery.ConstantScorer innerScorer = (ConstantScoreQuery.ConstantScorer)scorer;
Assert.AreEqual(InnerScorerClassName, innerScorer.docIdSetIterator.GetType().Name, "inner Scorer is implemented by wrong class");
}
}
public virtual void Collect(int doc)
{
Assert.AreEqual(ExpectedScore, this.scorer.GetScore(), 0, "Score differs from expected");
Count[0]++;
}
public virtual void SetNextReader(AtomicReaderContext context)
{
}
public virtual bool AcceptsDocsOutOfOrder
{
get { return true; }
}
}
[Test]
public virtual void TestWrapped2Times()
{
Directory directory = null;
IndexReader reader = null;
IndexSearcher searcher = null;
try
{
directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("field", "term", Field.Store.NO));
writer.AddDocument(doc);
reader = writer.Reader;
writer.Dispose();
// we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer.
searcher = NewSearcher(reader, true, false);
// set a similarity that does not normalize our boost away
searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
Query csq1 = new ConstantScoreQuery(new TermQuery(new Term("field", "term")));
csq1.Boost = 2.0f;
Query csq2 = new ConstantScoreQuery(csq1);
csq2.Boost = 5.0f;
BooleanQuery bq = new BooleanQuery();
bq.Add(csq1, Occur.SHOULD);
bq.Add(csq2, Occur.SHOULD);
Query csqbq = new ConstantScoreQuery(bq);
csqbq.Boost = 17.0f;
CheckHits(searcher, csq1, csq1.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, null);
CheckHits(searcher, csq2, csq2.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, typeof(ConstantScoreQuery.ConstantScorer).Name);
// for the combined BQ, the scorer should always be BooleanScorer's BucketScorer, because our scorer supports out-of order collection!
string bucketScorerClass = typeof(FakeScorer).Name;
CheckHits(searcher, bq, csq1.Boost + csq2.Boost, bucketScorerClass, null);
CheckHits(searcher, csqbq, csqbq.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, bucketScorerClass);
}
finally
{
if (reader != null)
{
reader.Dispose();
}
if (directory != null)
{
directory.Dispose();
}
}
}
private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
{
private readonly TestConstantScoreQuery OuterInstance;
public DefaultSimilarityAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance)
{
this.OuterInstance = outerInstance;
}
public override float QueryNorm(float sumOfSquaredWeights)
{
return 1.0f;
}
}
[Test]
public virtual void TestConstantScoreQueryAndFilter()
{
Directory d = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("field", "a", Field.Store.NO));
w.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("field", "b", Field.Store.NO));
w.AddDocument(doc);
IndexReader r = w.Reader;
w.Dispose();
Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b"))));
Query query = new ConstantScoreQuery(filterB);
IndexSearcher s = NewSearcher(r);
Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b
Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));
query = new ConstantScoreQuery(filterA);
Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a
r.Dispose();
d.Dispose();
}
// LUCENE-5307
// don't reuse the scorer of filters since they have been created with bulkScorer=false
[Test]
public virtual void TestQueryWrapperFilter()
{
Directory d = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("field", "a", Field.Store.NO));
w.AddDocument(doc);
IndexReader r = w.Reader;
w.Dispose();
Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random(), new TermQuery(new Term("field", "a"))));
IndexSearcher s = NewSearcher(r);
Debug.Assert(s is AssertingIndexSearcher);
// this used to fail
s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector());
// check the rewrite
Query rewritten = (new ConstantScoreQuery(filter)).Rewrite(r);
Assert.IsTrue(rewritten is ConstantScoreQuery);
Assert.IsTrue(((ConstantScoreQuery)rewritten).Query is AssertingQuery);
r.Dispose();
d.Dispose();
}
}
}
| |
namespace Xbehave.Test
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using Xbehave.Sdk;
using Xbehave.Test.Infrastructure;
using Xunit;
using Xunit.Abstractions;
// In order to prevent bugs due to incorrect code
// As a developer
// I want to run automated acceptance tests describing each feature of my product using scenarios
public class ScenarioFeature : Feature
{
// NOTE (adamralph): a plain xunit fact to prove that plain scenarios work in 2.x
[Fact]
public void ScenarioWithTwoPassingStepsAndOneFailingStepYieldsTwoPassesAndOneFail()
{
// arrange
var feature = typeof(FeatureWithAScenarioWithTwoPassingStepsAndOneFailingStep);
// act
var results = this.Run<ITestResultMessage>(feature);
// assert
Assert.Equal(3, results.Length);
Assert.All(results.Take(2), result => Assert.IsAssignableFrom<ITestPassed>(result));
Assert.All(results.Skip(2), result => Assert.IsAssignableFrom<ITestFailed>(result));
}
[Scenario]
public void ScenarioWithThreeSteps(Type feature, IMessageSinkMessage[] messages, ITestResultMessage[] results)
{
"Given a feature with a scenario with three steps"
.x(() => feature = typeof(FeatureWithAScenarioWithThreeSteps));
"When I run the scenarios"
.x(() => results = (messages = this.Run<IMessageSinkMessage>(feature))
.OfType<ITestResultMessage>().ToArray());
"Then there should be three results"
.x(() => Assert.Equal(3, results.Length));
"And the first result should have a display name ending with 'Step 1'"
.x(() => Assert.EndsWith("Step 1", results[0].Test.DisplayName));
"And the second result should have a display name ending with 'Step 2'"
.x(() => Assert.EndsWith("Step 2", results[1].Test.DisplayName));
"And the third result should have a display name ending with 'Step 3'"
.x(() => Assert.EndsWith("Step 3", results[2].Test.DisplayName));
"And the messages should satisfy the xunit message contract"
.x(() => Assert.Equal(
new[]
{
"TestCollectionStarting",
"TestClassStarting",
"TestMethodStarting",
"TestCaseStarting",
"TestStarting",
"TestPassed",
"TestFinished",
"TestStarting",
"TestPassed",
"TestFinished",
"TestStarting",
"TestPassed",
"TestFinished",
"TestCaseFinished",
"TestMethodFinished",
"TestClassFinished",
"TestCollectionFinished",
},
messages.Select(message => message.GetType().Name).SkipWhile(name => name == "TestAssemblyStarting").Take(17).ToArray()));
}
[Scenario]
public void OrderingStepsByDisplayName(Type feature, ITestResultMessage[] results)
{
"Given ten steps named alphabetically backwards starting with 'z'"
.x(() => feature = typeof(TenStepsNamedAlphabeticallyBackwardsStartingWithZ));
"When I run the scenarios"
.x(() => results = this.Run<ITestResultMessage>(feature));
"And I sort the results by their display name"
.x(() => results = results.OrderBy(result => result.Test.DisplayName).ToArray());
"Then a concatenation of the last character of each result display names should be 'zyxwvutsrq'"
.x(() => Assert.Equal("zyxwvutsrq", new string(results.Select(result => result.Test.DisplayName.Last()).ToArray())));
}
[Scenario]
public void ScenarioWithTwoPassingStepsAndOneFailingStep(Type feature, ITestResultMessage[] results)
{
"Given a feature with a scenario with two passing steps and one failing step"
.x(() => feature = typeof(FeatureWithAScenarioWithTwoPassingStepsAndOneFailingStep));
"When I run the scenarios"
.x(() => results = this.Run<ITestResultMessage>(feature));
"Then there should be three results"
.x(() => Assert.Equal(3, results.Length));
"And the first two results should be passes"
.x(() => Assert.All(results.Take(2), result => Assert.IsAssignableFrom<ITestPassed>(result)));
"And the third result should be a fail"
.x(() => Assert.All(results.Skip(2), result => Assert.IsAssignableFrom<ITestFailed>(result)));
}
[Scenario]
public void ScenarioBodyThrowsAnException(Type feature, Exception exception, ITestResultMessage[] results)
{
"Given a feature with a scenario body which throws an exception"
.x(() => feature = typeof(FeatureWithAScenarioBodyWhichThrowsAnException));
"When I run the scenarios"
.x(() => exception = Record.Exception(() =>
results = this.Run<ITestResultMessage>(feature)));
"Then no exception should be thrown"
.x(() => Assert.Null(exception));
"And the results should not be empty"
.x(() => Assert.NotEmpty(results));
"And each result should be a failure"
.x(() => Assert.All(results, result => Assert.IsAssignableFrom<ITestFailed>(result)));
}
[Scenario]
public void FeatureCannotBeConstructed(Type feature, Exception exception, ITestResultMessage[] results)
{
"Given a feature with a non-static scenario but no default constructor"
.x(() => feature = typeof(FeatureWithANonStaticScenarioButNoDefaultConstructor));
"When I run the scenarios"
.x(() => exception = Record.Exception(() => results = this.Run<ITestResultMessage>(feature)));
"Then no exception should be thrown"
.x(() => Assert.Null(exception));
"And the results should not be empty"
.x(() => Assert.NotEmpty(results));
"And each result should be a failure"
.x(() => Assert.All(results, result => Assert.IsAssignableFrom<ITestFailed>(result)));
}
[Scenario]
public void FeatureConstructionFails(Type feature, ITestFailed[] failures)
{
"Given a feature with a failing constructor"
.x(() => feature = typeof(FeatureWithAFailingConstructor));
"When I run the scenarios"
.x(() => failures = this.Run<ITestFailed>(feature));
"Then there should be one test failure"
.x(() => Assert.Single(failures));
}
[Scenario]
public void FailingStepThenPassingSteps(Type feature, ITestResultMessage[] results)
{
"Given a failing step and two passing steps named alphabetically backwards"
.x(() => feature = typeof(AFailingStepAndTwoPassingStepsNamedAlphabeticallyBackwards));
"When I run the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"And I sort the results by their display name"
.x(() => results = results.OrderBy(result => result.Test.DisplayName).ToArray());
"Then the there should be three results"
.x(() => Assert.Equal(3, results.Length));
"Then the first result should be a failure"
.x(() => Assert.IsAssignableFrom<ITestFailed>(results[0]));
"And the second and third results should be skips"
.x(() => Assert.All(results.Skip(1), result => Assert.IsAssignableFrom<ITestSkipped>(result)));
"And the second result should refer to the second step"
.x(() => Assert.Contains("Step y", results[1].Test.DisplayName));
"And the third result should refer to the third step"
.x(() => Assert.Contains("Step x", results[2].Test.DisplayName));
"And the second and third result messages should indicate that the first step failed"
.x(() => Assert.All(
results.Skip(1).Cast<ITestSkipped>(),
result =>
{
Assert.Contains("Failed to execute preceding step", result.Reason);
Assert.Contains("Step z", result.Reason);
}));
}
[Scenario]
public void ScenarioWithNoSteps(Type feature, ITestResultMessage[] results)
{
"Given a scenario with no steps"
.x(() => feature = typeof(FeatureWithAScenarioWithNoSteps));
"When I run the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"Then there should be one result"
.x(() => Assert.Single(results));
"And the result should be a pass"
.x(() => Assert.IsAssignableFrom<ITestPassed>(results.Single()));
}
[Scenario]
public void NullStepText() =>
((string)null)
.x(() => { });
[Scenario]
public void NullStepBody() =>
"Given a null body"
.x((Action)null);
[Scenario]
public void NullContextualStepBody() =>
"Given a null body"
.x((Action<IStepContext>)null);
[Scenario]
public void NestedStep(Type feature, ITestResultMessage[] results)
{
"Given a scenario with a nested step"
.x(() => feature = typeof(ScenarioWithANestedStep));
"When I run the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"Then there should be one result"
.x(() => Assert.Single(results));
"And the result should be a fail"
.x(() => Assert.IsAssignableFrom<ITestFailed>(results.Single()));
}
private class FeatureWithAScenarioWithThreeSteps
{
[Scenario]
public void Scenario()
{
"Step 1"
.x(() => { });
"Step 2"
.x(() => { });
"Step 3"
.x(() => { });
}
}
private class TenStepsNamedAlphabeticallyBackwardsStartingWithZ
{
[Scenario]
public static void Scenario()
{
"z"
.x(() => { });
"y"
.x(() => { });
"x"
.x(() => { });
"w"
.x(() => { });
"v"
.x(() => { });
"u"
.x(() => { });
"t"
.x(() => { });
"s"
.x(() => { });
"r"
.x(() => { });
"q"
.x(() => { });
}
}
private class FeatureWithAScenarioWithTwoPassingStepsAndOneFailingStep
{
[Scenario]
public static void Scenario()
{
var i = 0;
"Given 1"
.x(() => i = 1);
"When I add 1"
.x(() => i += 1);
"Then I have 3"
.x(() => Assert.Equal(3, i));
}
}
private class FeatureWithAScenarioBodyWhichThrowsAnException
{
[Scenario]
public static void Scenario() => throw new InvalidOperationException();
}
private class AFailingStepAndTwoPassingStepsNamedAlphabeticallyBackwards
{
[Scenario]
public static void Scenario()
{
"Step z"
.x(() => throw new NotImplementedException());
"Step y"
.x(() => { });
"Step x"
.x(() => { });
}
}
private class FeatureWithANonStaticScenarioButNoDefaultConstructor
{
#pragma warning disable IDE0060 // Remove unused parameter
public FeatureWithANonStaticScenarioButNoDefaultConstructor(int ignored)
#pragma warning restore IDE0060 // Remove unused parameter
{
}
[SuppressMessage("Microsoft.Performance", "CA1822:MarkMembersAsStatic", Justification = "Required for testing.")]
[Scenario]
public void Scenario() =>
"Given something"
.x(() => { });
}
private class FeatureWithAFailingConstructor
{
public FeatureWithAFailingConstructor() => throw new InvalidOperationException();
[SuppressMessage("Microsoft.Performance", "CA1822:MarkMembersAsStatic", Justification = "Required for testing.")]
[Scenario]
public void Scenario() =>
"Given something"
.x(() => { });
}
private class FeatureWithAScenarioWithNoSteps
{
[Scenario]
public void Scenario()
{
}
}
private class ScenarioWithANestedStep
{
[Scenario]
public void Scenario()
{
"Given something".x(() => "With something nested".x(() => { }));
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//-----------------------------------------------------------------------------
//
// Description:
// This is a class for representing a PackageRelationshipCollection. This is an internal
// class for manipulating relationships associated with a part
//
// Details:
// This class handles serialization to/from relationship parts, creation of those parts
// and offers methods to create, delete and enumerate relationships. This code was
// moved from the PackageRelationshipCollection class.
//
//-----------------------------------------------------------------------------
using System.Collections;
using System.Collections.Generic;
using System.Xml; // for XmlReader/Writer
using System.Diagnostics;
namespace System.IO.Packaging
{
/// <summary>
/// Collection of all the relationships corresponding to a given source PackagePart
/// </summary>
internal class InternalRelationshipCollection : IEnumerable<PackageRelationship>
{
#region IEnumerable
/// <summary>
/// Returns an enumerator over all the relationships for a Package or a PackagePart
/// </summary>
/// <returns></returns>
IEnumerator IEnumerable.GetEnumerator()
{
return _relationships.GetEnumerator();
}
/// <summary>
/// Returns an enumerator over all the relationships for a Package or a PackagePart
/// </summary>
/// <returns></returns>
IEnumerator<PackageRelationship> IEnumerable<PackageRelationship>.GetEnumerator()
{
return _relationships.GetEnumerator();
}
/// <summary>
/// Returns an enumerator over all the relationships for a Package or a PackagePart
/// </summary>
/// <returns></returns>
public List<PackageRelationship>.Enumerator GetEnumerator()
{
return _relationships.GetEnumerator();
}
#endregion
#region Internal Methods
/// <summary>
/// Constructor
/// </summary>
/// <remarks>For use by PackagePart</remarks>
internal InternalRelationshipCollection(PackagePart part) : this(part.Package, part)
{
}
/// <summary>
/// Constructor
/// </summary>
/// <remarks>For use by Package</remarks>
internal InternalRelationshipCollection(Package package) : this(package, null)
{
}
/// <summary>
/// Add new relationship
/// </summary>
/// <param name="targetUri">target</param>
/// <param name="targetMode">Enumeration indicating the base uri for the target uri</param>
/// <param name="relationshipType">relationship type that uniquely defines the role of the relationship</param>
/// <param name="id">String that conforms to the xsd:ID datatype. Unique across the source's relationships.
/// Null OK (ID will be generated).</param>
internal PackageRelationship Add(Uri targetUri, TargetMode targetMode, string relationshipType, string id)
{
return Add(targetUri, targetMode, relationshipType, id, parsing: false);
}
/// <summary>
/// Return the relationship whose id is 'id', and null if not found.
/// </summary>
internal PackageRelationship GetRelationship(string id)
{
int index = GetRelationshipIndex(id);
if (index == -1)
return null;
return _relationships[index];
}
/// <summary>
/// Delete relationship with ID 'id'
/// </summary>
/// <param name="id">ID of the relationship to remove</param>
internal void Delete(String id)
{
int index = GetRelationshipIndex(id);
if (index == -1)
return;
_relationships.RemoveAt(index);
_dirty = true;
}
/// <summary>
/// Clear all the relationships in this collection
/// Today it is only used when the entire relationship part is being deleted
/// </summary>
internal void Clear()
{
_relationships.Clear();
_dirty = true;
}
/// <summary>
/// Flush to stream (destructive)
/// </summary>
/// <remarks>
/// Flush part.
/// </remarks>
internal void Flush()
{
if (!_dirty)
return;
if (_relationships.Count == 0) // empty?
{
// delete the part
if (_package.PartExists(_uri))
{
_package.DeletePart(_uri);
}
_relationshipPart = null;
}
else
{
EnsureRelationshipPart(); // lazy init
// write xml
WriteRelationshipPart(_relationshipPart);
}
_dirty = false;
}
internal static void ThrowIfInvalidRelationshipType(string relationshipType)
{
// Look for empty string or string with just spaces
if (relationshipType.Trim() == String.Empty)
throw new ArgumentException(SR.InvalidRelationshipType);
}
// If 'id' is not of the xsd type ID, throw an exception.
internal static void ThrowIfInvalidXsdId(string id)
{
Debug.Assert(id != null, "id should not be null");
try
{
// An XSD ID is an NCName that is unique.
XmlConvert.VerifyNCName(id);
}
catch (XmlException exception)
{
var r = SR.NotAValidXmlIdString;
var s = SR.Format(r, id);
throw new XmlException(s, exception);
}
}
#endregion Internal Methods
#region Private Methods
/// <summary>
/// Constructor
/// </summary>
/// <param name="package">package</param>
/// <param name="part">part will be null if package is the source of the relationships</param>
/// <remarks>Shared constructor</remarks>
private InternalRelationshipCollection(Package package, PackagePart part)
{
Debug.Assert(package != null, "package parameter passed should never be null");
_package = package;
_sourcePart = part;
//_sourcePart may be null representing that the relationships are at the package level
_uri = GetRelationshipPartUri(_sourcePart);
_relationships = new List<PackageRelationship>(4);
// Load if available (not applicable to write-only mode).
if ((package.FileOpenAccess == FileAccess.Read ||
package.FileOpenAccess == FileAccess.ReadWrite) && package.PartExists(_uri))
{
_relationshipPart = package.GetPart(_uri);
ThrowIfIncorrectContentType(_relationshipPart.ValidatedContentType);
ParseRelationshipPart(_relationshipPart);
}
//Any initialization in the constructor should not set the dirty flag to true.
_dirty = false;
}
/// <summary>
/// Returns the associated RelationshipPart for this part
/// </summary>
/// <param name="part">may be null</param>
/// <returns>name of relationship part for the given part</returns>
private static Uri GetRelationshipPartUri(PackagePart part)
{
Uri sourceUri;
if (part == null)
sourceUri = PackUriHelper.PackageRootUri;
else
sourceUri = part.Uri;
return PackUriHelper.GetRelationshipPartUri(sourceUri);
}
/// <summary>
/// Parse PackageRelationship Stream
/// </summary>
/// <param name="part">relationship part</param>
/// <exception cref="XmlException">Thrown if XML is malformed</exception>
private void ParseRelationshipPart(PackagePart part)
{
//We can safely open the stream as FileAccess.Read, as this code
//should only be invoked if the Package has been opened in Read or ReadWrite mode.
Debug.Assert(_package.FileOpenAccess == FileAccess.Read || _package.FileOpenAccess == FileAccess.ReadWrite,
"This method should only be called when FileAccess is Read or ReadWrite");
using (Stream s = part.GetStream(FileMode.Open, FileAccess.Read))
{
// load from the relationship part associated with the given part
using (XmlReader baseReader = XmlReader.Create(s))
{
using (XmlCompatibilityReader reader = new XmlCompatibilityReader(baseReader, s_relationshipKnownNamespaces))
{
//This method expects the reader to be in ReadState.Initial.
//It will make the first read call.
PackagingUtilities.PerformInitialReadAndVerifyEncoding(baseReader);
//Note: After the previous method call the reader should be at the first tag in the markup.
//MoveToContent - Skips over the following - ProcessingInstruction, DocumentType, Comment, Whitespace, or SignificantWhitespace
//If the reader is currently at a content node then this function call is a no-op
reader.MoveToContent();
// look for our tag and namespace pair - throw if other elements are encountered
// Make sure that the current node read is an Element
if (reader.NodeType == XmlNodeType.Element
&& (reader.Depth == 0)
&& (String.CompareOrdinal(s_relationshipsTagName, reader.LocalName) == 0)
&& (String.CompareOrdinal(PackagingUtilities.RelationshipNamespaceUri, reader.NamespaceURI) == 0))
{
ThrowIfXmlBaseAttributeIsPresent(reader);
//There should be a namespace Attribute present at this level.
//Also any other attribute on the <Relationships> tag is an error including xml: and xsi: attributes
if (PackagingUtilities.GetNonXmlnsAttributeCount(reader) > 0)
throw new XmlException(SR.RelationshipsTagHasExtraAttributes, null, reader.LineNumber, reader.LinePosition);
// start tag encountered for Relationships
// now parse individual Relationship tags
while (reader.Read())
{
//Skips over the following - ProcessingInstruction, DocumentType, Comment, Whitespace, or SignificantWhitespace
//If the reader is currently at a content node then this function call is a no-op
reader.MoveToContent();
//If MoveToContent() takes us to the end of the content
if (reader.NodeType == XmlNodeType.None)
continue;
if (reader.NodeType == XmlNodeType.Element
&& (reader.Depth == 1)
&& (String.CompareOrdinal(s_relationshipTagName, reader.LocalName) == 0)
&& (String.CompareOrdinal(PackagingUtilities.RelationshipNamespaceUri, reader.NamespaceURI) == 0))
{
ThrowIfXmlBaseAttributeIsPresent(reader);
int expectedAttributesCount = 3;
string targetModeAttributeValue = reader.GetAttribute(s_targetModeAttributeName);
if (targetModeAttributeValue != null)
expectedAttributesCount++;
//check if there are expected number of attributes.
//Also any other attribute on the <Relationship> tag is an error including xml: and xsi: attributes
if (PackagingUtilities.GetNonXmlnsAttributeCount(reader) == expectedAttributesCount)
{
ProcessRelationshipAttributes(reader);
//Skip the EndElement for Relationship
if (!reader.IsEmptyElement)
ProcessEndElementForRelationshipTag(reader);
}
else
{
throw new XmlException(SR.RelationshipTagDoesntMatchSchema, null, reader.LineNumber, reader.LinePosition);
}
}
else
if (!(String.CompareOrdinal(s_relationshipsTagName, reader.LocalName) == 0 && (reader.NodeType == XmlNodeType.EndElement)))
throw new XmlException(SR.UnknownTagEncountered, null, reader.LineNumber, reader.LinePosition);
}
}
else throw new XmlException(SR.ExpectedRelationshipsElementTag, null, reader.LineNumber, reader.LinePosition);
}
}
}
}
//This method processes the attributes that are present on the Relationship element
private void ProcessRelationshipAttributes(XmlCompatibilityReader reader)
{
// Attribute : TargetMode
string targetModeAttributeValue = reader.GetAttribute(s_targetModeAttributeName);
//If the TargetMode attribute is missing in the underlying markup then we assume it to be internal
TargetMode relationshipTargetMode = TargetMode.Internal;
if (targetModeAttributeValue != null)
{
try
{
relationshipTargetMode = (TargetMode)(Enum.Parse(typeof(TargetMode), targetModeAttributeValue, ignoreCase: false));
}
catch (ArgumentNullException argNullEx)
{
ThrowForInvalidAttributeValue(reader, s_targetModeAttributeName, argNullEx);
}
catch (ArgumentException argEx)
{
//if the targetModeAttributeValue is not Internal|External then Argument Exception will be thrown.
ThrowForInvalidAttributeValue(reader, s_targetModeAttributeName, argEx);
}
}
// Attribute : Target
// create a new PackageRelationship
string targetAttributeValue = reader.GetAttribute(s_targetAttributeName);
if (string.IsNullOrEmpty(targetAttributeValue))
throw new XmlException(SR.Format(SR.RequiredRelationshipAttributeMissing, s_targetAttributeName), null, reader.LineNumber, reader.LinePosition);
Uri targetUri = new Uri(targetAttributeValue, UriKind.RelativeOrAbsolute);
// Attribute : Type
string typeAttributeValue = reader.GetAttribute(s_typeAttributeName);
if (string.IsNullOrEmpty(typeAttributeValue))
throw new XmlException(SR.Format(SR.RequiredRelationshipAttributeMissing, s_typeAttributeName), null, reader.LineNumber, reader.LinePosition);
// Attribute : Id
// Get the Id attribute (required attribute).
string idAttributeValue = reader.GetAttribute(s_idAttributeName);
if (string.IsNullOrEmpty(idAttributeValue))
throw new XmlException(SR.Format(SR.RequiredRelationshipAttributeMissing, s_idAttributeName), null, reader.LineNumber, reader.LinePosition);
// Add the relationship to the collection
Add(targetUri, relationshipTargetMode, typeAttributeValue, idAttributeValue, parsing: true);
}
//If End element is present for Relationship then we process it
private void ProcessEndElementForRelationshipTag(XmlCompatibilityReader reader)
{
Debug.Assert(!reader.IsEmptyElement, "This method should only be called if the Relationship Element is not empty");
reader.Read();
//Skips over the following - ProcessingInstruction, DocumentType, Comment, Whitespace, or SignificantWhitespace
reader.MoveToContent();
if (reader.NodeType == XmlNodeType.EndElement && String.CompareOrdinal(s_relationshipTagName, reader.LocalName) == 0)
return;
else
throw new XmlException(SR.Format(SR.ElementIsNotEmptyElement, s_relationshipTagName), null, reader.LineNumber, reader.LinePosition);
}
/// <summary>
/// Add new relationship to the Collection
/// </summary>
/// <param name="targetUri">target</param>
/// <param name="targetMode">Enumeration indicating the base uri for the target uri</param>
/// <param name="relationshipType">relationship type that uniquely defines the role of the relationship</param>
/// <param name="id">String that conforms to the xsd:ID datatype. Unique across the source's relationships.
/// Null OK (ID will be generated).</param>
/// <param name="parsing">Indicates whether the add call is made while parsing existing relationships
/// from a relationship part, or we are adding a new relationship</param>
private PackageRelationship Add(Uri targetUri, TargetMode targetMode, string relationshipType, string id, bool parsing)
{
if (targetUri == null)
throw new ArgumentNullException(nameof(targetUri));
if (relationshipType == null)
throw new ArgumentNullException(nameof(relationshipType));
ThrowIfInvalidRelationshipType(relationshipType);
//Verify if the Enum value is valid
if (targetMode < TargetMode.Internal || targetMode > TargetMode.External)
throw new ArgumentOutOfRangeException(nameof(targetMode));
// don't accept absolute Uri's if targetMode is Internal.
if (targetMode == TargetMode.Internal && targetUri.IsAbsoluteUri)
throw new ArgumentException(SR.RelationshipTargetMustBeRelative, nameof(targetUri));
// don't allow relationships to relationships
// This check should be made for following cases
// 1. Uri is absolute and it is pack Uri
// 2. Uri is NOT absolute and its target mode is internal (or NOT external)
// Note: if the target is absolute uri and its not a pack scheme then we cannot determine if it is a rels part
// Note: if the target is relative uri and target mode is external, we cannot determine if it is a rels part
if ((!targetUri.IsAbsoluteUri && targetMode != TargetMode.External)
|| (targetUri.IsAbsoluteUri && targetUri.Scheme == PackUriHelper.UriSchemePack))
{
Uri resolvedUri = GetResolvedTargetUri(targetUri, targetMode);
//GetResolvedTargetUri returns a null if the target mode is external and the
//target Uri is a packUri with no "part" component, so in that case we know that
//its not a relationship part.
if (resolvedUri != null)
{
if (PackUriHelper.IsRelationshipPartUri(resolvedUri))
throw new ArgumentException(SR.RelationshipToRelationshipIllegal, nameof(targetUri));
}
}
// Generate an ID if id is null. Throw exception if neither null nor a valid unique xsd:ID.
if (id == null)
id = GenerateUniqueRelationshipId();
else
ValidateUniqueRelationshipId(id);
//Ensure the relationship part
EnsureRelationshipPart();
// create and add
PackageRelationship relationship = new PackageRelationship(_package, _sourcePart, targetUri, targetMode, relationshipType, id);
_relationships.Add(relationship);
//If we are adding relationships as a part of Parsing the underlying relationship part, we should not set
//the dirty flag to false.
_dirty = !parsing;
return relationship;
}
/// <summary>
/// Write PackageRelationship Stream
/// </summary>
/// <param name="part">part to persist to</param>
private void WriteRelationshipPart(PackagePart part)
{
using (IgnoreFlushAndCloseStream s = new IgnoreFlushAndCloseStream(part.GetStream()))
{
s.SetLength(0); // truncate to resolve PS 954048
// use UTF-8 encoding by default
using (XmlWriter writer = XmlWriter.Create(s, new XmlWriterSettings { Encoding = System.Text.Encoding.UTF8 }))
{
writer.WriteStartDocument();
// start outer Relationships tag
writer.WriteStartElement(s_relationshipsTagName, PackagingUtilities.RelationshipNamespaceUri);
// Write Relationship elements.
WriteRelationshipsAsXml(
writer,
_relationships,
false /* do not systematically write target mode */
);
// end of Relationships tag
writer.WriteEndElement();
// close the document
writer.WriteEndDocument();
}
}
}
/// <summary>
/// Write one Relationship element for each member of relationships.
/// This method is used by XmlDigitalSignatureProcessor code as well
/// </summary>
internal static void WriteRelationshipsAsXml(XmlWriter writer, IEnumerable<PackageRelationship> relationships, bool alwaysWriteTargetModeAttribute)
{
foreach (PackageRelationship relationship in relationships)
{
writer.WriteStartElement(s_relationshipTagName);
// Write RelationshipType attribute.
writer.WriteAttributeString(s_typeAttributeName, relationship.RelationshipType);
// Write Target attribute.
// We would like to persist the uri as passed in by the user and so we use the
// OriginalString property. This makes the persisting behavior consistent
// for relative and absolute Uris.
// Since we accepted the Uri as a string, we are at the minimum guaranteed that
// the string can be converted to a valid Uri.
// Also, we are just using it here to persist the information and we are not
// resolving or fetching a resource based on this Uri.
writer.WriteAttributeString(s_targetAttributeName, relationship.TargetUri.OriginalString);
// TargetMode is optional attribute in the markup and its default value is TargetMode="Internal"
if (alwaysWriteTargetModeAttribute || relationship.TargetMode == TargetMode.External)
writer.WriteAttributeString(s_targetModeAttributeName, relationship.TargetMode.ToString());
// Write Id attribute.
writer.WriteAttributeString(s_idAttributeName, relationship.Id);
writer.WriteEndElement();
}
}
/// <summary>
/// Ensures that the PackageRelationship PackagePart has been created - lazy init
/// </summary>
/// <remarks>
/// </remarks>
private void EnsureRelationshipPart()
{
if (_relationshipPart == null || _relationshipPart.IsDeleted)
{
if (_package.PartExists(_uri))
{
_relationshipPart = _package.GetPart(_uri);
ThrowIfIncorrectContentType(_relationshipPart.ValidatedContentType);
}
else
{
CompressionOption compressionOption = _sourcePart == null ? CompressionOption.NotCompressed : _sourcePart.CompressionOption;
_relationshipPart = _package.CreatePart(_uri, PackagingUtilities.RelationshipPartContentType.ToString(), compressionOption);
}
}
}
/// <summary>
/// Resolves the target uri in the relationship against the source part or the
/// package root. This resolved Uri is then used by the Add method to figure
/// out if a relationship is being created to another relationship part.
/// </summary>
/// <param name="target">PackageRelationship target uri</param>
/// <param name="targetMode"> Enum value specifying the interpretation of the base uri
/// for the relationship target uri</param>
/// <returns>Resolved Uri</returns>
private Uri GetResolvedTargetUri(Uri target, TargetMode targetMode)
{
Debug.Assert(targetMode == TargetMode.Internal);
Debug.Assert(!target.IsAbsoluteUri, "Uri should be relative at this stage");
if (_sourcePart == null) //indicates that the source is the package root
return PackUriHelper.ResolvePartUri(PackUriHelper.PackageRootUri, target);
else
return PackUriHelper.ResolvePartUri(_sourcePart.Uri, target);
}
//Throws an exception if the relationship part does not have the correct content type
private void ThrowIfIncorrectContentType(ContentType contentType)
{
if (!contentType.AreTypeAndSubTypeEqual(PackagingUtilities.RelationshipPartContentType))
throw new FileFormatException(SR.RelationshipPartIncorrectContentType);
}
//Throws an exception if the xml:base attribute is present in the Relationships XML
private void ThrowIfXmlBaseAttributeIsPresent(XmlCompatibilityReader reader)
{
string xmlBaseAttributeValue = reader.GetAttribute(s_xmlBaseAttributeName);
if (xmlBaseAttributeValue != null)
throw new XmlException(SR.Format(SR.InvalidXmlBaseAttributePresent, s_xmlBaseAttributeName), null, reader.LineNumber, reader.LinePosition);
}
//Throws an XML exception if the attribute value is invalid
private void ThrowForInvalidAttributeValue(XmlCompatibilityReader reader, String attributeName, Exception ex)
{
throw new XmlException(SR.Format(SR.InvalidValueForTheAttribute, attributeName), ex, reader.LineNumber, reader.LinePosition);
}
// Generate a unique relation ID.
private string GenerateUniqueRelationshipId()
{
string id;
do
{
id = GenerateRelationshipId();
} while (GetRelationship(id) != null);
return id;
}
// Build an ID string consisting of the letter 'R' followed by an 8-byte GUID timestamp.
// Guid.ToString() outputs the bytes in the big-endian order (higher order byte first)
private string GenerateRelationshipId()
{
// The timestamp consists of the first 8 hex octets of the GUID.
return String.Concat("R", Guid.NewGuid().ToString("N").Substring(0, s_timestampLength));
}
// If 'id' is not of the xsd type ID or is not unique for this collection, throw an exception.
private void ValidateUniqueRelationshipId(string id)
{
// An XSD ID is an NCName that is unique.
ThrowIfInvalidXsdId(id);
// Check for uniqueness.
if (GetRelationshipIndex(id) >= 0)
throw new XmlException(SR.Format(SR.NotAUniqueRelationshipId, id));
}
// Retrieve a relationship's index in _relationships given its id.
// Return a negative value if not found.
private int GetRelationshipIndex(string id)
{
for (int index = 0; index < _relationships.Count; ++index)
if (string.Equals(_relationships[index].Id, id, StringComparison.Ordinal))
return index;
return -1;
}
#endregion
#region Private Properties
#endregion Private Properties
#region Private Members
private List<PackageRelationship> _relationships;
private bool _dirty; // true if we have uncommitted changes to _relationships
private Package _package; // our package - in case _sourcePart is null
private PackagePart _sourcePart; // owning part - null if package is the owner
private PackagePart _relationshipPart; // where our relationships are persisted
private Uri _uri; // the URI of our relationship part
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
// segment that indicates a relationship part
private static readonly int s_timestampLength = 16;
private static readonly string s_relationshipsTagName = "Relationships";
private static readonly string s_relationshipTagName = "Relationship";
private static readonly string s_targetAttributeName = "Target";
private static readonly string s_typeAttributeName = "Type";
private static readonly string s_idAttributeName = "Id";
private static readonly string s_xmlBaseAttributeName = "xml:base";
private static readonly string s_targetModeAttributeName = "TargetMode";
private static readonly string[] s_relationshipKnownNamespaces
= new string[] { PackagingUtilities.RelationshipNamespaceUri };
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Text;
using System.Xml;
using VersionOne.Profile;
using VersionOne.SDK.APIClient;
using VersionOne.ServiceHost.Core.Logging;
using VersionOne.ServiceHost.Core.Services;
using VersionOne.ServiceHost.Eventing;
namespace VersionOne.ServiceHost.TestServices {
public class TestWriterService : V1WriterServiceBase {
private static readonly NeededAssetType[] neededAssetTypes = {
new NeededAssetType("TestSuite", new[] {"Reference"}),
new NeededAssetType("TestRun", new[] {"Name", "Description", "Date", "Passed", "Failed", "NotRun", "Elapsed"}),
new NeededAssetType("Test",
new[]
{"Status", "Name", "AssetState", "Parent", "Parent.Number", "Scope", "Scope.Schedule", "Timebox"}),
new NeededAssetType("TestStatus", new[] {"Key"}),
new NeededAssetType("Defect", new[] {"AffectedPrimaryWorkitems", "Scope", "Timebox", "Name", "Description", "AssetState"}),
new NeededAssetType("Timebox", new[] {"AssetState", "ID", "Schedule", "EndDate"}),
new NeededAssetType("PrimaryWorkitem", new[] {"ID"}),
};
private readonly IDictionary<TestRun.TestRunState, Oid> states = new Dictionary<TestRun.TestRunState, Oid>();
private readonly IDictionary timeboxes = new ListDictionary();
private string changeComment;
private CreateDefect createDefect;
private string descriptionsuffix;
private string testreferenceattributetoken;
private IAssetType TestSuiteType {
get { return Services.Meta.GetAssetType("TestSuite"); }
}
private IAssetType TestRunType {
get { return Services.Meta.GetAssetType("TestRun"); }
}
private IAssetType TestType {
get { return Services.Meta.GetAssetType("Test"); }
}
private IAssetType PrimaryWorkitem {
get { return Services.Meta.GetAssetType("PrimaryWorkitem"); }
}
private IAssetType TestStatusType {
get { return Services.Meta.GetAssetType("TestStatus"); }
}
private IAssetType TimeboxType {
get { return Services.Meta.GetAssetType("Timebox"); }
}
private IAttributeDefinition TestReferenceDef {
get { return TestType.GetAttributeDefinition(testreferenceattributetoken); }
}
private IAttributeDefinition TestStatusDef {
get { return TestType.GetAttributeDefinition("Status"); }
}
private IAttributeDefinition TestNameDef {
get { return TestType.GetAttributeDefinition("Name"); }
}
private IAttributeDefinition TestAssetStateDef {
get { return TestType.GetAttributeDefinition("AssetState"); }
}
private IAttributeDefinition TestParentDef {
get { return TestType.GetAttributeDefinition("Parent"); }
}
private IAttributeDefinition TestParentNumberDef {
get { return TestType.GetAttributeDefinition("Parent.Number"); }
}
private IAttributeDefinition TestScopeDef {
get { return TestType.GetAttributeDefinition("Scope"); }
}
private IAttributeDefinition TestScheduleDef {
get { return TestType.GetAttributeDefinition("Scope.Schedule"); }
}
private IAttributeDefinition TestTimeboxDef {
get { return TestType.GetAttributeDefinition("Timebox"); }
}
private IAttributeDefinition DefectAffectedWorkitemsDef {
get { return DefectType.GetAttributeDefinition("AffectedPrimaryWorkitems"); }
}
private IAttributeDefinition DefectAssetStateDef {
get { return DefectType.GetAttributeDefinition("AssetState"); }
}
private IAttributeDefinition DefectScopeDef {
get { return DefectType.GetAttributeDefinition("Scope"); }
}
private IAttributeDefinition DefectTimeboxDef {
get { return DefectType.GetAttributeDefinition("Timebox"); }
}
private IAttributeDefinition TimeboxAssetStateDef {
get { return TimeboxType.GetAttributeDefinition("AssetState"); }
}
protected override IEnumerable<NeededAssetType> NeededAssetTypes {
get { return neededAssetTypes; }
}
#region IHostedService Members
public override void Initialize(XmlElement config, IEventManager eventManager, IProfile profile) {
base.Initialize(config, eventManager, profile);
testreferenceattributetoken = config["TestReferenceAttribute"].InnerText;
VerifyMeta();
LoadOid(TestRun.TestRunState.Passed, config, null);
LoadOid(TestRun.TestRunState.Failed, config, null);
LoadOid(TestRun.TestRunState.NotRun, config, Oid.Null);
changeComment = config["ChangeComment"].InnerText;
descriptionsuffix = config["DescriptionSuffix"].InnerText;
if(!string.IsNullOrEmpty(config["CreateDefect"].InnerText)) {
createDefect = (CreateDefect)Enum.Parse(typeof(CreateDefect), config["CreateDefect"].InnerText);
} else {
createDefect = CreateDefect.All;
}
eventManager.Subscribe(typeof(SuiteRun), SuiteRunSave);
eventManager.Subscribe(typeof(TestRun), TestRunSave);
eventManager.Subscribe(typeof(PartnerTestEvent), PartnerTestCreated);
}
#endregion
private void LoadOid(TestRun.TestRunState state, XmlElement config, Oid def) {
var configkey = state.ToString() + "Oid";
var oidconfig = config[configkey];
var oidtoken = oidconfig != null ? oidconfig.InnerText : null;
var oid = def;
try {
oid = Services.GetOid(oidtoken);
} catch(OidException) {
}
//the oid is null-null or its not oid-null and we can't find it in the V1 system. (Oid.Null is ok!)
if(oid == null) {
throw new InvalidOperationException(string.Format("Invalid Oid Token for {0}: {1}", configkey, oidtoken));
}
if(oid != Oid.Null) {
if(oid.AssetType != TestStatusType) {
throw new InvalidOperationException(string.Format("Oid for {0} is not a TestStatus Type: {1}", configkey, oidtoken));
}
var q = new Query(oid.AssetType);
var term = new FilterTerm(oid.AssetType.GetAttributeDefinition("Key"));
term.Equal(oid.Key);
q.Filter = term;
var assetlist = Services.Retrieve(q).Assets;
if(assetlist.Count == 0) {
throw new InvalidOperationException(string.Format("TestStatus for {0} does not exist: {1}", configkey, oidtoken));
}
}
states.Add(state, oid);
}
private void PartnerTestCreated(object pubobj) {
var partnerEvent = (PartnerTestEvent)pubobj;
Logger.Log(LogMessage.SeverityType.Debug,
string.Format("Update V1 Test {0}: set {1}={2}", partnerEvent.Oid, testreferenceattributetoken, partnerEvent.Reference));
var query = new Query(Oid.FromToken(partnerEvent.Oid, Services.Meta));
var result = Services.Retrieve(query);
result.Assets[0].SetAttributeValue(TestReferenceDef, partnerEvent.Reference);
if(!partnerEvent.Successful) {
result.Assets[0].SetAttributeValue(TestStatusDef, states[TestRun.TestRunState.Failed]);
}
Services.Save(result.Assets[0]);
}
private void SuiteRunSave(object pubobj) {
var run = (SuiteRun)pubobj;
Logger.Log(LogMessage.SeverityType.Debug, run.ToString());
if(string.IsNullOrEmpty(run.SuiteRef)) {
Logger.Log(LogMessage.SeverityType.Debug, "Suite Reference is null or empty. Skipping...");
return;
}
var q = new Query(TestSuiteType);
var term = new FilterTerm(TestSuiteType.GetAttributeDefinition("Reference"));
term.Equal(run.SuiteRef);
q.Filter = term;
var r = Services.Retrieve(q);
if(r.Assets.Count == 0) {
Logger.Log(LogMessage.SeverityType.Debug, "No TestSuite found by reference: " + run.SuiteRef);
return;
}
var save = new AssetList();
foreach(var testsuite in r.Assets) {
var testrun = Services.New(TestRunType, testsuite.Oid);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Name"), run.Name);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Description"), run.Description);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Date"), run.Stamp);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Passed"), run.Passed);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Failed"), run.Failed);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("NotRun"), run.NotRun);
testrun.SetAttributeValue(TestRunType.GetAttributeDefinition("Elapsed"), run.Elapsed);
LogSuiteRun(testrun);
save.Add(testrun);
}
Services.Save(save);
}
private void LogSuiteRun(Asset suiterun) {
Logger.Log("Suite:\r\n" + GetAssetText(suiterun));
}
private static string GetAssetText(Asset testrun) {
var sb = new StringBuilder();
foreach(var entry in testrun.Attributes) {
var attrib = entry.Value;
if(attrib != null) {
sb.Append(new string('\t', 2) + attrib.Definition.Token + " = " + attrib.Value + "\r\n");
}
}
return sb.ToString();
}
private void TestRunSave(object pubobj) {
var run = (TestRun)pubobj;
Logger.Log(LogMessage.SeverityType.Debug, run.ToString());
if(string.IsNullOrEmpty(run.TestRef)) {
Logger.Log(LogMessage.SeverityType.Debug, "Test Reference is null or empty. Skipping...");
return;
}
var newStatus = states[run.State];
var tests = GetRelatedTests(run);
if(tests.Assets.Count == 0) {
Logger.Log(LogMessage.SeverityType.Debug, "No Tests found by reference: " + run.TestRef);
}
foreach(var test in tests.Assets) {
var stateAttribute = test.GetAttribute(TestAssetStateDef);
if(((AssetState)stateAttribute.Value) == AssetState.Active) {
UpdateOpenTest(newStatus, test);
} else if(run.State == TestRun.TestRunState.Failed) {
DefectForClosedTest(run, test);
}
}
}
private void DefectForClosedTest(TestRun run, Asset test) {
var newDescription = string.Format(
"One or more acceptance tests failed at \"{0}\".<BR />{1}",
run.Stamp, descriptionsuffix);
if(ShouldCreateDefect(test)) {
var defect = CreateRelatedDefect(newDescription, test);
Services.Save(defect, changeComment);
Logger.Log(string.Format("Saving defect for test \"{0}\".", run.TestRef));
}
}
private bool ShouldCreateDefect(Asset test) {
if(RelatedDefectExists(test)) {
return false;
}
switch(createDefect) {
case CreateDefect.None:
return false;
case CreateDefect.All:
return true;
case CreateDefect.CurrentIteration:
return TimeboxIsCurrent(test);
}
return false;
}
private bool TimeboxIsCurrent(Asset test) {
var timeboxOid = test.GetAttribute(TestTimeboxDef).Value as Oid;
if((timeboxOid == null) || timeboxOid.IsNull) {
return false;
}
var timeboxStateQuery = new Query(TimeboxType);
timeboxStateQuery.Selection.Add(TimeboxAssetStateDef);
var term = new FilterTerm(TimeboxType.GetAttributeDefinition("ID"));
term.Equal(timeboxOid.Token);
timeboxStateQuery.Filter = term;
var result = Services.Retrieve(timeboxStateQuery);
if(result.Assets.Count == 0) {
return false;
}
var timebox = result.Assets[0];
return ((AssetState)timebox.GetAttribute(TimeboxAssetStateDef).Value == AssetState.Active);
}
private bool RelatedDefectExists(Asset test) {
var primaryWorkitemQuery = new Query(PrimaryWorkitem);
var affectedTerm = new FilterTerm(DefectAffectedWorkitemsDef);
affectedTerm.Equal(test.GetAttribute(TestParentDef).Value);
var statusTerm = new FilterTerm(DefectAssetStateDef);
statusTerm.Equal(AssetState.Active);
primaryWorkitemQuery.Filter = new AndFilterTerm(affectedTerm, statusTerm);
return Services.Retrieve(primaryWorkitemQuery).Assets.Count > 0;
}
private Asset CreateRelatedDefect(string newDescription, Asset test) {
var defect = Services.New(DefectType, Oid.Null);
defect.AddAttributeValue(DefectAffectedWorkitemsDef, test.GetAttribute(TestParentDef).Value);
defect.SetAttributeValue(DefectScopeDef, test.GetAttribute(TestScopeDef).Value);
var parent = (Oid)test.GetAttribute(TestParentDef).Value;
defect.SetAttributeValue(DefectType.GetAttributeDefinition("Name"),
string.Format(
"{0} \"{1}\" has failing Acceptance Test(s)",
Services.Localization(parent.AssetType.DisplayName),
test.GetAttribute(TestParentNumberDef).Value));
defect.SetAttributeValue(DefectType.GetAttributeDefinition("Description"), newDescription);
var timeboxOid = FindTimebox(test);
if(!timeboxOid.IsNull) {
defect.SetAttributeValue(DefectTimeboxDef, timeboxOid);
}
return defect;
}
private void UpdateOpenTest(Oid newStatus, Asset test) {
var statusAttribute = test.GetAttribute(TestStatusDef);
var statusOid = (Oid)statusAttribute.Value;
if(newStatus != statusOid) {
test.SetAttributeValue(TestStatusDef, newStatus);
Services.Save(test, changeComment);
Logger.Log(string.Format("Updating status of Acceptance Test \"{0}\".", test.Oid.Token));
}
}
private QueryResult GetRelatedTests(TestRun run) {
var testQuery = new Query(TestType);
testQuery.Selection.Add(TestStatusDef);
testQuery.Selection.Add(TestNameDef);
testQuery.Selection.Add(TestAssetStateDef);
testQuery.Selection.Add(TestParentDef);
testQuery.Selection.Add(TestScopeDef);
testQuery.Selection.Add(TestParentNumberDef);
testQuery.Selection.Add(TestScheduleDef);
testQuery.Selection.Add(TestTimeboxDef);
var term = new FilterTerm(TestReferenceDef);
term.Equal(run.TestRef);
testQuery.Filter = term;
return Services.Retrieve(testQuery);
}
private Oid FindTimebox(Asset test) {
if(createDefect == CreateDefect.CurrentIteration) {
return test.GetAttribute(TestTimeboxDef).Value as Oid;
}
return FindTimebox(test.GetAttribute(TestScheduleDef).Value as Oid);
}
private Oid FindTimebox(Oid scheduleOid) {
if((scheduleOid == null) || (scheduleOid.IsNull)) {
return Oid.Null;
}
var timebox = (Oid)timeboxes[scheduleOid];
if(timebox == null) {
timebox = Oid.Null;
var q = new Query(TimeboxType);
var scheduleTerm = new FilterTerm(TimeboxType.GetAttributeDefinition("Schedule"));
scheduleTerm.Equal(scheduleOid);
var assetStateTerm = new FilterTerm(TimeboxType.GetAttributeDefinition("AssetState"));
assetStateTerm.Equal(AssetState.Active);
q.Filter = new AndFilterTerm(scheduleTerm, assetStateTerm);
q.OrderBy.MajorSort(TimeboxType.GetAttributeDefinition("EndDate"), OrderBy.Order.Ascending);
q.Paging = new Paging(0, 1);
var r = Services.Retrieve(q);
if(r.Assets.Count != 0) {
timebox = r.Assets[0].Oid;
}
timeboxes[scheduleOid] = timebox;
}
return timebox;
}
private enum CreateDefect {
All,
CurrentIteration,
None
}
}
}
| |
/*
Copyright 2006-2017 Cryptany, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Text;
using System.Data.SqlClient;
using Cryptany.Core.DPO.MetaObjects;
using Cryptany.Core.DPO.MetaObjects.Attributes;
namespace Cryptany.Core.DPO.Sql
{
/// <summary>
/// Description of SqlScriptBuilder.
/// </summary>
public class SqlScriptBuilder
{
private Mapper _mapper;
private PersistentStorage _ps;
private string _insertTemplate = "INSERT INTO @@TableName ( @@FieldList ) @@Values";
private string _updateTemplate = "UPDATE @@TableName SET @@NameValuePairs WHERE ID = @@ID";
private string _updateInBatchTemplate = "UPDATE @@TableName SET @@NameValuePairs FROM (@@ValuesTable) AS temp WHERE @@TableName.ID = temp.ID";
private string _deleteTemplate = "DELETE FROM @@TableName WHERE ID IN ( @@ID )";
public SqlScriptBuilder(Mapper mapper, PersistentStorage ps)
{
_ps = ps;
_mapper = mapper;
_insertTemplate = _insertTemplate.Replace("@@TableName", mapper.FullTableName);
_updateTemplate = _updateTemplate.Replace("@@TableName", mapper.FullTableName);
_updateInBatchTemplate = _updateInBatchTemplate.Replace("@@TableName", mapper.FullTableName);
_deleteTemplate = _deleteTemplate.Replace("@@TableName", mapper.FullTableName);
}
private static string ValueToString(object value)
{
if ( value == null )
return "NULL";
if ( value is string )
return "'" + (value as string).Replace("'", "''") + "'";
if ( value is bool )
return ((bool)value) ? "1" : "0";
if ( value is Guid )
return "'" + value.ToString() + "'";
if ( value is DateTime )
return "'" + ((DateTime)value).ToString("u").Replace("Z", "") + "'";
if ( value is char )
return ((char)value).ToString();
if ( value as EntityBase != null )
return ValueToString((value as EntityBase).ID);
if ( value.GetType().IsEnum )
return Convert.ToInt32(value).ToString();
return value.ToString();
}
public PersistentStorage Ps
{
get
{
return _ps;
}
}
public Mapper SqlMapper
{
get
{
return _mapper;
}
}
public string CreateUpdateStatement(EntityBase e)
{
ObjectDescription od = ClassFactory.GetObjectDescription(_mapper.EntityType, _ps);
string nameValuePairs = "";
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
if ( nameValuePairs != "" )
nameValuePairs += ", ";
if ( pd.IsOneToOneRelation )
nameValuePairs += SqlMapper[pd.Name] + " = " + (e[pd.Name] as EntityBase)[pd.RelationAttribute.RelatedColumn];
else
nameValuePairs += SqlMapper[pd.Name] + " = " + ValueToString(e[pd.Name]);
}
nameValuePairs += ", " + ValueToString(e.ID) + " AS " + _mapper[od.IdField.Name];
return _updateTemplate.Replace("@@NameValuePairs", nameValuePairs).Replace("@@ID", ValueToString(e.ID));
}
private string GetFieldList()
{
string list = "";
ObjectDescription od = ClassFactory.GetObjectDescription(_mapper.EntityType, _ps);
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
if ( list != "" )
list += ", ";
list += SqlMapper[pd.Name, true];
}
return list;
}
private string GetValueList(EntityBase e)
{
string list = "";
ObjectDescription od = ClassFactory.GetObjectDescription(_mapper.EntityType, _ps);
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
if ( list != "" )
list += ", ";
if ( pd.IsOneToOneRelation )
list += (e[pd.Name] as EntityBase)[pd.RelationAttribute.RelatedColumn];
else
list += ValueToString(e[pd.Name]);
}
return list;
}
public string CreateUpdateStatement(IList<EntityBase> list)
{
string fieldList = "";
string nameValuePairs = "";
ObjectDescription od = ClassFactory.GetObjectDescription(_mapper.EntityType, _ps);
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
if ( fieldList != "" )
fieldList += ", ";
fieldList += SqlMapper[pd.Name];
if ( nameValuePairs != "" )
nameValuePairs += ", ";
nameValuePairs += _mapper[pd.Name, true] + " = temp." + _mapper[pd.Name, true];
}
string valuesTable = "";
foreach ( EntityBase e in list)
{
string valuesRow = "";
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
if ( valuesRow != "" )
valuesRow += ", ";
valuesRow += ValueToString(e[pd.Name]) + " AS " + _mapper[pd.Name];
}
valuesRow += ", " + ValueToString(e.ID) + " AS " + _mapper[od.IdField.Name];
valuesRow = "SELECT " + valuesRow;
if ( valuesTable != "" )
valuesTable += "\r\nUNION ALL\r\n";
valuesTable += valuesRow;
}
return _updateInBatchTemplate.Replace("@@FieldList", fieldList).Replace("@@NameValuePairs", nameValuePairs).Replace("@@ValuesTable", valuesTable);
}
public string CreateDeleteStatement(EntityBase e)
{
return CreateDeleteStatement(new List<EntityBase>( new EntityBase[] { e } ));
}
public string CreateDeleteStatement(IList<EntityBase> list)
{
string ids = "";
foreach ( EntityBase e in list )
{
if ( ids != "" )
ids += ", ";
ids += ValueToString(e.ID);
}
return _deleteTemplate.Replace("@@ID", ids);
}
public string CreateInsertStatement(EntityBase e)
{
return CreateInsertStatement(new List<EntityBase>( new EntityBase[] { e } ));
}
public string CreateInsertStatement(IList<EntityBase> list)
{
string fieldList = "";
ObjectDescription od = ClassFactory.GetObjectDescription(_mapper.EntityType, _ps);
fieldList += _mapper[od.IdField.Name, true];
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
//if ( fieldList != "" )
fieldList += ", ";
fieldList += SqlMapper[pd.Name];
}
string valuesTable = "";
foreach ( EntityBase e in list)
{
string valuesRow = "";
valuesRow += ValueToString(e.ID);
foreach (PropertyDescription pd in od.Properties)
{
if ( pd == od.IdField || pd.IsNonPersistent || pd.IsOneToManyRelation || pd.IsManyToManyRelation )
continue;
//if ( valuesRow != "" )
valuesRow += ", ";
valuesRow += ValueToString(e[pd.Name]);
}
valuesRow = "SELECT " + valuesRow;
if ( valuesTable != "" )
valuesTable += "\r\nUNION ALL\r\n";
valuesTable += valuesRow;
}
return _insertTemplate.Replace("@@FieldList", fieldList).Replace("@@Values", valuesTable);
}
public static string CreateMtmInsert(PropertyDescription pd, PersistentStorage ps, IList<EntityBase> entities)
{
RelationAttribute attr = pd.GetAttribute<RelationAttribute>();
if ( attr == null || attr.RelationType != RelationType.ManyToMany )
throw new Exception("A many-to-many relationship expected");
string script = "INSERT INTO " + (string.IsNullOrEmpty(attr.SchemaName) ? "" : attr.SchemaName + ".") +
attr.MamyToManyRelationTable + "(" + attr.MtmRelationTableChildColumn + "," + attr.MtmRelationTableParentColumn + ")\r\n";
string data = "";
foreach ( EntityBase entity in entities )
foreach ( EntityBase e in pd.GetValue<System.Collections.IList>(entity) )
if ( string.IsNullOrEmpty(data) )
data = "SELECT " + ValueToString(e.ID) + ", " + ValueToString(entity.ID) + "\r\n";
else
data += "UNION ALL\r\nSELECT " + ValueToString(e.ID) + ", " + ValueToString(entity.ID) + "\r\n";
if ( string.IsNullOrEmpty(data) )
return "";
else
script += data;
return script;
}
public static string CreateMtmDelete(PropertyDescription pd,PersistentStorage ps, IList<EntityBase> entities)
{
RelationAttribute attr = pd.GetAttribute<RelationAttribute>();
if ( attr == null || attr.RelationType != RelationType.ManyToMany )
throw new Exception("A many-to-many relationship expected");
string script = "DELETE FROM " + (string.IsNullOrEmpty(attr.SchemaName) ? "" : attr.SchemaName + ".") +
attr.MamyToManyRelationTable + "\r\nWHERE ";
script += attr.MtmRelationTableParentColumn+" IN (";
string data = "";
foreach ( EntityBase entity in entities )
if ( string.IsNullOrEmpty(data) )
data = ValueToString(entity.ID);
else
data += ", " + ValueToString(entity.ID);
script += data + ")\r\n";
return script;
}
}
}
| |
// ZipEntryFactory.cs
//
// Copyright 2006 John Reilly
//
// Copyright (C) 2001 Free Software Foundation, Inc.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// Linking this library statically or dynamically with other modules is
// making a combined work based on this library. Thus, the terms and
// conditions of the GNU General Public License cover the whole
// combination.
//
// As a special exception, the copyright holders of this library give you
// permission to link this library with independent modules to produce an
// executable, regardless of the license terms of these independent
// modules, and to copy and distribute the resulting executable under
// terms of your choice, provided that you also meet, for each linked
// independent module, the terms and conditions of the license of that
// module. An independent module is a module which is not derived from
// or based on this library. If you modify this library, you may extend
// this exception to your version of the library, but you are not
// obligated to do so. If you do not wish to do so, delete this
// exception statement from your version.
// HISTORY
// 2012-11-29 Z-1684 Added MakeFileEntry(string fileName, string entryName, bool useFileSystem)
using System;
using System.IO;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// Basic implementation of <see cref="IEntryFactory"></see>
/// </summary>
public class ZipEntryFactory : IEntryFactory
{
#region Enumerations
/// <summary>
/// Defines the possible values to be used for the <see cref="ZipEntry.DateTime"/>.
/// </summary>
public enum TimeSetting
{
/// <summary>
/// Use the recorded LastWriteTime value for the file.
/// </summary>
LastWriteTime,
/// <summary>
/// Use the recorded LastWriteTimeUtc value for the file
/// </summary>
LastWriteTimeUtc,
/// <summary>
/// Use the recorded CreateTime value for the file.
/// </summary>
CreateTime,
/// <summary>
/// Use the recorded CreateTimeUtc value for the file.
/// </summary>
CreateTimeUtc,
/// <summary>
/// Use the recorded LastAccessTime value for the file.
/// </summary>
LastAccessTime,
/// <summary>
/// Use the recorded LastAccessTimeUtc value for the file.
/// </summary>
LastAccessTimeUtc,
/// <summary>
/// Use a fixed value.
/// </summary>
/// <remarks>The actual <see cref="DateTime"/> value used can be
/// specified via the <see cref="ZipEntryFactory(DateTime)"/> constructor or
/// using the <see cref="ZipEntryFactory(TimeSetting)"/> with the setting set
/// to <see cref="TimeSetting.Fixed"/> which will use the <see cref="DateTime"/> when this class was constructed.
/// The <see cref="FixedDateTime"/> property can also be used to set this value.</remarks>
Fixed,
}
#endregion
#region Constructors
/// <summary>
/// Initialise a new instance of the <see cref="ZipEntryFactory"/> class.
/// </summary>
/// <remarks>A default <see cref="INameTransform"/>, and the LastWriteTime for files is used.</remarks>
public ZipEntryFactory()
{
nameTransform_ = new ZipNameTransform();
}
/// <summary>
/// Initialise a new instance of <see cref="ZipEntryFactory"/> using the specified <see cref="TimeSetting"/>
/// </summary>
/// <param name="timeSetting">The <see cref="TimeSetting">time setting</see> to use when creating <see cref="ZipEntry">Zip entries</see>.</param>
public ZipEntryFactory(TimeSetting timeSetting)
{
timeSetting_ = timeSetting;
nameTransform_ = new ZipNameTransform();
}
/// <summary>
/// Initialise a new instance of <see cref="ZipEntryFactory"/> using the specified <see cref="DateTime"/>
/// </summary>
/// <param name="time">The time to set all <see cref="ZipEntry.DateTime"/> values to.</param>
public ZipEntryFactory(DateTime time)
{
timeSetting_ = TimeSetting.Fixed;
FixedDateTime = time;
nameTransform_ = new ZipNameTransform();
}
#endregion
#region Properties
/// <summary>
/// Get / set the <see cref="INameTransform"/> to be used when creating new <see cref="ZipEntry"/> values.
/// </summary>
/// <remarks>
/// Setting this property to null will cause a default <see cref="ZipNameTransform">name transform</see> to be used.
/// </remarks>
public INameTransform NameTransform
{
get { return nameTransform_; }
set
{
if (value == null) {
nameTransform_ = new ZipNameTransform();
}
else {
nameTransform_ = value;
}
}
}
/// <summary>
/// Get / set the <see cref="TimeSetting"/> in use.
/// </summary>
public TimeSetting Setting
{
get { return timeSetting_; }
set { timeSetting_ = value; }
}
/// <summary>
/// Get / set the <see cref="DateTime"/> value to use when <see cref="Setting"/> is set to <see cref="TimeSetting.Fixed"/>
/// </summary>
public DateTime FixedDateTime
{
get { return fixedDateTime_; }
set
{
if (value.Year < 1970) {
throw new ArgumentException("Value is too old to be valid", "value");
}
fixedDateTime_ = value;
}
}
/// <summary>
/// A bitmask defining the attributes to be retrieved from the actual file.
/// </summary>
/// <remarks>The default is to get all possible attributes from the actual file.</remarks>
public int GetAttributes
{
get { return getAttributes_; }
set { getAttributes_ = value; }
}
/// <summary>
/// A bitmask defining which attributes are to be set on.
/// </summary>
/// <remarks>By default no attributes are set on.</remarks>
public int SetAttributes
{
get { return setAttributes_; }
set { setAttributes_ = value; }
}
/// <summary>
/// Get set a value indicating wether unidoce text should be set on.
/// </summary>
public bool IsUnicodeText
{
get { return isUnicodeText_; }
set { isUnicodeText_ = value; }
}
#endregion
#region IEntryFactory Members
/// <summary>
/// Make a new <see cref="ZipEntry"/> for a file.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName)
{
return MakeFileEntry(fileName, null, true);
}
/// <summary>
/// Make a new <see cref="ZipEntry"/> for a file.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName, bool useFileSystem) {
return MakeFileEntry(fileName, null, useFileSystem);
}
/// <summary>
/// Make a new <see cref="ZipEntry"/> from a name.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <param name="entryName">An alternative name to be used for the new entry. Null if not applicable.</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName, string entryName, bool useFileSystem)
{
// ZipEntry result = new ZipEntry(nameTransform_.TransformFile(entryName != null && entryName.Length > 0 ? entryName : fileName));
// result.IsUnicodeText = isUnicodeText_;
// int externalAttributes = 0;
// bool useAttributes = (setAttributes_ != 0);
// FileInfo fi = null;
// if (useFileSystem)
// {
// fi = new FileInfo(fileName);
// }
// if ((fi != null) && fi.Exists)
// {
// switch (timeSetting_)
// {
// case TimeSetting.CreateTime:
// result.DateTime = fi.CreationTime;
// break;
// case TimeSetting.CreateTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = fi.CreationTime.ToUniversalTime();
//#else
// result.DateTime = fi.CreationTimeUtc;
//#endif
// break;
// case TimeSetting.LastAccessTime:
// result.DateTime = fi.LastAccessTime;
// break;
// case TimeSetting.LastAccessTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = fi.LastAccessTime.ToUniversalTime();
//#else
// result.DateTime = fi.LastAccessTimeUtc;
//#endif
// break;
// case TimeSetting.LastWriteTime:
// result.DateTime = fi.LastWriteTime;
// break;
// case TimeSetting.LastWriteTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = fi.LastWriteTime.ToUniversalTime();
//#else
// result.DateTime = fi.LastWriteTimeUtc;
//#endif
// break;
// case TimeSetting.Fixed:
// result.DateTime = fixedDateTime_;
// break;
// default:
// throw new ZipException("Unhandled time setting in MakeFileEntry");
// }
// result.Size = fi.Length;
// useAttributes = true;
// externalAttributes = ((int)fi.Attributes & getAttributes_);
// }
// else
// {
// if (timeSetting_ == TimeSetting.Fixed)
// {
// result.DateTime = fixedDateTime_;
// }
// }
// if (useAttributes)
// {
// externalAttributes |= setAttributes_;
// result.ExternalFileAttributes = externalAttributes;
// }
// return result;
return null;
}
/// <summary>
/// Make a new <see cref="ZipEntry"></see> for a directory.
/// </summary>
/// <param name="directoryName">The raw untransformed name for the new directory</param>
/// <returns>Returns a new <see cref="ZipEntry"></see> representing a directory.</returns>
public ZipEntry MakeDirectoryEntry(string directoryName)
{
return MakeDirectoryEntry(directoryName, true);
}
/// <summary>
/// Make a new <see cref="ZipEntry"></see> for a directory.
/// </summary>
/// <param name="directoryName">The raw untransformed name for the new directory</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"></see> representing a directory.</returns>
public ZipEntry MakeDirectoryEntry(string directoryName, bool useFileSystem)
{
// ZipEntry result = new ZipEntry(nameTransform_.TransformDirectory(directoryName));
// result.IsUnicodeText = isUnicodeText_;
// result.Size = 0;
// int externalAttributes = 0;
// DirectoryInfo di = null;
// if (useFileSystem)
// {
// di = new DirectoryInfo(directoryName);
// }
// if ((di != null) && di.Exists)
// {
// switch (timeSetting_)
// {
// case TimeSetting.CreateTime:
// result.DateTime = di.CreationTime;
// break;
// case TimeSetting.CreateTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = di.CreationTime.ToUniversalTime();
//#else
// result.DateTime = di.CreationTimeUtc;
//#endif
// break;
// case TimeSetting.LastAccessTime:
// result.DateTime = di.LastAccessTime;
// break;
// case TimeSetting.LastAccessTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = di.LastAccessTime.ToUniversalTime();
//#else
// result.DateTime = di.LastAccessTimeUtc;
//#endif
// break;
// case TimeSetting.LastWriteTime:
// result.DateTime = di.LastWriteTime;
// break;
// case TimeSetting.LastWriteTimeUtc:
//#if NETCF_1_0 || NETCF_2_0 || UNITY_WINRT
// result.DateTime = di.LastWriteTime.ToUniversalTime();
//#else
// result.DateTime = di.LastWriteTimeUtc;
//#endif
// break;
// case TimeSetting.Fixed:
// result.DateTime = fixedDateTime_;
// break;
// default:
// throw new ZipException("Unhandled time setting in MakeDirectoryEntry");
// }
// externalAttributes = ((int)di.Attributes & getAttributes_);
// }
// else
// {
// if (timeSetting_ == TimeSetting.Fixed)
// {
// result.DateTime = fixedDateTime_;
// }
// }
// // Always set directory attribute on.
// externalAttributes |= (setAttributes_ | 16);
// result.ExternalFileAttributes = externalAttributes;
// return result;
return null;
}
#endregion
#region Instance Fields
INameTransform nameTransform_;
DateTime fixedDateTime_ = DateTime.Now;
TimeSetting timeSetting_;
bool isUnicodeText_;
int getAttributes_ = -1;
int setAttributes_;
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using System.Threading;
using System.Timers;
using Timer = System.Timers.Timer;
using Nini.Config;
using NUnit.Framework;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Communications;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.ClientStack.Linden;
using OpenSim.Region.CoreModules.Framework.EntityTransfer;
using OpenSim.Region.CoreModules.World.Serialiser;
using OpenSim.Region.CoreModules.ServiceConnectorsOut.Simulation;
using OpenSim.Tests.Common;
using OpenSim.Tests.Common.Mock;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Region.Framework.Scenes.Tests
{
/// <summary>
/// Scene presence tests
/// </summary>
[TestFixture]
public class ScenePresenceAgentTests : OpenSimTestCase
{
// public Scene scene, scene2, scene3;
// public UUID agent1, agent2, agent3;
// public static Random random;
// public ulong region1, region2, region3;
// public AgentCircuitData acd1;
// public TestClient testclient;
// [TestFixtureSetUp]
// public void Init()
// {
//// TestHelpers.InMethod();
////
//// SceneHelpers sh = new SceneHelpers();
////
//// scene = sh.SetupScene("Neighbour x", UUID.Random(), 1000, 1000);
//// scene2 = sh.SetupScene("Neighbour x+1", UUID.Random(), 1001, 1000);
//// scene3 = sh.SetupScene("Neighbour x-1", UUID.Random(), 999, 1000);
////
//// ISharedRegionModule interregionComms = new LocalSimulationConnectorModule();
//// interregionComms.Initialise(new IniConfigSource());
//// interregionComms.PostInitialise();
//// SceneHelpers.SetupSceneModules(scene, new IniConfigSource(), interregionComms);
//// SceneHelpers.SetupSceneModules(scene2, new IniConfigSource(), interregionComms);
//// SceneHelpers.SetupSceneModules(scene3, new IniConfigSource(), interregionComms);
//
//// agent1 = UUID.Random();
//// agent2 = UUID.Random();
//// agent3 = UUID.Random();
//
//// region1 = scene.RegionInfo.RegionHandle;
//// region2 = scene2.RegionInfo.RegionHandle;
//// region3 = scene3.RegionInfo.RegionHandle;
// }
[Test]
public void TestCreateRootScenePresence()
{
TestHelpers.InMethod();
// TestHelpers.EnableLogging();
UUID spUuid = TestHelpers.ParseTail(0x1);
TestScene scene = new SceneHelpers().SetupScene();
SceneHelpers.AddScenePresence(scene, spUuid);
Assert.That(scene.AuthenticateHandler.GetAgentCircuitData(spUuid), Is.Not.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuits().Count, Is.EqualTo(1));
ScenePresence sp = scene.GetScenePresence(spUuid);
Assert.That(sp, Is.Not.Null);
Assert.That(sp.IsChildAgent, Is.False);
Assert.That(sp.UUID, Is.EqualTo(spUuid));
Assert.That(scene.GetScenePresences().Count, Is.EqualTo(1));
}
[Test]
public void TestCreateDuplicateRootScenePresence()
{
TestHelpers.InMethod();
// TestHelpers.EnableLogging();
UUID spUuid = TestHelpers.ParseTail(0x1);
TestScene scene = new SceneHelpers().SetupScene();
SceneHelpers.AddScenePresence(scene, spUuid);
SceneHelpers.AddScenePresence(scene, spUuid);
Assert.That(scene.AuthenticateHandler.GetAgentCircuitData(spUuid), Is.Not.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuits().Count, Is.EqualTo(1));
ScenePresence sp = scene.GetScenePresence(spUuid);
Assert.That(sp, Is.Not.Null);
Assert.That(sp.IsChildAgent, Is.False);
Assert.That(sp.UUID, Is.EqualTo(spUuid));
}
[Test]
public void TestCloseAgent()
{
TestHelpers.InMethod();
// TestHelpers.EnableLogging();
TestScene scene = new SceneHelpers().SetupScene();
ScenePresence sp = SceneHelpers.AddScenePresence(scene, TestHelpers.ParseTail(0x1));
scene.IncomingCloseAgent(sp.UUID, false);
Assert.That(scene.GetScenePresence(sp.UUID), Is.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuitData(sp.UUID), Is.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuits().Count, Is.EqualTo(0));
// TestHelpers.DisableLogging();
}
[Test]
public void TestCreateChildScenePresence()
{
TestHelpers.InMethod();
// log4net.Config.XmlConfigurator.Configure();
LocalSimulationConnectorModule lsc = new LocalSimulationConnectorModule();
IConfigSource configSource = new IniConfigSource();
IConfig config = configSource.AddConfig("Modules");
config.Set("SimulationServices", "LocalSimulationConnectorModule");
SceneHelpers sceneHelpers = new SceneHelpers();
TestScene scene = sceneHelpers.SetupScene();
SceneHelpers.SetupSceneModules(scene, configSource, lsc);
UUID agentId = TestHelpers.ParseTail(0x01);
AgentCircuitData acd = SceneHelpers.GenerateAgentData(agentId);
acd.child = true;
GridRegion region = scene.GridService.GetRegionByName(UUID.Zero, scene.RegionInfo.RegionName);
string reason;
// *** This is the first stage, when a neighbouring region is told that a viewer is about to try and
// establish a child scene presence. We pass in the circuit code that the client has to connect with ***
// XXX: ViaLogin may not be correct here.
scene.SimulationService.CreateAgent(region, acd, (uint)TeleportFlags.ViaLogin, out reason);
Assert.That(scene.AuthenticateHandler.GetAgentCircuitData(agentId), Is.Not.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuits().Count, Is.EqualTo(1));
// There's no scene presence yet since only an agent circuit has been established.
Assert.That(scene.GetScenePresence(agentId), Is.Null);
// *** This is the second stage, where the client established a child agent/scene presence using the
// circuit code given to the scene in stage 1 ***
TestClient client = new TestClient(acd, scene);
scene.AddNewClient(client, PresenceType.User);
Assert.That(scene.AuthenticateHandler.GetAgentCircuitData(agentId), Is.Not.Null);
Assert.That(scene.AuthenticateHandler.GetAgentCircuits().Count, Is.EqualTo(1));
ScenePresence sp = scene.GetScenePresence(agentId);
Assert.That(sp, Is.Not.Null);
Assert.That(sp.UUID, Is.EqualTo(agentId));
Assert.That(sp.IsChildAgent, Is.True);
}
/// <summary>
/// Test that if a root agent logs into a region, a child agent is also established in the neighbouring region
/// </summary>
/// <remarks>
/// Please note that unlike the other tests here, this doesn't rely on anything set up in the instance fields.
/// INCOMPLETE
/// </remarks>
[Test]
public void TestChildAgentEstablishedInNeighbour()
{
TestHelpers.InMethod();
// log4net.Config.XmlConfigurator.Configure();
// UUID agent1Id = UUID.Parse("00000000-0000-0000-0000-000000000001");
TestScene myScene1 = new SceneHelpers().SetupScene("Neighbour y", UUID.Random(), 1000, 1000);
TestScene myScene2 = new SceneHelpers().SetupScene("Neighbour y + 1", UUID.Random(), 1001, 1000);
IConfigSource configSource = new IniConfigSource();
IConfig config = configSource.AddConfig("Startup");
config.Set("serverside_object_permissions", true);
config.Set("EventQueue", true);
EntityTransferModule etm = new EntityTransferModule();
EventQueueGetModule eqgm1 = new EventQueueGetModule();
SceneHelpers.SetupSceneModules(myScene1, configSource, etm, eqgm1);
EventQueueGetModule eqgm2 = new EventQueueGetModule();
SceneHelpers.SetupSceneModules(myScene2, configSource, etm, eqgm2);
// SceneHelpers.AddScenePresence(myScene1, agent1Id);
// ScenePresence childPresence = myScene2.GetScenePresence(agent1);
//
// // TODO: Need to do a fair amount of work to allow synchronous establishment of child agents
// Assert.That(childPresence, Is.Not.Null);
// Assert.That(childPresence.IsChildAgent, Is.True);
}
// /// <summary>
// /// Test adding a root agent to a scene. Doesn't yet actually complete crossing the agent into the scene.
// /// </summary>
// [Test]
// public void T010_TestAddRootAgent()
// {
// TestHelpers.InMethod();
//
// string firstName = "testfirstname";
//
// AgentCircuitData agent = new AgentCircuitData();
// agent.AgentID = agent1;
// agent.firstname = firstName;
// agent.lastname = "testlastname";
// agent.SessionID = UUID.Random();
// agent.SecureSessionID = UUID.Random();
// agent.circuitcode = 123;
// agent.BaseFolder = UUID.Zero;
// agent.InventoryFolder = UUID.Zero;
// agent.startpos = Vector3.Zero;
// agent.CapsPath = GetRandomCapsObjectPath();
// agent.ChildrenCapSeeds = new Dictionary<ulong, string>();
// agent.child = true;
//
// scene.PresenceService.LoginAgent(agent.AgentID.ToString(), agent.SessionID, agent.SecureSessionID);
//
// string reason;
// scene.NewUserConnection(agent, (uint)TeleportFlags.ViaLogin, out reason);
// testclient = new TestClient(agent, scene);
// scene.AddNewClient(testclient);
//
// ScenePresence presence = scene.GetScenePresence(agent1);
//
// Assert.That(presence, Is.Not.Null, "presence is null");
// Assert.That(presence.Firstname, Is.EqualTo(firstName), "First name not same");
// acd1 = agent;
// }
//
// /// <summary>
// /// Test removing an uncrossed root agent from a scene.
// /// </summary>
// [Test]
// public void T011_TestRemoveRootAgent()
// {
// TestHelpers.InMethod();
//
// scene.RemoveClient(agent1);
//
// ScenePresence presence = scene.GetScenePresence(agent1);
//
// Assert.That(presence, Is.Null, "presence is not null");
// }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using Xunit;
public partial class ThreadPoolBoundHandleTests
{
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // ThreadPoolBoundHandle.BindHandle is not supported on Unix
public unsafe void SingleOperationOverSingleHandle()
{
const int DATA_SIZE = 2;
SafeHandle handle = HandleFactory.CreateAsyncFileHandleForWrite(Path.Combine(TestDirectory, @"SingleOverlappedOverSingleHandle.tmp"));
ThreadPoolBoundHandle boundHandle = ThreadPoolBoundHandle.BindHandle(handle);
OverlappedContext result = new OverlappedContext();
byte[] data = new byte[DATA_SIZE];
data[0] = (byte)'A';
data[1] = (byte)'B';
NativeOverlapped* overlapped = boundHandle.AllocateNativeOverlapped(OnOverlappedOperationCompleted, result, data);
fixed (byte* p = data)
{
int retval = DllImport.WriteFile(boundHandle.Handle, p, DATA_SIZE, IntPtr.Zero, overlapped);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
// Wait for overlapped operation to complete
result.Event.WaitOne();
}
boundHandle.FreeNativeOverlapped(overlapped);
boundHandle.Dispose();
handle.Dispose();
Assert.Equal(0, result.ErrorCode);
Assert.Equal(DATA_SIZE, result.BytesWritten);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // ThreadPoolBoundHandle.BindHandle is not supported on Unix
public unsafe void MultipleOperationsOverSingleHandle()
{
const int DATA_SIZE = 2;
SafeHandle handle = HandleFactory.CreateAsyncFileHandleForWrite(Path.Combine(TestDirectory, @"MultipleOperationsOverSingleHandle.tmp"));
ThreadPoolBoundHandle boundHandle = ThreadPoolBoundHandle.BindHandle(handle);
OverlappedContext result1 = new OverlappedContext();
OverlappedContext result2 = new OverlappedContext();
byte[] data1 = new byte[DATA_SIZE];
data1[0] = (byte)'A';
data1[1] = (byte)'B';
byte[] data2 = new byte[DATA_SIZE];
data2[0] = (byte)'C';
data2[1] = (byte)'D';
NativeOverlapped* overlapped1 = boundHandle.AllocateNativeOverlapped(OnOverlappedOperationCompleted, result1, data1);
NativeOverlapped* overlapped2 = boundHandle.AllocateNativeOverlapped(OnOverlappedOperationCompleted, result2, data2);
fixed (byte* p1 = data1, p2 = data2)
{
int retval = DllImport.WriteFile(boundHandle.Handle, p1, DATA_SIZE, IntPtr.Zero, overlapped1);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
// Start the offset after the above write, so that it doesn't overwrite the previous write
overlapped2->OffsetLow = DATA_SIZE;
retval = DllImport.WriteFile(boundHandle.Handle, p2, DATA_SIZE, IntPtr.Zero, overlapped2);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
// Wait for overlapped operations to complete
WaitHandle.WaitAll(new WaitHandle[] { result1.Event, result2.Event });
}
boundHandle.FreeNativeOverlapped(overlapped1);
boundHandle.FreeNativeOverlapped(overlapped2);
boundHandle.Dispose();
handle.Dispose();
Assert.Equal(0, result1.ErrorCode);
Assert.Equal(0, result2.ErrorCode);
Assert.Equal(DATA_SIZE, result1.BytesWritten);
Assert.Equal(DATA_SIZE, result2.BytesWritten);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // ThreadPoolBoundHandle.BindHandle is not supported on Unix
public unsafe void MultipleOperationsOverMultipleHandles()
{
const int DATA_SIZE = 2;
SafeHandle handle1 = HandleFactory.CreateAsyncFileHandleForWrite(Path.Combine(TestDirectory, @"MultipleOperationsOverMultipleHandle1.tmp"));
SafeHandle handle2 = HandleFactory.CreateAsyncFileHandleForWrite(Path.Combine(TestDirectory, @"MultipleOperationsOverMultipleHandle2.tmp"));
ThreadPoolBoundHandle boundHandle1 = ThreadPoolBoundHandle.BindHandle(handle1);
ThreadPoolBoundHandle boundHandle2 = ThreadPoolBoundHandle.BindHandle(handle2);
OverlappedContext result1 = new OverlappedContext();
OverlappedContext result2 = new OverlappedContext();
byte[] data1 = new byte[DATA_SIZE];
data1[0] = (byte)'A';
data1[1] = (byte)'B';
byte[] data2 = new byte[DATA_SIZE];
data2[0] = (byte)'C';
data2[1] = (byte)'D';
PreAllocatedOverlapped preAlloc1 = new PreAllocatedOverlapped(OnOverlappedOperationCompleted, result1, data1);
PreAllocatedOverlapped preAlloc2 = new PreAllocatedOverlapped(OnOverlappedOperationCompleted, result2, data2);
for (int i = 0; i < 10; i++)
{
NativeOverlapped* overlapped1 = boundHandle1.AllocateNativeOverlapped(preAlloc1);
NativeOverlapped* overlapped2 = boundHandle2.AllocateNativeOverlapped(preAlloc2);
fixed (byte* p1 = data1, p2 = data2)
{
int retval = DllImport.WriteFile(boundHandle1.Handle, p1, DATA_SIZE, IntPtr.Zero, overlapped1);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
retval = DllImport.WriteFile(boundHandle2.Handle, p2, DATA_SIZE, IntPtr.Zero, overlapped2);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
// Wait for overlapped operations to complete
WaitHandle.WaitAll(new WaitHandle[] { result1.Event, result2.Event });
}
boundHandle1.FreeNativeOverlapped(overlapped1);
boundHandle2.FreeNativeOverlapped(overlapped2);
result1.Event.Reset();
result2.Event.Reset();
Assert.Equal(0, result1.ErrorCode);
Assert.Equal(0, result2.ErrorCode);
Assert.Equal(DATA_SIZE, result1.BytesWritten);
Assert.Equal(DATA_SIZE, result2.BytesWritten);
}
boundHandle1.Dispose();
boundHandle2.Dispose();
preAlloc1.Dispose();
preAlloc2.Dispose();
handle1.Dispose();
handle2.Dispose();
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // ThreadPoolBoundHandle.BindHandle is not supported on Unix
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Active Issue dotnet/corefx#13343")]
public unsafe void FlowsAsyncLocalsToCallback()
{ // Makes sure that we flow async locals to callback
const int DATA_SIZE = 2;
SafeHandle handle = HandleFactory.CreateAsyncFileHandleForWrite(Path.Combine(TestDirectory, @"AsyncLocal.tmp"));
ThreadPoolBoundHandle boundHandle = ThreadPoolBoundHandle.BindHandle(handle);
OverlappedContext context = new OverlappedContext();
byte[] data = new byte[DATA_SIZE];
AsyncLocal<int> asyncLocal = new AsyncLocal<int>();
asyncLocal.Value = 10;
int? result = null;
IOCompletionCallback callback = (_, __, ___) => {
result = asyncLocal.Value;
OnOverlappedOperationCompleted(_, __, ___);
};
NativeOverlapped* overlapped = boundHandle.AllocateNativeOverlapped(callback, context, data);
fixed (byte* p = data)
{
int retval = DllImport.WriteFile(boundHandle.Handle, p, DATA_SIZE, IntPtr.Zero, overlapped);
if (retval == 0)
{
Assert.Equal(DllImport.ERROR_IO_PENDING, Marshal.GetLastWin32Error());
}
// Wait for overlapped operation to complete
context.Event.WaitOne();
}
boundHandle.FreeNativeOverlapped(overlapped);
boundHandle.Dispose();
handle.Dispose();
Assert.Equal(10, result);
}
private static unsafe void OnOverlappedOperationCompleted(uint errorCode, uint numBytes, NativeOverlapped* overlapped)
{
OverlappedContext result = (OverlappedContext)ThreadPoolBoundHandle.GetNativeOverlappedState(overlapped);
result.ErrorCode = (int)errorCode;
result.BytesWritten = (int)numBytes;
// Signal original thread to indicate overlapped completed
result.Event.Set();
}
private class OverlappedContext
{
public readonly ManualResetEvent Event = new ManualResetEvent(false);
public int ErrorCode;
public int BytesWritten;
}
}
| |
namespace Microsoft.Protocols.TestSuites.MS_OXORULE
{
using System;
using System.Collections.Generic;
using System.Net;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.Protocols.TestTools;
/// <summary>
/// The MapiHttpAdapter class contains the MAPIHTTP implements for the interfaces of IMS_OXNSPIAdapter.
/// </summary>
public class NspiMapiHttpAdapter
{
#region Variables
/// <summary>
/// The Site instance.
/// </summary>
private ITestSite site;
/// <summary>
/// The Mailbox userName which can be used by client to connect to the SUT.
/// </summary>
private string userName;
/// <summary>
/// The user password which can be used by client to access to the SUT.
/// </summary>
private string password;
/// <summary>
/// Define the name of domain where the server belongs to.
/// </summary>
private string domainName;
/// <summary>
/// The URL that a client can use to connect with a NSPI server through MAPI over HTTP.
/// </summary>
private string addressBookUrl;
#endregion
/// <summary>
/// Initializes a new instance of the <see cref="NspiMapiHttpAdapter" /> class.
/// </summary>
/// <param name="site">The Site instance.</param>
/// <param name="userName">The Mailbox userName which can be used by client to connect to the SUT.</param>
/// <param name="password">The user password which can be used by client to access to the SUT.</param>
/// <param name="domainName">Define the name of domain where the server belongs to.</param>
/// <param name="addressBookUrl">The URL that a client can use to connect with a NSPI server through MAPI over HTTP.</param>
public NspiMapiHttpAdapter(ITestSite site, string userName, string password, string domainName, string addressBookUrl)
{
this.site = site;
this.userName = userName;
this.password = password;
this.domainName = domainName;
this.addressBookUrl = addressBookUrl;
}
#region Instance interface
/// <summary>
/// The NspiBind method initiates a session between a client and the server.
/// </summary>
/// <param name="flags">A DWORD value that contains a set of bit flags.</param>
/// <param name="stat">A STAT block that describes a logical position in a specific address book container.</param>
/// <param name="serverGuid">The value NULL or a pointer to a GUID value that is associated with the specific server.</param>
/// <returns>Status of NSPI method.</returns>
public ErrorCodeValue Bind(uint flags, STAT stat, ref FlatUID_r? serverGuid)
{
ErrorCodeValue result;
BindRequestBody bindRequestBody = this.BuildBindRequestBody(stat, flags);
byte[] rawBuffer = null;
ChunkedResponse chunkedResponse = null;
BindResponseBody bindResponseBody = null;
// Send the execute HTTP request and get the response
HttpWebResponse response = MapiHttpAdapter.SendMAPIHttpRequest(this.site, this.addressBookUrl, this.userName, this.domainName, this.password, bindRequestBody, RequestType.Bind.ToString(), AdapterHelper.SessionContextCookies);
// Read the HTTP response buffer and parse the response to correct format
rawBuffer = MapiHttpAdapter.ReadHttpResponse(response);
result = (ErrorCodeValue)int.Parse(response.Headers["X-ResponseCode"]);
if (result == ErrorCodeValue.Success)
{
chunkedResponse = ChunkedResponse.ParseChunkedResponse(rawBuffer);
bindResponseBody = BindResponseBody.Parse(chunkedResponse.ResponseBodyRawData);
result = (ErrorCodeValue)bindResponseBody.ErrorCode;
if (bindResponseBody.ServerGuid != null)
{
FlatUID_r newGuid = new FlatUID_r();
newGuid.Ab = bindResponseBody.ServerGuid.ToByteArray();
serverGuid = newGuid;
}
else
{
serverGuid = null;
}
}
response.GetResponseStream().Close();
AdapterHelper.SessionContextCookies = response.Cookies;
return result;
}
/// <summary>
/// The NspiUnbind method destroys the context handle. No other action is taken.
/// </summary>
/// <param name="reserved">A DWORD [MS-DTYP] value reserved for future use. This property is ignored by the server.</param>
/// <returns>A DWORD value that specifies the return status of the method.</returns>
public uint Unbind(uint reserved)
{
uint result;
UnbindRequestBody unbindRequest = this.BuildUnbindRequestBody();
ChunkedResponse chunkedResponse = this.SendAddressBookRequest(unbindRequest, RequestType.Unbind);
AdapterHelper.SessionContextCookies = new CookieCollection();
UnbindResponseBody unbindResponseBody = UnbindResponseBody.Parse(chunkedResponse.ResponseBodyRawData);
result = unbindResponseBody.ErrorCode;
return result;
}
/// <summary>
/// The NspiQueryRows method returns a number of rows from a specified table to the client.
/// </summary>
/// <param name="flags">A DWORD value that contains a set of bit flags.</param>
/// <param name="stat">A STAT block that describes a logical position in a specific address book container.</param>
/// <param name="tableCount">A DWORD value that contains the number values in the input parameter table.
/// This value is limited to 100,000.</param>
/// <param name="table">An array of DWORD values, representing an Explicit Table.</param>
/// <param name="count">A DWORD value that contains the number of rows the client is requesting.</param>
/// <param name="propTags">The value NULL or a reference to a PropertyTagArray_r value,
/// containing a list of the proptags of the properties that the client requires to be returned for each row returned.</param>
/// <param name="rows">A nullable PropertyRowSet_r value, it contains the address book container rows that the server returns in response to the request.</param>
/// <returns>Status of NSPI method.</returns>
public ErrorCodeValue QueryRows(uint flags, ref STAT stat, uint tableCount, uint[] table, uint count, PropertyTagArray_r? propTags, out PropertyRowSet_r? rows)
{
ErrorCodeValue result;
QueryRowsRequestBody queryRowsRequestBody = new QueryRowsRequestBody();
LargePropTagArray propetyTags = new LargePropTagArray();
if (propTags != null)
{
propetyTags.PropertyTagCount = propTags.Value.Values;
propetyTags.PropertyTags = new PropertyTag[propetyTags.PropertyTagCount];
for (int i = 0; i < propTags.Value.Values; i++)
{
propetyTags.PropertyTags[i].PropertyId = (ushort)((propTags.Value.AulPropTag[i] & 0xFFFF0000) >> 16);
propetyTags.PropertyTags[i].PropertyType = (ushort)(propTags.Value.AulPropTag[i] & 0x0000FFFF);
}
queryRowsRequestBody.HasColumns = true;
queryRowsRequestBody.Columns = propetyTags;
}
queryRowsRequestBody.Flags = flags;
queryRowsRequestBody.HasState = true;
queryRowsRequestBody.State = stat;
queryRowsRequestBody.ExplicitTableCount = tableCount;
queryRowsRequestBody.ExplicitTable = table;
queryRowsRequestBody.RowCount = count;
byte[] auxIn = new byte[] { };
queryRowsRequestBody.AuxiliaryBuffer = auxIn;
queryRowsRequestBody.AuxiliaryBufferSize = (uint)auxIn.Length;
ChunkedResponse chunkedResponse = this.SendAddressBookRequest(queryRowsRequestBody, RequestType.QueryRows);
QueryRowsResponseBody queryRowsResponseBody = QueryRowsResponseBody.Parse(chunkedResponse.ResponseBodyRawData);
result = (ErrorCodeValue)queryRowsResponseBody.ErrorCode;
if (queryRowsResponseBody.RowCount != null)
{
PropertyRowSet_r newRows = AdapterHelper.ParsePropertyRowSet_r(queryRowsResponseBody.Columns.Value, queryRowsResponseBody.RowCount.Value, queryRowsResponseBody.RowData);
rows = newRows;
}
else
{
rows = null;
}
if (queryRowsResponseBody.HasState)
{
stat = queryRowsResponseBody.State.Value;
}
return result;
}
#endregion
#region Private method
/// <summary>
/// Send the request to address book server endpoint.
/// </summary>
/// <param name="requestBody">The request body.</param>
/// <param name="requestType">The type of the request.</param>
/// <param name="cookieChange">Whether the session context cookie is changed.</param>
/// <returns>The returned chunked response.</returns>
private ChunkedResponse SendAddressBookRequest(IRequestBody requestBody, RequestType requestType, bool cookieChange = true)
{
byte[] rawBuffer = null;
ChunkedResponse chunkedResponse = null;
// Send the execute HTTP request and get the response
HttpWebResponse response = MapiHttpAdapter.SendMAPIHttpRequest(this.site, this.addressBookUrl, this.userName, this.domainName, this.password, requestBody, requestType.ToString(), AdapterHelper.SessionContextCookies);
rawBuffer = MapiHttpAdapter.ReadHttpResponse(response);
string responseCode = response.Headers["X-ResponseCode"];
this.site.Assert.AreEqual<uint>(0, uint.Parse(responseCode), "The request to the address book server should be executed successfully!");
// Read the HTTP response buffer and parse the response to correct format
chunkedResponse = ChunkedResponse.ParseChunkedResponse(rawBuffer);
response.GetResponseStream().Close();
if (cookieChange)
{
AdapterHelper.SessionContextCookies = response.Cookies;
}
return chunkedResponse;
}
/// <summary>
/// Initialize Bind request body.
/// </summary>
/// <param name="stat">A STAT block that describes a logical position in a specific address book container.</param>
/// <param name="flags">A set of bit flags that specify options to the server.</param>
/// <returns>An instance of the Bind request body.</returns>
private BindRequestBody BuildBindRequestBody(STAT stat, uint flags)
{
BindRequestBody bindRequestBody = new BindRequestBody();
bindRequestBody.State = stat;
bindRequestBody.Flags = flags;
bindRequestBody.HasState = true;
byte[] auxIn = new byte[] { };
bindRequestBody.AuxiliaryBuffer = auxIn;
bindRequestBody.AuxiliaryBufferSize = (uint)auxIn.Length;
return bindRequestBody;
}
/// <summary>
/// Initialize the Unbind request body.
/// </summary>
/// <returns>The Unbind request body</returns>
private UnbindRequestBody BuildUnbindRequestBody()
{
UnbindRequestBody unbindRequest = new UnbindRequestBody();
unbindRequest.Reserved = 0x00000000;
byte[] auxIn = new byte[] { };
unbindRequest.AuxiliaryBuffer = auxIn;
unbindRequest.AuxiliaryBufferSize = (uint)auxIn.Length;
return unbindRequest;
}
#endregion
}
}
| |
using System;
namespace Greatbone
{
/// <summary>
/// A set of commonly-used array operations.
/// </summary>
public static class ArrayUtility
{
public static E[] AddOf<E>(this E[] arr, E v, int limit = 0)
{
if (arr == null || arr.Length == 0)
{
return new[] {v};
}
int len = arr.Length;
E[] alloc;
if (limit > 0 && limit <= len)
{
alloc = new E[limit];
Array.Copy(arr, len - limit + 1, alloc, 0, limit - 1);
alloc[limit - 1] = v;
}
else
{
alloc = new E[len + 1];
Array.Copy(arr, alloc, len);
alloc[len] = v;
}
return alloc;
}
public static E[] AddOf<E>(this E[] arr, params E[] v)
{
if (arr == null || arr.Length == 0)
{
return v;
}
int len = arr.Length;
int vlen = v.Length;
E[] alloc = new E[len + vlen];
Array.Copy(arr, alloc, len);
Array.Copy(v, 0, alloc, len, vlen);
return alloc;
}
public static E[] MergeOf<E>(this E[] arr, params E[] v) where E : IEquatable<E>
{
if (arr == null || arr.Length == 0)
{
return v;
}
int len = arr.Length;
int vlen = v.Length;
ValueList<E> lst = new ValueList<E>();
for (int i = 0; i < vlen; i++) // out loop
{
var t = v[i];
if (t == null) continue;
bool dup = false; // found duplicate
for (int k = 0; k < len; k++) // match among arr elements
{
var a = arr[k];
if (a == null) continue;
if (a.Equals(t))
{
dup = true;
break;
}
}
if (!dup)
{
lst.Add(t);
}
}
int count = lst.Count;
if (count > 0)
{
E[] alloc = new E[len + count];
Array.Copy(arr, alloc, len);
// copy new elements
for (int i = 0; i < count; i++)
{
alloc[len + i] = lst[i];
}
return alloc;
}
return arr;
}
public static E[] RemovedOf<E>(this E[] arr, int index)
{
if (arr == null) return null;
int len = arr.Length;
if (index >= len || index < 0) return arr;
E[] alloc = new E[len - 1];
Array.Copy(arr, 0, alloc, 0, index);
int next = index + 1;
Array.Copy(arr, next, alloc, index, len - next);
return alloc;
}
public static E[] RemovedOf<E>(this E[] arr, Predicate<E> cond)
{
if (arr == null) return null;
int len = arr.Length;
if (len == 1 && cond(arr[0])) return null;
for (int i = 0; i < len; i++)
{
E e = arr[i];
if (cond(e))
{
E[] alloc = new E[len - 1];
Array.Copy(arr, 0, alloc, 0, i);
int next = i + 1;
Array.Copy(arr, next, alloc, i, len - next);
return alloc;
}
}
return arr;
}
public static E First<E>(this E[] arr, Predicate<E> cond)
{
if (arr != null)
{
int len = arr.Length;
for (int i = 0; i < len; i++)
{
E e = arr[i];
if (cond(e)) return e;
}
}
return default;
}
public static E Last<E>(this E[] arr, Predicate<E> cond)
{
if (arr != null)
{
int len = arr.Length;
for (int i = len - 1; i > 0; i--)
{
E e = arr[i];
if (cond(e)) return e;
}
}
return default;
}
public static int IndexOf<E>(this E[] arr, Predicate<E> cond)
{
if (arr != null)
{
int len = arr.Length;
for (int i = 0; i < len; i++)
{
E e = arr[i];
if (cond(e)) return i;
}
}
return -1;
}
public static bool IsNullOrEmpty<E>(this E[] arr)
{
return arr == null || arr.Length == 0;
}
public static bool Contains<V>(this V[] arr, V v)
{
if (v != null && arr != null)
{
for (int i = 0; i < arr.Length; i++)
{
if (arr[i].Equals(v)) return true;
}
}
return false;
}
public static bool IsSameAs<E>(this E[] arr, E[] another)
{
if (arr == null && another == null)
{
return true;
}
if (arr != null && another != null && arr.Length == another.Length)
{
int len = arr.Length;
for (int i = 0; i < len; i++)
{
if (!arr[i].Equals(another[i])) return false;
}
return true;
}
return false;
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Mail.Net
{
#region usings
using System;
using System.Net;
using System.Security.Cryptography.X509Certificates;
#endregion
/// <summary>
/// Holds IP bind info.
/// </summary>
public class IPBindInfo
{
#region Members
private readonly string m_HostName = "";
private readonly X509Certificate2 m_pCertificate;
private readonly IPEndPoint m_pEndPoint;
private readonly BindInfoProtocol m_Protocol = BindInfoProtocol.TCP;
private readonly SslMode m_SslMode = SslMode.None;
#endregion
#region Constructor
/// <summary>
/// Default constructor.
/// </summary>
/// <param name="hostName">Host name.</param>
/// <param name="protocol">Bind protocol.</param>
/// <param name="ip">IP address to listen.</param>
/// <param name="port">Port to listen.</param>
/// <exception cref="ArgumentNullException">Is raised when <b>ip</b> is null.</exception>
public IPBindInfo(string hostName, BindInfoProtocol protocol, IPAddress ip, int port)
{
if (ip == null)
{
throw new ArgumentNullException("ip");
}
m_HostName = hostName;
m_Protocol = protocol;
m_pEndPoint = new IPEndPoint(ip, port);
}
/// <summary>
/// Default constructor.
/// </summary>
/// <param name="hostName">Host name.</param>
/// <param name="ip">IP address to listen.</param>
/// <param name="port">Port to listen.</param>
/// <param name="sslMode">Specifies SSL mode.</param>
/// <param name="sslCertificate">Certificate to use for SSL connections.</param>
/// <exception cref="ArgumentNullException">Is raised when <b>ip</b> is null.</exception>
public IPBindInfo(string hostName,
IPAddress ip,
int port,
SslMode sslMode,
X509Certificate2 sslCertificate)
: this(hostName, BindInfoProtocol.TCP, ip, port, sslMode, sslCertificate) {}
/// <summary>
/// Default constructor.
/// </summary>
/// <param name="hostName">Host name.</param>
/// <param name="protocol">Bind protocol.</param>
/// <param name="ip">IP address to listen.</param>
/// <param name="port">Port to listen.</param>
/// <param name="sslMode">Specifies SSL mode.</param>
/// <param name="sslCertificate">Certificate to use for SSL connections.</param>
/// <exception cref="ArgumentNullException">Is raised when <b>ip</b> is null.</exception>
/// <exception cref="ArgumentException">Is raised when any of the arguments has invalid value.</exception>
public IPBindInfo(string hostName,
BindInfoProtocol protocol,
IPAddress ip,
int port,
SslMode sslMode,
X509Certificate2 sslCertificate)
{
if (ip == null)
{
throw new ArgumentNullException("ip");
}
m_HostName = hostName;
m_Protocol = protocol;
m_pEndPoint = new IPEndPoint(ip, port);
m_SslMode = sslMode;
m_pCertificate = sslCertificate;
if ((sslMode == SslMode.SSL || sslMode == SslMode.TLS) && sslCertificate == null)
{
throw new ArgumentException("SSL requested, but argument 'sslCertificate' is not provided.");
}
}
#endregion
#region Properties
/// <summary>
/// Gets SSL certificate.
/// </summary>
public X509Certificate2 Certificate
{
get { return m_pCertificate; }
}
/// <summary>
/// Gets IP end point.
/// </summary>
public IPEndPoint EndPoint
{
get { return m_pEndPoint; }
}
/// <summary>
/// Gets host name.
/// </summary>
public string HostName
{
get { return m_HostName; }
}
/// <summary>
/// Gets IP address.
/// </summary>
public IPAddress IP
{
get { return m_pEndPoint.Address; }
}
/// <summary>
/// Gets port.
/// </summary>
public int Port
{
get { return m_pEndPoint.Port; }
}
/// <summary>
/// Gets protocol.
/// </summary>
public BindInfoProtocol Protocol
{
get { return m_Protocol; }
}
/// <summary>
/// Gets SSL certificate.
/// </summary>
[Obsolete("Use property Certificate instead.")]
public X509Certificate2 SSL_Certificate
{
get { return m_pCertificate; }
}
/// <summary>
/// Gets SSL mode.
/// </summary>
public SslMode SslMode
{
get { return m_SslMode; }
}
/// <summary>
/// Gets or sets user data. This is used internally don't use it !!!.
/// </summary>
public object Tag { get; set; }
#endregion
#region Methods
/// <summary>
/// Compares the current instance with another object of the same type.
/// </summary>
/// <param name="obj">An object to compare with this instance.</param>
/// <returns>Returns true if two objects are equal.</returns>
public override bool Equals(object obj)
{
if (obj == null)
{
return false;
}
if (!(obj is IPBindInfo))
{
return false;
}
IPBindInfo bInfo = (IPBindInfo) obj;
if (bInfo.HostName != m_HostName)
{
return false;
}
if (bInfo.Protocol != m_Protocol)
{
return false;
}
if (!bInfo.EndPoint.Equals(m_pEndPoint))
{
return false;
}
if (bInfo.SslMode != m_SslMode)
{
return false;
}
if (!Equals(bInfo.Certificate, m_pCertificate))
{
return false;
}
return true;
}
/// <summary>
/// Returns the hash code.
/// </summary>
/// <returns>Returns the hash code.</returns>
public override int GetHashCode()
{
return base.GetHashCode();
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Globalization;
using Xunit;
public static class Int64Tests
{
[Fact]
public static void TestCtor()
{
Int64 i = new Int64();
Assert.True(i == 0);
i = 41;
Assert.True(i == 41);
}
[Fact]
public static void TestMaxValue()
{
Int64 max = Int64.MaxValue;
Assert.True(max == (Int64)0x7FFFFFFFFFFFFFFF);
}
[Fact]
public static void TestMinValue()
{
Int64 min = Int64.MinValue;
Assert.True(min == unchecked((Int64)0x8000000000000000));
}
[Fact]
public static void TestCompareToObject()
{
Int64 i = 234;
IComparable comparable = i;
Assert.Equal(1, comparable.CompareTo(null));
Assert.Equal(0, comparable.CompareTo((Int64)234));
Assert.True(comparable.CompareTo(Int64.MinValue) > 0);
Assert.True(comparable.CompareTo((Int64)0) > 0);
Assert.True(comparable.CompareTo((Int64)(-123)) > 0);
Assert.True(comparable.CompareTo((Int64)123) > 0);
Assert.True(comparable.CompareTo((Int64)456) < 0);
Assert.True(comparable.CompareTo(Int64.MaxValue) < 0);
Assert.Throws<ArgumentException>(() => comparable.CompareTo("a"));
}
[Fact]
public static void TestCompareTo()
{
Int64 i = 234;
Assert.Equal(0, i.CompareTo((Int64)234));
Assert.True(i.CompareTo(Int64.MinValue) > 0);
Assert.True(i.CompareTo((Int64)0) > 0);
Assert.True(i.CompareTo((Int64)(-123)) > 0);
Assert.True(i.CompareTo((Int64)123) > 0);
Assert.True(i.CompareTo((Int64)456) < 0);
Assert.True(i.CompareTo(Int64.MaxValue) < 0);
}
[Fact]
public static void TestEqualsObject()
{
Int64 i = 789;
object obj1 = (Int64)789;
Assert.True(i.Equals(obj1));
object obj2 = (Int64)(-789);
Assert.True(!i.Equals(obj2));
object obj3 = (Int64)0;
Assert.True(!i.Equals(obj3));
}
[Fact]
public static void TestEquals()
{
Int64 i = -911;
Assert.True(i.Equals((Int64)(-911)));
Assert.True(!i.Equals((Int64)911));
Assert.True(!i.Equals((Int64)0));
}
[Fact]
public static void TestGetHashCode()
{
Int64 i1 = 123;
Int64 i2 = 654;
Assert.NotEqual(0, i1.GetHashCode());
Assert.NotEqual(i1.GetHashCode(), i2.GetHashCode());
}
[Fact]
public static void TestToString()
{
Int64 i1 = 6310;
Assert.Equal("6310", i1.ToString());
Int64 i2 = -8249;
Assert.Equal("-8249", i2.ToString());
Assert.Equal(Int64.MaxValue.ToString(), "9223372036854775807");
Assert.Equal(Int64.MinValue.ToString(), "-9223372036854775808");
}
[Fact]
public static void TestToStringFormatProvider()
{
var numberFormat = new System.Globalization.NumberFormatInfo();
Int64 i1 = 6310;
Assert.Equal("6310", i1.ToString(numberFormat));
Int64 i2 = -8249;
Assert.Equal("-8249", i2.ToString(numberFormat));
Int64 i3 = -2468;
// Changing the negative pattern doesn't do anything without also passing in a format string
numberFormat.NumberNegativePattern = 0;
Assert.Equal("-2468", i3.ToString(numberFormat));
}
[Fact]
public static void TestToStringFormat()
{
Int64 i1 = 6310;
Assert.Equal("6310", i1.ToString("G"));
Int64 i2 = -8249;
Assert.Equal("-8249", i2.ToString("g"));
Int64 i3 = -2468;
Assert.Equal("-2,468.00", i3.ToString("N"));
Int64 i4 = 0x248;
Assert.Equal("248", i4.ToString("x"));
Assert.Equal(Int64.MinValue.ToString("X"), "8000000000000000");
Assert.Equal(Int64.MaxValue.ToString("X"), "7FFFFFFFFFFFFFFF");
}
[Fact]
public static void TestToStringFormatFormatProvider()
{
var numberFormat = new System.Globalization.NumberFormatInfo();
Int64 i1 = 6310;
Assert.Equal("6310", i1.ToString("G", numberFormat));
Int64 i2 = -8249;
Assert.Equal("-8249", i2.ToString("g", numberFormat));
numberFormat.NegativeSign = "xx"; // setting it to trash to make sure it doesn't show up
numberFormat.NumberGroupSeparator = "*";
numberFormat.NumberNegativePattern = 0;
Int64 i3 = -2468;
Assert.Equal("(2*468.00)", i3.ToString("N", numberFormat));
}
[Fact]
public static void TestParse()
{
Assert.Equal(123, Int64.Parse("123"));
Assert.Equal(-123, Int64.Parse("-123"));
//TODO: Negative tests once we get better exceptions
}
[Fact]
public static void TestParseNumberStyle()
{
Assert.Equal(0x123, Int64.Parse("123", NumberStyles.HexNumber));
Assert.Equal(1000, Int64.Parse("1,000", NumberStyles.AllowThousands));
//TODO: Negative tests once we get better exceptions
}
[Fact]
public static void TestParseFormatProvider()
{
var nfi = new NumberFormatInfo();
Assert.Equal(123, Int64.Parse("123", nfi));
Assert.Equal(-123, Int64.Parse("-123", nfi));
//TODO: Negative tests once we get better exceptions
}
[Fact]
public static void TestParseNumberStyleFormatProvider()
{
var nfi = new NumberFormatInfo();
Assert.Equal(0x123, Int64.Parse("123", NumberStyles.HexNumber, nfi));
nfi.CurrencySymbol = "$";
Assert.Equal(1000, Int64.Parse("$1,000", NumberStyles.Currency, nfi));
//TODO: Negative tests once we get better exception support
}
[Fact]
public static void TestTryParse()
{
// Defaults NumberStyles.Integer = NumberStyles.AllowLeadingWhite | NumberStyles.AllowTrailingWhite | NumberStyles.AllowLeadingSign
Int64 i;
Assert.True(Int64.TryParse("123", out i)); // Simple
Assert.Equal(123, i);
Assert.True(Int64.TryParse("-385", out i)); // LeadingSign
Assert.Equal(-385, i);
Assert.True(Int64.TryParse(" 678 ", out i)); // Leading/Trailing whitespace
Assert.Equal(678, i);
Assert.False(Int64.TryParse("$1000", out i)); // Currency
Assert.False(Int64.TryParse("1,000", out i)); // Thousands
Assert.False(Int64.TryParse("abc", out i)); // Hex digits
Assert.False(Int64.TryParse("678.90", out i)); // Decimal
Assert.False(Int64.TryParse("(135)", out i)); // Parentheses
Assert.False(Int64.TryParse("1E23", out i)); // Exponent
}
[Fact]
public static void TestTryParseNumberStyleFormatProvider()
{
Int64 i;
var nfi = new NumberFormatInfo();
Assert.True(Int64.TryParse("123", NumberStyles.Any, nfi, out i)); // Simple positive
Assert.Equal(123, i);
Assert.True(Int64.TryParse("123", NumberStyles.HexNumber, nfi, out i)); // Simple Hex
Assert.Equal(0x123, i);
nfi.CurrencySymbol = "$";
Assert.True(Int64.TryParse("$1,000", NumberStyles.Currency, nfi, out i)); // Currency/Thousands postive
Assert.Equal(1000, i);
Assert.False(Int64.TryParse("abc", NumberStyles.None, nfi, out i)); // Hex Number negative
Assert.True(Int64.TryParse("abc", NumberStyles.HexNumber, nfi, out i)); // Hex Number positive
Assert.Equal(0xabc, i);
Assert.False(Int64.TryParse("678.90", NumberStyles.Integer, nfi, out i)); // Decimal
Assert.False(Int64.TryParse(" 678 ", NumberStyles.None, nfi, out i)); // Trailing/Leading whitespace negative
Assert.True(Int64.TryParse("(135)", NumberStyles.AllowParentheses, nfi, out i)); // Parenthese postive
Assert.Equal(-135, i);
}
}
| |
using Avalonia;
using Avalonia.Controls;
using Avalonia.Controls.ApplicationLifetimes;
using DynamicData;
using DynamicData.Binding;
using NBitcoin;
using Newtonsoft.Json.Linq;
using ReactiveUI;
using Splat;
using System;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Reactive;
using System.Reactive.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using WalletWasabi.Blockchain.Keys;
using WalletWasabi.Extensions;
using WalletWasabi.Gui.Controls.WalletExplorer;
using WalletWasabi.Gui.Helpers;
using WalletWasabi.Gui.Models.StatusBarStatuses;
using WalletWasabi.Gui.ViewModels;
using WalletWasabi.Helpers;
using WalletWasabi.Hwi;
using WalletWasabi.Hwi.Models;
using WalletWasabi.Logging;
namespace WalletWasabi.Gui.Tabs.WalletManager.HardwareWallets
{
public class ConnectHardwareWalletViewModel : CategoryViewModel
{
private ObservableCollection<HardwareWalletViewModel> _wallets;
private HardwareWalletViewModel _selectedWallet;
private bool _isBusy;
private bool _isHardwareBusy;
private string _loadButtonText;
private bool _isHwWalletSearchTextVisible;
public ConnectHardwareWalletViewModel(WalletManagerViewModel owner) : base("Hardware Wallet")
{
Global = Locator.Current.GetService<Global>();
WalletManager = Global.WalletManager;
Owner = owner;
_wallets = new ObservableCollection<HardwareWalletViewModel>();
IsHwWalletSearchTextVisible = false;
this.WhenAnyValue(x => x.SelectedWallet)
.Where(x => x is null)
.ObserveOn(RxApp.MainThreadScheduler)
.Subscribe(_ =>
{
SelectedWallet = Wallets.FirstOrDefault();
SetLoadButtonText();
});
Wallets
.ToObservableChangeSet()
.ToCollection()
.Where(items => items.Any() && SelectedWallet is null)
.Select(items => items.First())
.ObserveOn(RxApp.MainThreadScheduler)
.Subscribe(x => SelectedWallet = x);
this.WhenAnyValue(x => x.IsBusy, x => x.IsHardwareBusy)
.ObserveOn(RxApp.MainThreadScheduler)
.Subscribe(_ => SetLoadButtonText());
LoadCommand = ReactiveCommand.CreateFromTask(LoadWalletAsync, this.WhenAnyValue(x => x.SelectedWallet, x => x.IsBusy).Select(x => x.Item1 is { } && !x.Item2));
ImportColdcardCommand = ReactiveCommand.CreateFromTask(async () =>
{
var ofd = new OpenFileDialog
{
AllowMultiple = false,
Title = "Import Coldcard"
};
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
ofd.Directory = Path.Combine("/media", Environment.UserName);
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
ofd.Directory = Environment.GetFolderPath(Environment.SpecialFolder.Personal);
}
var window = ((IClassicDesktopStyleApplicationLifetime)Application.Current.ApplicationLifetime).MainWindow;
var selected = await ofd.ShowAsync(window, fallBack: true);
if (selected is { } && selected.Any())
{
var path = selected.First();
var jsonString = await File.ReadAllTextAsync(path);
var json = JObject.Parse(jsonString);
var xpubString = json["ExtPubKey"].ToString();
var mfpString = json["MasterFingerprint"].ToString();
// https://github.com/zkSNACKs/WalletWasabi/pull/1663#issuecomment-508073066
// Coldcard 2.1.0 improperly implemented Wasabi skeleton fingerprint at first, so we must reverse byte order.
// The solution was to add a ColdCardFirmwareVersion json field from 2.1.1 and correct the one generated by 2.1.0.
var coldCardVersionString = json["ColdCardFirmwareVersion"]?.ToString();
var reverseByteOrder = false;
if (coldCardVersionString is null)
{
reverseByteOrder = true;
}
else
{
Version coldCardVersion = new Version(coldCardVersionString);
if (coldCardVersion == new Version("2.1.0")) // Should never happen though.
{
reverseByteOrder = true;
}
}
var bytes = ByteHelpers.FromHex(Guard.NotNullOrEmptyOrWhitespace(nameof(mfpString), mfpString, trim: true));
HDFingerprint mfp = reverseByteOrder ? new HDFingerprint(bytes.Reverse().ToArray()) : new HDFingerprint(bytes);
ExtPubKey extPubKey = NBitcoinHelpers.BetterParseExtPubKey(xpubString);
Logger.LogInfo("Creating a new wallet file.");
var walletName = WalletManager.WalletDirectories.GetNextWalletName("Coldcard");
var walletFullPath = WalletManager.WalletDirectories.GetWalletFilePaths(walletName).walletFilePath;
var keyManager = KeyManager.CreateNewHardwareWalletWatchOnly(mfp, extPubKey, walletFullPath);
WalletManager.AddWallet(keyManager);
owner.SelectLoadWallet(keyManager);
}
});
EnumerateHardwareWalletsCommand = ReactiveCommand.CreateFromTask(async () => await EnumerateIfHardwareWalletsAsync());
OpenBrowserCommand = ReactiveCommand.CreateFromTask<string>(IoHelpers.OpenBrowserAsync);
Observable
.Merge(LoadCommand.ThrownExceptions)
.Merge(OpenBrowserCommand.ThrownExceptions)
.Merge(ImportColdcardCommand.ThrownExceptions)
.Merge(EnumerateHardwareWalletsCommand.ThrownExceptions)
.ObserveOn(RxApp.TaskpoolScheduler)
.Subscribe(ex =>
{
Logger.LogError(ex);
NotificationHelpers.Error(ex.ToUserFriendlyString());
});
}
public bool IsHwWalletSearchTextVisible
{
get => _isHwWalletSearchTextVisible;
set => this.RaiseAndSetIfChanged(ref _isHwWalletSearchTextVisible, value);
}
public ObservableCollection<HardwareWalletViewModel> Wallets
{
get => _wallets;
set => this.RaiseAndSetIfChanged(ref _wallets, value);
}
public HardwareWalletViewModel SelectedWallet
{
get => _selectedWallet;
set => this.RaiseAndSetIfChanged(ref _selectedWallet, value);
}
public string LoadButtonText
{
get => _loadButtonText;
set => this.RaiseAndSetIfChanged(ref _loadButtonText, value);
}
public bool IsBusy
{
get => _isBusy;
set => this.RaiseAndSetIfChanged(ref _isBusy, value);
}
public bool IsHardwareBusy
{
get => _isHardwareBusy;
set => this.RaiseAndSetIfChanged(ref _isHardwareBusy, value);
}
public ReactiveCommand<Unit, Unit> LoadCommand { get; }
public ReactiveCommand<Unit, Unit> ImportColdcardCommand { get; set; }
public ReactiveCommand<Unit, Unit> EnumerateHardwareWalletsCommand { get; set; }
public ReactiveCommand<string, Unit> OpenBrowserCommand { get; }
public string UDevRulesLink => "https://github.com/bitcoin-core/HWI/tree/master/hwilib/udev";
public bool IsLinux => RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
private Global Global { get; }
private Wallets.WalletManager WalletManager { get; }
private WalletManagerViewModel Owner { get; }
public void SetLoadButtonText()
{
var text = "Load Wallet";
if (IsHardwareBusy)
{
text = "Waiting for Hardware Wallet...";
}
else if (IsBusy)
{
text = "Loading...";
}
else
{
// If the hardware wallet was not initialized, then make the button say Setup, not Load.
// If pin is needed, then make the button say Send Pin instead.
if (SelectedWallet?.HardwareWalletInfo is { })
{
if (!SelectedWallet.HardwareWalletInfo.IsInitialized())
{
text = "Setup Wallet";
}
if (SelectedWallet.HardwareWalletInfo.NeedsPinSent is true)
{
text = "Send PIN";
}
}
}
LoadButtonText = text;
}
public async Task<KeyManager?> LoadKeyManagerAsync()
{
try
{
var selectedWallet = SelectedWallet;
if (selectedWallet is null)
{
NotificationHelpers.Warning("No wallet selected.");
return null;
}
var walletName = selectedWallet.WalletName;
var client = new HwiClient(Global.Network);
if (selectedWallet.HardwareWalletInfo is null)
{
NotificationHelpers.Warning("No hardware wallet detected.");
return null;
}
if (!selectedWallet.HardwareWalletInfo.IsInitialized())
{
try
{
IsHardwareBusy = true;
MainWindowViewModel.Instance.StatusBar.TryAddStatus(StatusType.SettingUpHardwareWallet);
// Setup may take a while for users to write down stuff.
using var ctsSetup = new CancellationTokenSource(TimeSpan.FromMinutes(21));
// Trezor T doesn't require interactive mode.
if (selectedWallet.HardwareWalletInfo.Model is HardwareWalletModels.Trezor_T
or HardwareWalletModels.Trezor_T_Simulator)
{
await client.SetupAsync(selectedWallet.HardwareWalletInfo.Model, selectedWallet.HardwareWalletInfo.Path, false, ctsSetup.Token);
}
else
{
await client.SetupAsync(selectedWallet.HardwareWalletInfo.Model, selectedWallet.HardwareWalletInfo.Path, true, ctsSetup.Token);
}
MainWindowViewModel.Instance.StatusBar.TryAddStatus(StatusType.ConnectingToHardwareWallet);
await EnumerateIfHardwareWalletsAsync();
}
finally
{
IsHardwareBusy = false;
MainWindowViewModel.Instance.StatusBar.TryRemoveStatus(StatusType.SettingUpHardwareWallet, StatusType.ConnectingToHardwareWallet);
}
return await LoadKeyManagerAsync();
}
else if (selectedWallet.HardwareWalletInfo.NeedsPinSent is true)
{
await PinPadViewModel.UnlockAsync(selectedWallet.HardwareWalletInfo);
var p = selectedWallet.HardwareWalletInfo.Path;
var t = selectedWallet.HardwareWalletInfo.Model;
await EnumerateIfHardwareWalletsAsync();
selectedWallet = Wallets.FirstOrDefault(x => x.HardwareWalletInfo.Model == t && x.HardwareWalletInfo.Path == p);
if (selectedWallet is null)
{
NotificationHelpers.Warning("Could not find the hardware wallet. Did you disconnect it?");
return null;
}
else
{
SelectedWallet = selectedWallet;
}
if (!selectedWallet.HardwareWalletInfo.IsInitialized())
{
NotificationHelpers.Warning("Hardware wallet is not initialized.");
return null;
}
if (selectedWallet.HardwareWalletInfo.NeedsPinSent is true)
{
NotificationHelpers.Warning("Hardware wallet needs a PIN to be sent.");
return null;
}
}
ExtPubKey extPubKey;
using var cts = new CancellationTokenSource(TimeSpan.FromMinutes(3));
try
{
MainWindowViewModel.Instance.StatusBar.TryAddStatus(StatusType.AcquiringXpubFromHardwareWallet);
extPubKey = await client.GetXpubAsync(selectedWallet.HardwareWalletInfo.Model, selectedWallet.HardwareWalletInfo.Path, KeyManager.DefaultAccountKeyPath, cts.Token);
}
finally
{
MainWindowViewModel.Instance.StatusBar.TryRemoveStatus(StatusType.AcquiringXpubFromHardwareWallet);
}
walletName = WalletManager.GetWallets(true).FirstOrDefault(w => w.KeyManager.ExtPubKey == extPubKey)?.WalletName;
if (walletName is null)
{
var prefix = selectedWallet.HardwareWalletInfo?.Model.FriendlyName() ?? HardwareWalletModels.Unknown.FriendlyName();
walletName = WalletManager.WalletDirectories.GetNextWalletName(prefix);
Logger.LogInfo($"Hardware wallet was not used previously on this computer. New wallet '{walletName}' was created.");
var path = WalletManager.WalletDirectories.GetWalletFilePaths(walletName).walletFilePath;
// Get xpub should had triggered passphrase request, so the fingerprint should be available here.
if (!selectedWallet.HardwareWalletInfo.Fingerprint.HasValue)
{
await EnumerateIfHardwareWalletsAsync();
selectedWallet = Wallets.FirstOrDefault(x => x.HardwareWalletInfo.Model == selectedWallet.HardwareWalletInfo.Model && x.HardwareWalletInfo.Path == selectedWallet.HardwareWalletInfo.Path);
}
if (!selectedWallet.HardwareWalletInfo.Fingerprint.HasValue)
{
throw new InvalidOperationException("Hardware wallet did not provide fingerprint.");
}
WalletManager.AddWallet(KeyManager.CreateNewHardwareWalletWatchOnly(selectedWallet.HardwareWalletInfo.Fingerprint.Value, extPubKey, path));
}
KeyManager keyManager = WalletManager.GetWalletByName(walletName).KeyManager;
return keyManager;
}
catch (Exception ex)
{
try
{
await EnumerateIfHardwareWalletsAsync();
}
catch (Exception ex2)
{
Logger.LogError(ex2);
}
// Initialization failed.
Logger.LogError(ex);
NotificationHelpers.Error(ex.ToUserFriendlyString());
return null;
}
finally
{
SetLoadButtonText();
}
}
public async Task LoadWalletAsync()
{
try
{
IsBusy = true;
var keyManager = await LoadKeyManagerAsync();
if (keyManager is null)
{
return;
}
try
{
var wallet = await Task.Run(async () => await WalletManager.StartWalletAsync(keyManager));
// Successfully initialized.
Owner.OnClose();
}
catch (OperationCanceledException ex)
{
Logger.LogTrace(ex);
}
catch (Exception ex)
{
Logger.LogError(ex);
NotificationHelpers.Error(ex.ToUserFriendlyString());
}
}
finally
{
IsBusy = false;
}
}
protected async Task EnumerateIfHardwareWalletsAsync()
{
using var cts = new CancellationTokenSource(TimeSpan.FromMinutes(3));
IsHwWalletSearchTextVisible = true;
try
{
var client = new HwiClient(Global.Network);
var devices = await client.EnumerateAsync(cts.Token);
Wallets.Clear();
foreach (var dev in devices)
{
var walletEntry = new HardwareWalletViewModel(dev);
Wallets.Add(walletEntry);
}
}
finally
{
IsHwWalletSearchTextVisible = false;
}
}
}
}
| |
//Sony Computer Entertainment Confidential
using System;
using System.ComponentModel.Composition;
using System.Drawing;
using System.Threading;
using System.Windows.Forms;
using Sce.Atf.Controls;
using Timer = System.Threading.Timer;
namespace Sce.Atf.Applications
{
/// <summary>
/// Service that provides status UI</summary>
[Export(typeof(IStatusService))]
[Export(typeof(IInitializable))]
[Export(typeof(StatusService))]
[PartCreationPolicy(CreationPolicy.Shared)]
public class StatusService : IStatusService, IInitializable
{
/// <summary>
/// Constructor</summary>
/// <param name="mainForm">Application Main Form</param>
[ImportingConstructor]
public StatusService(Form mainForm)
{
m_mainForm = mainForm;
m_statusStrip = new StatusStrip();
m_statusStrip.Name = "StatusBar";
m_statusStrip.Dock = DockStyle.Bottom;
// main status text
m_mainPanel = new ToolStripStatusLabel();
m_mainPanel.Width = 256;
m_mainPanel.AutoSize = true;
m_mainPanel.Spring = true;
m_mainPanel.TextAlign = ContentAlignment.MiddleLeft;
m_statusStrip.Items.Add(m_mainPanel);
m_progressTimer = new Timer(
new TimerCallback(progressCallback), this, Timeout.Infinite, ProgressInterval);
m_progressDialog = new ThreadSafeProgressDialog(false, true);
m_progressDialog.Cancelled += new EventHandler(progressDialog_Cancelled);
}
#region IInitializable Members
void IInitializable.Initialize()
{
// first choice is a ToolStripContainer to hold the status strip; otherwise,
// add as first child of main Form.
foreach (Control control in m_mainForm.Controls)
{
m_toolStripContainer = control as ToolStripContainer;
if (m_toolStripContainer != null)
{
m_toolStripContainer.BottomToolStripPanel.Controls.Add(m_statusStrip);
break;
}
}
if (m_toolStripContainer == null)
{
m_mainForm.Controls.Add(m_statusStrip);
}
ShowStatus(Localizer.Localize("Ready", "Application is ready"));
}
#endregion
#region IStatusService members
/// <summary>
/// Shows a status message in the main panel</summary>
/// <param name="status">Status message</param>
public void ShowStatus(string status)
{
m_mainPanel.Text = status;
}
/// <summary>
/// Adds a new text status panel
/// </summary>
/// <param name="width">Width of panel</param>
/// <returns>Text panel</returns>
public IStatusText AddText(int width)
{
TextPanel textPanel = new TextPanel(width);
textPanel.Name = "$Status" + (s_controlCount++).ToString();
m_statusStrip.Items.Add(textPanel);
return textPanel;
}
/// <summary>
/// Adds a new image status panel
/// </summary>
/// <returns>Image status panel</returns>
public IStatusImage AddImage()
{
ImagePanel imagePanel = new ImagePanel();
imagePanel.Name = "$Status" + (s_controlCount++).ToString();
m_statusStrip.Items.Add(imagePanel);
return imagePanel;
}
/// <summary>
/// Begins progress display where client manually updates progress.
/// The cancel button appears and is enabled.</summary>
/// <param name="message">Message to display with progress meter</param>
public void BeginProgress(string message)
{
BeginProgress(message, true);
}
/// <summary>
/// Begins progress display where progress is updated automatically.
/// The cancel button appears and is enabled.</summary>
/// <param name="message">Message to display with progress meter</param>
/// <param name="expectedDuration">Expected length of operation, in milliseconds</param>
public void BeginProgress(string message, int expectedDuration)
{
BeginProgress(message, expectedDuration, true);
}
/// <summary>
/// Begins progress display where client manually updates progress</summary>
/// <param name="message">Message to display with progress meter</param>
/// <param name="canCancel">Should the cancel button appear and be enabled?</param>
public void BeginProgress(string message, bool canCancel)
{
m_progress = 0.0;
m_autoIncrement = 0.0;
m_progressDialog.IsCanceled = false;
m_progressDialog.Show();
m_progressDialog.CanCancel = canCancel;
m_progressDialog.Description = message;
m_progressTimer.Change(0, ProgressInterval);
}
/// <summary>
/// Begins progress display where progress is updated automatically.</summary>
/// <param name="message">Message to display with progress meter</param>
/// <param name="expectedDuration">Expected length of operation, in milliseconds</param>
/// <param name="canCancel">Should the cancel button appear and be enabled?</param>
public void BeginProgress(string message, int expectedDuration, bool canCancel)
{
m_progress = 0.0;
m_autoIncrement = (double)ProgressInterval / (double)expectedDuration;
m_progressDialog.IsCanceled = false;
m_progressDialog.Show();
m_progressDialog.CanCancel = canCancel;
m_progressDialog.Description = message;
m_progressTimer.Change(0, ProgressInterval);
}
/// <summary>
/// Shows progress</summary>
/// <param name="progress">Progress, in the interval [0..1]</param>
public void ShowProgress(double progress)
{
m_progress = progress;
}
/// <summary>
/// Ends progress display</summary>
public void EndProgress()
{
m_progress = 0.0;
m_progressTimer.Change(Timeout.Infinite, ProgressInterval);
m_progressDialog.Close();
}
/// <summary>
/// Event that is raised when progress dialog is cancelled</summary>
public event EventHandler ProgressCancelled;
#endregion
private void progressCallback(object state)
{
lock (this)
{
if (m_progress < 1.0)
{
int percent = (int)(100 * m_progress);
if (percent < 0)
percent = 0;
else if (percent > 100)
percent = 100;
m_progressDialog.Percent = percent;
m_progress += m_autoIncrement;
}
else
{
EndProgress();
}
}
}
private void progressDialog_Cancelled(object sender, EventArgs e)
{
Event.Raise(ProgressCancelled, this, e);
}
#region Private Classes
private class TextPanel : ToolStripStatusLabel, IStatusText
{
public TextPanel(int width)
{
DisplayStyle = ToolStripItemDisplayStyle.Text;
Width = width;
AutoSize = false;
TextAlign = ContentAlignment.MiddleLeft;
BorderSides = ToolStripStatusLabelBorderSides.All;
}
}
private class ImagePanel : ToolStripStatusLabel, IStatusImage
{
public ImagePanel()
{
DisplayStyle = ToolStripItemDisplayStyle.Image;
AutoSize = false;
}
}
#endregion
private Form m_mainForm;
private ToolStripContainer m_toolStripContainer;
private StatusStrip m_statusStrip = new StatusStrip();
private ToolStripStatusLabel m_mainPanel;
private double m_progress;
private double m_autoIncrement;
private Timer m_progressTimer;
private ThreadSafeProgressDialog m_progressDialog;
private const int ProgressInterval = 250; // Timer resolution, in ms
private static int s_controlCount;
}
}
| |
//
// Copyright (c) 2004-2017 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT && !__ANDROID__ && !__IOS__
// Unfortunately, Xamarin Android and Xamarin iOS don't support mutexes (see https://github.com/mono/mono/blob/3a9e18e5405b5772be88bfc45739d6a350560111/mcs/class/corlib/System.Threading/Mutex.cs#L167) so the BaseFileAppender class now throws an exception in the constructor.
#define SupportsMutex
#endif
namespace NLog.Internal.FileAppenders
{
using System;
using System.IO;
using System.Threading;
using NLog.Common;
/// <summary>
/// Maintains a collection of file appenders usually associated with file targets.
/// </summary>
internal sealed class FileAppenderCache : IDisposable
{
private readonly BaseFileAppender[] _appenders;
private Timer _autoClosingTimer;
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
private string _archiveFilePatternToWatch = null;
private readonly MultiFileWatcher _externalFileArchivingWatcher = new MultiFileWatcher(NotifyFilters.DirectoryName | NotifyFilters.FileName);
private bool _logFileWasArchived = false;
#endif
/// <summary>
/// An "empty" instance of the <see cref="FileAppenderCache"/> class with zero size and empty list of appenders.
/// </summary>
public static readonly FileAppenderCache Empty = new FileAppenderCache();
/// <summary>
/// Initializes a new "empty" instance of the <see cref="FileAppenderCache"/> class with zero size and empty
/// list of appenders.
/// </summary>
private FileAppenderCache() : this(0, null, null) { }
/// <summary>
/// Initializes a new instance of the <see cref="FileAppenderCache"/> class.
/// </summary>
/// <remarks>
/// The size of the list should be positive. No validations are performed during initialisation as it is an
/// intenal class.
/// </remarks>
/// <param name="size">Total number of appenders allowed in list.</param>
/// <param name="appenderFactory">Factory used to create each appender.</param>
/// <param name="createFileParams">Parameters used for creating a file.</param>
public FileAppenderCache(int size, IFileAppenderFactory appenderFactory, ICreateFileParameters createFileParams)
{
Size = size;
Factory = appenderFactory;
CreateFileParameters = createFileParams;
_appenders = new BaseFileAppender[Size];
_autoClosingTimer = new Timer(AutoClosingTimerCallback, null, Timeout.Infinite, Timeout.Infinite);
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
_externalFileArchivingWatcher.FileChanged += ExternalFileArchivingWatcher_OnFileChanged;
#endif
}
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
private void ExternalFileArchivingWatcher_OnFileChanged(object sender, FileSystemEventArgs e)
{
if (_logFileWasArchived || CheckCloseAppenders == null || _autoClosingTimer == null)
{
return;
}
if (FileAppenderFolderChanged(e.FullPath))
{
if ((e.ChangeType & (WatcherChangeTypes.Deleted | WatcherChangeTypes.Renamed)) != 0)
_logFileWasArchived = true; // File Appender file deleted/renamed
}
else
{
if ((e.ChangeType & WatcherChangeTypes.Created) == WatcherChangeTypes.Created)
_logFileWasArchived = true; // Something was created in the archive folder
}
if (_logFileWasArchived && _autoClosingTimer != null)
{
_autoClosingTimer.Change(50, Timeout.Infinite);
}
}
private bool FileAppenderFolderChanged(string fullPath)
{
if (!string.IsNullOrEmpty(fullPath))
{
if (string.IsNullOrEmpty(_archiveFilePatternToWatch))
{
return true;
}
else
{
string archiveFolderPath = Path.GetDirectoryName(_archiveFilePatternToWatch);
if (!string.IsNullOrEmpty(archiveFolderPath))
{
string currentFolderPath = Path.GetDirectoryName(fullPath);
return !string.Equals(archiveFolderPath, currentFolderPath, StringComparison.OrdinalIgnoreCase);
}
else
{
return true;
}
}
}
return false;
}
/// <summary>
/// The archive file path pattern that is used to detect when archiving occurs.
/// </summary>
public string ArchiveFilePatternToWatch
{
get { return _archiveFilePatternToWatch; }
set
{
if (_archiveFilePatternToWatch != value)
{
if (!string.IsNullOrEmpty(_archiveFilePatternToWatch))
{
string directoryPath = Path.GetDirectoryName(_archiveFilePatternToWatch);
if (string.IsNullOrEmpty(directoryPath))
_externalFileArchivingWatcher.StopWatching(directoryPath);
}
_archiveFilePatternToWatch = value;
_logFileWasArchived = false;
}
}
}
/// <summary>
/// Invalidates appenders for all files that were archived.
/// </summary>
public void InvalidateAppendersForInvalidFiles()
{
if (_logFileWasArchived)
{
_logFileWasArchived = false;
CloseAppenders("Cleanup Archive");
}
}
#endif
private void AutoClosingTimerCallback(object state)
{
var checkCloseAppenders = CheckCloseAppenders;
if (checkCloseAppenders != null)
{
checkCloseAppenders(this, EventArgs.Empty);
}
}
/// <summary>
/// Gets the parameters which will be used for creating a file.
/// </summary>
public ICreateFileParameters CreateFileParameters { get; private set; }
/// <summary>
/// Gets the file appender factory used by all the appenders in this list.
/// </summary>
public IFileAppenderFactory Factory { get; private set; }
/// <summary>
/// Gets the number of appenders which the list can hold.
/// </summary>
public int Size { get; private set; }
/// <summary>
/// Subscribe to background monitoring of active file appenders
/// </summary>
public event EventHandler CheckCloseAppenders;
/// <summary>
/// It allocates the first slot in the list when the file name does not already in the list and clean up any
/// unused slots.
/// </summary>
/// <param name="fileName">File name associated with a single appender.</param>
/// <returns>The allocated appender.</returns>
/// <exception cref="NullReferenceException">
/// Thrown when <see cref="M:AllocateAppender"/> is called on an <c>Empty</c><see cref="FileAppenderCache"/> instance.
/// </exception>
public BaseFileAppender AllocateAppender(string fileName)
{
//
// BaseFileAppender.Write is the most expensive operation here
// so the in-memory data structure doesn't have to be
// very sophisticated. It's a table-based LRU, where we move
// the used element to become the first one.
// The number of items is usually very limited so the
// performance should be equivalent to the one of the hashtable.
//
BaseFileAppender appenderToWrite = null;
int freeSpot = _appenders.Length - 1;
for (int i = 0; i < _appenders.Length; ++i)
{
// Use empty slot in recent appender list, if there is one.
if (_appenders[i] == null)
{
freeSpot = i;
break;
}
if (string.Equals(_appenders[i].FileName, fileName, StringComparison.OrdinalIgnoreCase))
{
// found it, move it to the first place on the list
// (MRU)
BaseFileAppender app = _appenders[i];
if (i > 0)
{
// file open has a chance of failure
// if it fails in the constructor, we won't modify any data structures
for (int j = i; j > 0; --j)
{
_appenders[j] = _appenders[j - 1];
}
_appenders[0] = app;
}
appenderToWrite = app;
break;
}
}
if (appenderToWrite == null)
{
try
{
InternalLogger.Debug("Creating file appender: {0}", fileName);
BaseFileAppender newAppender = Factory.Open(fileName, CreateFileParameters);
if (_appenders[freeSpot] != null)
{
CloseAppender(_appenders[freeSpot], "Stale", false);
_appenders[freeSpot] = null;
}
for (int j = freeSpot; j > 0; --j)
{
_appenders[j] = _appenders[j - 1];
}
_appenders[0] = newAppender;
appenderToWrite = newAppender;
if (CheckCloseAppenders != null)
{
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
if (freeSpot == 0)
_logFileWasArchived = false;
if (!string.IsNullOrEmpty(_archiveFilePatternToWatch))
{
string directoryPath = Path.GetDirectoryName(_archiveFilePatternToWatch);
if (!Directory.Exists(directoryPath))
Directory.CreateDirectory(directoryPath);
_externalFileArchivingWatcher.Watch(_archiveFilePatternToWatch); // Always monitor the archive-folder
}
_externalFileArchivingWatcher.Watch(appenderToWrite.FileName); // Monitor the active file-appender
#endif
}
}
catch (Exception ex)
{
InternalLogger.Warn(ex, "Failed to create file appender: {0}", fileName);
throw;
}
}
return appenderToWrite;
}
/// <summary>
/// Close all the allocated appenders.
/// </summary>
public void CloseAppenders(string reason)
{
if (_appenders != null)
{
for (int i = 0; i < _appenders.Length; ++i)
{
if (_appenders[i] == null)
{
break;
}
CloseAppender(_appenders[i], reason, true);
_appenders[i] = null;
}
}
}
/// <summary>
/// Close the allocated appenders initialised before the supplied time.
/// </summary>
/// <param name="expireTime">The time which prior the appenders considered expired</param>
public void CloseAppenders(DateTime expireTime)
{
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
if (_logFileWasArchived)
{
_logFileWasArchived = false;
CloseAppenders("Cleanup Timer");
}
else
#endif
{
if (expireTime != DateTime.MinValue)
{
for (int i = 0; i < this._appenders.Length; ++i)
{
if (this._appenders[i] == null)
{
break;
}
if (this._appenders[i].OpenTimeUtc < expireTime)
{
for (int j = i; j < this._appenders.Length; ++j)
{
if (this._appenders[j] == null)
{
break;
}
CloseAppender(this._appenders[j], "Expired", i == 0);
this._appenders[j] = null;
}
break;
}
}
}
}
}
/// <summary>
/// Fluch all the allocated appenders.
/// </summary>
public void FlushAppenders()
{
foreach (BaseFileAppender appender in _appenders)
{
if (appender == null)
{
break;
}
appender.Flush();
}
}
private BaseFileAppender GetAppender(string fileName)
{
for (int i = 0; i < this._appenders.Length; ++i)
{
BaseFileAppender appender = this._appenders[i];
if (appender == null)
break;
if (string.Equals(appender.FileName, fileName, StringComparison.OrdinalIgnoreCase))
return appender;
}
return null;
}
#if SupportsMutex
public Mutex GetArchiveMutex(string fileName)
{
var appender = GetAppender(fileName) as BaseMutexFileAppender;
return appender == null ? null : appender.ArchiveMutex;
}
#endif
public DateTime? GetFileCreationTimeSource(string filePath, bool fallback)
{
var appender = GetAppender(filePath);
DateTime? result = null;
if (appender != null)
{
try
{
result = FileCharacteristicsHelper.ValidateFileCreationTime(appender, (f) => f.GetFileCreationTimeUtc(), (f) => f.CreationTimeUtc, (f) => f.GetFileLastWriteTimeUtc());
if (result.HasValue)
{
// Check if cached value is still valid, and update if not (Will automatically update CreationTimeSource)
DateTime cachedTimeUtc = appender.CreationTimeUtc;
if (result.Value != cachedTimeUtc)
{
appender.CreationTimeUtc = result.Value;
}
return appender.CreationTimeSource;
}
}
catch (Exception ex)
{
InternalLogger.Error(ex, "Failed to get file creation time for file '{0}'.", appender.FileName);
InvalidateAppender(appender.FileName);
throw;
}
}
if (result == null && fallback)
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Exists)
{
result = FileCharacteristicsHelper.ValidateFileCreationTime(fileInfo, (f) => f.GetCreationTimeUtc(), (f) => f.GetLastWriteTimeUtc()).Value;
return Time.TimeSource.Current.FromSystemTime(result.Value);
}
}
return result;
}
public DateTime? GetFileLastWriteTimeUtc(string filePath, bool fallback)
{
var appender = GetAppender(filePath);
DateTime? result = null;
if (appender != null)
{
try
{
result = appender.GetFileLastWriteTimeUtc();
}
catch (Exception ex)
{
InternalLogger.Error(ex, "Failed to get last write time for file '{0}'.", appender.FileName);
InvalidateAppender(appender.FileName);
throw;
}
}
if (result == null && fallback)
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Exists)
{
return fileInfo.GetLastWriteTimeUtc();
}
}
return result;
}
public long? GetFileLength(string filePath, bool fallback)
{
var appender = GetAppender(filePath);
long? result = null;
if (appender != null)
{
try
{
result = appender.GetFileLength();
}
catch (Exception ex)
{
InternalLogger.Error(ex, "Failed to get length for file '{0}'.", appender.FileName);
InvalidateAppender(appender.FileName);
throw;
}
}
if (result == null && fallback)
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Exists)
{
return fileInfo.Length;
}
}
return result;
}
/// <summary>
/// Closes the specified appender and removes it from the list.
/// </summary>
/// <param name="filePath">File name of the appender to be closed.</param>
public void InvalidateAppender(string filePath)
{
for (int i = 0; i < _appenders.Length; ++i)
{
if (_appenders[i] == null)
{
break;
}
if (string.Equals(_appenders[i].FileName, filePath, StringComparison.OrdinalIgnoreCase))
{
var oldAppender = _appenders[i];
for (int j = i; j < _appenders.Length - 1; ++j)
{
_appenders[j] = _appenders[j + 1];
}
_appenders[_appenders.Length - 1] = null;
CloseAppender(oldAppender, "Invalidate", _appenders[0] == null);
break;
}
}
}
private void CloseAppender(BaseFileAppender appender, string reason, bool lastAppender)
{
InternalLogger.Debug("FileAppender Closing {0} - {1}", reason, appender.FileName);
if (lastAppender)
{
// No active appenders, deactivate background tasks
_autoClosingTimer.Change(Timeout.Infinite, Timeout.Infinite);
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
_externalFileArchivingWatcher.StopWatching();
_logFileWasArchived = false;
}
else
{
_externalFileArchivingWatcher.StopWatching(appender.FileName);
#endif
}
appender.Close();
}
public void Dispose()
{
CheckCloseAppenders = null;
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
_externalFileArchivingWatcher.Dispose();
_logFileWasArchived = false;
#endif
var currentTimer = _autoClosingTimer;
if (currentTimer != null)
{
_autoClosingTimer = null;
currentTimer.WaitForDispose(TimeSpan.Zero);
}
}
}
}
| |
//------------------------------------------------------------------------------
// Microsoft Avalon
// Copyright (c) Microsoft Corporation, 2001
//
// File: KeySpline.cs
//------------------------------------------------------------------------------
// Allow suppression of certain presharp messages
#pragma warning disable 1634, 1691
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
using SR=MS.Internal.PresentationCore.SR;
using SRID=MS.Internal.PresentationCore.SRID;
namespace System.Windows.Media.Animation
{
/// <summary>
/// This class is used by a SplineKeyFrame to define a key frame animation.
/// </summary>
/// <remarks>
/// Example of a SplineKeyFrame class: <see cref="System.Windows.Media.Animation.SplineDoubleKeyFrame">SplineDoubleKeyFrame</see>.
/// Example of a key frame animation class: <see cref="System.Windows.Media.Animation.DoubleAnimationUsingKeyFrames">DoubleAnimationUsingKeyFrames</see>.
/// </remarks>
[TypeConverter(typeof(KeySplineConverter))]
[Localizability(LocalizationCategory.None, Readability=Readability.Unreadable)]
public class KeySpline : Freezable, IFormattable
{
#region Constructors
/// <summary>
/// Creates a new KeySpline.
/// </summary>
/// <remarks>
/// Default values for control points are (0,0) and (1,1) which will
/// have no effect on the progress of an animation or key frame.
/// </remarks>
public KeySpline()
: base()
{
_controlPoint1 = new Point(0.0, 0.0);
_controlPoint2 = new Point(1.0, 1.0);
}
/// <summary>
/// Double constructor
/// </summary>
/// <param name="x1">x value for the 0,0 endpoint's control point</param>
/// <param name="y1">y value for the 0,0 endpoint's control point</param>
/// <param name="x2">x value for the 1,1 endpoint's control point</param>
/// <param name="y2">y value for the 1,1 endpoint's control point</param>
public KeySpline(double x1, double y1, double x2, double y2)
: this(new Point(x1, y1), new Point(x2, y2))
{
}
/// <summary>
/// Point constructor
/// </summary>
/// <param name="controlPoint1">the control point for the 0,0 endpoint</param>
/// <param name="controlPoint2">the control point for the 1,1 endpoint</param>
public KeySpline(Point controlPoint1, Point controlPoint2)
: base()
{
if (!IsValidControlPoint(controlPoint1))
{
throw new ArgumentException(SR.Get(
SRID.Animation_KeySpline_InvalidValue,
"controlPoint1",
controlPoint1));
}
if (!IsValidControlPoint(controlPoint2))
{
throw new ArgumentException(SR.Get(
SRID.Animation_KeySpline_InvalidValue,
"controlPoint2",
controlPoint2));
}
_controlPoint1 = controlPoint1;
_controlPoint2 = controlPoint2;
_isDirty = true;
}
#endregion Constructors
#region Freezable
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.CreateInstanceCore">Freezable.CreateInstanceCore</see>.
/// </summary>
/// <returns>The new Freezable.</returns>
protected override Freezable CreateInstanceCore()
{
return new KeySpline();
}
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.CloneCore(System.Windows.Freezable)">Freezable.CloneCore</see>.
/// </summary>
/// <param name="sourceFreezable">The KeySpline to copy.</param>
protected override void CloneCore(Freezable sourceFreezable)
{
KeySpline sourceKeySpline = (KeySpline) sourceFreezable;
base.CloneCore(sourceFreezable);
CloneCommon(sourceKeySpline);
}
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.CloneCurrentValueCore(System.Windows.Freezable)">Freezable.CloneCurrentValueCore</see>.
/// </summary>
/// <param name="sourceFreezable">The KeySpline to copy.</param>
protected override void CloneCurrentValueCore(Freezable sourceFreezable)
{
KeySpline sourceKeySpline = (KeySpline) sourceFreezable;
base.CloneCurrentValueCore(sourceFreezable);
CloneCommon(sourceKeySpline);
}
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.GetAsFrozenCore(System.Windows.Freezable)">Freezable.GetAsFrozenCore</see>.
/// </summary>
/// <param name="sourceFreezable">The KeySpline to copy.</param>
protected override void GetAsFrozenCore(Freezable sourceFreezable)
{
KeySpline sourceKeySpline = (KeySpline) sourceFreezable;
base.GetAsFrozenCore(sourceFreezable);
CloneCommon(sourceKeySpline);
}
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.GetCurrentValueAsFrozenCore(System.Windows.Freezable)">Freezable.GetCurrentValueAsFrozenCore</see>.
/// </summary>
/// <param name="sourceFreezable">The KeySpline to copy.</param>
protected override void GetCurrentValueAsFrozenCore(Freezable sourceFreezable)
{
KeySpline sourceKeySpline = (KeySpline)sourceFreezable;
base.GetCurrentValueAsFrozenCore(sourceFreezable);
CloneCommon(sourceKeySpline);
}
/// <summary>
/// Implementation of <see cref="System.Windows.Freezable.OnChanged">Freezable.OnChanged</see>.
/// </summary>
protected override void OnChanged()
{
_isDirty = true;
base.OnChanged();
}
#endregion
#region Public
/// <summary>
///
/// </summary>
public Point ControlPoint1
{
get
{
ReadPreamble();
return _controlPoint1;
}
set
{
WritePreamble();
if (value != _controlPoint1)
{
if (!IsValidControlPoint(value))
{
throw new ArgumentException(SR.Get(
SRID.Animation_KeySpline_InvalidValue,
"ControlPoint1",
value));
}
_controlPoint1 = value;
WritePostscript();
}
}
}
/// <summary>
///
/// </summary>
public Point ControlPoint2
{
get
{
ReadPreamble();
return _controlPoint2;
}
set
{
WritePreamble();
if (value != _controlPoint2)
{
if (!IsValidControlPoint(value))
{
throw new ArgumentException(SR.Get(
SRID.Animation_KeySpline_InvalidValue,
"ControlPoint2",
value));
}
_controlPoint2 = value;
WritePostscript();
}
}
}
/// <summary>
/// Calculates spline progress from a linear progress.
/// </summary>
/// <param name="linearProgress">the linear progress</param>
/// <returns>the spline progress</returns>
public double GetSplineProgress(double linearProgress)
{
ReadPreamble();
if (_isDirty)
{
Build();
}
if (!_isSpecified)
{
return linearProgress;
}
else
{
SetParameterFromX(linearProgress);
return GetBezierValue(_By, _Cy, _parameter);
}
}
#endregion
#region Private
private bool IsValidControlPoint(Point point)
{
return point.X >= 0.0
&& point.X <= 1.0;
}
/// <summary>
/// Compute cached coefficients.
/// </summary>
private void Build()
{
Debug.Assert(_isDirty);
if ( _controlPoint1 == new Point(0, 0)
&& _controlPoint2 == new Point(1, 1))
{
// This KeySpline would have no effect on the progress.
_isSpecified = false;
}
else
{
_isSpecified = true;
_parameter = 0;
// X coefficients
_Bx = 3 * _controlPoint1.X;
_Cx = 3 * _controlPoint2.X;
_Cx_Bx = 2 * (_Cx - _Bx);
_three_Cx = 3 - _Cx;
// Y coefficients
_By = 3 * _controlPoint1.Y;
_Cy = 3 * _controlPoint2.Y;
}
_isDirty = false;
}
/// <summary>
/// Get an X or Y value with the Bezier formula.
/// </summary>
/// <param name="b">the second Bezier coefficient</param>
/// <param name="c">the third Bezier coefficient</param>
/// <param name="t">the parameter value to evaluate at</param>
/// <returns>the value of the Bezier function at the given parameter</returns>
static private double GetBezierValue(double b, double c, double t)
{
double s = 1.0 - t;
double t2 = t * t;
return b * t * s * s + c * t2 * s + t2 * t;
}
/// <summary>
/// Get X and dX/dt at a given parameter
/// </summary>
/// <param name="t">the parameter value to evaluate at</param>
/// <param name="x">the value of x there</param>
/// <param name="dx">the value of dx/dt there</param>
private void GetXAndDx(double t, out double x, out double dx)
{
Debug.Assert(_isSpecified);
double s = 1.0 - t;
double t2 = t * t;
double s2 = s * s;
x = _Bx * t * s2 + _Cx * t2 * s + t2 * t;
dx = _Bx * s2 + _Cx_Bx * s * t + _three_Cx * t2;
}
/// <summary>
/// Compute the parameter value that corresponds to a given X value, using a modified
/// clamped Newton-Raphson algorithm to solve the equation X(t) - time = 0. We make
/// use of some known properties of this particular function:
/// * We are only interested in solutions in the interval [0,1]
/// * X(t) is increasing, so we can assume that if X(t) > time t > solution. We use
/// that to clamp down the search interval with every probe.
/// * The derivative of X and Y are between 0 and 3.
/// </summary>
/// <param name="time">the time, scaled to fit in [0,1]</param>
private void SetParameterFromX(double time)
{
Debug.Assert(_isSpecified);
// Dynamic search interval to clamp with
double bottom = 0;
double top = 1;
if (time == 0)
{
_parameter = 0;
}
else if (time == 1)
{
_parameter = 1;
}
else
{
// Loop while improving the guess
while (top - bottom > fuzz)
{
double x, dx, absdx;
// Get x and dx/dt at the current parameter
GetXAndDx(_parameter, out x, out dx);
absdx = Math.Abs(dx);
// Clamp down the search interval, relying on the monotonicity of X(t)
if (x > time)
{
top = _parameter; // because parameter > solution
}
else
{
bottom = _parameter; // because parameter < solution
}
// The desired accuracy is in ultimately in y, not in x, so the
// accuracy needs to be multiplied by dx/dy = (dx/dt) / (dy/dt).
// But dy/dt <=3, so we omit that
if (Math.Abs(x - time) < accuracy * absdx)
{
break; // We're there
}
if (absdx > fuzz)
{
// Nonzero derivative, use Newton-Raphson to obtain the next guess
double next = _parameter - (x - time) / dx;
// If next guess is out of the search interval then clamp it in
if (next >= top)
{
_parameter = (_parameter + top) / 2;
}
else if (next <= bottom)
{
_parameter = (_parameter + bottom) / 2;
}
else
{
// Next guess is inside the search interval, accept it
_parameter = next;
}
}
else // Zero derivative, halve the search interval
{
_parameter = (bottom + top) / 2;
}
}
}
}
/// <summary>
/// Copy the common fields for the various Clone methods
/// </summary>
/// <param name="sourceKeySpline">The KeySpline to copy.</param>
private void CloneCommon(KeySpline sourceKeySpline)
{
_controlPoint1 = sourceKeySpline._controlPoint1;
_controlPoint2 = sourceKeySpline._controlPoint2;
_isDirty = true;
}
#endregion
#region IFormattable
/// <summary>
/// Creates a string representation of this KeySpline based on the current culture.
/// </summary>
/// <returns>
/// A string representation of this KeySpline.
/// </returns>
public override string ToString()
{
ReadPreamble();
return InternalConvertToString(null, null);
}
/// <summary>
/// Creates a string representation of this KeySpline based on the IFormatProvider
/// passed in.
/// </summary>
/// <param name="formatProvider">
/// The format provider to use. If the provider is null, the CurrentCulture is used.
/// </param>
/// <returns>
/// A string representation of this KeySpline.
/// </returns>
public string ToString(IFormatProvider formatProvider)
{
ReadPreamble();
return InternalConvertToString(null, formatProvider);
}
/// <summary>
/// Creates a string representation of this KeySpline based on the IFormatProvider
/// passed in.
/// </summary>
/// <param name="format">
/// The format string to use.
/// </param>
/// <param name="formatProvider">
/// The format provider to use. If the provider is null, the CurrentCulture is used.
/// </param>
/// <returns>
/// A string representation of this KeySpline.
/// </returns>
string IFormattable.ToString(string format, IFormatProvider formatProvider)
{
ReadPreamble();
return InternalConvertToString(format, formatProvider);
}
/// <summary>
/// Creates a string representation of this KeySpline based on the IFormatProvider
/// passed in.
/// </summary>
/// <param name="format">
/// The format string to use.
/// </param>
/// <param name="formatProvider">
/// The format provider to use. If the provider is null, the CurrentCulture is used.
/// </param>
/// <returns>
/// A string representation of this KeySpline.
/// </returns>
internal string InternalConvertToString(string format, IFormatProvider formatProvider)
{
// Helper to get the numeric list separator for a given culture.
char separator = MS.Internal.TokenizerHelper.GetNumericListSeparator(formatProvider);
return String.Format(
formatProvider,
"{1}{0}{2}",
separator,
_controlPoint1,
_controlPoint2);
}
#endregion
#region Data
//
// Control points
private Point _controlPoint1;
private Point _controlPoint2;
private bool _isSpecified;
private bool _isDirty;
// The parameter that corresponds to the most recent time
private double _parameter;
// Cached coefficients
private double _Bx; // 3*points[0].X
private double _Cx; // 3*points[1].X
private double _Cx_Bx; // 2*(Cx - Bx)
private double _three_Cx; // 3 - Cx
private double _By; // 3*points[0].Y
private double _Cy; // 3*points[1].Y
// constants
private const double accuracy = .001; // 1/3 the desired accuracy in X
private const double fuzz = .000001; // computational zero
#endregion
}
}
| |
namespace Epi.Windows.Analysis.Dialogs
{
/// <summary>
/// Dialog to build Keys used in RELATE and MERGE
/// </summary>
partial class BuildKeyDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(BuildKeyDialog));
this.lblKeyComponent = new System.Windows.Forms.Label();
this.txtKeyComponent = new System.Windows.Forms.TextBox();
this.lblAvailableVariables = new System.Windows.Forms.Label();
this.lbxCurrentTableFields = new System.Windows.Forms.ListBox();
this.lbxRelatedTableFields = new System.Windows.Forms.ListBox();
this.btnOK = new System.Windows.Forms.Button();
this.btnCancel = new System.Windows.Forms.Button();
this.label1 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.AddCommandButton = new System.Windows.Forms.Button();
this.AddLabel = new System.Windows.Forms.Label();
this.lblBuildKeyInstructions = new System.Windows.Forms.Label();
this.btnHelp = new System.Windows.Forms.Button();
this.btnClear = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// baseImageList
//
this.baseImageList.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("baseImageList.ImageStream")));
this.baseImageList.Images.SetKeyName(0, "");
this.baseImageList.Images.SetKeyName(1, "");
this.baseImageList.Images.SetKeyName(2, "");
this.baseImageList.Images.SetKeyName(3, "");
this.baseImageList.Images.SetKeyName(4, "");
this.baseImageList.Images.SetKeyName(5, "");
this.baseImageList.Images.SetKeyName(6, "");
this.baseImageList.Images.SetKeyName(7, "");
this.baseImageList.Images.SetKeyName(8, "");
this.baseImageList.Images.SetKeyName(9, "");
this.baseImageList.Images.SetKeyName(10, "");
this.baseImageList.Images.SetKeyName(11, "");
this.baseImageList.Images.SetKeyName(12, "");
this.baseImageList.Images.SetKeyName(13, "");
this.baseImageList.Images.SetKeyName(14, "");
this.baseImageList.Images.SetKeyName(15, "");
this.baseImageList.Images.SetKeyName(16, "");
this.baseImageList.Images.SetKeyName(17, "");
this.baseImageList.Images.SetKeyName(18, "");
this.baseImageList.Images.SetKeyName(19, "");
this.baseImageList.Images.SetKeyName(20, "");
this.baseImageList.Images.SetKeyName(21, "");
this.baseImageList.Images.SetKeyName(22, "");
this.baseImageList.Images.SetKeyName(23, "");
this.baseImageList.Images.SetKeyName(24, "");
this.baseImageList.Images.SetKeyName(25, "");
this.baseImageList.Images.SetKeyName(26, "");
this.baseImageList.Images.SetKeyName(27, "");
this.baseImageList.Images.SetKeyName(28, "");
this.baseImageList.Images.SetKeyName(29, "");
this.baseImageList.Images.SetKeyName(30, "");
this.baseImageList.Images.SetKeyName(31, "");
this.baseImageList.Images.SetKeyName(32, "");
this.baseImageList.Images.SetKeyName(33, "");
this.baseImageList.Images.SetKeyName(34, "");
this.baseImageList.Images.SetKeyName(35, "");
this.baseImageList.Images.SetKeyName(36, "");
this.baseImageList.Images.SetKeyName(37, "");
this.baseImageList.Images.SetKeyName(38, "");
this.baseImageList.Images.SetKeyName(39, "");
this.baseImageList.Images.SetKeyName(40, "");
this.baseImageList.Images.SetKeyName(41, "");
this.baseImageList.Images.SetKeyName(42, "");
this.baseImageList.Images.SetKeyName(43, "");
this.baseImageList.Images.SetKeyName(44, "");
this.baseImageList.Images.SetKeyName(45, "");
this.baseImageList.Images.SetKeyName(46, "");
this.baseImageList.Images.SetKeyName(47, "");
this.baseImageList.Images.SetKeyName(48, "");
this.baseImageList.Images.SetKeyName(49, "");
this.baseImageList.Images.SetKeyName(50, "");
this.baseImageList.Images.SetKeyName(51, "");
this.baseImageList.Images.SetKeyName(52, "");
this.baseImageList.Images.SetKeyName(53, "");
this.baseImageList.Images.SetKeyName(54, "");
this.baseImageList.Images.SetKeyName(55, "");
this.baseImageList.Images.SetKeyName(56, "");
this.baseImageList.Images.SetKeyName(57, "");
this.baseImageList.Images.SetKeyName(58, "");
this.baseImageList.Images.SetKeyName(59, "");
this.baseImageList.Images.SetKeyName(60, "");
this.baseImageList.Images.SetKeyName(61, "");
this.baseImageList.Images.SetKeyName(62, "");
this.baseImageList.Images.SetKeyName(63, "");
this.baseImageList.Images.SetKeyName(64, "");
this.baseImageList.Images.SetKeyName(65, "");
this.baseImageList.Images.SetKeyName(66, "");
this.baseImageList.Images.SetKeyName(67, "");
this.baseImageList.Images.SetKeyName(68, "");
this.baseImageList.Images.SetKeyName(69, "");
this.baseImageList.Images.SetKeyName(70, "");
this.baseImageList.Images.SetKeyName(71, "");
this.baseImageList.Images.SetKeyName(72, "");
this.baseImageList.Images.SetKeyName(73, "");
this.baseImageList.Images.SetKeyName(74, "");
this.baseImageList.Images.SetKeyName(75, "");
this.baseImageList.Images.SetKeyName(76, "");
this.baseImageList.Images.SetKeyName(77, "");
this.baseImageList.Images.SetKeyName(78, "");
this.baseImageList.Images.SetKeyName(79, "");
//
// lblKeyComponent
//
this.lblKeyComponent.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.lblKeyComponent.Location = new System.Drawing.Point(15, 167);
this.lblKeyComponent.Name = "lblKeyComponent";
this.lblKeyComponent.Size = new System.Drawing.Size(500, 13);
this.lblKeyComponent.TabIndex = 0;
this.lblKeyComponent.Text = "Key Component";
//
// txtKeyComponent
//
this.txtKeyComponent.Location = new System.Drawing.Point(18, 183);
this.txtKeyComponent.Multiline = true;
this.txtKeyComponent.Name = "txtKeyComponent";
this.txtKeyComponent.ReadOnly = true;
this.txtKeyComponent.Size = new System.Drawing.Size(676, 106);
this.txtKeyComponent.TabIndex = 1;
this.txtKeyComponent.TabStop = false;
//
// lblAvailableVariables
//
this.lblAvailableVariables.Location = new System.Drawing.Point(15, 292);
this.lblAvailableVariables.Name = "lblAvailableVariables";
this.lblAvailableVariables.Size = new System.Drawing.Size(338, 13);
this.lblAvailableVariables.TabIndex = 1;
this.lblAvailableVariables.Text = "Current Data Variables";
//
// lbxCurrentTableFields
//
this.lbxCurrentTableFields.FormattingEnabled = true;
this.lbxCurrentTableFields.HorizontalScrollbar = true;
this.lbxCurrentTableFields.Location = new System.Drawing.Point(18, 308);
this.lbxCurrentTableFields.Name = "lbxCurrentTableFields";
this.lbxCurrentTableFields.Size = new System.Drawing.Size(335, 95);
this.lbxCurrentTableFields.TabIndex = 2;
this.lbxCurrentTableFields.SelectedIndexChanged += new System.EventHandler(this.lbxCurrentTableFields_SelectedIndexChanged);
//
// lbxRelatedTableFields
//
this.lbxRelatedTableFields.FormattingEnabled = true;
this.lbxRelatedTableFields.HorizontalScrollbar = true;
this.lbxRelatedTableFields.Location = new System.Drawing.Point(359, 308);
this.lbxRelatedTableFields.Name = "lbxRelatedTableFields";
this.lbxRelatedTableFields.Size = new System.Drawing.Size(335, 95);
this.lbxRelatedTableFields.TabIndex = 5;
this.lbxRelatedTableFields.SelectedIndexChanged += new System.EventHandler(this.lbxRelatedTableFields_SelectedIndexChanged);
//
// btnOK
//
this.btnOK.Location = new System.Drawing.Point(144, 438);
this.btnOK.Name = "btnOK";
this.btnOK.Size = new System.Drawing.Size(133, 23);
this.btnOK.TabIndex = 9;
this.btnOK.Text = "OK";
this.btnOK.UseVisualStyleBackColor = true;
//
// btnCancel
//
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.Location = new System.Drawing.Point(283, 438);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(133, 23);
this.btnCancel.TabIndex = 8;
this.btnCancel.Text = "Cancel";
this.btnCancel.UseVisualStyleBackColor = true;
//
// label1
//
this.label1.Location = new System.Drawing.Point(356, 292);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(338, 13);
this.label1.TabIndex = 4;
this.label1.Text = "Related Table Variables";
//
// label2
//
this.label2.Location = new System.Drawing.Point(257, 314);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(20, 17);
this.label2.TabIndex = 3;
this.label2.Text = " = ";
//
// AddCommandButton
//
this.AddCommandButton.Location = new System.Drawing.Point(561, 409);
this.AddCommandButton.Name = "AddCommandButton";
this.AddCommandButton.Size = new System.Drawing.Size(133, 23);
this.AddCommandButton.TabIndex = 7;
this.AddCommandButton.Text = "Add Relation";
this.AddCommandButton.UseVisualStyleBackColor = true;
this.AddCommandButton.Click += new System.EventHandler(this.AddCommandButton_Click);
//
// AddLabel
//
this.AddLabel.AutoSize = true;
this.AddLabel.Location = new System.Drawing.Point(18, 409);
this.AddLabel.Name = "AddLabel";
this.AddLabel.Size = new System.Drawing.Size(35, 13);
this.AddLabel.TabIndex = 6;
this.AddLabel.Text = "label3";
//
// lblBuildKeyInstructions
//
this.lblBuildKeyInstructions.Location = new System.Drawing.Point(15, 9);
this.lblBuildKeyInstructions.Name = "lblBuildKeyInstructions";
this.lblBuildKeyInstructions.Size = new System.Drawing.Size(679, 143);
this.lblBuildKeyInstructions.TabIndex = 10;
//
// btnHelp
//
this.btnHelp.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnHelp.ImeMode = System.Windows.Forms.ImeMode.NoControl;
this.btnHelp.Location = new System.Drawing.Point(561, 438);
this.btnHelp.Name = "btnHelp";
this.btnHelp.Size = new System.Drawing.Size(133, 23);
this.btnHelp.TabIndex = 19;
this.btnHelp.Text = "Help";
this.btnHelp.Click += new System.EventHandler(this.btnHelp_Click);
//
// btnClear
//
this.btnClear.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.btnClear.ImeMode = System.Windows.Forms.ImeMode.NoControl;
this.btnClear.Location = new System.Drawing.Point(422, 438);
this.btnClear.Name = "btnClear";
this.btnClear.Size = new System.Drawing.Size(133, 23);
this.btnClear.TabIndex = 20;
this.btnClear.Text = "Clear";
this.btnClear.Click += new System.EventHandler(this.btnClear_Click);
//
// BuildKeyDialog
//
this.AcceptButton = this.btnOK;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.btnCancel;
this.ClientSize = new System.Drawing.Size(706, 473);
this.Controls.Add(this.btnClear);
this.Controls.Add(this.btnHelp);
this.Controls.Add(this.lblBuildKeyInstructions);
this.Controls.Add(this.AddLabel);
this.Controls.Add(this.AddCommandButton);
this.Controls.Add(this.label1);
this.Controls.Add(this.btnCancel);
this.Controls.Add(this.btnOK);
this.Controls.Add(this.lbxRelatedTableFields);
this.Controls.Add(this.lbxCurrentTableFields);
this.Controls.Add(this.lblAvailableVariables);
this.Controls.Add(this.txtKeyComponent);
this.Controls.Add(this.lblKeyComponent);
this.Controls.Add(this.label2);
this.Name = "BuildKeyDialog";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Build Key Dialog";
this.Load += new System.EventHandler(this.BuildKeyDialog_Load);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label lblKeyComponent;
private System.Windows.Forms.TextBox txtKeyComponent;
private System.Windows.Forms.Label lblAvailableVariables;
private System.Windows.Forms.ListBox lbxCurrentTableFields;
private System.Windows.Forms.ListBox lbxRelatedTableFields;
private System.Windows.Forms.Button btnOK;
private System.Windows.Forms.Button btnCancel;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.Button AddCommandButton;
private System.Windows.Forms.Label AddLabel;
private System.Windows.Forms.Label lblBuildKeyInstructions;
private System.Windows.Forms.Button btnHelp;
private System.Windows.Forms.Button btnClear;
}
}
| |
using System;
using System.Text;
using Renci.SshNet.Security.Org.BouncyCastle.Math;
namespace Renci.SshNet.Security.Org.BouncyCastle.Utilities
{
/// <summary> General array utilities.</summary>
internal abstract class Arrays
{
public static readonly byte[] EmptyBytes = new byte[0];
public static readonly int[] EmptyInts = new int[0];
public static bool AreAllZeroes(byte[] buf, int off, int len)
{
uint bits = 0;
for (int i = 0; i < len; ++i)
{
bits |= buf[off + i];
}
return bits == 0;
}
public static bool AreEqual(
bool[] a,
bool[] b)
{
if (a == b)
return true;
if (a == null || b == null)
return false;
return HaveSameContents(a, b);
}
public static bool AreEqual(
char[] a,
char[] b)
{
if (a == b)
return true;
if (a == null || b == null)
return false;
return HaveSameContents(a, b);
}
/// <summary>
/// Are two arrays equal.
/// </summary>
/// <param name="a">Left side.</param>
/// <param name="b">Right side.</param>
/// <returns>True if equal.</returns>
public static bool AreEqual(
byte[] a,
byte[] b)
{
if (a == b)
return true;
if (a == null || b == null)
return false;
return HaveSameContents(a, b);
}
[Obsolete("Use 'AreEqual' method instead")]
public static bool AreSame(
byte[] a,
byte[] b)
{
return AreEqual(a, b);
}
/// <summary>
/// A constant time equals comparison - does not terminate early if
/// test will fail.
/// </summary>
/// <param name="a">first array</param>
/// <param name="b">second array</param>
/// <returns>true if arrays equal, false otherwise.</returns>
public static bool ConstantTimeAreEqual(
byte[] a,
byte[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
int cmp = 0;
while (i != 0)
{
--i;
cmp |= (a[i] ^ b[i]);
}
return cmp == 0;
}
public static bool AreEqual(
int[] a,
int[] b)
{
if (a == b)
return true;
if (a == null || b == null)
return false;
return HaveSameContents(a, b);
}
public static bool AreEqual(uint[] a, uint[] b)
{
if (a == b)
return true;
if (a == null || b == null)
return false;
return HaveSameContents(a, b);
}
private static bool HaveSameContents(
bool[] a,
bool[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
while (i != 0)
{
--i;
if (a[i] != b[i])
return false;
}
return true;
}
private static bool HaveSameContents(
char[] a,
char[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
while (i != 0)
{
--i;
if (a[i] != b[i])
return false;
}
return true;
}
private static bool HaveSameContents(
byte[] a,
byte[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
while (i != 0)
{
--i;
if (a[i] != b[i])
return false;
}
return true;
}
private static bool HaveSameContents(
int[] a,
int[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
while (i != 0)
{
--i;
if (a[i] != b[i])
return false;
}
return true;
}
private static bool HaveSameContents(uint[] a, uint[] b)
{
int i = a.Length;
if (i != b.Length)
return false;
while (i != 0)
{
--i;
if (a[i] != b[i])
return false;
}
return true;
}
public static string ToString(
object[] a)
{
StringBuilder sb = new StringBuilder('[');
if (a.Length > 0)
{
sb.Append(a[0]);
for (int index = 1; index < a.Length; ++index)
{
sb.Append(", ").Append(a[index]);
}
}
sb.Append(']');
return sb.ToString();
}
public static int GetHashCode(byte[] data)
{
if (data == null)
{
return 0;
}
int i = data.Length;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= data[i];
}
return hc;
}
public static int GetHashCode(byte[] data, int off, int len)
{
if (data == null)
{
return 0;
}
int i = len;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= data[off + i];
}
return hc;
}
public static int GetHashCode(int[] data)
{
if (data == null)
return 0;
int i = data.Length;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= data[i];
}
return hc;
}
public static int GetHashCode(int[] data, int off, int len)
{
if (data == null)
return 0;
int i = len;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= data[off + i];
}
return hc;
}
public static int GetHashCode(uint[] data)
{
if (data == null)
return 0;
int i = data.Length;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= (int)data[i];
}
return hc;
}
public static int GetHashCode(uint[] data, int off, int len)
{
if (data == null)
return 0;
int i = len;
int hc = i + 1;
while (--i >= 0)
{
hc *= 257;
hc ^= (int)data[off + i];
}
return hc;
}
public static int GetHashCode(ulong[] data)
{
if (data == null)
return 0;
int i = data.Length;
int hc = i + 1;
while (--i >= 0)
{
ulong di = data[i];
hc *= 257;
hc ^= (int)di;
hc *= 257;
hc ^= (int)(di >> 32);
}
return hc;
}
public static int GetHashCode(ulong[] data, int off, int len)
{
if (data == null)
return 0;
int i = len;
int hc = i + 1;
while (--i >= 0)
{
ulong di = data[off + i];
hc *= 257;
hc ^= (int)di;
hc *= 257;
hc ^= (int)(di >> 32);
}
return hc;
}
public static byte[] Clone(
byte[] data)
{
return data == null ? null : (byte[])data.Clone();
}
public static byte[] Clone(
byte[] data,
byte[] existing)
{
if (data == null)
{
return null;
}
if ((existing == null) || (existing.Length != data.Length))
{
return Clone(data);
}
Array.Copy(data, 0, existing, 0, existing.Length);
return existing;
}
public static int[] Clone(
int[] data)
{
return data == null ? null : (int[])data.Clone();
}
internal static uint[] Clone(uint[] data)
{
return data == null ? null : (uint[])data.Clone();
}
public static long[] Clone(long[] data)
{
return data == null ? null : (long[])data.Clone();
}
public static ulong[] Clone(
ulong[] data)
{
return data == null ? null : (ulong[]) data.Clone();
}
public static ulong[] Clone(
ulong[] data,
ulong[] existing)
{
if (data == null)
{
return null;
}
if ((existing == null) || (existing.Length != data.Length))
{
return Clone(data);
}
Array.Copy(data, 0, existing, 0, existing.Length);
return existing;
}
public static bool Contains(byte[] a, byte n)
{
for (int i = 0; i < a.Length; ++i)
{
if (a[i] == n)
return true;
}
return false;
}
public static bool Contains(short[] a, short n)
{
for (int i = 0; i < a.Length; ++i)
{
if (a[i] == n)
return true;
}
return false;
}
public static bool Contains(int[] a, int n)
{
for (int i = 0; i < a.Length; ++i)
{
if (a[i] == n)
return true;
}
return false;
}
public static void Fill(
byte[] buf,
byte b)
{
int i = buf.Length;
while (i > 0)
{
buf[--i] = b;
}
}
public static void Fill(byte[] buf, int from, int to, byte b)
{
for (int i = from; i < to; ++i)
{
buf[i] = b;
}
}
public static byte[] CopyOf(byte[] data, int newLength)
{
byte[] tmp = new byte[newLength];
Array.Copy(data, 0, tmp, 0, System.Math.Min(newLength, data.Length));
return tmp;
}
public static char[] CopyOf(char[] data, int newLength)
{
char[] tmp = new char[newLength];
Array.Copy(data, 0, tmp, 0, System.Math.Min(newLength, data.Length));
return tmp;
}
public static int[] CopyOf(int[] data, int newLength)
{
int[] tmp = new int[newLength];
Array.Copy(data, 0, tmp, 0, System.Math.Min(newLength, data.Length));
return tmp;
}
public static long[] CopyOf(long[] data, int newLength)
{
long[] tmp = new long[newLength];
Array.Copy(data, 0, tmp, 0, System.Math.Min(newLength, data.Length));
return tmp;
}
public static BigInteger[] CopyOf(BigInteger[] data, int newLength)
{
BigInteger[] tmp = new BigInteger[newLength];
Array.Copy(data, 0, tmp, 0, System.Math.Min(newLength, data.Length));
return tmp;
}
/**
* Make a copy of a range of bytes from the passed in data array. The range can
* extend beyond the end of the input array, in which case the return array will
* be padded with zeroes.
*
* @param data the array from which the data is to be copied.
* @param from the start index at which the copying should take place.
* @param to the final index of the range (exclusive).
*
* @return a new byte array containing the range given.
*/
public static byte[] CopyOfRange(byte[] data, int from, int to)
{
int newLength = GetLength(from, to);
byte[] tmp = new byte[newLength];
Array.Copy(data, from, tmp, 0, System.Math.Min(newLength, data.Length - from));
return tmp;
}
public static int[] CopyOfRange(int[] data, int from, int to)
{
int newLength = GetLength(from, to);
int[] tmp = new int[newLength];
Array.Copy(data, from, tmp, 0, System.Math.Min(newLength, data.Length - from));
return tmp;
}
public static long[] CopyOfRange(long[] data, int from, int to)
{
int newLength = GetLength(from, to);
long[] tmp = new long[newLength];
Array.Copy(data, from, tmp, 0, System.Math.Min(newLength, data.Length - from));
return tmp;
}
public static BigInteger[] CopyOfRange(BigInteger[] data, int from, int to)
{
int newLength = GetLength(from, to);
BigInteger[] tmp = new BigInteger[newLength];
Array.Copy(data, from, tmp, 0, System.Math.Min(newLength, data.Length - from));
return tmp;
}
private static int GetLength(int from, int to)
{
int newLength = to - from;
if (newLength < 0)
throw new ArgumentException(from + " > " + to);
return newLength;
}
public static byte[] Append(byte[] a, byte b)
{
if (a == null)
return new byte[] { b };
int length = a.Length;
byte[] result = new byte[length + 1];
Array.Copy(a, 0, result, 0, length);
result[length] = b;
return result;
}
public static short[] Append(short[] a, short b)
{
if (a == null)
return new short[] { b };
int length = a.Length;
short[] result = new short[length + 1];
Array.Copy(a, 0, result, 0, length);
result[length] = b;
return result;
}
public static int[] Append(int[] a, int b)
{
if (a == null)
return new int[] { b };
int length = a.Length;
int[] result = new int[length + 1];
Array.Copy(a, 0, result, 0, length);
result[length] = b;
return result;
}
public static byte[] Concatenate(byte[] a, byte[] b)
{
if (a == null)
return Clone(b);
if (b == null)
return Clone(a);
byte[] rv = new byte[a.Length + b.Length];
Array.Copy(a, 0, rv, 0, a.Length);
Array.Copy(b, 0, rv, a.Length, b.Length);
return rv;
}
public static byte[] ConcatenateAll(params byte[][] vs)
{
byte[][] nonNull = new byte[vs.Length][];
int count = 0;
int totalLength = 0;
for (int i = 0; i < vs.Length; ++i)
{
byte[] v = vs[i];
if (v != null)
{
nonNull[count++] = v;
totalLength += v.Length;
}
}
byte[] result = new byte[totalLength];
int pos = 0;
for (int j = 0; j < count; ++j)
{
byte[] v = nonNull[j];
Array.Copy(v, 0, result, pos, v.Length);
pos += v.Length;
}
return result;
}
public static int[] Concatenate(int[] a, int[] b)
{
if (a == null)
return Clone(b);
if (b == null)
return Clone(a);
int[] rv = new int[a.Length + b.Length];
Array.Copy(a, 0, rv, 0, a.Length);
Array.Copy(b, 0, rv, a.Length, b.Length);
return rv;
}
public static byte[] Prepend(byte[] a, byte b)
{
if (a == null)
return new byte[] { b };
int length = a.Length;
byte[] result = new byte[length + 1];
Array.Copy(a, 0, result, 1, length);
result[0] = b;
return result;
}
public static short[] Prepend(short[] a, short b)
{
if (a == null)
return new short[] { b };
int length = a.Length;
short[] result = new short[length + 1];
Array.Copy(a, 0, result, 1, length);
result[0] = b;
return result;
}
public static int[] Prepend(int[] a, int b)
{
if (a == null)
return new int[] { b };
int length = a.Length;
int[] result = new int[length + 1];
Array.Copy(a, 0, result, 1, length);
result[0] = b;
return result;
}
public static byte[] Reverse(byte[] a)
{
if (a == null)
return null;
int p1 = 0, p2 = a.Length;
byte[] result = new byte[p2];
while (--p2 >= 0)
{
result[p2] = a[p1++];
}
return result;
}
public static int[] Reverse(int[] a)
{
if (a == null)
return null;
int p1 = 0, p2 = a.Length;
int[] result = new int[p2];
while (--p2 >= 0)
{
result[p2] = a[p1++];
}
return result;
}
}
}
| |
// String.cs
// Script#/Libraries/CoreLib
// This source code is subject to terms and conditions of the Apache License, Version 2.0.
//
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.CompilerServices;
using System.Text.RegularExpressions;
namespace System {
/// <summary>
/// Equivalent to the String type in Javascript.
/// </summary>
[IgnoreNamespace]
[Imported(ObeysTypeSystem = true)]
public sealed class String : IComparable<String>, IEquatable<String> {
[ScriptName("")]
public String() {}
[ScriptName("")]
public String(String other) {}
[InlineCode("{$System.Script}.stringFromChar({$System.String}.fromCharCode({ch}), {count})")]
public String(char ch, int count) {}
[InlineCode("{$System.String}.fromCharCode.apply(null, {value})")]
public String(char[] value) {}
[InlineCode("{$System.String}.fromCharCode.apply(null, {value}.slice({startIndex}, {startIndex} + {length}))")]
public String(char[] value, int startIndex, int length) {}
[IndexerName("Chars")]
public char this[int index] { [InlineCode("{this}.charCodeAt({index})")] get { return '\0'; } }
[NonScriptable]
public IEnumerator<char> GetEnumerator() { return null; }
/// <summary>
/// An empty zero-length string.
/// </summary>
[InlineConstant]
public const String Empty = "";
/// <summary>
/// The number of characters in the string.
/// </summary>
[IntrinsicProperty]
public int Length {
get {
return 0;
}
}
/// <summary>
/// Retrieves the character at the specified position.
/// </summary>
/// <param name="index">The specified 0-based position.</param>
/// <returns>The character within the string.</returns>
public string CharAt(int index) {
return null;
}
/// <summary>
/// Retrieves the character code of the character at the specified position.
/// </summary>
/// <param name="index">The specified 0-based position.</param>
/// <returns>The character code of the character within the string.</returns>
public char CharCodeAt(int index) {
return '\0';
}
[InlineCode("{$System.Script}.compareStrings({s1}, {s2})")]
public static int Compare(string s1, string s2) {
return 0;
}
[InlineCode("{$System.Script}.compareStrings({s1}, {s2}, {ignoreCase})")]
public static int Compare(string s1, string s2, bool ignoreCase) {
return 0;
}
[InlineCode("{$System.Script}.compareStrings({this}, {s}, {ignoreCase})")]
public int CompareTo(string s, bool ignoreCase) {
return 0;
}
[InlineCode("[{s1}, {s2}].join('')")]
public static string Concat(string s1, string s2) {
return null;
}
[InlineCode("[{s1}, {s2}, {s3}].join('')")]
public static string Concat(string s1, string s2, string s3) {
return null;
}
[InlineCode("[{s1}, {s2}, {s3}, {s4}].join('')")]
public static string Concat(string s1, string s2, string s3, string s4) {
return null;
}
/// <summary>
/// Concatenates a set of individual strings into a single string.
/// </summary>
/// <param name="strings">The sequence of strings</param>
/// <returns>The concatenated string.</returns>
[InlineCode("[{*strings}].join('')")]
public static string Concat(params string[] strings) {
return null;
}
[InlineCode("[{o1}, {o2}].join('')")]
public static string Concat(object o1, object o2) {
return null;
}
[InlineCode("[{o1}, {o2}, {o3}].join('')")]
public static string Concat(object o1, object o2, object o3) {
return null;
}
[InlineCode("[{o1}, {o2}, {o3}, {o4}].join('')")]
public static string Concat(object o1, object o2, object o3, object o4) {
return null;
}
[InlineCode("[{*o}].join('')")]
public static string Concat(params object[] o) {
return null;
}
[InlineCode("[{o}].join('')")]
public static string Concat(object o) {
return null;
}
/// <summary>
/// Returns the unencoded version of a complete encoded URI.
/// </summary>
/// <returns>The unencoded string.</returns>
[ScriptAlias("decodeURI")]
public static string DecodeUri(string s) {
return null;
}
/// <summary>
/// Returns the unencoded version of a single part or component of an encoded URI.
/// </summary>
/// <returns>The unencoded string.</returns>
[ScriptAlias("decodeURIComponent")]
public static string DecodeUriComponent(string s) {
return null;
}
/// <summary>
/// Encodes the complete URI.
/// </summary>
/// <returns>The encoded string.</returns>
[ScriptAlias("encodeURI")]
public static string EncodeUri(string s) {
return null;
}
/// <summary>
/// Encodes a single part or component of a URI.
/// </summary>
/// <returns>The encoded string.</returns>
[ScriptAlias("encodeURIComponent")]
public static string EncodeUriComponent(string s) {
return null;
}
/// <summary>
/// Determines if the string ends with the specified character.
/// </summary>
/// <param name="ch">The character to test for.</param>
/// <returns>true if the string ends with the character; false otherwise.</returns>
[InlineCode("{$System.Script}.endsWithString({this}, {$System.String}.fromCharCode({ch}))")]
public bool EndsWith(char ch) {
return false;
}
/// <summary>
/// Determines if the string ends with the specified substring or suffix.
/// </summary>
/// <param name="suffix">The string to test for.</param>
/// <returns>true if the string ends with the suffix; false otherwise.</returns>
[InlineCode("{$System.Script}.endsWithString({this}, {suffix})")]
public bool EndsWith(string suffix) {
return false;
}
/// <summary>
/// Determines if the strings are equal.
/// </summary>
/// <returns>true if the string s1 = s2; false otherwise.</returns>
[InlineCode("{$System.Script}.compareStrings({s1}, {s2}, {ignoreCase}) === 0)")]
public static bool Equals(string s1, string s2, bool ignoreCase) {
return false;
}
/// <summary>
/// Encodes a string by replacing punctuation, spaces etc. with their escaped equivalents.
/// </summary>
/// <returns>The escaped string.</returns>
[ScriptAlias("escape") ]
public static string Escape(string s) {
return null;
}
[InlineCode("{$System.Script}.formatString({format}, {*values})")]
public static string Format(string format, params object[] values) {
return null;
}
[ExpandParams]
public static string FromCharCode(params char[] charCode) {
return null;
}
[InlineCode("{$System.Script}.htmlDecode({this})")]
public string HtmlDecode() {
return null;
}
[InlineCode("{$System.Script}.htmlEncode({this})")]
public string HtmlEncode() {
return null;
}
[InlineCode("{this}.indexOf({$System.String}.fromCharCode({ch}))")]
public int IndexOf(char ch) {
return 0;
}
public int IndexOf(string subString) {
return 0;
}
[InlineCode("{this}.indexOf({$System.String}.fromCharCode({ch}), {startIndex})")]
public int IndexOf(char ch, int startIndex) {
return 0;
}
public int IndexOf(string ch, int startIndex) {
return 0;
}
[InlineCode("{$System.Script}.indexOfString({this}, {$System.String}.fromCharCode({ch}), {startIndex}, {count})")]
public int IndexOf(char ch, int startIndex, int count)
{
return 0;
}
[InlineCode("{$System.Script}.indexOfString({this}, {ch}, {startIndex}, {count})")]
public int IndexOf(string ch, int startIndex, int count)
{
return 0;
}
[InlineCode("{$System.Script}.indexOfAnyString({this}, {ch})")]
public int IndexOfAny(params char[] ch) {
return 0;
}
[InlineCode("{$System.Script}.indexOfAnyString({this}, {ch}, {startIndex})")]
public int IndexOfAny(char[] ch, int startIndex) {
return 0;
}
[InlineCode("{$System.Script}.indexOfAnyString({this}, {ch}, {startIndex}, {count})")]
public int IndexOfAny(char[] ch, int startIndex, int count) {
return 0;
}
[InlineCode("{$System.Script}.insertString({this}, {index}, {value})")]
public string Insert(int index, string value) {
return null;
}
[InlineCode("{$System.Script}.isNullOrEmptyString({s})")]
public static bool IsNullOrEmpty(string s) {
return false;
}
[InlineCode("{this}.lastIndexOf({$System.String}.fromCharCode({ch}))")]
public int LastIndexOf(char ch) {
return 0;
}
public int LastIndexOf(string subString) {
return 0;
}
public int LastIndexOf(string subString, int startIndex) {
return 0;
}
[InlineCode("{$System.Script}.lastIndexOfString({this}, {$System.String}.fromCharCode({ch}), {startIndex}, {count})")]
public int LastIndexOf(char ch, int startIndex, int count)
{
return 0;
}
[InlineCode("{$System.Script}.lastIndexOfString({this}, {subString}, {startIndex}, {count})")]
public int LastIndexOf(string subString, int startIndex, int count)
{
return 0;
}
[InlineCode("{this}.lastIndexOf({$System.String}.fromCharCode({ch}), {startIndex})")]
public int LastIndexOf(char ch, int startIndex) {
return 0;
}
[InlineCode("{$System.Script}.lastIndexOfAnyString({this}, {ch})")]
public int LastIndexOfAny(params char[] ch) {
return 0;
}
[InlineCode("{$System.Script}.lastIndexOfAnyString({this}, {ch}, {startIndex})")]
public int LastIndexOfAny(char[] ch, int startIndex) {
return 0;
}
[InlineCode("{$System.Script}.lastIndexOfAnyString({this}, {ch}, {startIndex}, {count})")]
public int LastIndexOfAny(char[] ch, int startIndex, int count) {
return 0;
}
public int LocaleCompare(string string2) {
return 0;
}
[ExpandParams]
public static string LocaleFormat(string format, params object[] values) {
return null;
}
public string[] Match(Regex regex) {
return null;
}
[InlineCode("{$System.Script}.padLeftString({this}, {totalWidth})")]
public string PadLeft(int totalWidth) {
return null;
}
[InlineCode("{$System.Script}.padLeftString({this}, {totalWidth}, {ch})")]
public string PadLeft(int totalWidth, char ch) {
return null;
}
[InlineCode("{$System.Script}.padRightString({this}, {totalWidth})")]
public string PadRight(int totalWidth) {
return null;
}
[InlineCode("{$System.Script}.padRightString({this}, {totalWidth}, {ch})")]
public string PadRight(int totalWidth, char ch) {
return null;
}
[InlineCode("{$System.Script}.removeString({this}, {index})")]
public string Remove(int index) {
return null;
}
[InlineCode("{$System.Script}.removeString({this}, {index}, {count})")]
public string Remove(int index, int count) {
return null;
}
[InlineCode("{$System.Script}.replaceAllString({this}, {oldText}, {replaceText})")]
public string Replace(string oldText, string replaceText) {
return null;
}
[InlineCode("{$System.Script}.replaceAllString({this}, {$System.String}.fromCharCode({oldChar}), {$System.String}.fromCharCode({replaceChar}))")]
public string Replace(char oldChar, char replaceChar)
{
return null;
}
[ScriptName("replace")]
public string ReplaceFirst(string oldText, string replaceText) {
return null;
}
[ScriptName("replace")]
public string Replace(Regex regex, string replaceText) {
return null;
}
[ScriptName("replace")]
public string Replace(Regex regex, StringReplaceCallback callback) {
return null;
}
public int Search(Regex regex) {
return 0;
}
public string[] Split(string separator) {
return null;
}
[InlineCode("{this}.split({$System.String}.fromCharCode({separator}))")]
public string[] Split(char separator) {
return null;
}
public string[] Split(string separator, int limit) {
return null;
}
[InlineCode("{this}.split({$System.String}.fromCharCode({separator}), {limit})")]
public string[] Split(char separator, int limit) {
return null;
}
public string[] Split(Regex regex) {
return null;
}
public string[] Split(Regex regex, int limit) {
return null;
}
[InlineCode("{$System.Script}.startsWithString({this}, {$System.String}.fromCharCode({ch}))")]
public bool StartsWith(char ch) {
return false;
}
[InlineCode("{$System.Script}.startsWithString({this}, {prefix})")]
public bool StartsWith(string prefix) {
return false;
}
public string Substr(int startIndex) {
return null;
}
public string Substr(int startIndex, int length) {
return null;
}
public string Substring(int startIndex) {
return null;
}
[ScriptName("substr")]
public string Substring(int startIndex, int length) {
return null;
}
[ScriptName("substring")]
public string JsSubstring(int startIndex, int end) {
return null;
}
public string ToLocaleLowerCase() {
return null;
}
public string ToLocaleUpperCase() {
return null;
}
public string ToLowerCase() {
return null;
}
[ScriptName("toLowerCase")]
public string ToLower() {
return null;
}
public string ToUpperCase() {
return null;
}
[ScriptName("toUpperCase")]
public string ToUpper() {
return null;
}
public string Trim() {
return null;
}
[InlineCode("{$System.Script}.trimString({this}, {values})")]
public string Trim(params char[] values)
{
return null;
}
[InlineCode("{$System.Script}.trimStartString({this}, {values})")]
public string TrimStart(params char[] values)
{
return null;
}
[InlineCode("{$System.Script}.trimEndString({this}, {values})")]
public string TrimEnd(params char[] values)
{
return null;
}
[InlineCode("{$System.Script}.trimStartString({this})")]
public string TrimStart() {
return null;
}
[InlineCode("{$System.Script}.trimEndString({this})")]
public string TrimEnd() {
return null;
}
/// <summary>
/// Decodes a string by replacing escaped parts with their equivalent textual representation.
/// </summary>
/// <returns>The unescaped string.</returns>
[ScriptAlias("unescape")]
public static string Unescape(string s) {
return null;
}
[IntrinsicOperator]
public static bool operator ==(string s1, string s2) {
return false;
}
[IntrinsicOperator]
public static bool operator !=(string s1, string s2) {
return false;
}
[InlineCode("{$System.Script}.compare({this}, {other})")]
public int CompareTo(string other) {
return 0;
}
[InlineCode("{$System.Script}.equalsT({this}, {other})")]
public bool Equals(string other) {
return false;
}
[InlineCode("{$System.Script}.equalsT({a}, {b})")]
public static bool Equals(string a, string b)
{
return false;
}
[InlineCode("{args}.join({separator})")]
public static string Join(string separator, params string[] args)
{
return null;
}
[InlineCode("{args}.join({separator})")]
public static string Join(string separator, params Object[] args)
{
return null;
}
[InlineCode("{$System.Script}.arrayFromEnumerable({args}).join({separator})")]
public static string Join(string separator, IEnumerable<string> args)
{
return null;
}
[InlineCode("{$System.Script}.arrayFromEnumerable({args}).join({separator})")]
public static string Join<T>(string separator, IEnumerable<T> args)
{
return null;
}
[InlineCode("{args}.slice({startIndex}, {startIndex} + {count}).join({separator})")]
public static string Join(string separator, string[] args, int startIndex, int count)
{
return null;
}
[InlineCode("({this}.indexOf({value}) !== -1)")]
public bool Contains(string value)
{
return false;
}
}
}
| |
using OpenKh.Tools.Common.Wpf;
using OpenKh.Common;
using OpenKh.Kh2;
using OpenKh.Kh2.Contextes;
using OpenKh.Kh2.Extensions;
using OpenKh.Tools.Kh2TextEditor.Types;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Windows;
using Xe.Tools;
using Xe.Tools.Wpf.Commands;
using Xe.Tools.Wpf.Dialogs;
using OpenKh.Tools.Kh2TextEditor.Services;
using System.Text;
using OpenKh.Engine.Renders;
using OpenKh.Engine.Extensions;
namespace OpenKh.Tools.Kh2TextEditor.ViewModels
{
public class MainViewModel : BaseNotifyPropertyChanged
{
private const string DefaultName = "FAKE";
private const string GuideUrl = "https://openkh.dev/kh2/tool/Kh2TextEditor/OpenKh.Tools.Kh2TextEditor";
private static string ApplicationName = Utilities.GetApplicationName();
private string _fileName;
private string _barEntryName;
private FontContext _fontContext = new FontContext();
private FontType _fontType;
private EncodingType _encodingType;
private static readonly List<FileDialogFilter> MessageFilters = FileDialogFilterComposer
.Compose()
.AddExtensions("Message files", "bar", "msg", "bin")
.AddAllFiles();
private static readonly List<FileDialogFilter> FontImageFilters = FileDialogFilterComposer
.Compose()
.AddExtensions("fontimage.bar", "bar")
.AddAllFiles();
private static readonly List<FileDialogFilter> FontInfoFilters = FileDialogFilterComposer
.Compose()
.AddExtensions("fontinfo.bar", "bar")
.AddAllFiles();
private static readonly List<FileDialogFilter> ExportFilters = FileDialogFilterComposer
.Compose()
.Concat(TextExporters.GetAll().Select(x => FileDialogFilter.ByExtensions(x.Filter().Item1, x.Filter().Item2)))
.ToList();
private static readonly List<FileDialogFilter> ImportFilters = FileDialogFilterComposer
.Compose()
.Concat(TextImporters.GetAll().Select(x => FileDialogFilter.ByExtensions(x.Filter().Item1, x.Filter().Item2)))
.ToList();
public string Title => $"{_barEntryName ?? DefaultName} | {FileName ?? "untitled"} | {ApplicationName}";
private string FileName
{
get => _fileName;
set
{
_fileName = value;
OnPropertyChanged(nameof(Title));
}
}
private Window Window => Application.Current.Windows.OfType<Window>().FirstOrDefault(x => x.IsActive);
public RelayCommand OpenCommand { get; }
public RelayCommand SaveCommand { get; }
public RelayCommand SaveAsCommand { get; }
public RelayCommand ExportMessageAsCommand { get; }
public RelayCommand ImportMessageFromCommand { get; }
public RelayCommand ExitCommand { get; }
public RelayCommand GuideCommand { get; }
public RelayCommand AboutCommand { get; }
public RelayCommand OpenFontImageCommand { get; }
public RelayCommand SaveFontImageCommand { get; }
public RelayCommand EditFontImageCommand { get; }
public RelayCommand OpenFontInfoCommand { get; }
public RelayCommand SaveFontInfoCommand { get; }
public RelayCommand EditFontInfoCommand { get; }
public TextEditorViewModel TextEditor { get; private set; }
public bool OptimizeOnSave { get; set; }
public FontType FontType
{
get => _fontType;
set
{
_fontType = value;
InvalidateFontContext();
}
}
public EncodingType EncodingType
{
get => _encodingType;
set
{
_encodingType = value;
InvalidateFontContext();
}
}
public MainViewModel()
{
OpenCommand = new RelayCommand(x =>
{
FileDialog.OnOpen(fileName =>
{
OpenFile(fileName);
}, MessageFilters);
}, x => true);
SaveCommand = new RelayCommand(x =>
{
if (!string.IsNullOrEmpty(FileName))
{
SaveFile(FileName, FileName);
}
else
{
SaveAsCommand.Execute(x);
}
}, x => true);
SaveAsCommand = new RelayCommand(x =>
{
FileDialog.OnSave(fileName =>
{
SaveFile(FileName, fileName);
FileName = fileName;
}, MessageFilters);
}, x => true);
ExportMessageAsCommand = new RelayCommand(x =>
{
FileDialog.OnSave(fileName =>
{
var selectedExtension = $"{Path.GetExtension(fileName).TrimStart('.')}";
ExportMessageAsFile(
fileName: fileName,
textExporter: TextExporters.FindFromFile(fileName)
);
}, ExportFilters);
}, x => true);
ImportMessageFromCommand = new RelayCommand(x =>
{
FileDialog.OnOpen(fileName =>
{
var selectedExtension = $"{Path.GetExtension(fileName).TrimStart('.')}";
var textImporter = TextImporters.FindFromFile(fileName);
if (textImporter != null)
{
ImportMessageFromFile(
fileName: fileName,
textImporter
);
}
else
{
MessageBox.Show($"Failed to match text decoder for your file:\n{fileName}");
}
}, ImportFilters);
}, x => true);
ExitCommand = new RelayCommand(x =>
{
Window.Close();
}, x => true);
OpenFontImageCommand = new RelayCommand(x =>
{
FileDialog.OnOpen(fileName =>
{
OpenFontImageFile(fileName);
}, FontImageFilters);
}, x => true);
SaveFontImageCommand = new RelayCommand(x =>
{
FileDialog.OnSave(fileName =>
{
SaveFontImageFile(fileName);
}, FontImageFilters);
}, x => true);
OpenFontInfoCommand = new RelayCommand(x =>
{
FileDialog.OnOpen(fileName =>
{
OpenFontInfoFile(fileName);
}, FontInfoFilters);
}, x => true);
SaveFontInfoCommand = new RelayCommand(x =>
{
FileDialog.OnSave(fileName =>
{
SaveFontInfoFile(fileName);
}, FontInfoFilters);
}, x => true);
GuideCommand = new RelayCommand(x =>
{
Process.Start(new ProcessStartInfo(GuideUrl));
}, x => true);
AboutCommand = new RelayCommand(x =>
{
new AboutDialog(Assembly.GetExecutingAssembly()).ShowDialog();
}, x => true);
TextEditor = new TextEditorViewModel();
FontType = FontType.System;
}
public bool OpenFile(string fileName) => File.OpenRead(fileName).Using(stream =>
{
_barEntryName = null;
if (!TryReadMsg(stream) && !TryReadMsgAsBar(stream))
{
MessageBox.Show(Window, "Invalid or not existing Message data found.", "Error", MessageBoxButton.OK, MessageBoxImage.Error);
return false;
}
if (_barEntryName == null)
{
_barEntryName = Path.GetFileNameWithoutExtension(fileName);
if (_barEntryName.Length > 4)
_barEntryName = _barEntryName.Substring(0, 4);
_barEntryName = _barEntryName.ToLower();
}
FileName = fileName;
LoadSupportFiles(Path.GetDirectoryName(fileName));
AutodetectRegion();
return true;
});
public void SaveFile(string previousFileName, string fileName)
{
if (File.Exists(previousFileName))
{
bool isBar = false;
List<Bar.Entry> entries;
entries = File.OpenRead(previousFileName).Using(stream =>
{
isBar = Bar.IsValid(stream);
return isBar ? Bar.Read(stream) : null;
});
if (isBar)
File.Create(fileName).Using(stream => WriteBar(entries, stream));
else
File.Create(fileName).Using(WriteMsg);
}
else
{
File.Create(fileName).Using(WriteMsg);
}
}
public void ExportMessageAsFile(string fileName, ITextExporter textExporter)
{
new StreamWriter(fileName, false, Encoding.UTF8).Using(
writer => textExporter.Export(
TextEditor.Messages
.Select(
source => new ExchangeableMessage
{
Id = source.Id,
Text = source.Text,
}
),
writer
)
);
}
public void ImportMessageFromFile(string fileName, ITextImporter textImporter)
{
var importedMessages = new StreamReader(fileName, Encoding.UTF8).Using(
reader => textImporter.Import(reader)
.ToArray() // make sure to import all messages from file before closing StreamReader!
);
foreach (var importMessage in importedMessages)
{
var found = TextEditor.Messages.SingleOrDefault(it => it.Id == importMessage.Id);
if (found != null)
{
found.Text = importMessage.Text;
}
}
}
public void OpenFontImageFile(string fileName) => File.OpenRead(fileName).Using(stream =>
{
if (Bar.IsValid(stream))
{
_fontContext.Read(Bar.Read(stream));
InvalidateFontContext();
}
});
private void SaveFontImageFile(string fileName)
{
throw new NotImplementedException();
}
public void OpenFontInfoFile(string fileName) => File.OpenRead(fileName).Using(stream =>
{
if (Bar.IsValid(stream))
{
_fontContext.Read(Bar.Read(stream));
InvalidateFontContext();
}
});
private void SaveFontInfoFile(string fileName)
{
throw new NotImplementedException();
}
private void InvalidateFontContext()
{
RenderingMessageContext context;
switch (EncodingType)
{
case EncodingType.European:
switch (FontType)
{
case FontType.System:
context = _fontContext.ToKh2EuSystemTextContext();
break;
case FontType.Event:
context = _fontContext.ToKh2EuEventTextContext();
break;
default:
context = null;
break;
}
break;
case EncodingType.Japanese:
switch (FontType)
{
case FontType.System:
context = _fontContext.ToKh2JpSystemTextContext();
break;
case FontType.Event:
context = _fontContext.ToKh2JpEventTextContext();
break;
default:
context = null;
break;
}
break;
case EncodingType.Turkish:
switch (FontType)
{
case FontType.System:
context = _fontContext.ToKh2TRSystemTextContext();
break;
case FontType.Event:
context = _fontContext.ToKh2TREventTextContext();
break;
default:
context = null;
break;
}
break;
default:
context = null;
break;
}
TextEditor.TextContext = context;
}
private bool TryReadMsg(Stream stream)
{
if (!Msg.IsValid(stream))
return false;
TextEditor.MessageEntries = Msg.Read(stream);
return true;
}
private bool TryReadMsgAsBar(Stream stream)
{
if (!Bar.IsValid(stream))
return false;
var msgEntry = Bar.Read(stream)
.FirstOrDefault(x => x.Type == Bar.EntryType.List);
if (msgEntry == null)
return false;
_barEntryName = msgEntry.Name;
return TryReadMsg(msgEntry.Stream);
}
private void WriteMsg(Stream stream)
{
if (OptimizeOnSave)
Msg.WriteOptimized(stream, TextEditor.MessageEntries);
else
Msg.Write(stream, TextEditor.MessageEntries);
stream.SetLength(stream.Position);
}
private void WriteBar(List<Bar.Entry> entries, Stream stream)
{
var newEntries = entries
.ForEntry(Bar.EntryType.List, _barEntryName, 0, entry => WriteMsg(entry.Stream));
Bar.Write(stream, newEntries);
}
private void LoadSupportFiles(string basePath)
{
const string FontImageFileName = "fontimage.bar";
var fontImageFileName = Path.Combine(basePath, FontImageFileName);
if (File.Exists(fontImageFileName))
OpenFontImageFile(fontImageFileName);
const string FontInfoFileName = "fontinfo.bar";
var fontInfoFileName = Path.Combine(basePath, FontInfoFileName);
if (File.Exists(fontInfoFileName))
OpenFontImageFile(fontInfoFileName);
}
private void AutodetectRegion()
{
switch (IsMsgJapanese())
{
case true:
EncodingType = EncodingType.Japanese;
break;
case false:
EncodingType = EncodingType.European;
break;
}
}
private bool? IsMsgJapanese()
{
const ushort FakeTextId = 0x0ADC;
if (TextEditor?.MessageEntries == null)
return null;
var messageEntry = TextEditor.MessageEntries.FirstOrDefault(x => x.Id == FakeTextId);
var data = messageEntry?.Data;
if (data == null || data.Length == 0)
return null;
return data.Length != 5;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Globalization;
using System.IO;
using System.Text;
using System.Net;
using System.Diagnostics;
using System.Collections;
using OpenLiveWriter.BlogClient;
using OpenLiveWriter.BlogClient.Detection;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.CoreServices.Settings;
using OpenLiveWriter.PostEditor;
using System.Text.RegularExpressions;
namespace OpenLiveWriter.PostEditor.PostHtmlEditing
{
/// <summary>
/// Summary description for PostHtmlEditingSettings.
/// </summary>
public class PostHtmlEditingSettings : IDisposable
{
private string _blogId;
public PostHtmlEditingSettings(string blogId)
{
_blogId = blogId;
using (SettingsPersisterHelper blogSettings = BlogSettings.GetWeblogSettingsKey(blogId))
{
_editorTemplateSettings = blogSettings.GetSubSettings("EditorTemplate");
}
}
public string LastEditingView
{
get { return _editorTemplateSettings.GetString(LAST_EDITING_VIEW, String.Empty); }
set { _editorTemplateSettings.SetString(LAST_EDITING_VIEW, value); }
}
private const string LAST_EDITING_VIEW = "LastEditingView";
public bool EditUsingBlogStylesIsSet
{
get { return _editorTemplateSettings.HasValue(EDIT_USING_STYLES); }
}
public bool EditUsingBlogStyles
{
get { return _editorTemplateSettings.GetBoolean(EDIT_USING_STYLES, LastEditingView != EditingViews.Normal); }
set { _editorTemplateSettings.SetBoolean(EDIT_USING_STYLES, value); }
}
private const string EDIT_USING_STYLES = "EditUsingStyles";
public bool DisplayWebLayoutWarning
{
get { return _editorTemplateSettings.GetBoolean(DISPLAY_WEB_LAYOUT_WARNING, true); }
set { _editorTemplateSettings.SetBoolean(DISPLAY_WEB_LAYOUT_WARNING, value); }
}
private const string DISPLAY_WEB_LAYOUT_WARNING = "DisplayWebLayoutWarning";
public BlogEditingTemplateFile[] EditorTemplateHtmlFiles
{
get
{
SettingsPersisterHelper templates = _editorTemplateSettings.GetSubSettings(EDITOR_TEMPLATES_KEY);
string[] templateTypes = templates.GetNames();
BlogEditingTemplateFile[] templateFiles = new BlogEditingTemplateFile[templateTypes.Length];
for (int i = 0; i < templateTypes.Length; i++)
{
string templateTypeStr = templateTypes[i];
string templateFile = templates.GetString(templateTypeStr, BlogEditingTemplate.GetBlogTemplateDir(_blogId));
BlogEditingTemplateType templateType =
(BlogEditingTemplateType)BlogEditingTemplateType.Parse(typeof(BlogEditingTemplateType), templateTypeStr);
templateFiles[i] = new BlogEditingTemplateFile(templateType, templateFile);
}
return templateFiles;
}
set
{
if (_editorTemplateSettings.HasSubSettings(EDITOR_TEMPLATES_KEY))
_editorTemplateSettings.UnsetSubsettingTree(EDITOR_TEMPLATES_KEY);
for (int i = 0; i < value.Length; i++)
{
SettingsPersisterHelper templates = _editorTemplateSettings.GetSubSettings(EDITOR_TEMPLATES_KEY);
BlogEditingTemplateFile templateFile = value[i];
templates.SetString(templateFile.TemplateType.ToString(), MakeRelative(templateFile.TemplateFile));
}
}
}
private const string EDITOR_TEMPLATES_KEY = "templates";
public string GetEditorTemplateHtml(BlogEditingTemplateType templateType, bool forceRTL)
{
SettingsPersisterHelper templates = _editorTemplateSettings.GetSubSettings(EDITOR_TEMPLATES_KEY);
string templateHtmlFile = templates.GetString(templateType.ToString(), null);
// Sometimes templateHtmlFile is relative, sometimes it is already absolute (from older builds).
templateHtmlFile = MakeAbsolute(templateHtmlFile);
if (templateHtmlFile != null && File.Exists(templateHtmlFile))
{
string templateHtml;
using (StreamReader reader = new StreamReader(templateHtmlFile, Encoding.UTF8))
templateHtml = reader.ReadToEnd();
if (File.Exists(templateHtmlFile + ".path"))
{
string origPath = File.ReadAllText(templateHtmlFile + ".path");
string newPath = Path.Combine(Path.GetDirectoryName(templateHtmlFile), Path.GetFileName(origPath));
string newUri = UrlHelper.SafeToAbsoluteUri(new Uri(newPath));
templateHtml = templateHtml.Replace(origPath, newUri);
}
/*Parse meta tags in order to set CSS3 compatibility*/
Regex metatag = new Regex(@"<(?i:meta)(\s)+(?i:http-equiv)(\s)*=""(?:X-UA-Compatible)""(\s)+(?i:content)(\s)*=""(?i:IE=edge)""(\s)*/>");
Match match = metatag.Match(templateHtml);
if (!match.Success)
{
// prepend the metatag to make css3 compatible at least on edge (Windows 8+)
int i = templateHtml.IndexOf("<HEAD>", StringComparison.OrdinalIgnoreCase);
if (i > 0)
{
templateHtml = ( templateHtml.Substring(0, i + 6)
+ "<meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\" />"
+ templateHtml.Substring(i + 6) );
}
}
return templateHtml;
}
else
{
return BlogEditingTemplate.GetDefaultTemplateHtml(forceRTL, templateType != BlogEditingTemplateType.Normal);
}
}
private string MakeAbsolute(string templateHtmlFile)
{
if (templateHtmlFile == null)
return null;
if (!Path.IsPathRooted(templateHtmlFile))
templateHtmlFile = Path.Combine(BlogEditingTemplate.GetBlogTemplateDir(_blogId), templateHtmlFile);
return templateHtmlFile;
}
private string MakeRelative(string templateHtmlFile)
{
if (templateHtmlFile == null)
return null;
if (!Path.IsPathRooted(templateHtmlFile))
return templateHtmlFile;
string filename = Path.GetFileName(templateHtmlFile);
if (File.Exists(Path.Combine(BlogEditingTemplate.GetBlogTemplateDir(_blogId), filename)))
return filename;
else
{
Trace.Fail("Failed to make relative path: " + templateHtmlFile);
return templateHtmlFile;
}
}
internal void CleanupUnusedTemplates()
{
try
{
using (SettingsPersisterHelper templates = _editorTemplateSettings.GetSubSettings(EDITOR_TEMPLATES_KEY))
{
// get the list of templates which are legit
ArrayList templatesInUse = new ArrayList();
foreach (string key in templates.GetNames())
templatesInUse.Add(MakeAbsolute(templates.GetString(key, String.Empty)).Trim().ToLower(CultureInfo.CurrentCulture));
// delete each of the template files in the directory which
// are not contained in our list of valid templates
if (templatesInUse.Count > 0)
{
string templateDirectory = Path.GetDirectoryName((string)templatesInUse[0]);
if (Directory.Exists(templateDirectory))
{
string[] templateFiles = Directory.GetFiles(templateDirectory, "*.htm");
foreach (string templateFile in templateFiles)
{
string templateFileNormalized = templateFile.Trim().ToLower(CultureInfo.CurrentCulture);
if (!templatesInUse.Contains(templateFileNormalized))
CleanupTemplateAndSupportingFiles(templateFile);
}
}
}
}
}
catch (Exception ex)
{
Trace.Fail("Error occurred cleaning up unused templates: " + ex.ToString());
}
}
private void CleanupTemplateAndSupportingFiles(string templateFile)
{
try
{
// determine the name of the supporting file directory
string templateContents;
using (StreamReader reader = new StreamReader(templateFile, Encoding.UTF8))
templateContents = reader.ReadToEnd().ToLower(CultureInfo.CurrentCulture);
// determine the template path
string templateDirectory = Path.GetDirectoryName(templateFile);
string templatePathReference = UrlHelper.InsureTrailingSlash(UrlHelper.CreateUrlFromPath(templateDirectory)).Replace("%20", " ");
int pathRefIndex = templateContents.IndexOf(templatePathReference.ToLower(CultureInfo.CurrentCulture));
// if there are references to the template path within the file then
// use it to derive the supporting file directory and delete it
if (pathRefIndex != -1)
{
int endPathRefIndex = pathRefIndex + templatePathReference.Length;
int nextSlashIndex = templateContents.IndexOf('/', endPathRefIndex);
int length = nextSlashIndex - endPathRefIndex;
Trace.Assert(length > 0);
string supportingFilePath = templateContents.Substring(endPathRefIndex, length);
// delete the supporting file directory
Directory.Delete(Path.Combine(templateDirectory, supportingFilePath), true);
}
// delete the template file
File.Delete(templateFile);
}
catch (Exception ex)
{
Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "Error occurred cleaning up template {0}: {1}", templateFile, ex.ToString()));
}
}
public void Dispose()
{
if (_editorTemplateSettings != null)
{
_editorTemplateSettings.Dispose();
_editorTemplateSettings = null;
}
}
private SettingsPersisterHelper _editorTemplateSettings;
}
}
| |
using Geofence.Plugin.Abstractions;
#if __UNIFIED__
using CoreLocation;
using UIKit;
using Foundation;
#else
using MonoTouch.CoreLocation;
using MonoTouch.UIKit;
using MonoTouch.Foundation;
#endif
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Collections.ObjectModel;
namespace Geofence.Plugin
{
/// <summary>
/// Implementation for Geofence
/// </summary>
public class GeofenceImplementation : IGeofence
{
CLLocationManager locationManager;
private Dictionary<string, GeofenceCircularRegion> mRegions=GeofenceStore.SharedInstance.GetAll();
private Dictionary<string, GeofenceResult> mGeofenceResults;
/// <summary>
/// Monitored regions
/// </summary>
public IReadOnlyDictionary<string, GeofenceCircularRegion> Regions { get { return mRegions; } }
/// <summary>
/// Geofence results
/// </summary>
public IReadOnlyDictionary<string, GeofenceResult> GeofenceResults { get { return mGeofenceResults; } }
/// <summary>
/// Checks if is monitoring regions
/// </summary>
public bool IsMonitoring { get { return mRegions.Count > 0; } }
/// <summary>
/// This checks if we are currently prompting for location permissions to avoid the double prompt from multiple simultaneous regions
/// </summary>
bool isPromptingLocationPermission;
private GeofenceLocation lastKnownGeofenceLocation;
/// <summary>
/// Set this flag to false if the application is already asking the user for permissions to send Notifications.
/// </summary>
/// <value><c>true</c> if request notification permission; otherwise, <c>false</c>.</value>
public bool RequestNotificationPermission { get; set; }
/// <summary>
/// Set this flag to false if the application is already prompting the user to use Location Services.
/// </summary>
/// <value><c>true</c> if request location permission; otherwise, <c>false</c>.</value>
public bool RequestLocationPermission { get; set; }
private const string ViewAction = "View";
/// <summary>
/// Last known location
/// </summary>
public GeofenceLocation LastKnownLocation { get { return lastKnownGeofenceLocation; } }
/// <summary>
/// Geofence plugin iOS implementation
/// </summary>
public GeofenceImplementation()
{
mGeofenceResults = new Dictionary<string, GeofenceResult>();
using (var pool = new NSAutoreleasePool())
{
pool.InvokeOnMainThread(() => {
locationManager = new CLLocationManager();
locationManager.DidStartMonitoringForRegion += DidStartMonitoringForRegion;
locationManager.RegionEntered += RegionEntered;
locationManager.RegionLeft += RegionLeft;
locationManager.Failed += OnFailure;
locationManager.DidDetermineState += DidDetermineState;
locationManager.LocationsUpdated += LocationsUpdated;
});
}
string priorityType = "Balanced Power";
switch(CrossGeofence.GeofencePriority)
{
case GeofencePriority.HighAccuracy:
priorityType = "High Accuracy";
locationManager.DesiredAccuracy = CLLocation.AccuracyBest;
break;
case GeofencePriority.AcceptableAccuracy:
priorityType = "Acceptable Accuracy";
locationManager.DesiredAccuracy = CLLocation.AccuracyNearestTenMeters;
break;
case GeofencePriority.MediumAccuracy:
priorityType = "Medium Accuracy";
locationManager.DesiredAccuracy = CLLocation.AccuracyHundredMeters;
break;
case GeofencePriority.LowAccuracy:
priorityType = "Low Accuracy";
locationManager.DesiredAccuracy = CLLocation.AccuracyKilometer;
break;
case GeofencePriority.LowestAccuracy:
priorityType = "Lowest Accuracy";
locationManager.DesiredAccuracy = CLLocation.AccuracyThreeKilometers;
break;
default:
locationManager.DesiredAccuracy = CLLocation.AccurracyBestForNavigation;
break;
}
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}: {2}", CrossGeofence.Id, "Location priority set to", priorityType));
if(CrossGeofence.SmallestDisplacement>0)
{
locationManager.DistanceFilter = CrossGeofence.SmallestDisplacement;
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}: {2} meters", CrossGeofence.Id, "Location smallest displacement set to", CrossGeofence.SmallestDisplacement));
}
if (locationManager.MonitoredRegions.Count > 0 && IsMonitoring)
{
NSSet monitoredRegions = locationManager.MonitoredRegions;
foreach (CLCircularRegion region in monitoredRegions)
{
//If not on regions remove on startup since that region was set not persistent
if (!Regions.ContainsKey(region.Identifier))
{
locationManager.StopMonitoring(region);
}
else
{
locationManager.RequestState(region);
}
}
locationManager.StartMonitoringSignificantLocationChanges();
string message = string.Format("{0} - {1} {2} region(s)", CrossGeofence.Id, "Actually monitoring", locationManager.MonitoredRegions.Count);
System.Diagnostics.Debug.WriteLine(message);
}
SetLastKnownLocation(locationManager.Location);
}
void SetLastKnownLocation(CLLocation location)
{
if (location != null)
{
if (lastKnownGeofenceLocation == null)
{
lastKnownGeofenceLocation = new GeofenceLocation();
}
lastKnownGeofenceLocation.Latitude = location.Coordinate.Latitude;
lastKnownGeofenceLocation.Longitude = location.Coordinate.Longitude;
lastKnownGeofenceLocation.Accuracy = location.HorizontalAccuracy;
DateTime referenceDate = TimeZone.CurrentTimeZone.ToLocalTime(new DateTime(2001, 1, 1, 0, 0, 0));
lastKnownGeofenceLocation.Date = referenceDate.AddSeconds(location.Timestamp.SecondsSinceReferenceDate);
CrossGeofence.GeofenceListener.OnLocationChanged(lastKnownGeofenceLocation);
}
}
void LocationsUpdated(object sender, CLLocationsUpdatedEventArgs e)
{
CLLocation lastLocation = e.Locations[e.Locations.Length-1];
SetLastKnownLocation(lastLocation);
if (Regions.Count > 20 && locationManager.MonitoredRegions.Count == 20)
{
RecalculateRegions();
}
else
{
//Check any current monitored regions not in loaded persistent regions and stop monitoring them
foreach (CLCircularRegion region in locationManager.MonitoredRegions)
{
if (!Regions.ContainsKey(region.Identifier))
{
locationManager.StopMonitoring(region);
string message = string.Format("{0} - Stopped monitoring region {1} wasn't in persistent loaded regions", CrossGeofence.Id, region.Identifier);
System.Diagnostics.Debug.WriteLine(message);
}
}
}
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}: {2},{3}", CrossGeofence.Id, "Location update",lastLocation.Coordinate.Latitude,lastLocation.Coordinate.Longitude));
}
/// <summary>
/// Calculates distance between two locations
/// </summary>
/// <param name="lat1"></param>
/// <param name="lon1"></param>
/// <param name="lat2"></param>
/// <param name="lon2"></param>
/// <returns></returns>
public double CalculateDistance(double lat1,double lon1,double lat2,double lon2)
{
double R = 6372.8; // In kilometers
double dLat = Math.PI * (lat2 - lat1) / 180.0;
double dLon = Math.PI * (lon2 - lon1) / 180.0;
lat1 = Math.PI * (lat1) / 180.0;
lat2 = Math.PI * (lat2) / 180.0;
double a = Math.Sin(dLat / 2) * Math.Sin(dLat / 2) + Math.Sin(dLon / 2) * Math.Sin(dLon / 2) * Math.Cos(lat1) * Math.Cos(lat2);
double c = 2 * Math.Asin(Math.Sqrt(a));
return (R * c) * 1000; //meters
}
void DidDetermineState(object sender, CLRegionStateDeterminedEventArgs e)
{
switch (e.State)
{
case CLRegionState.Inside:
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}", CrossGeofence.Id, "InsideRegion: " + e.Region));
OnRegionEntered(e.Region);
break;
case CLRegionState.Outside:
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}", CrossGeofence.Id, "OutsideRegion: " + e.Region));
OnRegionLeft(e.Region);
break;
default:
string message = string.Format("{0} - {1}", CrossGeofence.Id, "Unknown region state");
System.Diagnostics.Debug.WriteLine(message);
break;
}
}
void OnFailure(object sender, NSErrorEventArgs e)
{
if (IsMonitoring)
{
StopMonitoringAllRegions();
}
CrossGeofence.GeofenceListener.OnError(e.Error.LocalizedDescription);
}
void RegionEntered(object sender, CLRegionEventArgs e)
{
OnRegionEntered(e.Region);
}
void OnRegionEntered(CLRegion region)
{
if (GeofenceResults.ContainsKey(region.Identifier) && GeofenceResults[region.Identifier].Transition == GeofenceTransition.Entered)
{
return;
}
if (!mGeofenceResults.ContainsKey(region.Identifier))
{
mGeofenceResults.Add(region.Identifier, new GeofenceResult()
{
RegionId = region.Identifier
});
}
if (LastKnownLocation != null)
{
mGeofenceResults[region.Identifier].Latitude = LastKnownLocation.Latitude;
mGeofenceResults[region.Identifier].Longitude = LastKnownLocation.Longitude;
mGeofenceResults[region.Identifier].Accuracy = LastKnownLocation.Accuracy;
}
else
{
mGeofenceResults[region.Identifier].Latitude = region.Center.Latitude;
mGeofenceResults[region.Identifier].Longitude = region.Center.Longitude;
mGeofenceResults[region.Identifier].Accuracy = region.Radius;
}
mGeofenceResults[region.Identifier].LastEnterTime = DateTime.Now;
mGeofenceResults[region.Identifier].LastExitTime = null;
mGeofenceResults[region.Identifier].Transition = GeofenceTransition.Entered;
if (mRegions.ContainsKey(region.Identifier))
region.NotifyOnEntry = mRegions[region.Identifier].NotifyOnEntry;
if(region.NotifyOnEntry)
{
CrossGeofence.GeofenceListener.OnRegionStateChanged(mGeofenceResults[region.Identifier]);
if (Regions.ContainsKey(region.Identifier) && Regions[region.Identifier].ShowNotification && Regions[region.Identifier].ShowEntryNotification)
{
CreateNotification(ViewAction, string.IsNullOrEmpty(Regions[region.Identifier].NotificationEntryMessage) ? GeofenceResults[region.Identifier].ToString() : Regions[region.Identifier].NotificationEntryMessage);
}
}
Task.Factory.StartNew(async () =>
{
//Checks if device has stayed asynchronously
await CheckIfStayed(region.Identifier);
});
}
/// <summary>
/// Checks if has passed to stayed state
/// </summary>
/// <param name="regionId"></param>
/// <returns></returns>
public async Task CheckIfStayed(string regionId)
{
if (GeofenceRegionExists(regionId) && CrossGeofence.Current.Regions[regionId].NotifyOnStay && CrossGeofence.Current.GeofenceResults[regionId].Transition == GeofenceTransition.Entered && CrossGeofence.Current.Regions[regionId].StayedInThresholdDuration.TotalMilliseconds != 0)
{
await Task.Delay((int)CrossGeofence.Current.Regions[regionId].StayedInThresholdDuration.TotalMilliseconds);
if (GeofenceRegionExists(regionId) && CrossGeofence.Current.GeofenceResults[regionId].LastExitTime == null && CrossGeofence.Current.GeofenceResults[regionId].Transition != GeofenceTransition.Stayed)
{
CrossGeofence.Current.GeofenceResults[regionId].Transition = GeofenceTransition.Stayed;
CrossGeofence.GeofenceListener.OnRegionStateChanged(CrossGeofence.Current.GeofenceResults[regionId]);
if (CrossGeofence.Current.Regions[regionId].ShowNotification && CrossGeofence.Current.Regions[regionId].ShowStayNotification)
{
CreateNotification(ViewAction, string.IsNullOrEmpty(CrossGeofence.Current.Regions[regionId].NotificationStayMessage) ? CrossGeofence.Current.GeofenceResults[regionId].ToString() : CrossGeofence.Current.Regions[regionId].NotificationStayMessage);
}
}
}
}
/// <summary>
/// Checks if a GeofenceRegion exists in the monitored regions
/// </summary>
/// <returns><c>true</c>, if it exists, <c>false</c> otherwise.</returns>
/// <param name="regionId">Region identifier.</param>
bool GeofenceRegionExists(string regionId)
{
return CrossGeofence.Current.GeofenceResults.ContainsKey (regionId) && CrossGeofence.Current.Regions.ContainsKey (regionId);
}
void RegionLeft(object sender, CLRegionEventArgs e)
{
OnRegionLeft(e.Region);
}
void OnRegionLeft(CLRegion region)
{
if (GeofenceResults.ContainsKey(region.Identifier) && GeofenceResults[region.Identifier].Transition == GeofenceTransition.Exited)
{
return;
}
if (!mGeofenceResults.ContainsKey(region.Identifier))
{
mGeofenceResults.Add(region.Identifier, new GeofenceResult()
{
RegionId = region.Identifier
});
}
if(LastKnownLocation!=null)
{
mGeofenceResults[region.Identifier].Latitude = LastKnownLocation.Latitude;
mGeofenceResults[region.Identifier].Longitude = LastKnownLocation.Longitude;
mGeofenceResults[region.Identifier].Accuracy = LastKnownLocation.Accuracy;
}
else
{
mGeofenceResults[region.Identifier].Latitude = region.Center.Latitude;
mGeofenceResults[region.Identifier].Longitude = region.Center.Longitude;
mGeofenceResults[region.Identifier].Accuracy = region.Radius;
}
mGeofenceResults[region.Identifier].LastExitTime = DateTime.Now;
mGeofenceResults[region.Identifier].Transition = GeofenceTransition.Exited;
CrossGeofence.GeofenceListener.OnRegionStateChanged(mGeofenceResults[region.Identifier]);
if (Regions.ContainsKey(region.Identifier) && Regions[region.Identifier].ShowNotification && Regions[region.Identifier].ShowExitNotification)
{
CreateNotification(ViewAction, string.IsNullOrEmpty(Regions[region.Identifier].NotificationExitMessage) ? GeofenceResults[region.Identifier].ToString() : Regions[region.Identifier].NotificationExitMessage);
}
}
void DidStartMonitoringForRegion(object sender, CLRegionEventArgs e)
{
CrossGeofence.GeofenceListener.OnMonitoringStarted(e.Region.Identifier);
}
/// <summary>
/// Checks if is available for monitoring
/// </summary>
/// <returns></returns>
public bool AvailableForMonitoring()
{
bool retVal = false;
RequestAlwaysAuthorization();
if (!CLLocationManager.LocationServicesEnabled)
{
string message = string.Format("{0} - {1}", CrossGeofence.Id, "You need to enable Location Services");
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
else if (CLLocationManager.Status == CLAuthorizationStatus.Denied || CLLocationManager.Status == CLAuthorizationStatus.Restricted)
{
string message = string.Format("{0} - {1}", CrossGeofence.Id, "You need to authorize Location Services");
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
else if (CLLocationManager.IsMonitoringAvailable(typeof(CLRegion)))
{
if (RequestNotificationPermission)
{
using (var pool = new NSAutoreleasePool())
{
pool.InvokeOnMainThread(() => {
var settings = UIUserNotificationSettings.GetSettingsForTypes(
UIUserNotificationType.Alert
| UIUserNotificationType.Badge
| UIUserNotificationType.Sound,
new NSSet());
UIApplication.SharedApplication.RegisterUserNotificationSettings(settings);
});
}
}
retVal = true;
}
else
{
string message = string.Format("{0} - {1}", CrossGeofence.Id, "Not available for monitoring");
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
return retVal;
}
/// <summary>
/// Starts monitoring region
/// </summary>
/// <param name="region"></param>
public void StartMonitoring(GeofenceCircularRegion region)
{
if (AvailableForMonitoring())
{
if (!mRegions.ContainsKey(region.Id))
{
mRegions.Add(region.Id, region);
}
else
{
mRegions[region.Id] = region;
}
GeofenceStore.SharedInstance.Save(region);
if (Regions.Count > 20 && locationManager.MonitoredRegions.Count == 20)
{
RecalculateRegions();
}
else
{
AddRegion(region);
}
locationManager.StartMonitoringSignificantLocationChanges();
}
}
void RecalculateRegions()
{
IList<GeofenceCircularRegion> regions = Regions.Values.ToList();
//Stop all monitored regions
foreach (CLCircularRegion region in locationManager.MonitoredRegions)
{
locationManager.StopMonitoring(region);
}
IList<GeofenceCircularRegion> nearestRegions = GetCurrentRegions(regions);
foreach (GeofenceCircularRegion region in nearestRegions)
{
AddRegion(region);
}
string message = string.Format("{0} - {1}", CrossGeofence.Id, "Restarted monitoring to nearest 20 regions");
System.Diagnostics.Debug.WriteLine(message);
}
void AddRegion(GeofenceCircularRegion region)
{
CLRegion cRegion = null;
if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
{
cRegion = new CLCircularRegion(new CLLocationCoordinate2D(region.Latitude, region.Longitude), (region.Radius > locationManager.MaximumRegionMonitoringDistance) ? locationManager.MaximumRegionMonitoringDistance : region.Radius, region.Id);
}
else
{
cRegion = new CLRegion(new CLLocationCoordinate2D(region.Latitude, region.Longitude), (region.Radius > locationManager.MaximumRegionMonitoringDistance) ? locationManager.MaximumRegionMonitoringDistance : region.Radius, region.Id);
}
cRegion.NotifyOnEntry = region.NotifyOnEntry || region.NotifyOnStay;
cRegion.NotifyOnExit = region.NotifyOnExit;
locationManager.StartMonitoring(cRegion);
// Request state for this region, putting request behind a timer per thread: http://stackoverflow.com/questions/24543814/diddeterminestate-not-always-called
Task.Run(async() => {
await Task.Delay(TimeSpan.FromSeconds(2));
locationManager.RequestState(cRegion);
});
}
/// <summary>
/// Start monitoring regions
/// </summary>
/// <param name="regions"></param>
public void StartMonitoring(IList<GeofenceCircularRegion> regions)
{
if (AvailableForMonitoring())
{
foreach (var region in regions)
{
if (!mRegions.ContainsKey(region.Id))
{
mRegions.Add(region.Id, region);
}
else
{
mRegions[region.Id] = region;
}
GeofenceStore.SharedInstance.Save(region);
}
if (Regions.Count > 20 && locationManager.MonitoredRegions.Count == 20)
{
RecalculateRegions();
}
else
{
foreach (var region in regions)
{
AddRegion(region);
}
}
locationManager.StartMonitoringSignificantLocationChanges();
}
}
/// <summary>
/// Get current 20 monitored regions.
/// </summary>
/// <param name="regions"></param>
/// <returns></returns>
public IList<GeofenceCircularRegion> GetCurrentRegions(IList<GeofenceCircularRegion> regions)
{
IList<GeofenceCircularRegion> nearestRegions = null;
if (regions.Count > 20)
{
if (locationManager.Location != null)
{
IEnumerable<GeofenceCircularRegion> nRegions = regions.OrderBy(r => CalculateDistance(locationManager.Location.Coordinate.Latitude, locationManager.Location.Coordinate.Longitude, r.Latitude, r.Longitude)).Take(20);
nearestRegions = nRegions.ToList();
}
else
{
nearestRegions = regions.Take(20).ToList();
}
}
else
{
nearestRegions = regions;
}
return nearestRegions;
}
/// <summary>
/// Stops monitoring all regions
/// </summary>
public void StopMonitoringAllRegions()
{
if (AvailableForMonitoring())
{
GeofenceStore.SharedInstance.RemoveAll();
foreach (CLCircularRegion region in locationManager.MonitoredRegions)
{
locationManager.StopMonitoring(region);
}
locationManager.StopMonitoringSignificantLocationChanges();
mRegions.Clear();
mGeofenceResults.Clear();
CrossGeofence.GeofenceListener.OnMonitoringStopped();
}
}
/// <summary>
/// Stops monitoring region
/// </summary>
/// <param name="regionIdentifier"></param>
public void StopMonitoring(string regionIdentifier)
{
if (CLLocationManager.IsMonitoringAvailable(typeof(CLRegion)))
{
RemoveRegionMonitoring(regionIdentifier);
CrossGeofence.GeofenceListener.OnMonitoringStopped(regionIdentifier);
if (mRegions.Count == 0)
{
CrossGeofence.GeofenceListener.OnMonitoringStopped();
}
}
}
/// <summary>
/// Stop monitoring regions
/// </summary>
/// <param name="regionIdentifiers"></param>
public void StopMonitoring(IList<string> regionIdentifiers)
{
if (AvailableForMonitoring())
{
foreach (string regionIdentifier in regionIdentifiers)
{
StopMonitoring(regionIdentifier);
}
}
}
private void RemoveRegionMonitoring(string regionIdentifier)
{
if (mRegions.ContainsKey(regionIdentifier))
{
mRegions.Remove(regionIdentifier);
}
if (mGeofenceResults.ContainsKey(regionIdentifier))
{
mGeofenceResults.Remove(regionIdentifier);
}
GeofenceStore.SharedInstance.Remove(regionIdentifier);
var region = GetRegion(regionIdentifier);
if (region != null)
{
locationManager.StopMonitoring(region);
}
}
private CLRegion GetRegion(string identifier)
{
CLRegion region = null;
foreach(CLRegion r in locationManager.MonitoredRegions)
{
if (r.Identifier.Equals(identifier, StringComparison.Ordinal))
{
region = r;
break;
}
}
return region;
}
void CreateNotification(string title,string message)
{
// Do nothing if we have no notification permission at this time, or we will get a buildup of stale notifications
if (UIApplication.SharedApplication.CurrentUserNotificationSettings.Types == UIUserNotificationType.None)
return;
// Create notifications on main thread, this method can be invoked from a background thread
using (var pool = new NSAutoreleasePool())
{
pool.InvokeOnMainThread(() =>
{
var notification = new UILocalNotification();
notification.AlertAction = title;
notification.AlertBody = message;
notification.HasAction = true;
notification.SoundName = UILocalNotification.DefaultSoundName;
#if __UNIFIED__
UIApplication.SharedApplication.PresentLocalNotificationNow(notification);
#else
UIApplication.SharedApplication.PresentLocationNotificationNow(notification);
#endif
});
}
}
void RequestAlwaysAuthorization()
{
if (!RequestLocationPermission)
return;
if (isPromptingLocationPermission)
return;
isPromptingLocationPermission = true;
CLAuthorizationStatus status = CLLocationManager.Status;
if(status ==CLAuthorizationStatus.AuthorizedWhenInUse || status == CLAuthorizationStatus.Denied)
{
using (var pool = new NSAutoreleasePool())
{
pool.InvokeOnMainThread(() => {
UIAlertView alertView = new UIAlertView()
{
Title = (status == CLAuthorizationStatus.Denied) ? "Location services are off" : "Background location is not enabled",
Message = "To use background location you must turn on 'Always' in the Location Services Settings"
};
alertView.AddButton("OK");
alertView.Clicked += (sender, buttonArgs) =>
{
if (buttonArgs.ButtonIndex == 1)
{
// Send the user to the Settings for this app
NSUrl settingsUrl = new NSUrl(UIApplication.OpenSettingsUrlString);
UIApplication.SharedApplication.OpenUrl(settingsUrl);
}
isPromptingLocationPermission = false;
};
alertView.Show();
});
}
}
else if (status == CLAuthorizationStatus.NotDetermined)
{
locationManager.RequestAlwaysAuthorization();
}
}
/// <summary>
/// For iOS, it's AuthorizedAlways or it's not authorized.
/// </summary>
/// <returns>true</returns>
/// <c>false</c>
/// <param name="returnAction">Return action.</param>
public void IsLocationEnabled(Action<bool> returnAction)
{
returnAction(CLLocationManager.Status == CLAuthorizationStatus.AuthorizedAlways);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.MultiCluster;
using Orleans.Runtime.MembershipService;
namespace Orleans.Runtime.MultiClusterNetwork
{
internal class MultiClusterOracle : SystemTarget, IMultiClusterOracle, ISiloStatusListener, IMultiClusterGossipService
{
private readonly MultiClusterGossipChannelFactory channelFactory;
// as a backup measure, current local active status is sent occasionally
public static readonly TimeSpan ResendActiveStatusAfter = TimeSpan.FromMinutes(10);
// time after which this gateway removes other gateways in this same cluster that are known to be gone
public static readonly TimeSpan CleanupSilentGoneGatewaysAfter = TimeSpan.FromSeconds(30);
private readonly MultiClusterOracleData localData;
private readonly ILogger logger;
private readonly SafeRandom random;
private readonly string clusterId;
private readonly IReadOnlyList<string> defaultMultiCluster;
private readonly bool multiClusterActive;
private readonly int maxMultiClusterGateways;
private readonly TimeSpan backgroundGossipInterval;
private TimeSpan resendActiveStatusAfter;
private readonly object gatewayCacheUpdateLock = new object();
private MembershipTableSnapshot cachedSnapshot;
private List<SiloAddress> gatewayCache = new List<SiloAddress>();
private List<IGossipChannel> gossipChannels;
private IGrainTimer timer;
private readonly ISiloStatusOracle siloStatusOracle;
private readonly MembershipTableManager tableManager;
private readonly IInternalGrainFactory grainFactory;
private MultiClusterConfiguration injectedConfig;
private readonly ILoggerFactory loggerFactory;
public MultiClusterOracle(
ILocalSiloDetails siloDetails,
MultiClusterGossipChannelFactory channelFactory,
ISiloStatusOracle siloStatusOracle,
MembershipTableManager tableManager,
IInternalGrainFactory grainFactory,
ILoggerFactory loggerFactory,
IOptions<MultiClusterOptions> multiClusterOptions)
: base(Constants.MultiClusterOracleId, siloDetails.SiloAddress, loggerFactory)
{
this.loggerFactory = loggerFactory;
this.channelFactory = channelFactory;
this.siloStatusOracle = siloStatusOracle;
this.tableManager = tableManager;
this.grainFactory = grainFactory;
logger = loggerFactory.CreateLogger<MultiClusterOracle>();
localData = new MultiClusterOracleData(logger, grainFactory);
clusterId = siloDetails.ClusterId;
var multiClusterOptionsSnapshot = multiClusterOptions.Value;
defaultMultiCluster = multiClusterOptionsSnapshot.DefaultMultiCluster?.ToList();
this.multiClusterActive = multiClusterOptionsSnapshot.HasMultiClusterNetwork;
this.maxMultiClusterGateways = multiClusterOptionsSnapshot.MaxMultiClusterGateways;
random = new SafeRandom();
// to avoid convoying, each silo varies these period intervals a little
backgroundGossipInterval = RandomizeTimespanSlightly(multiClusterOptionsSnapshot.BackgroundGossipInterval);
resendActiveStatusAfter = RandomizeTimespanSlightly(ResendActiveStatusAfter);
}
// randomize a timespan a little (add between 0% and 5%)
private TimeSpan RandomizeTimespanSlightly(TimeSpan value)
{
return TimeSpan.FromMilliseconds(value.TotalMilliseconds * (1 + (random.NextDouble() * 0.05)));
}
public bool IsFunctionalClusterGateway(SiloAddress siloAddress)
{
GatewayEntry g;
return localData.Current.Gateways.TryGetValue(siloAddress, out g)
&& g.Status == GatewayStatus.Active;
}
public IEnumerable<string> GetActiveClusters()
{
return localData.ActiveGatewaysByCluster.Keys;
}
public IEnumerable<GatewayEntry> GetGateways()
{
return localData.Current.Gateways.Values;
}
public SiloAddress GetRandomClusterGateway(string cluster)
{
List<SiloAddress> gatewaylist;
if (!localData.ActiveGatewaysByCluster.TryGetValue(cluster, out gatewaylist))
return null;
return gatewaylist[random.Next(gatewaylist.Count)];
}
public MultiClusterConfiguration GetMultiClusterConfiguration()
{
return localData.Current.Configuration;
}
public async Task InjectMultiClusterConfiguration(MultiClusterConfiguration config)
{
this.injectedConfig = config;
logger.Info("Starting MultiClusterConfiguration Injection, configuration={0} ", config);
PublishChanges();
// wait for the gossip channel tasks and aggregate exceptions
var currentChannelTasks = this.channelWorkers.Values.ToList();
await Task.WhenAll(currentChannelTasks.Select(ct => ct.WaitForCurrentWorkToBeServiced()));
var exceptions = currentChannelTasks
.Where(ct => ct.LastException != null)
.Select(ct => ct.LastException)
.ToList();
logger.Info("Completed MultiClusterConfiguration Injection, {0} exceptions", exceptions.Count);
if (exceptions.Count > 0)
throw new AggregateException(exceptions);
}
public void SiloStatusChangeNotification(SiloAddress updatedSilo, SiloStatus status)
{
// any status change can cause changes in gateway list
this.ScheduleTask(() => Utils.SafeExecute(() => this.PublishChanges())).Ignore();
}
public bool SubscribeToMultiClusterConfigurationEvents(IMultiClusterConfigurationListener listener)
{
return localData.SubscribeToMultiClusterConfigurationEvents(listener);
}
public bool UnSubscribeFromMultiClusterConfigurationEvents(IMultiClusterConfigurationListener listener)
{
return localData.UnSubscribeFromMultiClusterConfigurationEvents(listener);
}
/// <inheritdoc/>
public Func<ILogConsistencyProtocolMessage, bool> ProtocolMessageFilterForTesting { get; set; }
public async Task Start()
{
logger.Info(ErrorCode.MultiClusterNetwork_Starting, "MultiClusterOracle starting on {0}", Silo);
try
{
if (string.IsNullOrEmpty(clusterId))
throw new OrleansException("Internal Error: missing cluster id");
gossipChannels = await this.channelFactory.CreateGossipChannels();
if (gossipChannels.Count == 0)
logger.Warn(ErrorCode.MultiClusterNetwork_NoChannelsConfigured, "No gossip channels are configured.");
// startup: pull all the info from the tables, then inject default multi cluster if none found
foreach (var ch in gossipChannels)
{
GetChannelWorker(ch).Synchronize();
}
await Task.WhenAll(this.channelWorkers.Select(kvp => kvp.Value.WaitForCurrentWorkToBeServiced()));
if (GetMultiClusterConfiguration() == null && defaultMultiCluster != null)
{
this.injectedConfig = new MultiClusterConfiguration(DateTime.UtcNow, defaultMultiCluster, "DefaultMultiCluster");
logger.Info("No configuration found. Using default configuration {0} ", this.injectedConfig);
}
this.siloStatusOracle.SubscribeToSiloStatusEvents(this);
PublishChanges();
StartTimer(); // for periodic full bulk gossip
logger.Info(ErrorCode.MultiClusterNetwork_Starting, "MultiClusterOracle started on {0} ", Silo);
}
catch (Exception exc)
{
logger.Error(ErrorCode.MultiClusterNetwork_FailedToStart, "MultiClusterOracle failed to start {0}", exc);
throw;
}
}
private void StartTimer()
{
if (timer != null)
timer.Dispose();
timer = GrainTimer.FromTimerCallback(
this.RuntimeClient.Scheduler,
this.loggerFactory.CreateLogger<GrainTimer>(),
this.OnGossipTimerTick,
null,
this.backgroundGossipInterval,
this.backgroundGossipInterval,
"MultiCluster.GossipTimer");
timer.Start();
}
public List<SiloAddress> GetApproximateMultiClusterGateways()
{
if (ReferenceEquals(this.cachedSnapshot, this.tableManager.MembershipTableSnapshot))
{
return this.gatewayCache;
}
lock (this.gatewayCacheUpdateLock)
{
var currentMembership = this.tableManager.MembershipTableSnapshot;
if (ReferenceEquals(this.cachedSnapshot, currentMembership))
{
return this.gatewayCache;
}
var activeSilos = new List<SiloAddress>();
var activeEntries = new List<MembershipEntry>();
foreach (var entry in currentMembership.Entries)
{
var silo = entry.Key;
var status = entry.Value.Status;
if (status == SiloStatus.Active)
{
activeEntries.Add(entry.Value);
activeSilos.Add(entry.Key);
}
}
List<SiloAddress> result;
// take all the active silos if their count does not exceed the desired number of gateways
if (activeEntries.Count <= this.maxMultiClusterGateways)
{
result = activeSilos;
}
else
{
result = MembershipHelper.DeterministicBalancedChoice(
activeSilos,
this.maxMultiClusterGateways,
(SiloAddress a) => new UpdateFaultCombo(currentMembership.Entries[a]),
logger);
}
if (logger.IsEnabled(LogLevel.Debug))
{
var gateways = string.Join(", ", result.Select(silo => silo.ToString()));
logger.Debug($"-DetermineMultiClusterGateways {gateways}");
}
Interlocked.Exchange(ref this.cachedSnapshot, currentMembership);
this.gatewayCache = result;
return result;
}
}
private void OnGossipTimerTick(object _)
{
logger.Trace("-timer");
PublishChanges();
PeriodicBackgroundGossip();
}
// called in response to changed status, and periodically
private void PublishChanges()
{
logger.Debug("--- PublishChanges: assess");
var activeLocalGateways = this.GetApproximateMultiClusterGateways();
var iAmGateway = activeLocalGateways.Contains(Silo);
// collect deltas that need to be published to all other gateways.
// Most of the time, this will contain just zero or one change.
var deltas = new MultiClusterData();
// Determine local status, and add to deltas if it changed
InjectLocalStatus(iAmGateway, ref deltas);
// Determine if admin has injected a new configuration, and add to deltas if that is the case
InjectConfiguration(ref deltas);
// Determine if there are some stale gateway entries of this cluster that should be demoted,
// and add those demotions to deltas
if (iAmGateway)
DemoteLocalGateways(activeLocalGateways, ref deltas);
if (logger.IsEnabled(LogLevel.Debug))
logger.Debug("--- PublishChanges: found activeGateways={0} iAmGateway={1} publish={2}",
string.Join(",", activeLocalGateways), iAmGateway, deltas);
if (!deltas.IsEmpty)
{
// Now we do the actual publishing. Note that we publish deltas only once and
// simply log any errors without retrying. To handle problems
// caused by lost messages we rely instead on the periodic background gossip:
// each node periodically does full two-way gossip (Synchronize) with
// some random other node or channel. This ensures all information
// eventually gets everywhere.
// publish deltas to all remote clusters
foreach (var x in this.AllClusters().Where(x => x != this.clusterId))
{
GetClusterWorker(x).Publish(deltas);
}
// publish deltas to all local silos
var activeLocalClusterSilos = this.GetApproximateOtherActiveSilos();
foreach (var activeLocalClusterSilo in activeLocalClusterSilos)
{
GetSiloWorker(activeLocalClusterSilo).Publish(deltas);
}
// publish deltas to all gossip channels
foreach (var ch in gossipChannels)
{
GetChannelWorker(ch).Publish(deltas);
}
}
if (deltas.Gateways.ContainsKey(this.Silo) && deltas.Gateways[this.Silo].Status == GatewayStatus.Active)
{
// Fully synchronize with channels if we just went active, which helps with initial startup time.
// Note: doing a partial publish just before this full synchronize is by design, so that it reduces stabilization
// time when several Silos are starting up at the same time, and there already is information about each other
// before they attempt the full gossip
foreach (var ch in gossipChannels)
{
GetChannelWorker(ch).Synchronize();
}
}
logger.Debug("--- PublishChanges: done");
}
private IEnumerable<SiloAddress> GetApproximateOtherActiveSilos()
{
return this.siloStatusOracle.GetApproximateSiloStatuses()
.Where(kvp => !kvp.Key.Equals(this.Silo) && kvp.Value == SiloStatus.Active)
.Select(kvp => kvp.Key);
}
private void PeriodicBackgroundGossip()
{
logger.Debug("--- PeriodicBackgroundGossip");
// pick random target for full gossip
var gateways = localData.Current.Gateways.Values
.Where(gw => !gw.SiloAddress.Equals(this.Silo) && gw.Status == GatewayStatus.Active)
.ToList();
var pick = random.Next(gateways.Count + gossipChannels.Count);
if (pick < gateways.Count)
{
var cluster = gateways[pick].ClusterId;
GetClusterWorker(cluster).Synchronize();
}
else
{
var address = gossipChannels[pick - gateways.Count];
GetChannelWorker(address).Synchronize();
}
// report summary of encountered communication problems in log
var unreachableClusters = string.Join(",", this.clusterWorkers
.Where(kvp => kvp.Value.LastException != null)
.Select(kvp => string.Format("{0}({1})", kvp.Key, kvp.Value.LastException.GetType().Name)));
if (!string.IsNullOrEmpty(unreachableClusters))
logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure, "Gossip Communication: cannot reach clusters {0}", unreachableClusters);
var unreachableSilos = string.Join(",", this.siloWorkers
.Where(kvp => kvp.Value.LastException != null)
.Select(kvp => string.Format("{0}({1})", kvp.Key, kvp.Value.LastException.GetType().Name)));
if (!string.IsNullOrEmpty(unreachableSilos))
logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure, "Gossip Communication: cannot reach silos {0}", unreachableSilos);
var unreachableChannels = string.Join(",", this.channelWorkers
.Where(kvp => kvp.Value.LastException != null)
.Select(kvp => string.Format("{0}({1})", kvp.Key, kvp.Value.LastException.GetType().Name)));
if (!string.IsNullOrEmpty(unreachableChannels))
logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure, "Gossip Communication: cannot reach channels {0}", unreachableChannels);
// discard workers that have not been used for a while and are idle
RemoveIdleWorkers(this.clusterWorkers);
RemoveIdleWorkers(this.siloWorkers);
logger.Debug("--- PeriodicBackgroundGossip: done");
}
// the set of all known clusters
private IEnumerable<string> AllClusters()
{
var allClusters = localData.Current.Gateways.Values.Select(gw => gw.ClusterId);
if (localData.Current.Configuration != null)
{
allClusters = allClusters.Union(localData.Current.Configuration.Clusters);
}
return new HashSet<string>(allClusters);
}
private void RemoveIdleWorkers<K, T>(Dictionary<K, T> dict) where T : GossipWorker
{
var now = DateTime.UtcNow;
var toRemove = dict
.Where(kvp => (now - kvp.Value.LastUse).TotalMilliseconds > 2.5 * this.resendActiveStatusAfter.TotalMilliseconds)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in toRemove)
dict.Remove(key);
}
// called by remote nodes that publish changes
public Task Publish(IMultiClusterGossipData gossipData, bool forwardLocally)
{
logger.Debug("--- Publish: receive {0} data {1}", forwardLocally ? "remote" : "local", gossipData);
var data = (MultiClusterData)gossipData;
var delta = localData.ApplyDataAndNotify(data);
// forward changes to all local silos
if (forwardLocally)
{
foreach (var activeSilo in this.GetApproximateOtherActiveSilos())
GetSiloWorker(activeSilo).Publish(delta);
}
PublishMyStatusToNewDestinations(delta);
logger.Debug("--- Publish: done");
return Task.CompletedTask;
}
// called by remote nodes' full background gossip
public Task<IMultiClusterGossipData> Synchronize(IMultiClusterGossipData gossipData)
{
logger.Debug("--- Synchronize: gossip {0}", gossipData);
var data = (MultiClusterData)gossipData;
var delta = this.localData.ApplyDataAndNotify(data);
PublishMyStatusToNewDestinations(delta);
logger.Debug("--- Synchronize: done, answer={0}", delta);
return Task.FromResult((IMultiClusterGossipData)delta);
}
// initiate a search for lagging silos, contacting other silos and clusters
public async Task<List<SiloAddress>> FindLaggingSilos(MultiClusterConfiguration expected)
{
var tasks = new List<Task<List<SiloAddress>>>();
// check this cluster for lagging silos
tasks.Add(FindLaggingSilos(expected, true));
// check all other clusters for lagging silos
foreach (var cluster in GetActiveClusters())
{
if (cluster != this.clusterId)
{
var silo = GetRandomClusterGateway(cluster);
if (silo == null)
throw new OrleansException("no gateway for cluster " + cluster);
var remoteOracle = this.grainFactory.GetSystemTarget<IMultiClusterGossipService>(Constants.MultiClusterOracleId, silo);
tasks.Add(remoteOracle.FindLaggingSilos(expected, true));
}
}
// This function is called during manual admin operations through
// IManagementGrain (change configuration, or check stability).
// Users are going to want to see the exception details to figure out
// what is going on.
await Task.WhenAll(tasks);
return tasks.SelectMany(t => t.Result).ToList();
}
// receive a remote request for finding lagging silos in this cluster or on this silo
public async Task<List<SiloAddress>> FindLaggingSilos(MultiClusterConfiguration expected, bool forwardLocally)
{
logger.Debug("--- FindLaggingSilos: {0}, {1}", forwardLocally ? "remote" : "local", expected);
var result = new List<SiloAddress>();
// check if this silo is lagging
if (!MultiClusterConfiguration.Equals(localData.Current.Configuration, expected))
result.Add(this.Silo);
if (forwardLocally)
{
// contact all other active silos in this cluster
var tasks = new List<Task<List<SiloAddress>>>();
foreach (var activeSilo in this.GetApproximateOtherActiveSilos())
{
var remoteOracle = this.grainFactory.GetSystemTarget<IMultiClusterGossipService>(Constants.MultiClusterOracleId, activeSilo);
tasks.Add(remoteOracle.FindLaggingSilos(expected, false));
}
await Task.WhenAll(tasks);
foreach (var silo in tasks.SelectMany(t => t.Result))
{
result.Add(silo);
}
}
logger.Debug("--- FindLaggingSilos: done, found {0}", result.Count);
return result;
}
private void PublishMyStatusToNewDestinations(IMultiClusterGossipData delta)
{
// for quicker convergence, we publish active local status information
// immediately when we learn about a new destination
GatewayEntry myEntry;
// don't do this if we are not an active gateway
if (!localData.Current.Gateways.TryGetValue(this.Silo, out myEntry)
|| myEntry.Status != GatewayStatus.Active)
return;
foreach (var gateway in delta.Gateways.Values)
{
var gossipworker = (gateway.ClusterId == this.clusterId) ?
GetSiloWorker(gateway.SiloAddress) : GetClusterWorker(gateway.ClusterId);
var destinationCluster = gateway.ClusterId;
if (!gossipworker.KnowsMe)
gossipworker.Publish(new MultiClusterData(myEntry));
}
}
// gossip workers, by category
private readonly Dictionary<SiloAddress, SiloGossipWorker> siloWorkers = new Dictionary<SiloAddress, SiloGossipWorker>();
private readonly Dictionary<string, SiloGossipWorker> clusterWorkers = new Dictionary<string, SiloGossipWorker>();
private readonly Dictionary<IGossipChannel, ChannelGossipWorker> channelWorkers = new Dictionary<IGossipChannel, ChannelGossipWorker>();
// numbering for tasks (helps when analyzing logs)
private int idCounter;
private SiloGossipWorker GetSiloWorker(SiloAddress silo)
{
if (silo == null) throw new ArgumentNullException("silo");
SiloGossipWorker worker;
if (!this.siloWorkers.TryGetValue(silo, out worker))
this.siloWorkers[silo] = worker = new SiloGossipWorker(this, silo, this.grainFactory);
return worker;
}
private SiloGossipWorker GetClusterWorker(string cluster)
{
if (cluster == null) throw new ArgumentNullException("cluster");
SiloGossipWorker worker;
if (!this.clusterWorkers.TryGetValue(cluster, out worker))
this.clusterWorkers[cluster] = worker = new SiloGossipWorker(this, cluster, this.grainFactory);
return worker;
}
private ChannelGossipWorker GetChannelWorker(IGossipChannel channel)
{
if (channel == null) throw new ArgumentNullException("channel");
ChannelGossipWorker worker;
if (!this.channelWorkers.TryGetValue(channel, out worker))
this.channelWorkers[channel] = worker = new ChannelGossipWorker(this, channel);
return worker;
}
// superclass for gossip workers.
// a gossip worker queues (push) and (synchronize) requests,
// and services them using a single async worker
private abstract class GossipWorker : BatchWorker
{
public GossipWorker(MultiClusterOracle oracle)
{
this.oracle = oracle;
}
protected MultiClusterOracle oracle;
// add all data to be published into this variable
protected MultiClusterData toPublish = new MultiClusterData();
// set this flag to request a full gossip (synchronize)
protected bool doSynchronize = false;
public void Publish(IMultiClusterGossipData data)
{
// add the data to the data waiting to be published
toPublish = toPublish.Merge(data);
if (oracle.logger.IsEnabled(LogLevel.Debug))
LogQueuedPublish(toPublish);
Notify();
}
public void Synchronize()
{
doSynchronize = true;
Notify();
}
public Exception LastException;
public DateTime LastUse = DateTime.UtcNow;
protected override async Task Work()
{
// publish data that has been queued
var data = toPublish;
if (!data.IsEmpty)
{
toPublish = new MultiClusterData(); // clear queued data
int id = ++oracle.idCounter;
LastUse = DateTime.UtcNow;
await Publish(id, data);
LastUse = DateTime.UtcNow;
};
// do a full synchronize if flag is set
if (doSynchronize)
{
doSynchronize = false; // clear flag
int id = ++oracle.idCounter;
LastUse = DateTime.UtcNow;
await Synchronize(id);
LastUse = DateTime.UtcNow;
}
}
protected abstract Task Publish(int id, MultiClusterData data);
protected abstract Task Synchronize(int id);
protected abstract void LogQueuedPublish(MultiClusterData data);
}
// A worker for gossiping with silos
private class SiloGossipWorker : GossipWorker
{
public SiloAddress Silo;
private readonly IInternalGrainFactory grainFactory;
public string Cluster;
public bool TargetsRemoteCluster { get { return Cluster != null; } }
public bool KnowsMe; // used for optimizing pushes
public SiloGossipWorker(MultiClusterOracle oracle, SiloAddress Silo, IInternalGrainFactory grainFactory)
: base(oracle)
{
this.Cluster = null; // only local cluster
this.Silo = Silo;
this.grainFactory = grainFactory;
}
public SiloGossipWorker(MultiClusterOracle oracle, string cluster, IInternalGrainFactory grainFactory)
: base(oracle)
{
this.Cluster = cluster;
this.Silo = null;
this.grainFactory = grainFactory;
}
protected override void LogQueuedPublish(MultiClusterData data)
{
if (TargetsRemoteCluster)
oracle.logger.Debug("enqueued publish to cluster {0}, cumulative: {1}", Cluster, data);
else
oracle.logger.Debug("enqueued publish to silo {0}, cumulative: {1}", Silo, data);
}
protected async override Task Publish(int id, MultiClusterData data)
{
// optimization: can skip publish to local clusters if we are doing a full synchronize anyway
if (!TargetsRemoteCluster && doSynchronize)
return;
// for remote clusters, pick a random gateway if we don't already have one, or it is not active anymore
if (TargetsRemoteCluster && (Silo == null
|| !oracle.localData.Current.IsActiveGatewayForCluster(Silo, Cluster)))
{
Silo = oracle.GetRandomClusterGateway(Cluster);
}
// if the cluster has no gateways reporting, skip
if (Silo == null)
{
return;
}
oracle.logger.Debug("-{0} Publish to silo {1} ({2}) {3}", id, Silo, Cluster ?? "local", data);
try
{
// publish to the remote system target
var remoteOracle = this.grainFactory.GetSystemTarget<IMultiClusterGossipService>(Constants.MultiClusterOracleId, Silo);
await remoteOracle.Publish(data, TargetsRemoteCluster);
LastException = null;
if (data.Gateways.ContainsKey(oracle.Silo))
KnowsMe = data.Gateways[oracle.Silo].Status == GatewayStatus.Active;
oracle.logger.Debug("-{0} Publish to silo successful", id);
}
catch (Exception e)
{
oracle.logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure,
$"-{id} Publish to silo {Silo} ({Cluster ?? "local"}) failed", e);
if (TargetsRemoteCluster)
Silo = null; // pick a different gateway next time
LastException = e;
}
}
protected async override Task Synchronize(int id)
{
// for remote clusters, always pick another random gateway
if (TargetsRemoteCluster)
Silo = oracle.GetRandomClusterGateway(Cluster);
oracle.logger.Debug("-{0} Synchronize with silo {1} ({2})", id, Silo, Cluster ?? "local");
try
{
var remoteOracle = this.grainFactory.GetSystemTarget<IMultiClusterGossipService>(Constants.MultiClusterOracleId, Silo);
var data = oracle.localData.Current;
var answer = (MultiClusterData)await remoteOracle.Synchronize(oracle.localData.Current);
// apply what we have learnt
var delta = oracle.localData.ApplyDataAndNotify(answer);
LastException = null;
if (data.Gateways.ContainsKey(oracle.Silo))
KnowsMe = data.Gateways[oracle.Silo].Status == GatewayStatus.Active;
oracle.logger.Debug("-{0} Synchronize with silo successful, answer={1}", id, answer);
oracle.PublishMyStatusToNewDestinations(delta);
}
catch (Exception e)
{
oracle.logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure,
string.Format("-{0} Synchronize with silo {1} ({2}) failed", id, Silo, Cluster ?? "local"), e);
if (TargetsRemoteCluster)
Silo = null; // pick a different gateway next time
LastException = e;
}
}
}
// A worker for gossiping with channels
private class ChannelGossipWorker : GossipWorker
{
IGossipChannel channel;
public ChannelGossipWorker(MultiClusterOracle oracle, IGossipChannel channel)
: base(oracle)
{
this.channel = channel;
}
protected override void LogQueuedPublish(MultiClusterData data)
{
oracle.logger.Debug("enqueue publish to channel {0}, cumulative: {1}", channel.Name, data);
}
protected async override Task Publish(int id, MultiClusterData data)
{
oracle.logger.Debug("-{0} Publish to channel {1} {2}", id, channel.Name, data);
try
{
await channel.Publish(data);
LastException = null;
oracle.logger.Debug("-{0} Publish to channel successful, answer={1}", id, data);
}
catch (Exception e)
{
oracle.logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure,
string.Format("-{0} Publish to channel {1} failed", id, channel.Name), e);
LastException = e;
}
}
protected async override Task Synchronize(int id)
{
oracle.logger.Debug("-{0} Synchronize with channel {1}", id, channel.Name);
try
{
var answer = await channel.Synchronize(oracle.localData.Current);
// apply what we have learnt
var delta = oracle.localData.ApplyDataAndNotify(answer);
LastException = null;
oracle.logger.Debug("-{0} Synchronize with channel successful", id);
oracle.PublishMyStatusToNewDestinations(delta);
}
catch (Exception e)
{
oracle.logger.Warn(ErrorCode.MultiClusterNetwork_GossipCommunicationFailure,
string.Format("-{0} Synchronize with channel {1} failed", id, channel.Name), e);
LastException = e;
}
}
}
private void InjectConfiguration(ref MultiClusterData deltas)
{
if (this.injectedConfig == null)
return;
var data = new MultiClusterData(this.injectedConfig);
this.injectedConfig = null;
if (logger.IsEnabled(LogLevel.Debug))
logger.Debug("-InjectConfiguration {0}", data.Configuration.ToString());
var delta = this.localData.ApplyDataAndNotify(data);
if (!delta.IsEmpty)
deltas = deltas.Merge(delta);
}
private void InjectLocalStatus(bool isGateway, ref MultiClusterData deltas)
{
var myStatus = new GatewayEntry()
{
ClusterId = clusterId,
SiloAddress = Silo,
Status = isGateway ? GatewayStatus.Active : GatewayStatus.Inactive,
HeartbeatTimestamp = DateTime.UtcNow,
};
GatewayEntry existingEntry;
// do not update if we are reporting inactive status and entry is not already there
if (!this.localData.Current.Gateways.TryGetValue(Silo, out existingEntry) && !isGateway)
return;
// send if status is changed, or we are active and haven't said so in a while
if (existingEntry == null
|| existingEntry.Status != myStatus.Status
|| (myStatus.Status == GatewayStatus.Active
&& myStatus.HeartbeatTimestamp - existingEntry.HeartbeatTimestamp > this.resendActiveStatusAfter))
{
logger.Info($"Report as {myStatus}");
// update current data with status
var delta = this.localData.ApplyDataAndNotify(new MultiClusterData(myStatus));
if (!delta.IsEmpty)
deltas = deltas.Merge(delta);
}
}
private void DemoteLocalGateways(IReadOnlyList<SiloAddress> activeGateways, ref MultiClusterData deltas)
{
var now = DateTime.UtcNow;
// mark gateways as inactive if they have not recently advertised their existence,
// and if they are not designated gateways as per membership table
var toBeUpdated = this.localData.Current.Gateways.Values
.Where(g => g.ClusterId == clusterId
&& g.Status == GatewayStatus.Active
&& (now - g.HeartbeatTimestamp > CleanupSilentGoneGatewaysAfter)
&& !activeGateways.Contains(g.SiloAddress))
.Select(g => new GatewayEntry()
{
ClusterId = g.ClusterId,
SiloAddress = g.SiloAddress,
Status = GatewayStatus.Inactive,
HeartbeatTimestamp = g.HeartbeatTimestamp + CleanupSilentGoneGatewaysAfter,
}).ToList();
if (toBeUpdated.Count == 0)
return;
var data = new MultiClusterData(toBeUpdated);
if (logger.IsEnabled(LogLevel.Debug))
logger.Debug("-DemoteLocalGateways {0}", data.ToString());
var delta = this.localData.ApplyDataAndNotify(data);
if (!delta.IsEmpty)
{
deltas = deltas.Merge(delta);
}
}
}
}
| |
/*!
* CSharpVerbalExpressions v0.1
* https://github.com/VerbalExpressions/CSharpVerbalExpressions
*
* @psoholt
*
* Date: 2013-07-26
*
* Additions and Refactoring
* @alexpeta
*
* Date: 2013-08-06
*/
using System;
using System.Text;
using System.Linq;
using System.Text.RegularExpressions;
namespace CSharpVerbalExpressions
{
public class VerbalExpressions
{
#region Statics
/// <summary>
/// Returns a default instance of VerbalExpressions
/// having the Multiline option enabled
/// </summary>
public static VerbalExpressions DefaultExpression
{
get { return new VerbalExpressions(); }
}
#endregion Statics
#region Private Members
private readonly StringBuilder _prefixes = new StringBuilder();
private readonly StringBuilder _source = new StringBuilder();
private readonly StringBuilder _suffixes = new StringBuilder();
private RegexOptions _modifiers = RegexOptions.Multiline;
#endregion Private Members
#region Private Properties
private string RegexString
{
get { return new StringBuilder().Append(_prefixes).Append(_source).Append(_suffixes).ToString();}
}
private Regex PatternRegex
{
get {
RegexCache regexCache = new RegexCache();
return regexCache.Get(this.RegexString, _modifiers);
}
}
#endregion Private Properties
#region Public Methods
#region Helpers
public string Sanitize(string value)
{
if (string.IsNullOrEmpty(value))
{
throw new ArgumentNullException("value");
}
return Regex.Escape(value);
}
public bool Test(string toTest)
{
return IsMatch(toTest);
}
public bool IsMatch(string toTest)
{
return PatternRegex.IsMatch(toTest);
}
public Regex ToRegex()
{
return PatternRegex;
}
public override string ToString()
{
return PatternRegex.ToString();
}
public string Capture(string toTest, string groupName)
{
if (!Test(toTest))
return null;
var match = PatternRegex.Match(toTest);
return match.Groups[groupName].Value;
}
#endregion Helpers
#region Expression Modifiers
public VerbalExpressions Add(CommonRegex commonRegex)
{
return Add(commonRegex.Name, false);
}
public VerbalExpressions Add(string value, bool sanitize = true)
{
if (value == null)
throw new ArgumentNullException("value must be provided");
value = sanitize ? Sanitize(value) : value;
_source.Append(value);
return this;
}
public VerbalExpressions StartOfLine(bool enable = true)
{
_prefixes.Append(enable ? "^" : String.Empty);
return this;
}
public VerbalExpressions EndOfLine(bool enable = true)
{
_suffixes.Append(enable ? "$" : String.Empty);
return this;
}
public VerbalExpressions Then(string value, bool sanitize = true)
{
var sanitizedValue = sanitize ? Sanitize(value) : value;
value = string.Format("({0})", sanitizedValue);
return Add(value, false);
}
public VerbalExpressions Then(CommonRegex commonRegex)
{
return Then(commonRegex.Name, false);
}
public VerbalExpressions Find(string value)
{
return Then(value);
}
public VerbalExpressions Maybe(string value, bool sanitize = true)
{
value = sanitize ? Sanitize(value) : value;
value = string.Format("({0})?", value);
return Add(value, false);
}
public VerbalExpressions Maybe(CommonRegex commonRegex)
{
return Maybe(commonRegex.Name, sanitize: false);
}
public VerbalExpressions Anything()
{
return Add("(.*)", false);
}
public VerbalExpressions AnythingBut(string value, bool sanitize = true)
{
value = sanitize ? Sanitize(value) : value;
value = string.Format("([^{0}]*)", value);
return Add(value, false);
}
public VerbalExpressions Something()
{
return Add("(.+)", false);
}
public VerbalExpressions SomethingBut(string value, bool sanitize = true)
{
value = sanitize ? Sanitize(value) : value;
value = string.Format("([^" + value + "]+)");
return Add(value, false);
}
public VerbalExpressions Replace(string value)
{
string whereToReplace = PatternRegex.ToString();
if (whereToReplace.Length != 0)
{
_source.Replace(whereToReplace, value);
}
return this;
}
public VerbalExpressions LineBreak()
{
return Add(@"(\n|(\r\n))", false);
}
public VerbalExpressions Br()
{
return LineBreak();
}
public VerbalExpressions Tab()
{
return Add(@"\t");
}
public VerbalExpressions Word()
{
return Add(@"\w+", false);
}
public VerbalExpressions AnyOf(string value, bool sanitize = true)
{
if (string.IsNullOrEmpty(value))
{
throw new ArgumentNullException("value");
}
value = sanitize ? Sanitize(value) : value;
value = string.Format("[{0}]", value);
return Add(value, false);
}
public VerbalExpressions Any(string value)
{
return AnyOf(value);
}
public VerbalExpressions Range(params object[] arguments)
{
if (object.ReferenceEquals(arguments, null))
{
throw new ArgumentNullException("arguments");
}
if (arguments.Length == 1)
{
throw new ArgumentOutOfRangeException("arguments");
}
string[] sanitizedStrings = arguments.Select(argument =>
{
if (object.ReferenceEquals(argument, null))
{
return string.Empty;
}
string casted = argument.ToString();
if (string.IsNullOrEmpty(casted))
{
return string.Empty;
}
else
{
return Sanitize(casted);
}
})
.Where(sanitizedString =>
!string.IsNullOrEmpty(sanitizedString))
.OrderBy(s => s)
.ToArray();
if (sanitizedStrings.Length > 3)
{
throw new ArgumentOutOfRangeException("arguments");
}
if (!sanitizedStrings.Any())
{
return this;
}
bool hasOddNumberOfParams = (sanitizedStrings.Length % 2) > 0;
StringBuilder sb = new StringBuilder("[");
for (int _from = 0; _from < sanitizedStrings.Length; _from += 2)
{
int _to = _from + 1;
if (sanitizedStrings.Length <= _to)
{
break;
}
sb.AppendFormat("{0}-{1}", sanitizedStrings[_from], sanitizedStrings[_to]);
}
sb.Append("]");
if (hasOddNumberOfParams)
{
sb.AppendFormat("|{0}", sanitizedStrings.Last());
}
return Add(sb.ToString(), false);
}
public VerbalExpressions Multiple(string value, bool sanitize = true)
{
if (string.IsNullOrEmpty(value))
{
throw new ArgumentNullException("value");
}
value = sanitize ? this.Sanitize(value) : value;
value = string.Format(@"({0})+", value);
return Add(value, false);
}
public VerbalExpressions Or(CommonRegex commonRegex)
{
return Or(commonRegex.Name, false);
}
public VerbalExpressions Or(string value, bool sanitize = true)
{
_prefixes.Append("(");
_suffixes.Insert(0, ")");
_source.Append(")|(");
return Add(value, sanitize);
}
public VerbalExpressions BeginCapture()
{
return Add("(", false);
}
public VerbalExpressions BeginCapture(string groupName)
{
return Add("(?<", false).Add(groupName, true).Add(">", false);
}
public VerbalExpressions EndCapture()
{
return Add(")", false);
}
public VerbalExpressions RepeatPrevious(int n)
{
return Add("{" + n + "}", false);
}
public VerbalExpressions RepeatPrevious(int n, int m)
{
return Add("{" + n + "," + m + "}", false);
}
#endregion Expression Modifiers
#region Expression Options Modifiers
public VerbalExpressions AddModifier(char modifier)
{
switch (modifier)
{
case 'i':
_modifiers |= RegexOptions.IgnoreCase;
break;
case 'x':
_modifiers |= RegexOptions.IgnorePatternWhitespace;
break;
case 'm':
_modifiers |= RegexOptions.Multiline;
break;
case 's':
_modifiers |= RegexOptions.Singleline;
break;
}
return this;
}
public VerbalExpressions RemoveModifier(char modifier)
{
switch (modifier)
{
case 'i':
_modifiers &= ~RegexOptions.IgnoreCase;
break;
case 'x':
_modifiers &= ~RegexOptions.IgnorePatternWhitespace;
break;
case 'm':
_modifiers &= ~RegexOptions.Multiline;
break;
case 's':
_modifiers &= ~RegexOptions.Singleline;
break;
}
return this;
}
public VerbalExpressions WithAnyCase(bool enable = true)
{
if (enable)
{
AddModifier('i');
}
else
{
RemoveModifier('i');
}
return this;
}
public VerbalExpressions UseOneLineSearchOption(bool enable)
{
if (enable)
{
RemoveModifier('m');
}
else
{
AddModifier('m');
}
return this;
}
public VerbalExpressions WithOptions(RegexOptions options)
{
this._modifiers = options;
return this;
}
#endregion Expression Options Modifiers
#endregion Public Methods
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyComplex
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// Test Infrastructure for AutoRest
/// </summary>
public partial class AutoRestComplexTestService : ServiceClient<AutoRestComplexTestService>, IAutoRestComplexTestService
{
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// API ID.
/// </summary>
public string ApiVersion { get; private set; }
/// <summary>
/// Gets the IBasicOperations.
/// </summary>
public virtual IBasicOperations BasicOperations { get; private set; }
/// <summary>
/// Gets the IPrimitive.
/// </summary>
public virtual IPrimitive Primitive { get; private set; }
/// <summary>
/// Gets the IArray.
/// </summary>
public virtual IArray Array { get; private set; }
/// <summary>
/// Gets the IDictionary.
/// </summary>
public virtual IDictionary Dictionary { get; private set; }
/// <summary>
/// Gets the IInheritance.
/// </summary>
public virtual IInheritance Inheritance { get; private set; }
/// <summary>
/// Gets the IPolymorphism.
/// </summary>
public virtual IPolymorphism Polymorphism { get; private set; }
/// <summary>
/// Gets the IPolymorphicrecursive.
/// </summary>
public virtual IPolymorphicrecursive Polymorphicrecursive { get; private set; }
/// <summary>
/// Gets the IReadonlyproperty.
/// </summary>
public virtual IReadonlyproperty Readonlyproperty { get; private set; }
/// <summary>
/// Initializes a new instance of the AutoRestComplexTestService class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestComplexTestService(params DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestComplexTestService class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestComplexTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestComplexTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestComplexTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestComplexTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestComplexTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// An optional partial-method to perform custom initialization.
///</summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.BasicOperations = new BasicOperations(this);
this.Primitive = new Primitive(this);
this.Array = new Array(this);
this.Dictionary = new Dictionary(this);
this.Inheritance = new Inheritance(this);
this.Polymorphism = new Polymorphism(this);
this.Polymorphicrecursive = new Polymorphicrecursive(this);
this.Readonlyproperty = new Readonlyproperty(this);
this.BaseUri = new Uri("http://localhost");
this.ApiVersion = "2014-04-01-preview";
SerializationSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter<Fish>("fishtype"));
DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter<Fish>("fishtype"));
CustomInitialize();
}
}
}
| |
//
// X509CRL.cs: Handles X.509 certificates revocation lists.
//
// Author:
// Sebastien Pouliot <sebastien@ximian.com>
//
// (C) 2004 Novell (http://www.novell.com)
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Globalization;
using System.IO;
using System.Security.Cryptography;
using Mono.Security.X509.Extensions;
namespace Mono.Security.X509 {
/*
* CertificateList ::= SEQUENCE {
* tbsCertList TBSCertList,
* signatureAlgorithm AlgorithmIdentifier,
* signature BIT STRING
* }
*
* TBSCertList ::= SEQUENCE {
* version Version OPTIONAL,
* -- if present, MUST be v2
* signature AlgorithmIdentifier,
* issuer Name,
* thisUpdate Time,
* nextUpdate Time OPTIONAL,
* revokedCertificates SEQUENCE OF SEQUENCE {
* userCertificate CertificateSerialNumber,
* revocationDate Time,
* crlEntryExtensions Extensions OPTIONAL
* -- if present, MUST be v2
* } OPTIONAL,
* crlExtensions [0] Extensions OPTIONAL }
* -- if present, MUST be v2
*/
#if INSIDE_CORLIB
internal
#else
public
#endif
class X509Crl {
public class X509CrlEntry {
private byte[] sn;
private DateTime revocationDate;
private X509ExtensionCollection extensions;
internal X509CrlEntry (byte[] serialNumber, DateTime revocationDate, X509ExtensionCollection extensions)
{
sn = serialNumber;
this.revocationDate = revocationDate;
if (extensions == null)
this.extensions = new X509ExtensionCollection ();
else
this.extensions = extensions;
}
internal X509CrlEntry (ASN1 entry)
{
sn = entry [0].Value;
Array.Reverse (sn);
revocationDate = ASN1Convert.ToDateTime (entry [1]);
extensions = new X509ExtensionCollection (entry [2]);
}
public byte[] SerialNumber {
get { return (byte[]) sn.Clone (); }
}
public DateTime RevocationDate {
get { return revocationDate; }
}
public X509ExtensionCollection Extensions {
get { return extensions; }
}
public byte[] GetBytes ()
{
ASN1 sequence = new ASN1 (0x30);
sequence.Add (new ASN1 (0x02, sn));
sequence.Add (ASN1Convert.FromDateTime (revocationDate));
if (extensions.Count > 0)
sequence.Add (new ASN1 (extensions.GetBytes ()));
return sequence.GetBytes ();
}
}
private string issuer;
private byte version;
private DateTime thisUpdate;
private DateTime nextUpdate;
private ArrayList entries;
private string signatureOID;
private byte[] signature;
private X509ExtensionCollection extensions;
private byte[] encoded;
public X509Crl (byte[] crl)
{
if (crl == null)
throw new ArgumentNullException ("crl");
encoded = (byte[]) crl.Clone ();
Parse (encoded);
}
private void Parse (byte[] crl)
{
string e = "Input data cannot be coded as a valid CRL.";
try {
// CertificateList ::= SEQUENCE {
ASN1 encodedCRL = new ASN1 (encoded);
if ((encodedCRL.Tag != 0x30) || (encodedCRL.Count != 3))
throw new CryptographicException (e);
// CertificateList / TBSCertList,
ASN1 toBeSigned = encodedCRL [0];
if ((toBeSigned.Tag != 0x30) || (toBeSigned.Count < 3))
throw new CryptographicException (e);
int n = 0;
// CertificateList / TBSCertList / Version OPTIONAL, -- if present, MUST be v2
if (toBeSigned [n].Tag == 0x02) {
version = (byte) (toBeSigned [n++].Value [0] + 1);
}
else
version = 1; // DEFAULT
// CertificateList / TBSCertList / AlgorithmIdentifier,
signatureOID = ASN1Convert.ToOid (toBeSigned [n++][0]);
// CertificateList / TBSCertList / Name,
issuer = X501.ToString (toBeSigned [n++]);
// CertificateList / TBSCertList / Time,
thisUpdate = ASN1Convert.ToDateTime (toBeSigned [n++]);
// CertificateList / TBSCertList / Time OPTIONAL,
ASN1 next = toBeSigned [n++];
if ((next.Tag == 0x17) || (next.Tag == 0x18)) {
nextUpdate = ASN1Convert.ToDateTime (next);
next = toBeSigned [n++];
}
// CertificateList / TBSCertList / revokedCertificates SEQUENCE OF SEQUENCE {
entries = new ArrayList ();
ASN1 revokedCertificates = next;
for (int i=0; i < revokedCertificates.Count; i++) {
entries.Add (new X509CrlEntry (revokedCertificates [i]));
}
// CertificateList / TBSCertList / crlExtensions [0] Extensions OPTIONAL }
ASN1 extns = toBeSigned [n];
if ((extns != null) && (extns.Tag == 0xA0) && (extns.Count == 1))
extensions = new X509ExtensionCollection (extns [0]);
else
extensions = new X509ExtensionCollection (null); // result in a read only object
// CertificateList / AlgorithmIdentifier
string signatureAlgorithm = ASN1Convert.ToOid (encodedCRL [1][0]);
if (signatureOID != signatureAlgorithm)
throw new CryptographicException (e + " [Non-matching signature algorithms in CRL]");
// CertificateList / BIT STRING
byte[] bitstring = encodedCRL [2].Value;
// first byte contains unused bits in first byte
signature = new byte [bitstring.Length - 1];
Buffer.BlockCopy (bitstring, 1, signature, 0, signature.Length);
}
catch {
throw new CryptographicException (e);
}
}
public ArrayList Entries {
get { return ArrayList.ReadOnly (entries); }
}
public X509CrlEntry this [int index] {
get { return (X509CrlEntry) entries [index]; }
}
public X509CrlEntry this [byte[] serialNumber] {
get { return GetCrlEntry (serialNumber); }
}
public X509ExtensionCollection Extensions {
get { return extensions; }
}
public string IssuerName {
get { return issuer; }
}
public DateTime NextUpdate {
get { return nextUpdate; }
}
public DateTime ThisUpdate {
get { return thisUpdate; }
}
public string SignatureAlgorithm {
get { return signatureOID; }
}
public byte[] Signature {
get {
if (signature == null)
return null;
return (byte[]) signature.Clone ();
}
}
public byte Version {
get { return version; }
}
public bool IsCurrent {
get { return WasCurrent (DateTime.UtcNow); }
}
public bool WasCurrent (DateTime instant)
{
if (nextUpdate == DateTime.MinValue)
return (instant >= thisUpdate);
else
return ((instant >= thisUpdate) && (instant <= nextUpdate));
}
public byte[] GetBytes ()
{
return (byte[]) encoded.Clone ();
}
private bool Compare (byte[] array1, byte[] array2)
{
if ((array1 == null) && (array2 == null))
return true;
if ((array1 == null) || (array2 == null))
return false;
if (array1.Length != array2.Length)
return false;
for (int i=0; i < array1.Length; i++) {
if (array1 [i] != array2 [i])
return false;
}
return true;
}
public X509CrlEntry GetCrlEntry (X509Certificate x509)
{
if (x509 == null)
throw new ArgumentNullException ("x509");
return GetCrlEntry (x509.SerialNumber);
}
public X509CrlEntry GetCrlEntry (byte[] serialNumber)
{
if (serialNumber == null)
throw new ArgumentNullException ("serialNumber");
for (int i=0; i < entries.Count; i++) {
X509CrlEntry entry = (X509CrlEntry) entries [i];
if (Compare (serialNumber, entry.SerialNumber))
return entry;
}
return null;
}
public bool VerifySignature (X509Certificate x509)
{
if (x509 == null)
throw new ArgumentNullException ("x509");
// 1. x509 certificate must be a CA certificate (unknown for v1 or v2 certs)
if (x509.Version >= 3) {
// 1.1. Check for "cRLSign" bit in KeyUsage extension
X509Extension ext = x509.Extensions ["2.5.29.15"];
if (ext != null) {
KeyUsageExtension keyUsage = new KeyUsageExtension (ext);
if (!keyUsage.Support (KeyUsages.cRLSign))
return false;
}
// 1.2. Check for ca = true in BasicConstraint
ext = x509.Extensions ["2.5.29.19"];
if (ext != null) {
BasicConstraintsExtension basicConstraints = new BasicConstraintsExtension (ext);
if (!basicConstraints.CertificateAuthority)
return false;
}
}
// 2. CRL issuer must match CA subject name
if (issuer != x509.SubjectName)
return false;
// 3. Check the CRL signature with the CA certificate public key
switch (signatureOID) {
case "1.2.840.10040.4.3":
return VerifySignature (x509.DSA);
default:
return VerifySignature (x509.RSA);
}
}
private byte[] GetHash (string hashName)
{
ASN1 encodedCRL = new ASN1 (encoded);
byte[] toBeSigned = encodedCRL [0].GetBytes ();
HashAlgorithm ha = HashAlgorithm.Create (hashName);
return ha.ComputeHash (toBeSigned);
}
internal bool VerifySignature (DSA dsa)
{
if (signatureOID != "1.2.840.10040.4.3")
throw new CryptographicException ("Unsupported hash algorithm: " + signatureOID);
DSASignatureDeformatter v = new DSASignatureDeformatter (dsa);
// only SHA-1 is supported
string hashName = "SHA1";
v.SetHashAlgorithm (hashName);
ASN1 sign = new ASN1 (signature);
if ((sign == null) || (sign.Count != 2))
return false;
// parts may be less than 20 bytes (i.e. first bytes were 0x00)
byte[] part1 = sign [0].Value;
byte[] part2 = sign [1].Value;
byte[] sig = new byte [40];
Buffer.BlockCopy (part1, 0, sig, (20 - part1.Length), part1.Length);
Buffer.BlockCopy (part2, 0, sig, (40 - part2.Length), part2.Length);
return v.VerifySignature (GetHash (hashName), sig);
}
internal bool VerifySignature (RSA rsa)
{
RSAPKCS1SignatureDeformatter v = new RSAPKCS1SignatureDeformatter (rsa);
string hashName = null;
switch (signatureOID) {
// MD2 with RSA encryption
case "1.2.840.113549.1.1.2":
// maybe someone installed MD2 ?
hashName = "MD2";
break;
// MD5 with RSA encryption
case "1.2.840.113549.1.1.4":
hashName = "MD5";
break;
// SHA-1 with RSA Encryption
case "1.2.840.113549.1.1.5":
hashName = "SHA1";
break;
default:
throw new CryptographicException ("Unsupported hash algorithm: " + signatureOID);
}
v.SetHashAlgorithm (hashName);
return v.VerifySignature (GetHash (hashName), signature);
}
public bool VerifySignature (AsymmetricAlgorithm aa)
{
if (aa == null)
throw new ArgumentNullException ("aa");
// only validate the signature (in case we don't have the CA certificate)
if (aa is RSA)
return VerifySignature (aa as RSA);
else if (aa is DSA)
return VerifySignature (aa as DSA);
else
throw new NotSupportedException ("Unknown Asymmetric Algorithm " + aa.ToString ());
}
static public X509Crl CreateFromFile (string filename)
{
byte[] crl = null;
using (FileStream fs = File.Open (filename, FileMode.Open, FileAccess.Read, FileShare.Read)) {
crl = new byte [fs.Length];
fs.Read (crl, 0, crl.Length);
fs.Close ();
}
return new X509Crl (crl);
}
}
}
| |
namespace Azure.AI.Language.Conversations
{
public partial class AnalysisParameters
{
public AnalysisParameters() { }
public string ApiVersion { get { throw null; } set { } }
}
public partial class AnalyzeConversationOptions
{
public AnalyzeConversationOptions() { }
public string DirectTarget { get { throw null; } set { } }
public bool? IsLoggingEnabled { get { throw null; } set { } }
public string Language { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, Azure.AI.Language.Conversations.AnalysisParameters> Parameters { get { throw null; } }
public bool? Verbose { get { throw null; } set { } }
}
public partial class AnalyzeConversationResult
{
internal AnalyzeConversationResult() { }
public string DetectedLanguage { get { throw null; } }
public Azure.AI.Language.Conversations.BasePrediction Prediction { get { throw null; } }
public string Query { get { throw null; } }
}
public partial class AnswerSpan
{
internal AnswerSpan() { }
public double? Confidence { get { throw null; } }
public int? Length { get { throw null; } }
public int? Offset { get { throw null; } }
public string Text { get { throw null; } }
}
public partial class BasePrediction
{
internal BasePrediction() { }
public Azure.AI.Language.Conversations.ProjectKind ProjectKind { get { throw null; } set { } }
public string TopIntent { get { throw null; } }
}
public partial class ConversationAnalysisClient
{
protected ConversationAnalysisClient() { }
public ConversationAnalysisClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { }
public ConversationAnalysisClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.Language.Conversations.ConversationAnalysisClientOptions options) { }
public virtual System.Uri Endpoint { get { throw null; } }
public virtual Azure.Response<Azure.AI.Language.Conversations.AnalyzeConversationResult> AnalyzeConversation(string utterance, Azure.AI.Language.Conversations.ConversationsProject project, Azure.AI.Language.Conversations.AnalyzeConversationOptions options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.Language.Conversations.AnalyzeConversationResult>> AnalyzeConversationAsync(string utterance, Azure.AI.Language.Conversations.ConversationsProject project, Azure.AI.Language.Conversations.AnalyzeConversationOptions options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ConversationAnalysisClientOptions : Azure.Core.ClientOptions
{
public ConversationAnalysisClientOptions(Azure.AI.Language.Conversations.ConversationAnalysisClientOptions.ServiceVersion version = Azure.AI.Language.Conversations.ConversationAnalysisClientOptions.ServiceVersion.V2021_11_01_Preview) { }
public enum ServiceVersion
{
V2021_11_01_Preview = 1,
}
}
public partial class ConversationCallingOptions
{
public ConversationCallingOptions() { }
public bool? IsLoggingEnabled { get { throw null; } set { } }
public string Language { get { throw null; } set { } }
public bool? Verbose { get { throw null; } set { } }
}
public partial class ConversationEntity
{
internal ConversationEntity() { }
public string Category { get { throw null; } }
public float Confidence { get { throw null; } }
public int Length { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> ListKeys { get { throw null; } }
public int Offset { get { throw null; } }
public string Text { get { throw null; } }
}
public partial class ConversationIntent
{
internal ConversationIntent() { }
public string Category { get { throw null; } }
public float Confidence { get { throw null; } }
}
public partial class ConversationParameters : Azure.AI.Language.Conversations.AnalysisParameters
{
public ConversationParameters() { }
public Azure.AI.Language.Conversations.ConversationCallingOptions CallingOptions { get { throw null; } set { } }
}
public partial class ConversationPrediction : Azure.AI.Language.Conversations.BasePrediction
{
internal ConversationPrediction() { }
public System.Collections.Generic.IReadOnlyList<Azure.AI.Language.Conversations.ConversationEntity> Entities { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.AI.Language.Conversations.ConversationIntent> Intents { get { throw null; } }
}
public partial class ConversationResult
{
internal ConversationResult() { }
public string DetectedLanguage { get { throw null; } }
public Azure.AI.Language.Conversations.ConversationPrediction Prediction { get { throw null; } }
public string Query { get { throw null; } }
}
public static partial class ConversationsModelFactory
{
public static Azure.AI.Language.Conversations.AnalyzeConversationResult AnalyzeConversationResult(string query = null, string detectedLanguage = null, Azure.AI.Language.Conversations.BasePrediction prediction = null) { throw null; }
public static Azure.AI.Language.Conversations.AnswerSpan AnswerSpan(string text = null, double? confidence = default(double?), int? offset = default(int?), int? length = default(int?)) { throw null; }
public static Azure.AI.Language.Conversations.BasePrediction BasePrediction(Azure.AI.Language.Conversations.ProjectKind projectKind = default(Azure.AI.Language.Conversations.ProjectKind), string topIntent = null) { throw null; }
public static Azure.AI.Language.Conversations.ConversationEntity ConversationEntity(string category = null, string text = null, int offset = 0, int length = 0, float confidence = 0f, System.Collections.Generic.IEnumerable<string> listKeys = null) { throw null; }
public static Azure.AI.Language.Conversations.ConversationIntent ConversationIntent(string category = null, float confidence = 0f) { throw null; }
public static Azure.AI.Language.Conversations.ConversationPrediction ConversationPrediction(Azure.AI.Language.Conversations.ProjectKind projectKind = default(Azure.AI.Language.Conversations.ProjectKind), string topIntent = null, System.Collections.Generic.IEnumerable<Azure.AI.Language.Conversations.ConversationIntent> intents = null, System.Collections.Generic.IEnumerable<Azure.AI.Language.Conversations.ConversationEntity> entities = null) { throw null; }
public static Azure.AI.Language.Conversations.ConversationResult ConversationResult(string query = null, string detectedLanguage = null, Azure.AI.Language.Conversations.ConversationPrediction prediction = null) { throw null; }
public static Azure.AI.Language.Conversations.ConversationTargetIntentResult ConversationTargetIntentResult(Azure.AI.Language.Conversations.TargetKind targetKind = default(Azure.AI.Language.Conversations.TargetKind), string apiVersion = null, double confidence = 0, Azure.AI.Language.Conversations.ConversationResult result = null) { throw null; }
public static Azure.AI.Language.Conversations.KnowledgeBaseAnswer KnowledgeBaseAnswer(System.Collections.Generic.IEnumerable<string> questions = null, string answer = null, double? confidence = default(double?), int? id = default(int?), string source = null, System.Collections.Generic.IReadOnlyDictionary<string, string> metadata = null, Azure.AI.Language.Conversations.KnowledgeBaseAnswerDialog dialog = null, Azure.AI.Language.Conversations.AnswerSpan answerSpan = null) { throw null; }
public static Azure.AI.Language.Conversations.KnowledgeBaseAnswerDialog KnowledgeBaseAnswerDialog(bool? isContextOnly = default(bool?), System.Collections.Generic.IEnumerable<Azure.AI.Language.Conversations.KnowledgeBaseAnswerPrompt> prompts = null) { throw null; }
public static Azure.AI.Language.Conversations.KnowledgeBaseAnswerPrompt KnowledgeBaseAnswerPrompt(int? displayOrder = default(int?), int? qnaId = default(int?), string displayText = null) { throw null; }
public static Azure.AI.Language.Conversations.KnowledgeBaseAnswers KnowledgeBaseAnswers(System.Collections.Generic.IEnumerable<Azure.AI.Language.Conversations.KnowledgeBaseAnswer> answers = null) { throw null; }
public static Azure.AI.Language.Conversations.LuisTargetIntentResult LuisTargetIntentResult(Azure.AI.Language.Conversations.TargetKind targetKind = default(Azure.AI.Language.Conversations.TargetKind), string apiVersion = null, double confidenceScore = 0, object result = null) { throw null; }
public static Azure.AI.Language.Conversations.NoneLinkedTargetIntentResult NoneLinkedTargetIntentResult(Azure.AI.Language.Conversations.TargetKind targetKind = default(Azure.AI.Language.Conversations.TargetKind), string apiVersion = null, double confidence = 0, Azure.AI.Language.Conversations.ConversationResult result = null) { throw null; }
public static Azure.AI.Language.Conversations.OrchestratorPrediction OrchestratorPrediction(Azure.AI.Language.Conversations.ProjectKind projectKind = default(Azure.AI.Language.Conversations.ProjectKind), string topIntent = null, System.Collections.Generic.IReadOnlyDictionary<string, Azure.AI.Language.Conversations.TargetIntentResult> intents = null) { throw null; }
public static Azure.AI.Language.Conversations.QuestionAnsweringTargetIntentResult QuestionAnsweringTargetIntentResult(Azure.AI.Language.Conversations.TargetKind targetKind = default(Azure.AI.Language.Conversations.TargetKind), string apiVersion = null, double confidence = 0, Azure.AI.Language.Conversations.KnowledgeBaseAnswers result = null) { throw null; }
public static Azure.AI.Language.Conversations.TargetIntentResult TargetIntentResult(Azure.AI.Language.Conversations.TargetKind targetKind = default(Azure.AI.Language.Conversations.TargetKind), string apiVersion = null, double confidence = 0) { throw null; }
}
public partial class ConversationsProject
{
public ConversationsProject(string projectName, string deploymentName) { }
public string DeploymentName { get { throw null; } }
public string ProjectName { get { throw null; } }
}
public partial class ConversationTargetIntentResult : Azure.AI.Language.Conversations.TargetIntentResult
{
internal ConversationTargetIntentResult() { }
public Azure.AI.Language.Conversations.ConversationResult Result { get { throw null; } }
}
public partial class KnowledgeBaseAnswer
{
internal KnowledgeBaseAnswer() { }
public string Answer { get { throw null; } }
public Azure.AI.Language.Conversations.AnswerSpan AnswerSpan { get { throw null; } }
public double? Confidence { get { throw null; } }
public Azure.AI.Language.Conversations.KnowledgeBaseAnswerDialog Dialog { get { throw null; } }
public int? Id { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> Metadata { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Questions { get { throw null; } }
public string Source { get { throw null; } }
}
public partial class KnowledgeBaseAnswerDialog
{
internal KnowledgeBaseAnswerDialog() { }
public bool? IsContextOnly { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.AI.Language.Conversations.KnowledgeBaseAnswerPrompt> Prompts { get { throw null; } }
}
public partial class KnowledgeBaseAnswerPrompt
{
internal KnowledgeBaseAnswerPrompt() { }
public int? DisplayOrder { get { throw null; } }
public string DisplayText { get { throw null; } }
public int? QnaId { get { throw null; } }
}
public partial class KnowledgeBaseAnswers
{
internal KnowledgeBaseAnswers() { }
public System.Collections.Generic.IReadOnlyList<Azure.AI.Language.Conversations.KnowledgeBaseAnswer> Answers { get { throw null; } }
}
public partial class LuisCallingOptions
{
public LuisCallingOptions() { }
public string BingSpellCheckSubscriptionKey { get { throw null; } set { } }
public bool? Log { get { throw null; } set { } }
public bool? ShowAllIntents { get { throw null; } set { } }
public bool? SpellCheck { get { throw null; } set { } }
public float? TimezoneOffset { get { throw null; } set { } }
public bool? Verbose { get { throw null; } set { } }
}
public partial class LuisParameters : Azure.AI.Language.Conversations.AnalysisParameters
{
public LuisParameters() { }
public System.Collections.Generic.IDictionary<string, object> AdditionalProperties { get { throw null; } }
public Azure.AI.Language.Conversations.LuisCallingOptions CallingOptions { get { throw null; } set { } }
public string Query { get { throw null; } set { } }
}
public partial class LuisTargetIntentResult : Azure.AI.Language.Conversations.TargetIntentResult
{
internal LuisTargetIntentResult() { }
public System.BinaryData Result { get { throw null; } }
}
public partial class NoneLinkedTargetIntentResult : Azure.AI.Language.Conversations.TargetIntentResult
{
internal NoneLinkedTargetIntentResult() { }
public Azure.AI.Language.Conversations.ConversationResult Result { get { throw null; } }
}
public partial class OrchestratorPrediction : Azure.AI.Language.Conversations.BasePrediction
{
internal OrchestratorPrediction() { }
public System.Collections.Generic.IReadOnlyDictionary<string, Azure.AI.Language.Conversations.TargetIntentResult> Intents { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct ProjectKind : System.IEquatable<Azure.AI.Language.Conversations.ProjectKind>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public ProjectKind(string value) { throw null; }
public static Azure.AI.Language.Conversations.ProjectKind Conversation { get { throw null; } }
public static Azure.AI.Language.Conversations.ProjectKind Workflow { get { throw null; } }
public bool Equals(Azure.AI.Language.Conversations.ProjectKind other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.AI.Language.Conversations.ProjectKind left, Azure.AI.Language.Conversations.ProjectKind right) { throw null; }
public static implicit operator Azure.AI.Language.Conversations.ProjectKind (string value) { throw null; }
public static bool operator !=(Azure.AI.Language.Conversations.ProjectKind left, Azure.AI.Language.Conversations.ProjectKind right) { throw null; }
public override string ToString() { throw null; }
}
public partial class QuestionAnsweringParameters : Azure.AI.Language.Conversations.AnalysisParameters
{
public QuestionAnsweringParameters() { }
public object CallingOptions { get { throw null; } set { } }
}
public partial class QuestionAnsweringTargetIntentResult : Azure.AI.Language.Conversations.TargetIntentResult
{
internal QuestionAnsweringTargetIntentResult() { }
public Azure.AI.Language.Conversations.KnowledgeBaseAnswers Result { get { throw null; } }
}
public partial class TargetIntentResult
{
internal TargetIntentResult() { }
public string ApiVersion { get { throw null; } }
public double Confidence { get { throw null; } }
public Azure.AI.Language.Conversations.TargetKind TargetKind { get { throw null; } set { } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct TargetKind : System.IEquatable<Azure.AI.Language.Conversations.TargetKind>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public TargetKind(string value) { throw null; }
public static Azure.AI.Language.Conversations.TargetKind Conversation { get { throw null; } }
public static Azure.AI.Language.Conversations.TargetKind Luis { get { throw null; } }
public static Azure.AI.Language.Conversations.TargetKind NonLinked { get { throw null; } }
public static Azure.AI.Language.Conversations.TargetKind QuestionAnswering { get { throw null; } }
public bool Equals(Azure.AI.Language.Conversations.TargetKind other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.AI.Language.Conversations.TargetKind left, Azure.AI.Language.Conversations.TargetKind right) { throw null; }
public static implicit operator Azure.AI.Language.Conversations.TargetKind (string value) { throw null; }
public static bool operator !=(Azure.AI.Language.Conversations.TargetKind left, Azure.AI.Language.Conversations.TargetKind right) { throw null; }
public override string ToString() { throw null; }
}
}
namespace Microsoft.Extensions.Azure
{
public static partial class ConversationAnalysisClientExtensions
{
public static Azure.Core.Extensions.IAzureClientBuilder<Azure.AI.Language.Conversations.ConversationAnalysisClient, Azure.AI.Language.Conversations.ConversationAnalysisClientOptions> AddConversationAnalysisClient<TBuilder>(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; }
public static Azure.Core.Extensions.IAzureClientBuilder<Azure.AI.Language.Conversations.ConversationAnalysisClient, Azure.AI.Language.Conversations.ConversationAnalysisClientOptions> AddConversationAnalysisClient<TBuilder, TConfiguration>(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration<TConfiguration> { throw null; }
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Reflection.Emit;
using System.Windows.Forms;
namespace GodLesZ.Library.Controls {
/// <summary>
/// A TypedObjectListView is a type-safe wrapper around an ObjectListView.
/// </summary>
/// <remarks>
/// <para>VCS does not support generics on controls. It can be faked to some degree, but it
/// cannot be completely overcome. In our case in particular, there is no way to create
/// the custom OLVColumn's that we need to truly be generic. So this wrapper is an
/// experiment in providing some type-safe access in a way that is useful and available today.</para>
/// <para>A TypedObjectListView is not more efficient than a normal ObjectListView.
/// Underneath, the same name of casts are performed. But it is easier to use since you
/// do not have to write the casts yourself.
/// </para>
/// </remarks>
/// <typeparam name="T">The class of model object that the list will manage</typeparam>
/// <example>
/// To use a TypedObjectListView, you write code like this:
/// <code>
/// TypedObjectListView<Person> tlist = new TypedObjectListView<Person>(this.listView1);
/// tlist.CheckStateGetter = delegate(Person x) { return x.IsActive; };
/// tlist.GetColumn(0).AspectGetter = delegate(Person x) { return x.Name; };
/// ...
/// </code>
/// To iterate over the selected objects, you can write something elegant like this:
/// <code>
/// foreach (Person x in tlist.SelectedObjects) {
/// x.GrantSalaryIncrease();
/// }
/// </code>
/// </example>
public class TypedObjectListView<T> where T : class {
/// <summary>
/// Create a typed wrapper around the given list.
/// </summary>
/// <param name="olv">The listview to be wrapped</param>
public TypedObjectListView(ObjectListView olv) {
this.olv = olv;
}
//--------------------------------------------------------------------------------------
// Properties
/// <summary>
/// Return the model object that is checked, if only one row is checked.
/// If zero rows are checked, or more than one row, null is returned.
/// </summary>
public virtual T CheckedObject {
get { return (T)this.olv.CheckedObject; }
}
/// <summary>
/// Return the list of all the checked model objects
/// </summary>
public virtual IList<T> CheckedObjects {
get {
IList checkedObjects = this.olv.CheckedObjects;
List<T> objects = new List<T>(checkedObjects.Count);
foreach (object x in checkedObjects)
objects.Add((T)x);
return objects;
}
set { this.olv.CheckedObjects = (IList)value; }
}
/// <summary>
/// The ObjectListView that is being wrapped
/// </summary>
public virtual ObjectListView ListView {
get { return olv; }
set { olv = value; }
}
private ObjectListView olv;
/// <summary>
/// Get or set the list of all model objects
/// </summary>
public virtual IList<T> Objects {
get {
List<T> objects = new List<T>(this.olv.GetItemCount());
for (int i = 0; i < this.olv.GetItemCount(); i++)
objects.Add(this.GetModelObject(i));
return objects;
}
set { this.olv.SetObjects(value); }
}
/// <summary>
/// Return the model object that is selected, if only one row is selected.
/// If zero rows are selected, or more than one row, null is returned.
/// </summary>
public virtual T SelectedObject {
get { return (T)this.olv.GetSelectedObject(); }
set { this.olv.SelectObject(value, true); }
}
/// <summary>
/// The list of model objects that are selected.
/// </summary>
public virtual IList<T> SelectedObjects {
get {
List<T> objects = new List<T>(this.olv.SelectedIndices.Count);
foreach (int index in this.olv.SelectedIndices)
objects.Add((T)this.olv.GetModelObject(index));
return objects;
}
set { this.olv.SelectObjects((IList)value); }
}
//--------------------------------------------------------------------------------------
// Accessors
/// <summary>
/// Return a typed wrapper around the column at the given index
/// </summary>
/// <param name="i">The index of the column</param>
/// <returns>A typed column or null</returns>
public virtual TypedColumn<T> GetColumn(int i) {
return new TypedColumn<T>(this.olv.GetColumn(i));
}
/// <summary>
/// Return a typed wrapper around the column with the given name
/// </summary>
/// <param name="name">The name of the column</param>
/// <returns>A typed column or null</returns>
public virtual TypedColumn<T> GetColumn(string name) {
return new TypedColumn<T>(this.olv.GetColumn(name));
}
/// <summary>
/// Return the model object at the given index
/// </summary>
/// <param name="index">The index of the model object</param>
/// <returns>The model object or null</returns>
public virtual T GetModelObject(int index) {
return (T)this.olv.GetModelObject(index);
}
//--------------------------------------------------------------------------------------
// Delegates
/// <summary>
/// CheckStateGetter
/// </summary>
/// <param name="rowObject"></param>
/// <returns></returns>
public delegate CheckState TypedCheckStateGetterDelegate(T rowObject);
/// <summary>
/// Gets or sets the check state getter
/// </summary>
public virtual TypedCheckStateGetterDelegate CheckStateGetter {
get { return checkStateGetter; }
set {
this.checkStateGetter = value;
if (value == null)
this.olv.CheckStateGetter = null;
else
this.olv.CheckStateGetter = delegate(object x) {
return this.checkStateGetter((T)x);
};
}
}
private TypedCheckStateGetterDelegate checkStateGetter;
/// <summary>
/// BooleanCheckStateGetter
/// </summary>
/// <param name="rowObject"></param>
/// <returns></returns>
public delegate bool TypedBooleanCheckStateGetterDelegate(T rowObject);
/// <summary>
/// Gets or sets the boolean check state getter
/// </summary>
public virtual TypedBooleanCheckStateGetterDelegate BooleanCheckStateGetter {
set {
if (value == null)
this.olv.BooleanCheckStateGetter = null;
else
this.olv.BooleanCheckStateGetter = delegate(object x) {
return value((T)x);
};
}
}
/// <summary>
/// CheckStatePutter
/// </summary>
/// <param name="rowObject"></param>
/// <param name="newValue"></param>
/// <returns></returns>
public delegate CheckState TypedCheckStatePutterDelegate(T rowObject, CheckState newValue);
/// <summary>
/// Gets or sets the check state putter delegate
/// </summary>
public virtual TypedCheckStatePutterDelegate CheckStatePutter {
get { return checkStatePutter; }
set {
this.checkStatePutter = value;
if (value == null)
this.olv.CheckStatePutter = null;
else
this.olv.CheckStatePutter = delegate(object x, CheckState newValue) {
return this.checkStatePutter((T)x, newValue);
};
}
}
private TypedCheckStatePutterDelegate checkStatePutter;
/// <summary>
/// BooleanCheckStatePutter
/// </summary>
/// <param name="rowObject"></param>
/// <param name="newValue"></param>
/// <returns></returns>
public delegate bool TypedBooleanCheckStatePutterDelegate(T rowObject, bool newValue);
/// <summary>
/// Gets or sets the boolean check state putter
/// </summary>
public virtual TypedBooleanCheckStatePutterDelegate BooleanCheckStatePutter {
set {
if (value == null)
this.olv.BooleanCheckStatePutter = null;
else
this.olv.BooleanCheckStatePutter = delegate(object x, bool newValue) {
return value((T)x, newValue);
};
}
}
/// <summary>
/// ToolTipGetter
/// </summary>
/// <param name="column"></param>
/// <param name="modelObject"></param>
/// <returns></returns>
public delegate String TypedCellToolTipGetterDelegate(OLVColumn column, T modelObject);
/// <summary>
/// Gets or sets the cell tooltip getter
/// </summary>
public virtual TypedCellToolTipGetterDelegate CellToolTipGetter {
set {
if (value == null)
this.olv.CellToolTipGetter = null;
else
this.olv.CellToolTipGetter = delegate(OLVColumn col, Object x) {
return value(col, (T)x);
};
}
}
/// <summary>
/// Gets or sets the header tool tip getter
/// </summary>
public virtual HeaderToolTipGetterDelegate HeaderToolTipGetter {
get { return this.olv.HeaderToolTipGetter; }
set { this.olv.HeaderToolTipGetter = value; }
}
//--------------------------------------------------------------------------------------
// Commands
/// <summary>
/// This method will generate AspectGetters for any column that has an AspectName.
/// </summary>
public virtual void GenerateAspectGetters() {
for (int i = 0; i < this.ListView.Columns.Count; i++)
this.GetColumn(i).GenerateAspectGetter();
}
}
/// <summary>
/// A type-safe wrapper around an OLVColumn
/// </summary>
/// <typeparam name="T"></typeparam>
public class TypedColumn<T> where T : class {
/// <summary>
/// Creates a TypedColumn
/// </summary>
/// <param name="column"></param>
public TypedColumn(OLVColumn column) {
this.column = column;
}
private OLVColumn column;
/// <summary>
///
/// </summary>
/// <param name="rowObject"></param>
/// <returns></returns>
public delegate Object TypedAspectGetterDelegate(T rowObject);
/// <summary>
///
/// </summary>
/// <param name="rowObject"></param>
/// <param name="newValue"></param>
public delegate void TypedAspectPutterDelegate(T rowObject, Object newValue);
/// <summary>
///
/// </summary>
/// <param name="rowObject"></param>
/// <returns></returns>
public delegate Object TypedGroupKeyGetterDelegate(T rowObject);
/// <summary>
///
/// </summary>
/// <param name="rowObject"></param>
/// <returns></returns>
public delegate Object TypedImageGetterDelegate(T rowObject);
/// <summary>
///
/// </summary>
public TypedAspectGetterDelegate AspectGetter {
get { return this.aspectGetter; }
set {
this.aspectGetter = value;
if (value == null)
this.column.AspectGetter = null;
else
this.column.AspectGetter = delegate(object x) {
return this.aspectGetter((T)x);
};
}
}
private TypedAspectGetterDelegate aspectGetter;
/// <summary>
///
/// </summary>
public TypedAspectPutterDelegate AspectPutter {
get { return aspectPutter; }
set {
this.aspectPutter = value;
if (value == null)
this.column.AspectPutter = null;
else
this.column.AspectPutter = delegate(object x, object newValue) {
this.aspectPutter((T)x, newValue);
};
}
}
private TypedAspectPutterDelegate aspectPutter;
/// <summary>
///
/// </summary>
public TypedImageGetterDelegate ImageGetter {
get { return imageGetter; }
set {
this.imageGetter = value;
if (value == null)
this.column.ImageGetter = null;
else
this.column.ImageGetter = delegate(object x) {
return this.imageGetter((T)x);
};
}
}
private TypedImageGetterDelegate imageGetter;
/// <summary>
///
/// </summary>
public TypedGroupKeyGetterDelegate GroupKeyGetter {
get { return groupKeyGetter; }
set {
this.groupKeyGetter = value;
if (value == null)
this.column.GroupKeyGetter = null;
else
this.column.GroupKeyGetter = delegate(object x) {
return this.groupKeyGetter((T)x);
};
}
}
private TypedGroupKeyGetterDelegate groupKeyGetter;
#region Dynamic methods
/// <summary>
/// Generate an aspect getter that does the same thing as the AspectName,
/// except without using reflection.
/// </summary>
/// <remarks>
/// <para>
/// If you have an AspectName of "Owner.Address.Postcode", this will generate
/// the equivilent of: <code>this.AspectGetter = delegate (object x) {
/// return x.Owner.Address.Postcode;
/// }
/// </code>
/// </para>
/// <para>
/// If AspectName is empty, this method will do nothing, otherwise
/// this will replace any existing AspectGetter.
/// </para>
/// </remarks>
public void GenerateAspectGetter() {
if (!String.IsNullOrEmpty(this.column.AspectName))
this.AspectGetter = this.GenerateAspectGetter(typeof(T), this.column.AspectName);
}
/// <summary>
/// Generates an aspect getter method dynamically. The method will execute
/// the given dotted chain of selectors against a model object given at runtime.
/// </summary>
/// <param name="type">The type of model object to be passed to the generated method</param>
/// <param name="path">A dotted chain of selectors. Each selector can be the name of a
/// field, property or parameter-less method.</param>
/// <returns>A typed delegate</returns>
private TypedAspectGetterDelegate GenerateAspectGetter(Type type, string path) {
DynamicMethod getter = new DynamicMethod(String.Empty,
typeof(Object), new Type[] { type }, type, true);
this.GenerateIL(type, path, getter.GetILGenerator());
return (TypedAspectGetterDelegate)getter.CreateDelegate(typeof(TypedAspectGetterDelegate));
}
/// <summary>
/// This method generates the actual IL for the method.
/// </summary>
/// <param name="type"></param>
/// <param name="path"></param>
/// <param name="il"></param>
private void GenerateIL(Type type, string path, ILGenerator il) {
// Push our model object onto the stack
il.Emit(OpCodes.Ldarg_0);
// Generate the IL to access each part of the dotted chain
string[] parts = path.Split('.');
for (int i = 0; i < parts.Length; i++) {
type = this.GeneratePart(il, type, parts[i], (i == parts.Length - 1));
if (type == null)
break;
}
// If the object to be returned is a value type (e.g. int, bool), it
// must be boxed, since the delegate returns an Object
if (type != null && type.IsValueType && !typeof(T).IsValueType)
il.Emit(OpCodes.Box, type);
il.Emit(OpCodes.Ret);
}
private Type GeneratePart(ILGenerator il, Type type, string pathPart, bool isLastPart) {
// TODO: Generate check for null
// Find the first member with the given nam that is a field, property, or parameter-less method
List<MemberInfo> infos = new List<MemberInfo>(type.GetMember(pathPart));
MemberInfo info = infos.Find(delegate(MemberInfo x) {
if (x.MemberType == MemberTypes.Field || x.MemberType == MemberTypes.Property)
return true;
if (x.MemberType == MemberTypes.Method)
return ((MethodInfo)x).GetParameters().Length == 0;
else
return false;
});
// If we couldn't find anything with that name, pop the current result and return an error
if (info == null) {
il.Emit(OpCodes.Pop);
il.Emit(OpCodes.Ldstr, String.Format("'{0}' is not a parameter-less method, property or field of type '{1}'", pathPart, type.FullName));
return null;
}
// Generate the correct IL to access the member. We remember the type of object that is going to be returned
// so that we can do a method lookup on it at the next iteration
Type resultType = null;
switch (info.MemberType) {
case MemberTypes.Method:
MethodInfo mi = (MethodInfo)info;
if (mi.IsVirtual)
il.Emit(OpCodes.Callvirt, mi);
else
il.Emit(OpCodes.Call, mi);
resultType = mi.ReturnType;
break;
case MemberTypes.Property:
PropertyInfo pi = (PropertyInfo)info;
il.Emit(OpCodes.Call, pi.GetGetMethod());
resultType = pi.PropertyType;
break;
case MemberTypes.Field:
FieldInfo fi = (FieldInfo)info;
il.Emit(OpCodes.Ldfld, fi);
resultType = fi.FieldType;
break;
}
// If the method returned a value type, and something is going to call a method on that value,
// we need to load its address onto the stack, rather than the object itself.
if (resultType.IsValueType && !isLastPart) {
LocalBuilder lb = il.DeclareLocal(resultType);
il.Emit(OpCodes.Stloc, lb);
il.Emit(OpCodes.Ldloca, lb);
}
return resultType;
}
#endregion
}
}
| |
/*
* Copyright (c) 2015, InWorldz Halcyon Developers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of halcyon nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenMetaverse;
using OpenSim.Framework;
using OpenMetaverse.StructuredData;
namespace InWorldz.Testing
{
public class MockClientAPI : IClientAPI
{
public OpenMetaverse.Vector3 StartPos
{
get
{
return OpenMetaverse.Vector3.Zero;
}
set
{
}
}
public OpenMetaverse.UUID AgentId { get; set; }
public OpenMetaverse.UUID SessionId { get; set; }
public OpenMetaverse.UUID SecureSessionId
{
get { return OpenMetaverse.UUID.Zero; }
}
public OpenMetaverse.UUID ActiveGroupId
{
get { return OpenMetaverse.UUID.Zero; }
}
public string ActiveGroupName
{
get { return String.Empty; }
}
public ulong ActiveGroupPowers
{
get { return 0; }
}
public ulong GetGroupPowers(OpenMetaverse.UUID groupID)
{
return 0;
}
public ulong? GetGroupPowersOrNull(OpenMetaverse.UUID groupID)
{
return null;
}
public bool IsGroupMember(OpenMetaverse.UUID GroupID)
{
return true;
}
public string FirstName
{
get { return "Mock"; }
}
public string LastName
{
get { return "User"; }
}
public IScene Scene
{
get { return null; }
}
public int NextAnimationSequenceNumber
{
get { return 0; }
}
public string Name
{
get { return "Mock User"; }
}
public bool IsActive
{
get { return true; }
}
public bool SendLogoutPacketWhenClosing
{
set { }
}
public bool DebugCrossings
{
get { return false; }
set { }
}
public uint NeighborsRange
{
get { return 2; }
set { }
}
public uint CircuitCode { get; set; }
#pragma warning disable 0067 // disable "X is never used"
public event Action<int> OnSetThrottles;
public event GenericMessage OnGenericMessage;
public event ImprovedInstantMessage OnInstantMessage;
public event ChatMessage OnChatFromClient;
public event TextureRequest OnRequestTexture;
public event RezObject OnRezObject;
public event RestoreObject OnRestoreObject;
public event ModifyTerrain OnModifyTerrain;
public event BakeTerrain OnBakeTerrain;
public event EstateChangeInfo OnEstateChangeInfo;
public event SimWideDeletesDelegate OnSimWideDeletes;
public event SetAppearance OnSetAppearance;
public event AvatarNowWearing OnAvatarNowWearing;
public event RezSingleAttachmentFromInv OnRezSingleAttachmentFromInv;
public event RezMultipleAttachmentsFromInv OnRezMultipleAttachmentsFromInv;
public event UUIDNameRequest OnDetachAttachmentIntoInv;
public event ObjectAttach OnObjectAttach;
public event ObjectDeselect OnObjectDetach;
public event ObjectDrop OnObjectDrop;
public event StartAnim OnStartAnim;
public event StopAnim OnStopAnim;
public event LinkObjects OnLinkObjects;
public event DelinkObjects OnDelinkObjects;
public event RequestMapBlocks OnRequestMapBlocks;
public event RequestMapName OnMapNameRequest;
public event TeleportLocationRequest OnTeleportLocationRequest;
public event DisconnectUser OnDisconnectUser;
public event RequestAvatarProperties OnRequestAvatarProperties;
public event RequestAvatarInterests OnRequestAvatarInterests;
public event SetAlwaysRun OnSetAlwaysRun;
public event TeleportLandmarkRequest OnTeleportLandmarkRequest;
public event DeRezObjects OnDeRezObjects;
public event Action<IClientAPI> OnRegionHandShakeReply;
public event GenericCall2 OnRequestWearables;
public event GenericCall2 OnCompleteMovementToRegion;
public event UpdateAgent OnAgentUpdate;
public event AgentRequestSit OnAgentRequestSit;
public event AgentSit OnAgentSit;
public event AvatarPickerRequest OnAvatarPickerRequest;
public event Action<IClientAPI> OnRequestAvatarsData;
public event AddNewPrim OnAddPrim;
public event FetchInventory OnAgentDataUpdateRequest;
public event TeleportLocationRequest OnSetStartLocationRequest;
public event RequestGodlikePowers OnRequestGodlikePowers;
public event GodKickUser OnGodKickUser;
public event ObjectDuplicate OnObjectDuplicate;
public event ObjectDuplicateOnRay OnObjectDuplicateOnRay;
public event GrabObject OnGrabObject;
public event DeGrabObject OnDeGrabObject;
public event MoveObject OnGrabUpdate;
public event SpinStart OnSpinStart;
public event SpinObject OnSpinUpdate;
public event SpinStop OnSpinStop;
public event UpdateShape OnUpdatePrimShape;
public event ObjectExtraParams OnUpdateExtraParams;
public event ObjectRequest OnObjectRequest;
public event ObjectSelect OnObjectSelect;
public event ObjectDeselect OnObjectDeselect;
public event GenericCall7 OnObjectDescription;
public event GenericCall7 OnObjectName;
public event GenericCall7 OnObjectClickAction;
public event GenericCall7 OnObjectMaterial;
public event RequestObjectPropertiesFamily OnRequestObjectPropertiesFamily;
public event UpdatePrimFlags OnUpdatePrimFlags;
public event UpdatePrimTexture OnUpdatePrimTexture;
public event UpdateVectorWithUndoSupport OnUpdatePrimGroupPosition;
public event UpdateVectorWithUndoSupport OnUpdatePrimSinglePosition;
public event UpdatePrimRotation OnUpdatePrimGroupRotation;
public event UpdatePrimSingleRotation OnUpdatePrimSingleRotation;
public event UpdatePrimSingleRotationPosition OnUpdatePrimSingleRotationPosition;
public event UpdatePrimGroupRotation OnUpdatePrimGroupMouseRotation;
public event UpdateVector OnUpdatePrimScale;
public event UpdateVector OnUpdatePrimGroupScale;
public event StatusChange OnChildAgentStatus;
public event GenericCall2 OnStopMovement;
public event Action<OpenMetaverse.UUID> OnRemoveAvatar;
public event ObjectPermissions OnObjectPermissions;
public event CreateNewInventoryItem OnCreateNewInventoryItem;
public event LinkInventoryItem OnLinkInventoryItem;
public event CreateInventoryFolder OnCreateNewInventoryFolder;
public event UpdateInventoryFolder OnUpdateInventoryFolder;
public event MoveInventoryFolder OnMoveInventoryFolder;
public event FetchInventoryDescendents OnFetchInventoryDescendents;
public event PurgeInventoryDescendents OnPurgeInventoryDescendents;
public event FetchInventory OnFetchInventory;
public event RequestTaskInventory OnRequestTaskInventory;
public event UpdateInventoryItem OnUpdateInventoryItem;
public event CopyInventoryItem OnCopyInventoryItem;
public event MoveInventoryItem OnMoveInventoryItem;
public event RemoveInventoryFolder OnRemoveInventoryFolder;
public event RemoveInventoryItem OnRemoveInventoryItem;
public event RemoveInventoryItem OnPreRemoveInventoryItem;
public event UDPAssetUploadRequest OnAssetUploadRequest;
public event XferReceive OnXferReceive;
public event RequestXfer OnRequestXfer;
public event ConfirmXfer OnConfirmXfer;
public event AbortXfer OnAbortXfer;
public event RezScript OnRezScript;
public event UpdateTaskInventory OnUpdateTaskInventory;
public event MoveTaskInventory OnMoveTaskItem;
public event RemoveTaskInventory OnRemoveTaskItem;
public event RequestAsset OnRequestAsset;
public event UUIDNameRequest OnNameFromUUIDRequest;
public event ParcelAccessListRequest OnParcelAccessListRequest;
public event ParcelAccessListUpdateRequest OnParcelAccessListUpdateRequest;
public event ParcelPropertiesRequest OnParcelPropertiesRequest;
public event ParcelDivideRequest OnParcelDivideRequest;
public event ParcelJoinRequest OnParcelJoinRequest;
public event ParcelPropertiesUpdateRequest OnParcelPropertiesUpdateRequest;
public event ParcelSelectObjects OnParcelSelectObjects;
public event ParcelObjectOwnerRequest OnParcelObjectOwnerRequest;
public event ParcelAbandonRequest OnParcelAbandonRequest;
public event ParcelGodForceOwner OnParcelGodForceOwner;
public event ParcelReclaim OnParcelReclaim;
public event ParcelReturnObjectsRequest OnParcelReturnObjectsRequest;
public event ParcelDeedToGroup OnParcelDeedToGroup;
public event RegionInfoRequest OnRegionInfoRequest;
public event EstateCovenantRequest OnEstateCovenantRequest;
public event FriendActionDelegate OnApproveFriendRequest;
public event FriendActionDelegate OnDenyFriendRequest;
public event FriendshipTermination OnTerminateFriendship;
public event MoneyTransferRequest OnMoneyTransferRequest;
public event EconomyDataRequest OnEconomyDataRequest;
public event MoneyBalanceRequest OnMoneyBalanceRequest;
public event UpdateAvatarProperties OnUpdateAvatarProperties;
public event AvatarInterestsUpdate OnAvatarInterestsUpdate;
public event ParcelBuy OnParcelBuy;
public event RequestPayPrice OnRequestPayPrice;
public event ObjectSaleInfo OnObjectSaleInfo;
public event ObjectBuy OnObjectBuy;
public event BuyObjectInventory OnBuyObjectInventory;
public event RequestTerrain OnRequestTerrain;
public event RequestTerrain OnUploadTerrain;
public event ObjectIncludeInSearch OnObjectIncludeInSearch;
public event UUIDNameRequest OnTeleportHomeRequest;
public event ScriptAnswer OnScriptAnswer;
public event AgentSit OnUndo;
public event AgentSit OnRedo;
public event LandUndo OnLandUndo;
public event ForceReleaseControls OnForceReleaseControls;
public event GodLandStatRequest OnLandStatRequest;
public event DetailedEstateDataRequest OnDetailedEstateDataRequest;
public event SetEstateFlagsRequest OnSetEstateFlagsRequest;
public event SetEstateTerrainBaseTexture OnSetEstateTerrainBaseTexture;
public event SetEstateTerrainDetailTexture OnSetEstateTerrainDetailTexture;
public event SetEstateTerrainTextureHeights OnSetEstateTerrainTextureHeights;
public event CommitEstateTerrainTextureRequest OnCommitEstateTerrainTextureRequest;
public event SetRegionTerrainSettings OnSetRegionTerrainSettings;
public event EstateRestartSimRequest OnEstateRestartSimRequest;
public event EstateChangeCovenantRequest OnEstateChangeCovenantRequest;
public event UpdateEstateAccessDeltaRequest OnUpdateEstateAccessDeltaRequest;
public event SimulatorBlueBoxMessageRequest OnSimulatorBlueBoxMessageRequest;
public event EstateBlueBoxMessageRequest OnEstateBlueBoxMessageRequest;
public event EstateDebugRegionRequest OnEstateDebugRegionRequest;
public event EstateTeleportOneUserHomeRequest OnEstateTeleportOneUserHomeRequest;
public event EstateTeleportAllUsersHomeRequest OnEstateTeleportAllUsersHomeRequest;
public event UUIDNameRequest OnUUIDGroupNameRequest;
public event RegionHandleRequest OnRegionHandleRequest;
public event ParcelInfoRequest OnParcelInfoRequest;
public event RequestObjectPropertiesFamily OnObjectGroupRequest;
public event ScriptReset OnScriptReset;
public event GetScriptRunning OnGetScriptRunning;
public event SetScriptRunning OnSetScriptRunning;
public event UpdateVector OnAutoPilotGo;
public event TerrainUnacked OnUnackedTerrain;
public event ActivateGestures OnActivateGestures;
public event DeactivateGestures OnDeactivateGestures;
public event ObjectOwner OnObjectOwner;
public event DirPlacesQuery OnDirPlacesQuery;
public event DirFindQuery OnDirFindQuery;
public event DirLandQuery OnDirLandQuery;
public event DirPopularQuery OnDirPopularQuery;
public event DirClassifiedQuery OnDirClassifiedQuery;
public event EventInfoRequest OnEventInfoRequest;
public event ParcelSetOtherCleanTime OnParcelSetOtherCleanTime;
public event MapItemRequest OnMapItemRequest;
public event OfferCallingCard OnOfferCallingCard;
public event AcceptCallingCard OnAcceptCallingCard;
public event DeclineCallingCard OnDeclineCallingCard;
public event SoundTrigger OnSoundTrigger;
public event StartLure OnStartLure;
public event TeleportLureRequest OnTeleportLureRequest;
public event NetworkStats OnNetworkStatsUpdate;
public event ClassifiedInfoRequest OnClassifiedInfoRequest;
public event ClassifiedInfoUpdate OnClassifiedInfoUpdate;
public event ClassifiedDelete OnClassifiedDelete;
public event ClassifiedDelete OnClassifiedGodDelete;
public event EventNotificationAddRequest OnEventNotificationAddRequest;
public event EventNotificationRemoveRequest OnEventNotificationRemoveRequest;
public event EventGodDelete OnEventGodDelete;
public event ParcelDwellRequest OnParcelDwellRequest;
public event UserInfoRequest OnUserInfoRequest;
public event UpdateUserInfo OnUpdateUserInfo;
public event RetrieveInstantMessages OnRetrieveInstantMessages;
public event PickDelete OnPickDelete;
public event PickGodDelete OnPickGodDelete;
public event PickInfoUpdate OnPickInfoUpdate;
public event AvatarNotesUpdate OnAvatarNotesUpdate;
public event MuteListRequest OnMuteListRequest;
public event MuteListEntryUpdate OnUpdateMuteListEntry;
public event MuteListEntryRemove OnRemoveMuteListEntry;
public event PlacesQuery OnPlacesQuery;
public event GrantUserRights OnGrantUserRights;
public event FreezeUserUpdate OnParcelFreezeUser;
public event EjectUserUpdate OnParcelEjectUser;
public event GroupVoteHistoryRequest OnGroupVoteHistoryRequest;
public event GroupAccountDetailsRequest OnGroupAccountDetailsRequest;
public event GroupAccountSummaryRequest OnGroupAccountSummaryRequest;
public event GroupAccountTransactionsRequest OnGroupAccountTransactionsRequest;
public event AgentCachedTextureRequest OnAgentCachedTextureRequest;
public event ActivateGroup OnActivateGroup;
public event GodlikeMessage OnGodlikeMessage;
public event GodlikeMessage OnEstateTelehubRequest;
#pragma warning restore 0067
public System.Net.IPEndPoint RemoteEndPoint
{
get { return new System.Net.IPEndPoint(System.Net.IPAddress.Parse("127.0.0.1"), 18374); }
}
public bool IsLoggingOut
{
get
{
return false;
}
set
{
}
}
public void SetDebugPacketLevel(int newDebug)
{
}
public void ProcessInPacket(OpenMetaverse.Packets.Packet NewPack)
{
}
public void Close()
{
var connClosed = this.OnConnectionClosed;
if (connClosed != null)
{
connClosed(this);
}
}
public void Kick(string message)
{
}
public void Start()
{
}
public void SendWearables(AvatarWearable[] wearables, int serial)
{
}
public void SendAppearance(AvatarAppearance app, Vector3 hover)
{
}
public void SendStartPingCheck(byte seq)
{
}
public void SendKillObject(ulong regionHandle, uint localID)
{
}
public void SendKillObjects(ulong regionHandle, uint[] localIDs)
{
}
public void SendNonPermanentKillObject(ulong regionHandle, uint localID)
{
}
public void SendNonPermanentKillObjects(ulong regionHandle, uint[] localIDs)
{
}
public void SendAnimations(OpenMetaverse.UUID[] animID, int[] seqs, OpenMetaverse.UUID sourceAgentId, OpenMetaverse.UUID[] objectIDs)
{
}
public void SendRegionHandshake(RegionInfo regionInfo, RegionHandshakeArgs args)
{
}
public void SendChatMessage(string message, byte type, OpenMetaverse.Vector3 fromPos, string fromName, OpenMetaverse.UUID fromAgentID, OpenMetaverse.UUID ownerID, byte source, byte audible)
{
}
public void SendInstantMessage(GridInstantMessage im)
{
}
public void SendGenericMessage(string method, List<string> message)
{
}
public void SendLayerData(float[] map)
{
}
public void SendLayerData(int px, int py, float[] map)
{
}
public void SendWindData(OpenMetaverse.Vector2[] windSpeeds)
{
}
public void SendCloudData(float[] cloudCover)
{
}
public void MoveAgentIntoRegion(RegionInfo regInfo, OpenMetaverse.Vector3 pos, OpenMetaverse.Vector3 look)
{
}
public void InformClientOfNeighbour(ulong neighbourHandle, System.Net.IPEndPoint neighbourExternalEndPoint)
{
}
public AgentCircuitData RequestClientInfo()
{
AgentCircuitData agentData = new AgentCircuitData();
agentData.AgentID = AgentId;
// agentData.Appearance
// agentData.BaseFolder
agentData.CapsPath = OpenMetaverse.UUID.Random().ToString();
agentData.child = false;
agentData.CircuitCode = 100;
agentData.ClientVersion = "Test Client";
agentData.FirstName = "Mock";
// agentData.InventoryFolder
agentData.LastName = "User";
agentData.SecureSessionID = OpenMetaverse.UUID.Random();
agentData.SessionID = OpenMetaverse.UUID.Random();
return agentData;
}
public void SendMapBlock(List<MapBlockData> mapBlocks, uint flag)
{
}
public void SendLocalTeleport(OpenMetaverse.Vector3 position, OpenMetaverse.Vector3 lookAt, uint flags)
{
}
public void SendTeleportFailed(string reason)
{
}
public void SendTeleportLocationStart()
{
}
public void SendPayPrice(OpenMetaverse.UUID objectID, int[] payPrice)
{
}
public void SendAvatarData(ulong regionHandle, string firstName, string lastName, string grouptitle, OpenMetaverse.UUID avatarID, uint avatarLocalID,
OpenMetaverse.Vector3 Pos, byte[] textureEntry, uint parentID, OpenMetaverse.Quaternion rotation, OpenMetaverse.Vector4 collisionPlane,
OpenMetaverse.Vector3 velocity, bool immediate)
{
}
public void SendAvatarTerseUpdate(ulong regionHandle, ushort timeDilation, uint localID, OpenMetaverse.Vector3 position, OpenMetaverse.Vector3 velocity, OpenMetaverse.Vector3 acceleration, OpenMetaverse.Quaternion rotation, OpenMetaverse.UUID agentid, OpenMetaverse.Vector4 collisionPlane)
{
}
public void SendCoarseLocationUpdate(List<OpenMetaverse.UUID> users, List<OpenMetaverse.Vector3> CoarseLocations)
{
}
public void AttachObject(uint localID, OpenMetaverse.Quaternion rotation, byte attachPoint, OpenMetaverse.UUID ownerID)
{
}
public void SetChildAgentThrottle(byte[] throttle)
{
}
public void SendPrimitiveToClient(object sop, uint clientFlags, OpenMetaverse.Vector3 lpos, PrimUpdateFlags updateFlags)
{
}
public void SendPrimitiveToClientImmediate(object sop, uint clientFlags, OpenMetaverse.Vector3 lpos)
{
}
public void SendPrimTerseUpdate(object sop)
{
}
public void SendInventoryFolderDetails(OpenMetaverse.UUID ownerID, InventoryFolderBase folder, List<InventoryItemBase> items, List<InventoryFolderBase> folders, bool fetchFolders, bool fetchItems)
{
}
public void FlushPrimUpdates()
{
}
public void SendInventoryItemDetails(OpenMetaverse.UUID ownerID, InventoryItemBase item)
{
}
public void SendInventoryItemCreateUpdate(InventoryItemBase Item, uint callbackId)
{
}
public void SendRemoveInventoryItem(OpenMetaverse.UUID itemID)
{
}
public void SendTakeControls(int controls, bool TakeControls, bool passToAgent)
{
}
public void SendTakeControls2(int controls1, bool takeControls1, bool passToAgent1,
int controls2, bool takeControls2, bool passToAgent2)
{
}
public void SendTaskInventory(OpenMetaverse.UUID taskID, short serial, byte[] fileName)
{
}
public void SendBulkUpdateInventory(InventoryNodeBase node)
{
}
public void SendXferPacket(ulong xferID, uint packet, byte[] data)
{
}
public void SendEconomyData(float EnergyEfficiency, int ObjectCapacity, int ObjectCount, int PriceEnergyUnit, int PriceGroupCreate, int PriceObjectClaim, float PriceObjectRent, float PriceObjectScaleFactor, int PriceParcelClaim, float PriceParcelClaimFactor, int PriceParcelRent, int PricePublicObjectDecay, int PricePublicObjectDelete, int PriceRentLight, int PriceUpload, int TeleportMinPrice, float TeleportPriceExponent)
{
}
public void SendAvatarPickerReply(AvatarPickerReplyAgentDataArgs AgentData, List<AvatarPickerReplyDataArgs> Data)
{
}
public void SendAgentDataUpdate(OpenMetaverse.UUID agentid, OpenMetaverse.UUID activegroupid, string firstname, string lastname, ulong grouppowers, string groupname, string grouptitle)
{
}
public void SendPreLoadSound(OpenMetaverse.UUID objectID, OpenMetaverse.UUID ownerID, OpenMetaverse.UUID soundID)
{
}
public void SendPlayAttachedSound(OpenMetaverse.UUID soundID, OpenMetaverse.UUID objectID, OpenMetaverse.UUID ownerID, float gain, byte flags)
{
}
public void SendTriggeredSound(OpenMetaverse.UUID soundID, OpenMetaverse.UUID ownerID, OpenMetaverse.UUID objectID, OpenMetaverse.UUID parentID, ulong handle, OpenMetaverse.Vector3 position, float gain)
{
}
public void SendAttachedSoundGainChange(OpenMetaverse.UUID objectID, float gain)
{
}
public void SendNameReply(OpenMetaverse.UUID profileId, string firstname, string lastname)
{
}
public void SendAlertMessage(string message)
{
}
public void SendAlertMessage(string message, string infoMessage, OSD extraParams)
{
/* no op */
}
public void SendAgentAlertMessage(string message, bool modal)
{
}
public void SendLoadURL(string objectname, OpenMetaverse.UUID objectID, OpenMetaverse.UUID ownerID, bool groupOwned, string message, string url)
{
}
public void SendDialog(string objectname, OpenMetaverse.UUID objectID, OpenMetaverse.UUID ownerID, string ownerFirstname, string ownerLastname, string msg, OpenMetaverse.UUID textureID, int ch, string[] buttonlabels)
{
}
public bool AddMoney(int debit)
{
return true;
}
public void SendSunPos(OpenMetaverse.Vector3 sunPos, OpenMetaverse.Vector3 sunVel, ulong CurrentTime, uint SecondsPerSunCycle, uint SecondsPerYear, float OrbitalPosition)
{
}
public void SendViewerEffect(OpenMetaverse.Packets.ViewerEffectPacket.EffectBlock[] effectBlocks)
{
}
public void SendViewerTime(int phase)
{
}
public OpenMetaverse.UUID GetDefaultAnimation(string name)
{
return OpenMetaverse.UUID.Zero;
}
public void SendAvatarProperties(OpenMetaverse.UUID avatarID, string aboutText, string bornOn, byte[] charterMember, string flAbout, uint flags, OpenMetaverse.UUID flImageID, OpenMetaverse.UUID imageID, string profileURL, OpenMetaverse.UUID partnerID)
{
}
public void SendAvatarInterests(OpenMetaverse.UUID avatarID, uint skillsMask, string skillsText, uint wantToMask, string wantToText, string languagesText)
{
}
public void SendScriptQuestion(OpenMetaverse.UUID taskID, string taskName, string ownerName, OpenMetaverse.UUID itemID, int question)
{
}
public void SendHealth(float health)
{
}
public void SendEstateUUIDList(OpenMetaverse.UUID invoice, int whichList, OpenMetaverse.UUID[] UUIDList, uint estateID)
{
}
public void SendBannedUserList(OpenMetaverse.UUID invoice, EstateBan[] banlist, uint estateID)
{
}
public void SendRegionInfoToEstateMenu(RegionInfoForEstateMenuArgs args)
{
}
public void SendEstateCovenantInformation(OpenMetaverse.UUID covenant, uint lastUpdated)
{
}
public void SendDetailedEstateData(OpenMetaverse.UUID invoice, string estateName, uint estateID, uint parentEstate, uint estateFlags, uint sunPosition, OpenMetaverse.UUID covenant, uint covenantLastUpdated, string abuseEmail, OpenMetaverse.UUID estateOwner)
{
}
public void SendLandProperties(int sequence_id, bool snap_selection, int request_result, LandData landData, float simObjectBonusFactor, int parcelObjectCapacity, int simObjectCapacity, uint regionFlags)
{
}
public void SendLandAccessListData(List<OpenMetaverse.UUID> avatars, uint accessFlag, int localLandID)
{
}
public void SendForceClientSelectObjects(List<uint> objectIDs)
{
}
public void SendLandObjectOwners(LandData land, List<OpenMetaverse.UUID> groups, Dictionary<OpenMetaverse.UUID, int> ownersAndCount)
{
}
public void SendLandParcelOverlay(byte[] data, int sequence_id)
{
}
public void SendParcelMediaCommand(uint flags, ParcelMediaCommandEnum command, float time)
{
}
public void SendParcelMediaUpdate(string mediaUrl, OpenMetaverse.UUID mediaTextureID, byte autoScale, string mediaType, string mediaDesc, int mediaWidth, int mediaHeight, byte mediaLoop)
{
}
public void SendAssetUploadCompleteMessage(sbyte AssetType, bool Success, OpenMetaverse.UUID AssetFullID)
{
}
public void SendConfirmXfer(ulong xferID, uint PacketID)
{
}
public void SendXferRequest(ulong XferID, short AssetType, OpenMetaverse.UUID vFileID, byte FilePath, byte[] FileName)
{
}
public void SendInitiateDownload(string simFileName, string clientFileName)
{
}
public void SendImageFirstPart(ushort numParts, OpenMetaverse.UUID ImageUUID, uint ImageSize, byte[] ImageData, byte imageCodec)
{
}
public void SendImageNextPart(ushort partNumber, OpenMetaverse.UUID imageUuid, byte[] imageData)
{
}
public void SendImageNotFound(OpenMetaverse.UUID imageid)
{
}
public void SendDisableSimulator()
{
}
public void SendSimStats(SimStats stats)
{
}
public void SendObjectPropertiesFamilyData(uint RequestFlags, OpenMetaverse.UUID ObjectUUID, OpenMetaverse.UUID OwnerID, OpenMetaverse.UUID GroupID, uint BaseMask, uint OwnerMask, uint GroupMask, uint EveryoneMask, uint NextOwnerMask, int OwnershipCost, byte SaleType, int SalePrice, uint Category, OpenMetaverse.UUID LastOwnerID, string ObjectName, string Description)
{
}
public void SendObjectPropertiesReply(OpenMetaverse.UUID ItemID, ulong CreationDate, OpenMetaverse.UUID CreatorUUID, OpenMetaverse.UUID FolderUUID, OpenMetaverse.UUID FromTaskUUID, OpenMetaverse.UUID GroupUUID, short InventorySerial, OpenMetaverse.UUID LastOwnerUUID, OpenMetaverse.UUID ObjectUUID, OpenMetaverse.UUID OwnerUUID, string TouchTitle, byte[] TextureID, string SitTitle, string ItemName, string ItemDescription, uint OwnerMask, uint NextOwnerMask, uint GroupMask, uint EveryoneMask, uint BaseMask, uint FoldedOwnerMask, uint FoldedNextOwnerMask, byte saleType, int salePrice)
{
}
public void SendAgentOffline(OpenMetaverse.UUID[] agentIDs)
{
}
public void SendAgentOnline(OpenMetaverse.UUID[] agentIDs)
{
}
public void SendSitResponse(OpenMetaverse.UUID TargetID, OpenMetaverse.Vector3 OffsetPos, OpenMetaverse.Quaternion SitOrientation, bool autopilot, OpenMetaverse.Vector3 CameraAtOffset, OpenMetaverse.Vector3 CameraEyeOffset, bool ForceMouseLook)
{
}
public void SendAdminResponse(OpenMetaverse.UUID Token, uint AdminLevel)
{
}
public void SendGroupMembership(GroupMembershipData[] GroupMembership)
{
}
public void SendGroupNameReply(OpenMetaverse.UUID groupLLUID, string GroupName)
{
}
public void SendJoinGroupReply(OpenMetaverse.UUID groupID, bool success)
{
}
public void SendEjectGroupMemberReply(OpenMetaverse.UUID agentID, OpenMetaverse.UUID groupID, bool success)
{
}
public void SendLeaveGroupReply(OpenMetaverse.UUID groupID, bool success)
{
}
public void SendCreateGroupReply(OpenMetaverse.UUID groupID, bool success, string message)
{
}
public void SendLandStatReply(uint reportType, uint requestFlags, uint resultCount, List<LandStatReportItem> lsrpl)
{
}
public void SendScriptRunningReply(OpenMetaverse.UUID objectID, OpenMetaverse.UUID itemID, bool running)
{
}
public void SendAsset(AssetBase asset, AssetRequestInfo req)
{
}
public void SendTexture(AssetBase TextureAsset)
{
}
public byte[] GetThrottlesPacked(float multiplier)
{
return new byte[0];
}
public event ViewerEffectEventHandler OnViewerEffect;
public event Action<IClientAPI> OnLogout;
public event Action<IClientAPI> OnConnectionClosed;
public void SendBlueBoxMessage(OpenMetaverse.UUID FromAvatarID, string FromAvatarName, string Message)
{
}
public void SendLogoutPacket()
{
}
public void SetClientInfo(ClientInfo info)
{
}
public void SetClientOption(string option, string value)
{
}
public string GetClientOption(string option)
{
return String.Empty;
}
public void SendSetFollowCamProperties(OpenMetaverse.UUID objectID, Dictionary<int, float> parameters)
{
}
public void SendClearFollowCamProperties(OpenMetaverse.UUID objectID)
{
}
public void SendRegionHandle(OpenMetaverse.UUID regoinID, ulong handle)
{
}
public void SendParcelInfo(RegionInfo info, LandData land, OpenMetaverse.UUID parcelID, uint x, uint y)
{
}
public void SendScriptTeleportRequest(string objName, string simName, OpenMetaverse.Vector3 pos, OpenMetaverse.Vector3 lookAt)
{
}
public void SendDirPlacesReply(OpenMetaverse.UUID queryID, DirPlacesReplyData[] data)
{
}
public void SendDirPeopleReply(OpenMetaverse.UUID queryID, DirPeopleReplyData[] data)
{
}
public void SendDirEventsReply(OpenMetaverse.UUID queryID, DirEventsReplyData[] data)
{
}
public void SendDirGroupsReply(OpenMetaverse.UUID queryID, DirGroupsReplyData[] data)
{
}
public void SendDirClassifiedReply(OpenMetaverse.UUID queryID, DirClassifiedReplyData[] data)
{
}
public void SendDirLandReply(OpenMetaverse.UUID queryID, DirLandReplyData[] data)
{
}
public void SendDirPopularReply(OpenMetaverse.UUID queryID, DirPopularReplyData[] data)
{
}
public void SendEventInfoReply(EventData info)
{
}
public void SendMapItemReply(mapItemReply[] replies, uint mapitemtype, uint flags)
{
}
public void SendAvatarGroupsReply(OpenMetaverse.UUID avatarID, GroupMembershipData[] data)
{
}
public void SendOfferCallingCard(OpenMetaverse.UUID srcID, OpenMetaverse.UUID transactionID)
{
}
public void SendAcceptCallingCard(OpenMetaverse.UUID transactionID)
{
}
public void SendDeclineCallingCard(OpenMetaverse.UUID transactionID)
{
}
public void SendTerminateFriend(OpenMetaverse.UUID exFriendID)
{
}
public void SendAvatarClassifiedReply(OpenMetaverse.UUID targetID, OpenMetaverse.UUID[] classifiedID, string[] name)
{
}
public void SendAvatarInterestsReply(OpenMetaverse.UUID avatarID, uint skillsMask, string skillsText, uint wantToMask, string wantToTask, string languagesText)
{
}
public void SendClassifiedInfoReply(OpenMetaverse.UUID classifiedID, OpenMetaverse.UUID creatorID, uint creationDate, uint expirationDate, uint category, string name, string description, OpenMetaverse.UUID parcelID, uint parentEstate, OpenMetaverse.UUID snapshotID, string simName, OpenMetaverse.Vector3 globalPos, string parcelName, byte classifiedFlags, int price)
{
}
public void SendAgentDropGroup(OpenMetaverse.UUID groupID)
{
}
public void RefreshGroupMembership()
{
}
public void SendAvatarNotesReply(OpenMetaverse.UUID targetID, string text)
{
}
public void SendAvatarPicksReply(OpenMetaverse.UUID targetID, Dictionary<OpenMetaverse.UUID, string> picks)
{
}
public void SendPickInfoReply(OpenMetaverse.UUID pickID, OpenMetaverse.UUID creatorID, bool topPick, OpenMetaverse.UUID parcelID, string name, string desc, OpenMetaverse.UUID snapshotID, string user, string originalName, string simName, OpenMetaverse.Vector3 posGlobal, int sortOrder, bool enabled)
{
}
public void SendAvatarClassifiedReply(OpenMetaverse.UUID targetID, Dictionary<OpenMetaverse.UUID, string> classifieds)
{
}
public void SendParcelDwellReply(int localID, OpenMetaverse.UUID parcelID, float dwell)
{
}
public void SendUserInfoReply(bool imViaEmail, bool visible, string email)
{
}
public void SendUseCachedMuteList()
{
}
public void SendMuteListUpdate(string filename)
{
}
public void KillEndDone()
{
}
public bool AddGenericPacketHandler(string MethodName, GenericMessage handler)
{
return true;
}
public void SendChangeUserRights(OpenMetaverse.UUID agent, OpenMetaverse.UUID agentRelated, int relatedRights)
{
}
public void SendTextBoxRequest(string message, int chatChannel, string objectname, OpenMetaverse.UUID ownerID, string firstName, string lastName, OpenMetaverse.UUID objectId)
{
}
public void FreezeMe(uint flags, OpenMetaverse.UUID whoKey, string who)
{
}
public void SendAbortXfer(ulong id, int result)
{
}
public void RunAttachmentOperation(Action action)
{
}
public void SendAgentCachedTexture(List<CachedAgentArgs> args)
{
}
public void SendTelehubInfo(OpenMetaverse.Vector3 TelehubPos, OpenMetaverse.Quaternion TelehubRot, List<OpenMetaverse.Vector3> SpawnPoint, OpenMetaverse.UUID ObjectID, string nameT)
{
}
public void HandleWithInventoryWriteThread(Action toHandle)
{
}
public Task PauseUpdatesAndFlush()
{
return null;
}
public void ResumeUpdates(IEnumerable<uint> excludeObjectIds)
{
}
public void WaitForClose()
{
}
public void AfterAttachedToConnection(OpenSim.Framework.AgentCircuitData c)
{
}
public void SendMoneyBalance(OpenMetaverse.UUID transaction, bool success, string description, int balance, OpenMetaverse.Packets.MoneyBalanceReplyPacket.TransactionInfoBlock transInfo)
{
}
public List<AgentGroupData> GetAllGroupPowers()
{
return new List<AgentGroupData>();
}
public void SetGroupPowers(IEnumerable<AgentGroupData> groupPowers)
{
}
public int GetThrottleTotal()
{
return 0;
}
public void SetActiveGroupInfo(AgentGroupData activeGroup)
{
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using Newtonsoft.Json.Schema;
#if !(NET20 || NET35 || PORTABLE40 || PORTABLE) || NETSTANDARD1_3 || NETSTANDARD2_0
using System.Numerics;
#endif
using System.Runtime.Serialization;
using System.Text;
#if !(NET20 || NET35)
using System.Threading.Tasks;
#endif
using System.Xml;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
using Newtonsoft.Json.Tests.Serialization;
using Newtonsoft.Json.Tests.TestObjects;
using Newtonsoft.Json.Tests.TestObjects.Organization;
using Newtonsoft.Json.Utilities;
#if DNXCORE50
using Xunit;
using Test = Xunit.FactAttribute;
using Assert = Newtonsoft.Json.Tests.XUnitAssert;
#else
using NUnit.Framework;
#endif
namespace Newtonsoft.Json.Tests
{
[TestFixture]
public class JsonConvertTest : TestFixtureBase
{
[Test]
public void ToStringEnsureEscapedArrayLength()
{
const char nonAsciiChar = (char)257;
const char escapableNonQuoteAsciiChar = '\0';
string value = nonAsciiChar + @"\" + escapableNonQuoteAsciiChar;
string convertedValue = JsonConvert.ToString((object)value);
Assert.AreEqual(@"""" + nonAsciiChar + @"\\\u0000""", convertedValue);
}
public class PopulateTestObject
{
public decimal Prop { get; set; }
}
[Test]
public void PopulateObjectWithHeaderComment()
{
string json = @"// file header
{
""prop"": 1.0
}";
PopulateTestObject o = new PopulateTestObject();
JsonConvert.PopulateObject(json, o);
Assert.AreEqual(1m, o.Prop);
}
[Test]
public void PopulateObjectWithMultipleHeaderComment()
{
string json = @"// file header
// another file header?
{
""prop"": 1.0
}";
PopulateTestObject o = new PopulateTestObject();
JsonConvert.PopulateObject(json, o);
Assert.AreEqual(1m, o.Prop);
}
[Test]
public void PopulateObjectWithNoContent()
{
ExceptionAssert.Throws<JsonSerializationException>(() =>
{
string json = @"";
PopulateTestObject o = new PopulateTestObject();
JsonConvert.PopulateObject(json, o);
}, "No JSON content found. Path '', line 0, position 0.");
}
[Test]
public void PopulateObjectWithOnlyComment()
{
ExceptionAssert.Throws<JsonSerializationException>(() =>
{
string json = @"// file header";
PopulateTestObject o = new PopulateTestObject();
JsonConvert.PopulateObject(json, o);
}, "No JSON content found. Path '', line 1, position 14.");
}
[Test]
public void DefaultSettings()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented
};
string json = JsonConvert.SerializeObject(new { test = new[] { 1, 2, 3 } });
StringAssert.AreEqual(@"{
""test"": [
1,
2,
3
]
}", json);
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
public class NameTableTestClass
{
public string Value { get; set; }
}
public class NameTableTestClassConverter : JsonConverter
{
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
reader.Read();
reader.Read();
JsonTextReader jsonTextReader = (JsonTextReader)reader;
Assert.IsNotNull(jsonTextReader.NameTable);
string s = serializer.Deserialize<string>(reader);
Assert.AreEqual("hi", s);
Assert.IsNotNull(jsonTextReader.NameTable);
NameTableTestClass o = new NameTableTestClass
{
Value = s
};
return o;
}
public override bool CanConvert(Type objectType)
{
return objectType == typeof(NameTableTestClass);
}
}
[Test]
public void NameTableTest()
{
StringReader sr = new StringReader("{'property':'hi'}");
JsonTextReader jsonTextReader = new JsonTextReader(sr);
Assert.IsNull(jsonTextReader.NameTable);
JsonSerializer serializer = new JsonSerializer();
serializer.Converters.Add(new NameTableTestClassConverter());
NameTableTestClass o = serializer.Deserialize<NameTableTestClass>(jsonTextReader);
Assert.IsNull(jsonTextReader.NameTable);
Assert.AreEqual("hi", o.Value);
}
[Test]
public void DefaultSettings_Example()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented,
ContractResolver = new CamelCasePropertyNamesContractResolver()
};
Employee e = new Employee
{
FirstName = "Eric",
LastName = "Example",
BirthDate = new DateTime(1980, 4, 20, 0, 0, 0, DateTimeKind.Utc),
Department = "IT",
JobTitle = "Web Dude"
};
string json = JsonConvert.SerializeObject(e);
// {
// "firstName": "Eric",
// "lastName": "Example",
// "birthDate": "1980-04-20T00:00:00Z",
// "department": "IT",
// "jobTitle": "Web Dude"
// }
StringAssert.AreEqual(@"{
""firstName"": ""Eric"",
""lastName"": ""Example"",
""birthDate"": ""1980-04-20T00:00:00Z"",
""department"": ""IT"",
""jobTitle"": ""Web Dude""
}", json);
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
[Test]
public void DefaultSettings_Override()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented
};
string json = JsonConvert.SerializeObject(new { test = new[] { 1, 2, 3 } }, new JsonSerializerSettings
{
Formatting = Formatting.None
});
Assert.AreEqual(@"{""test"":[1,2,3]}", json);
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
[Test]
public void DefaultSettings_Override_JsonConverterOrder()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented,
Converters = { new IsoDateTimeConverter { DateTimeFormat = "yyyy" } }
};
string json = JsonConvert.SerializeObject(new[] { new DateTime(2000, 12, 12, 4, 2, 4, DateTimeKind.Utc) }, new JsonSerializerSettings
{
Formatting = Formatting.None,
Converters =
{
// should take precedence
new JavaScriptDateTimeConverter(),
new IsoDateTimeConverter { DateTimeFormat = "dd" }
}
});
Assert.AreEqual(@"[new Date(976593724000)]", json);
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
[Test]
public void DefaultSettings_Create()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented
};
IList<int> l = new List<int> { 1, 2, 3 };
StringWriter sw = new StringWriter();
JsonSerializer serializer = JsonSerializer.CreateDefault();
serializer.Serialize(sw, l);
StringAssert.AreEqual(@"[
1,
2,
3
]", sw.ToString());
sw = new StringWriter();
serializer.Formatting = Formatting.None;
serializer.Serialize(sw, l);
Assert.AreEqual(@"[1,2,3]", sw.ToString());
sw = new StringWriter();
serializer = new JsonSerializer();
serializer.Serialize(sw, l);
Assert.AreEqual(@"[1,2,3]", sw.ToString());
sw = new StringWriter();
serializer = JsonSerializer.Create();
serializer.Serialize(sw, l);
Assert.AreEqual(@"[1,2,3]", sw.ToString());
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
[Test]
public void DefaultSettings_CreateWithSettings()
{
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented
};
IList<int> l = new List<int> { 1, 2, 3 };
StringWriter sw = new StringWriter();
JsonSerializer serializer = JsonSerializer.CreateDefault(new JsonSerializerSettings
{
Converters = { new IntConverter() }
});
serializer.Serialize(sw, l);
StringAssert.AreEqual(@"[
2,
4,
6
]", sw.ToString());
sw = new StringWriter();
serializer.Converters.Clear();
serializer.Serialize(sw, l);
StringAssert.AreEqual(@"[
1,
2,
3
]", sw.ToString());
sw = new StringWriter();
serializer = JsonSerializer.Create(new JsonSerializerSettings { Formatting = Formatting.Indented });
serializer.Serialize(sw, l);
StringAssert.AreEqual(@"[
1,
2,
3
]", sw.ToString());
}
finally
{
JsonConvert.DefaultSettings = null;
}
}
public class IntConverter : JsonConverter
{
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
int i = (int)value;
writer.WriteValue(i * 2);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override bool CanConvert(Type objectType)
{
return objectType == typeof(int);
}
}
[Test]
public void DeserializeObject_EmptyString()
{
object result = JsonConvert.DeserializeObject(string.Empty);
Assert.IsNull(result);
}
[Test]
public void DeserializeObject_Integer()
{
object result = JsonConvert.DeserializeObject("1");
Assert.AreEqual(1L, result);
}
[Test]
public void DeserializeObject_Integer_EmptyString()
{
int? value = JsonConvert.DeserializeObject<int?>("");
Assert.IsNull(value);
}
[Test]
public void DeserializeObject_Decimal_EmptyString()
{
decimal? value = JsonConvert.DeserializeObject<decimal?>("");
Assert.IsNull(value);
}
[Test]
public void DeserializeObject_DateTime_EmptyString()
{
DateTime? value = JsonConvert.DeserializeObject<DateTime?>("");
Assert.IsNull(value);
}
[Test]
public void EscapeJavaScriptString()
{
string result;
result = JavaScriptUtils.ToEscapedJavaScriptString("How now brown cow?", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""How now brown cow?""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("How now 'brown' cow?", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""How now 'brown' cow?""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("How now <brown> cow?", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""How now <brown> cow?""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("How \r\nnow brown cow?", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""How \r\nnow brown cow?""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007""", result);
result =
JavaScriptUtils.ToEscapedJavaScriptString("\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013""", result);
result =
JavaScriptUtils.ToEscapedJavaScriptString(
"\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f ", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f """, result);
result =
JavaScriptUtils.ToEscapedJavaScriptString(
"!\"#$%&\u0027()*+,-./0123456789:;\u003c=\u003e?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""!\""#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("^_`abcdefghijklmnopqrstuvwxyz{|}~", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""^_`abcdefghijklmnopqrstuvwxyz{|}~""", result);
string data =
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&\u0027()*+,-./0123456789:;\u003c=\u003e?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
string expected =
@"""\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\""#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~""";
result = JavaScriptUtils.ToEscapedJavaScriptString(data, '"', true, StringEscapeHandling.Default);
Assert.AreEqual(expected, result);
result = JavaScriptUtils.ToEscapedJavaScriptString("Fred's cat.", '\'', true, StringEscapeHandling.Default);
Assert.AreEqual(result, @"'Fred\'s cat.'");
result = JavaScriptUtils.ToEscapedJavaScriptString(@"""How are you gentlemen?"" said Cats.", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(result, @"""\""How are you gentlemen?\"" said Cats.""");
result = JavaScriptUtils.ToEscapedJavaScriptString(@"""How are' you gentlemen?"" said Cats.", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(result, @"""\""How are' you gentlemen?\"" said Cats.""");
result = JavaScriptUtils.ToEscapedJavaScriptString(@"Fred's ""cat"".", '\'', true, StringEscapeHandling.Default);
Assert.AreEqual(result, @"'Fred\'s ""cat"".'");
result = JavaScriptUtils.ToEscapedJavaScriptString("\u001farray\u003caddress", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(result, @"""\u001farray<address""");
}
[Test]
public void EscapeJavaScriptString_UnicodeLinefeeds()
{
string result;
result = JavaScriptUtils.ToEscapedJavaScriptString("before" + '\u0085' + "after", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""before\u0085after""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("before" + '\u2028' + "after", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""before\u2028after""", result);
result = JavaScriptUtils.ToEscapedJavaScriptString("before" + '\u2029' + "after", '"', true, StringEscapeHandling.Default);
Assert.AreEqual(@"""before\u2029after""", result);
}
[Test]
public void ToStringInvalid()
{
ExceptionAssert.Throws<ArgumentException>(() => { JsonConvert.ToString(new Version(1, 0)); }, "Unsupported type: System.Version. Use the JsonSerializer class to get the object's JSON representation.");
}
[Test]
public void GuidToString()
{
Guid guid = new Guid("BED7F4EA-1A96-11d2-8F08-00A0C9A6186D");
string json = JsonConvert.ToString(guid);
Assert.AreEqual(@"""bed7f4ea-1a96-11d2-8f08-00a0c9a6186d""", json);
}
[Test]
public void EnumToString()
{
string json = JsonConvert.ToString(StringComparison.CurrentCultureIgnoreCase);
Assert.AreEqual("1", json);
}
[Test]
public void ObjectToString()
{
object value;
value = 1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = 1.1;
Assert.AreEqual("1.1", JsonConvert.ToString(value));
value = 1.1m;
Assert.AreEqual("1.1", JsonConvert.ToString(value));
value = (float)1.1;
Assert.AreEqual("1.1", JsonConvert.ToString(value));
value = (short)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (long)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (byte)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (uint)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (ushort)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (sbyte)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = (ulong)1;
Assert.AreEqual("1", JsonConvert.ToString(value));
value = new DateTime(DateTimeUtils.InitialJavaScriptDateTicks, DateTimeKind.Utc);
Assert.AreEqual(@"""1970-01-01T00:00:00Z""", JsonConvert.ToString(value));
value = new DateTime(DateTimeUtils.InitialJavaScriptDateTicks, DateTimeKind.Utc);
Assert.AreEqual(@"""\/Date(0)\/""", JsonConvert.ToString((DateTime)value, DateFormatHandling.MicrosoftDateFormat, DateTimeZoneHandling.RoundtripKind));
#if !NET20
value = new DateTimeOffset(DateTimeUtils.InitialJavaScriptDateTicks, TimeSpan.Zero);
Assert.AreEqual(@"""1970-01-01T00:00:00+00:00""", JsonConvert.ToString(value));
value = new DateTimeOffset(DateTimeUtils.InitialJavaScriptDateTicks, TimeSpan.Zero);
Assert.AreEqual(@"""\/Date(0+0000)\/""", JsonConvert.ToString((DateTimeOffset)value, DateFormatHandling.MicrosoftDateFormat));
#endif
value = null;
Assert.AreEqual("null", JsonConvert.ToString(value));
#if !(PORTABLE || DNXCORE50 || PORTABLE40)
value = DBNull.Value;
Assert.AreEqual("null", JsonConvert.ToString(value));
#endif
value = "I am a string";
Assert.AreEqual(@"""I am a string""", JsonConvert.ToString(value));
value = true;
Assert.AreEqual("true", JsonConvert.ToString(value));
value = 'c';
Assert.AreEqual(@"""c""", JsonConvert.ToString(value));
}
[Test]
public void TestInvalidStrings()
{
ExceptionAssert.Throws<JsonReaderException>(() =>
{
string orig = @"this is a string ""that has quotes"" ";
string serialized = JsonConvert.SerializeObject(orig);
// *** Make string invalid by stripping \" \"
serialized = serialized.Replace(@"\""", "\"");
JsonConvert.DeserializeObject<string>(serialized);
}, "Additional text encountered after finished reading JSON content: t. Path '', line 1, position 19.");
}
[Test]
public void DeserializeValueObjects()
{
int i = JsonConvert.DeserializeObject<int>("1");
Assert.AreEqual(1, i);
#if !NET20
DateTimeOffset d = JsonConvert.DeserializeObject<DateTimeOffset>(@"""\/Date(-59011455539000+0000)\/""");
Assert.AreEqual(new DateTimeOffset(new DateTime(100, 1, 1, 1, 1, 1, DateTimeKind.Utc)), d);
#endif
bool b = JsonConvert.DeserializeObject<bool>("true");
Assert.AreEqual(true, b);
object n = JsonConvert.DeserializeObject<object>("null");
Assert.AreEqual(null, n);
object u = JsonConvert.DeserializeObject<object>("undefined");
Assert.AreEqual(null, u);
}
[Test]
public void FloatToString()
{
Assert.AreEqual("1.1", JsonConvert.ToString(1.1));
Assert.AreEqual("1.11", JsonConvert.ToString(1.11));
Assert.AreEqual("1.111", JsonConvert.ToString(1.111));
Assert.AreEqual("1.1111", JsonConvert.ToString(1.1111));
Assert.AreEqual("1.11111", JsonConvert.ToString(1.11111));
Assert.AreEqual("1.111111", JsonConvert.ToString(1.111111));
Assert.AreEqual("1.0", JsonConvert.ToString(1.0));
Assert.AreEqual("1.0", JsonConvert.ToString(1d));
Assert.AreEqual("-1.0", JsonConvert.ToString(-1d));
Assert.AreEqual("1.01", JsonConvert.ToString(1.01));
Assert.AreEqual("1.001", JsonConvert.ToString(1.001));
Assert.AreEqual(JsonConvert.PositiveInfinity, JsonConvert.ToString(Double.PositiveInfinity));
Assert.AreEqual(JsonConvert.NegativeInfinity, JsonConvert.ToString(Double.NegativeInfinity));
Assert.AreEqual(JsonConvert.NaN, JsonConvert.ToString(Double.NaN));
}
[Test]
public void DecimalToString()
{
Assert.AreEqual("1.1", JsonConvert.ToString(1.1m));
Assert.AreEqual("1.11", JsonConvert.ToString(1.11m));
Assert.AreEqual("1.111", JsonConvert.ToString(1.111m));
Assert.AreEqual("1.1111", JsonConvert.ToString(1.1111m));
Assert.AreEqual("1.11111", JsonConvert.ToString(1.11111m));
Assert.AreEqual("1.111111", JsonConvert.ToString(1.111111m));
Assert.AreEqual("1.0", JsonConvert.ToString(1.0m));
Assert.AreEqual("-1.0", JsonConvert.ToString(-1.0m));
Assert.AreEqual("-1.0", JsonConvert.ToString(-1m));
Assert.AreEqual("1.0", JsonConvert.ToString(1m));
Assert.AreEqual("1.01", JsonConvert.ToString(1.01m));
Assert.AreEqual("1.001", JsonConvert.ToString(1.001m));
Assert.AreEqual("79228162514264337593543950335.0", JsonConvert.ToString(Decimal.MaxValue));
Assert.AreEqual("-79228162514264337593543950335.0", JsonConvert.ToString(Decimal.MinValue));
}
[Test]
public void StringEscaping()
{
string v = "It's a good day\r\n\"sunshine\"";
string json = JsonConvert.ToString(v);
Assert.AreEqual(@"""It's a good day\r\n\""sunshine\""""", json);
}
[Test]
public void ToStringStringEscapeHandling()
{
string v = "<b>hi " + '\u20AC' + "</b>";
string json = JsonConvert.ToString(v, '"');
Assert.AreEqual(@"""<b>hi " + '\u20AC' + @"</b>""", json);
json = JsonConvert.ToString(v, '"', StringEscapeHandling.EscapeHtml);
Assert.AreEqual(@"""\u003cb\u003ehi " + '\u20AC' + @"\u003c/b\u003e""", json);
json = JsonConvert.ToString(v, '"', StringEscapeHandling.EscapeNonAscii);
Assert.AreEqual(@"""<b>hi \u20ac</b>""", json);
}
[Test]
public void WriteDateTime()
{
DateTimeResult result = null;
result = TestDateTime("DateTime Max", DateTime.MaxValue);
Assert.AreEqual("9999-12-31T23:59:59.9999999", result.IsoDateRoundtrip);
Assert.AreEqual("9999-12-31T23:59:59.9999999" + GetOffset(DateTime.MaxValue, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("9999-12-31T23:59:59.9999999", result.IsoDateUnspecified);
Assert.AreEqual("9999-12-31T23:59:59.9999999Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(253402300799999)\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(253402300799999" + GetOffset(DateTime.MaxValue, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(253402300799999)\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(253402300799999)\/", result.MsDateUtc);
DateTime year2000local = new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Local);
string localToUtcDate = year2000local.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.FFFFFFFK");
result = TestDateTime("DateTime Local", year2000local);
Assert.AreEqual("2000-01-01T01:01:01" + GetOffset(year2000local, DateFormatHandling.IsoDateFormat), result.IsoDateRoundtrip);
Assert.AreEqual("2000-01-01T01:01:01" + GetOffset(year2000local, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("2000-01-01T01:01:01", result.IsoDateUnspecified);
Assert.AreEqual(localToUtcDate, result.IsoDateUtc);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000local) + GetOffset(year2000local, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000local) + GetOffset(year2000local, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000local) + GetOffset(year2000local, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000local) + @")\/", result.MsDateUtc);
DateTime millisecondsLocal = new DateTime(2000, 1, 1, 1, 1, 1, 999, DateTimeKind.Local);
localToUtcDate = millisecondsLocal.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.FFFFFFFK");
result = TestDateTime("DateTime Local with milliseconds", millisecondsLocal);
Assert.AreEqual("2000-01-01T01:01:01.999" + GetOffset(millisecondsLocal, DateFormatHandling.IsoDateFormat), result.IsoDateRoundtrip);
Assert.AreEqual("2000-01-01T01:01:01.999" + GetOffset(millisecondsLocal, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("2000-01-01T01:01:01.999", result.IsoDateUnspecified);
Assert.AreEqual(localToUtcDate, result.IsoDateUtc);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(millisecondsLocal) + GetOffset(millisecondsLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(millisecondsLocal) + GetOffset(millisecondsLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(millisecondsLocal) + GetOffset(millisecondsLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(millisecondsLocal) + @")\/", result.MsDateUtc);
DateTime ticksLocal = new DateTime(634663873826822481, DateTimeKind.Local);
localToUtcDate = ticksLocal.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.FFFFFFFK");
result = TestDateTime("DateTime Local with ticks", ticksLocal);
Assert.AreEqual("2012-03-03T16:03:02.6822481" + GetOffset(ticksLocal, DateFormatHandling.IsoDateFormat), result.IsoDateRoundtrip);
Assert.AreEqual("2012-03-03T16:03:02.6822481" + GetOffset(ticksLocal, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("2012-03-03T16:03:02.6822481", result.IsoDateUnspecified);
Assert.AreEqual(localToUtcDate, result.IsoDateUtc);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(ticksLocal) + GetOffset(ticksLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(ticksLocal) + GetOffset(ticksLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(ticksLocal) + GetOffset(ticksLocal, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(ticksLocal) + @")\/", result.MsDateUtc);
DateTime year2000Unspecified = new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Unspecified);
result = TestDateTime("DateTime Unspecified", year2000Unspecified);
Assert.AreEqual("2000-01-01T01:01:01", result.IsoDateRoundtrip);
Assert.AreEqual("2000-01-01T01:01:01" + GetOffset(year2000Unspecified, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("2000-01-01T01:01:01", result.IsoDateUnspecified);
Assert.AreEqual("2000-01-01T01:01:01Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000Unspecified) + GetOffset(year2000Unspecified, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000Unspecified) + GetOffset(year2000Unspecified, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000Unspecified) + GetOffset(year2000Unspecified, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(year2000Unspecified.ToLocalTime()) + @")\/", result.MsDateUtc);
DateTime year2000Utc = new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc);
string utcTolocalDate = year2000Utc.ToLocalTime().ToString("yyyy-MM-ddTHH:mm:ss");
result = TestDateTime("DateTime Utc", year2000Utc);
Assert.AreEqual("2000-01-01T01:01:01Z", result.IsoDateRoundtrip);
Assert.AreEqual(utcTolocalDate + GetOffset(year2000Utc, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("2000-01-01T01:01:01", result.IsoDateUnspecified);
Assert.AreEqual("2000-01-01T01:01:01Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(946688461000)\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(946688461000" + GetOffset(year2000Utc, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(DateTime.SpecifyKind(year2000Utc, DateTimeKind.Unspecified)) + GetOffset(year2000Utc, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(946688461000)\/", result.MsDateUtc);
DateTime unixEpoc = new DateTime(621355968000000000, DateTimeKind.Utc);
utcTolocalDate = unixEpoc.ToLocalTime().ToString("yyyy-MM-ddTHH:mm:ss");
result = TestDateTime("DateTime Unix Epoc", unixEpoc);
Assert.AreEqual("1970-01-01T00:00:00Z", result.IsoDateRoundtrip);
Assert.AreEqual(utcTolocalDate + GetOffset(unixEpoc, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("1970-01-01T00:00:00", result.IsoDateUnspecified);
Assert.AreEqual("1970-01-01T00:00:00Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(0)\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(0" + GetOffset(unixEpoc, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(" + DateTimeUtils.ConvertDateTimeToJavaScriptTicks(DateTime.SpecifyKind(unixEpoc, DateTimeKind.Unspecified)) + GetOffset(unixEpoc, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(0)\/", result.MsDateUtc);
result = TestDateTime("DateTime Min", DateTime.MinValue);
Assert.AreEqual("0001-01-01T00:00:00", result.IsoDateRoundtrip);
Assert.AreEqual("0001-01-01T00:00:00" + GetOffset(DateTime.MinValue, DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("0001-01-01T00:00:00", result.IsoDateUnspecified);
Assert.AreEqual("0001-01-01T00:00:00Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(-62135596800000" + GetOffset(DateTime.MinValue, DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateUtc);
result = TestDateTime("DateTime Default", default(DateTime));
Assert.AreEqual("0001-01-01T00:00:00", result.IsoDateRoundtrip);
Assert.AreEqual("0001-01-01T00:00:00" + GetOffset(default(DateTime), DateFormatHandling.IsoDateFormat), result.IsoDateLocal);
Assert.AreEqual("0001-01-01T00:00:00", result.IsoDateUnspecified);
Assert.AreEqual("0001-01-01T00:00:00Z", result.IsoDateUtc);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateRoundtrip);
Assert.AreEqual(@"\/Date(-62135596800000" + GetOffset(default(DateTime), DateFormatHandling.MicrosoftDateFormat) + @")\/", result.MsDateLocal);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateUnspecified);
Assert.AreEqual(@"\/Date(-62135596800000)\/", result.MsDateUtc);
#if !NET20
result = TestDateTime("DateTimeOffset TimeSpan Zero", new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
Assert.AreEqual("2000-01-01T01:01:01+00:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(946688461000+0000)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset TimeSpan 1 hour", new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.FromHours(1)));
Assert.AreEqual("2000-01-01T01:01:01+01:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(946684861000+0100)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset TimeSpan 1.5 hour", new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.FromHours(1.5)));
Assert.AreEqual("2000-01-01T01:01:01+01:30", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(946683061000+0130)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset TimeSpan 13 hour", new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.FromHours(13)));
Assert.AreEqual("2000-01-01T01:01:01+13:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(946641661000+1300)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset TimeSpan with ticks", new DateTimeOffset(634663873826822481, TimeSpan.Zero));
Assert.AreEqual("2012-03-03T16:03:02.6822481+00:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(1330790582682+0000)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset Min", DateTimeOffset.MinValue);
Assert.AreEqual("0001-01-01T00:00:00+00:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(-62135596800000+0000)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset Max", DateTimeOffset.MaxValue);
Assert.AreEqual("9999-12-31T23:59:59.9999999+00:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(253402300799999+0000)\/", result.MsDateRoundtrip);
result = TestDateTime("DateTimeOffset Default", default(DateTimeOffset));
Assert.AreEqual("0001-01-01T00:00:00+00:00", result.IsoDateRoundtrip);
Assert.AreEqual(@"\/Date(-62135596800000+0000)\/", result.MsDateRoundtrip);
#endif
}
public class DateTimeResult
{
public string IsoDateRoundtrip { get; set; }
public string IsoDateLocal { get; set; }
public string IsoDateUnspecified { get; set; }
public string IsoDateUtc { get; set; }
public string MsDateRoundtrip { get; set; }
public string MsDateLocal { get; set; }
public string MsDateUnspecified { get; set; }
public string MsDateUtc { get; set; }
}
private DateTimeResult TestDateTime<T>(string name, T value)
{
Console.WriteLine(name);
DateTimeResult result = new DateTimeResult()
{
IsoDateRoundtrip = TestDateTimeFormat(value, DateFormatHandling.IsoDateFormat, DateTimeZoneHandling.RoundtripKind)
};
if (value is DateTime)
{
result.IsoDateLocal = TestDateTimeFormat(value, DateFormatHandling.IsoDateFormat, DateTimeZoneHandling.Local);
result.IsoDateUnspecified = TestDateTimeFormat(value, DateFormatHandling.IsoDateFormat, DateTimeZoneHandling.Unspecified);
result.IsoDateUtc = TestDateTimeFormat(value, DateFormatHandling.IsoDateFormat, DateTimeZoneHandling.Utc);
}
result.MsDateRoundtrip = TestDateTimeFormat(value, DateFormatHandling.MicrosoftDateFormat, DateTimeZoneHandling.RoundtripKind);
if (value is DateTime)
{
result.MsDateLocal = TestDateTimeFormat(value, DateFormatHandling.MicrosoftDateFormat, DateTimeZoneHandling.Local);
result.MsDateUnspecified = TestDateTimeFormat(value, DateFormatHandling.MicrosoftDateFormat, DateTimeZoneHandling.Unspecified);
result.MsDateUtc = TestDateTimeFormat(value, DateFormatHandling.MicrosoftDateFormat, DateTimeZoneHandling.Utc);
}
TestDateTimeFormat(value, new IsoDateTimeConverter());
if (value is DateTime)
{
Console.WriteLine(XmlConvert.ToString((DateTime)(object)value, XmlDateTimeSerializationMode.RoundtripKind));
}
else
{
Console.WriteLine(XmlConvert.ToString((DateTimeOffset)(object)value));
}
#if !NET20
MemoryStream ms = new MemoryStream();
DataContractSerializer s = new DataContractSerializer(typeof(T));
s.WriteObject(ms, value);
string json = Encoding.UTF8.GetString(ms.ToArray(), 0, Convert.ToInt32(ms.Length));
Console.WriteLine(json);
#endif
Console.WriteLine();
return result;
}
private static string TestDateTimeFormat<T>(T value, DateFormatHandling format, DateTimeZoneHandling timeZoneHandling)
{
string date = null;
if (value is DateTime)
{
date = JsonConvert.ToString((DateTime)(object)value, format, timeZoneHandling);
}
else
{
#if !NET20
date = JsonConvert.ToString((DateTimeOffset)(object)value, format);
#endif
}
Console.WriteLine(format.ToString("g") + "-" + timeZoneHandling.ToString("g") + ": " + date);
if (timeZoneHandling == DateTimeZoneHandling.RoundtripKind)
{
T parsed = JsonConvert.DeserializeObject<T>(date);
if (!value.Equals(parsed))
{
long valueTicks = GetTicks(value);
long parsedTicks = GetTicks(parsed);
valueTicks = (valueTicks / 10000) * 10000;
Assert.AreEqual(valueTicks, parsedTicks);
}
}
return date.Trim('"');
}
private static void TestDateTimeFormat<T>(T value, JsonConverter converter)
{
string date = Write(value, converter);
Console.WriteLine(converter.GetType().Name + ": " + date);
T parsed = Read<T>(date, converter);
try
{
Assert.AreEqual(value, parsed);
}
catch (Exception)
{
// JavaScript ticks aren't as precise, recheck after rounding
long valueTicks = GetTicks(value);
long parsedTicks = GetTicks(parsed);
valueTicks = (valueTicks / 10000) * 10000;
Assert.AreEqual(valueTicks, parsedTicks);
}
}
public static long GetTicks(object value)
{
return (value is DateTime) ? ((DateTime)value).Ticks : ((DateTimeOffset)value).Ticks;
}
public static string Write(object value, JsonConverter converter)
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
converter.WriteJson(writer, value, null);
writer.Flush();
return sw.ToString();
}
public static T Read<T>(string text, JsonConverter converter)
{
JsonTextReader reader = new JsonTextReader(new StringReader(text));
reader.ReadAsString();
return (T)converter.ReadJson(reader, typeof(T), null, null);
}
[Test]
public void SerializeObjectDateTimeZoneHandling()
{
string json = JsonConvert.SerializeObject(
new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Unspecified),
new JsonSerializerSettings
{
DateTimeZoneHandling = DateTimeZoneHandling.Utc
});
Assert.AreEqual(@"""2000-01-01T01:01:01Z""", json);
}
[Test]
public void DeserializeObject()
{
string json = @"{
'Name': 'Bad Boys',
'ReleaseDate': '1995-4-7T00:00:00',
'Genres': [
'Action',
'Comedy'
]
}";
Movie m = JsonConvert.DeserializeObject<Movie>(json);
string name = m.Name;
// Bad Boys
Assert.AreEqual("Bad Boys", m.Name);
}
#if !NET20
[Test]
public void TestJsonDateTimeOffsetRoundtrip()
{
var now = DateTimeOffset.Now;
var dict = new Dictionary<string, object> { { "foo", now } };
var settings = new JsonSerializerSettings()
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateParseHandling = DateParseHandling.DateTimeOffset,
DateTimeZoneHandling = DateTimeZoneHandling.RoundtripKind
};
var json = JsonConvert.SerializeObject(dict, settings);
var newDict = new Dictionary<string, object>();
JsonConvert.PopulateObject(json, newDict, settings);
var date = newDict["foo"];
Assert.AreEqual(date, now);
}
[Test]
public void MaximumDateTimeOffsetLength()
{
DateTimeOffset dt = new DateTimeOffset(2000, 12, 31, 20, 59, 59, new TimeSpan(0, 11, 33, 0, 0));
dt = dt.AddTicks(9999999);
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteValue(dt);
writer.Flush();
Assert.AreEqual(@"""2000-12-31T20:59:59.9999999+11:33""", sw.ToString());
}
#endif
[Test]
public void MaximumDateTimeLength()
{
DateTime dt = new DateTime(2000, 12, 31, 20, 59, 59, DateTimeKind.Local);
dt = dt.AddTicks(9999999);
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteValue(dt);
writer.Flush();
}
[Test]
public void MaximumDateTimeMicrosoftDateFormatLength()
{
DateTime dt = DateTime.MaxValue;
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
DateFormatHandling = DateFormatHandling.MicrosoftDateFormat
};
writer.WriteValue(dt);
writer.Flush();
}
#if !(NET20 || NET35 || PORTABLE40 || PORTABLE) || NETSTANDARD1_3 || NETSTANDARD2_0
[Test]
public void IntegerLengthOverflows()
{
// Maximum javascript number length (in characters) is 380
JObject o = JObject.Parse(@"{""biginteger"":" + new String('9', 380) + "}");
JValue v = (JValue)o["biginteger"];
Assert.AreEqual(JTokenType.Integer, v.Type);
Assert.AreEqual(typeof(BigInteger), v.Value.GetType());
Assert.AreEqual(BigInteger.Parse(new String('9', 380)), (BigInteger)v.Value);
ExceptionAssert.Throws<JsonReaderException>(() => JObject.Parse(@"{""biginteger"":" + new String('9', 381) + "}"), "JSON integer " + new String('9', 381) + " is too large to parse. Path 'biginteger', line 1, position 395.");
}
#endif
[Test]
public void ParseIsoDate()
{
StringReader sr = new StringReader(@"""2014-02-14T14:25:02-13:00""");
JsonReader jsonReader = new JsonTextReader(sr);
Assert.IsTrue(jsonReader.Read());
Assert.AreEqual(typeof(DateTime), jsonReader.ValueType);
}
#if DNXCORE50
[Test(Skip = "Don't run with other unit tests")]
#else
[Ignore("Don't run with other unit tests")]
[Test]
#endif
public void StackOverflowTest()
{
StringBuilder sb = new StringBuilder();
int depth = 900;
for (int i = 0; i < depth; i++)
{
sb.Append("{'A':");
}
// invalid json
sb.Append("{***}");
for (int i = 0; i < depth; i++)
{
sb.Append("}");
}
string json = sb.ToString();
JsonSerializer serializer = new JsonSerializer() { };
serializer.Deserialize<Nest>(new JsonTextReader(new StringReader(json)));
}
public class Nest
{
public Nest A { get; set; }
}
[Test]
public void ParametersPassedToJsonConverterConstructor()
{
ClobberMyProperties clobber = new ClobberMyProperties { One = "Red", Two = "Green", Three = "Yellow", Four = "Black" };
string json = JsonConvert.SerializeObject(clobber);
Assert.AreEqual("{\"One\":\"Uno-1-Red\",\"Two\":\"Dos-2-Green\",\"Three\":\"Tres-1337-Yellow\",\"Four\":\"Black\"}", json);
}
public class ClobberMyProperties
{
[JsonConverter(typeof(ClobberingJsonConverter), "Uno", 1)]
public string One { get; set; }
[JsonConverter(typeof(ClobberingJsonConverter), "Dos", 2)]
public string Two { get; set; }
[JsonConverter(typeof(ClobberingJsonConverter), "Tres")]
public string Three { get; set; }
public string Four { get; set; }
}
public class ClobberingJsonConverter : JsonConverter
{
public string ClobberValueString { get; private set; }
public int ClobberValueInt { get; private set; }
public ClobberingJsonConverter(string clobberValueString, int clobberValueInt)
{
ClobberValueString = clobberValueString;
ClobberValueInt = clobberValueInt;
}
public ClobberingJsonConverter(string clobberValueString)
: this(clobberValueString, 1337)
{
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteValue(ClobberValueString + "-" + ClobberValueInt.ToString() + "-" + value.ToString());
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override bool CanConvert(Type objectType)
{
return objectType == typeof(string);
}
}
[Test]
public void WrongParametersPassedToJsonConvertConstructorShouldThrow()
{
IncorrectJsonConvertParameters value = new IncorrectJsonConvertParameters { One = "Boom" };
ExceptionAssert.Throws<JsonException>(() => { JsonConvert.SerializeObject(value); });
}
public class IncorrectJsonConvertParameters
{
/// <summary>
/// We deliberately use the wrong number/type of arguments for ClobberingJsonConverter to ensure an
/// exception is thrown.
/// </summary>
[JsonConverter(typeof(ClobberingJsonConverter), "Uno", "Blammo")]
public string One { get; set; }
}
public class OverloadsJsonConverterer : JsonConverter
{
private readonly string _type;
// constructor with Type argument
public OverloadsJsonConverterer(Type typeParam)
{
_type = "Type";
}
public OverloadsJsonConverterer(object objectParam)
{
_type = string.Format("object({0})", objectParam.GetType().FullName);
}
// primitive type conversions
public OverloadsJsonConverterer(byte byteParam)
{
_type = "byte";
}
public OverloadsJsonConverterer(short shortParam)
{
_type = "short";
}
public OverloadsJsonConverterer(int intParam)
{
_type = "int";
}
public OverloadsJsonConverterer(long longParam)
{
_type = "long";
}
public OverloadsJsonConverterer(double doubleParam)
{
_type = "double";
}
// params argument
public OverloadsJsonConverterer(params int[] intParams)
{
_type = "int[]";
}
public OverloadsJsonConverterer(bool[] intParams)
{
_type = "bool[]";
}
// closest type resolution
public OverloadsJsonConverterer(IEnumerable<string> iEnumerableParam)
{
_type = "IEnumerable<string>";
}
public OverloadsJsonConverterer(IList<string> iListParam)
{
_type = "IList<string>";
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteValue(_type);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override bool CanConvert(Type objectType)
{
return objectType == typeof(int);
}
}
public class OverloadWithTypeParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), typeof(int))]
public int Overload { get; set; }
}
[Test]
public void JsonConverterConstructor_OverloadWithTypeParam()
{
OverloadWithTypeParameter value = new OverloadWithTypeParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"Type\"}", json);
}
public class OverloadWithUnhandledParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), "str")]
public int Overload { get; set; }
}
[Test]
public void JsonConverterConstructor_OverloadWithUnhandledParam_FallbackToObject()
{
OverloadWithUnhandledParameter value = new OverloadWithUnhandledParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"object(System.String)\"}", json);
}
public class OverloadWithIntParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1)]
public int Overload { get; set; }
}
public class OverloadWithUIntParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1U)]
public int Overload { get; set; }
}
public class OverloadWithLongParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1L)]
public int Overload { get; set; }
}
public class OverloadWithULongParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1UL)]
public int Overload { get; set; }
}
public class OverloadWithShortParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), (short)1)]
public int Overload { get; set; }
}
public class OverloadWithUShortParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), (ushort)1)]
public int Overload { get; set; }
}
public class OverloadWithSByteParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), (sbyte)1)]
public int Overload { get; set; }
}
public class OverloadWithByteParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), (byte)1)]
public int Overload { get; set; }
}
public class OverloadWithCharParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 'a')]
public int Overload { get; set; }
}
public class OverloadWithBoolParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), true)]
public int Overload { get; set; }
}
public class OverloadWithFloatParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1.5f)]
public int Overload { get; set; }
}
public class OverloadWithDoubleParameter
{
[JsonConverter(typeof(OverloadsJsonConverterer), 1.5)]
public int Overload { get; set; }
}
[Test]
public void JsonConverterConstructor_OverloadsWithPrimitiveParams()
{
{
OverloadWithIntParameter value = new OverloadWithIntParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"int\"}", json);
}
{
// uint -> long
OverloadWithUIntParameter value = new OverloadWithUIntParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"long\"}", json);
}
{
OverloadWithLongParameter value = new OverloadWithLongParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"long\"}", json);
}
{
// ulong -> double
OverloadWithULongParameter value = new OverloadWithULongParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"double\"}", json);
}
{
OverloadWithShortParameter value = new OverloadWithShortParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"short\"}", json);
}
{
// ushort -> int
OverloadWithUShortParameter value = new OverloadWithUShortParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"int\"}", json);
}
{
// sbyte -> short
OverloadWithSByteParameter value = new OverloadWithSByteParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"short\"}", json);
}
{
OverloadWithByteParameter value = new OverloadWithByteParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"byte\"}", json);
}
{
// char -> int
OverloadWithCharParameter value = new OverloadWithCharParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"int\"}", json);
}
{
// bool -> (object)bool
OverloadWithBoolParameter value = new OverloadWithBoolParameter();
var json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"object(System.Boolean)\"}", json);
}
{
// float -> double
OverloadWithFloatParameter value = new OverloadWithFloatParameter();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"double\"}", json);
}
{
OverloadWithDoubleParameter value = new OverloadWithDoubleParameter();
var json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"double\"}", json);
}
}
public class OverloadWithArrayParameters
{
[JsonConverter(typeof(OverloadsJsonConverterer), new int[] { 1, 2, 3 })]
public int WithParams { get; set; }
[JsonConverter(typeof(OverloadsJsonConverterer), new bool[] { true, false })]
public int WithoutParams { get; set; }
}
[Test]
public void JsonConverterConstructor_OverloadsWithArrayParams()
{
OverloadWithArrayParameters value = new OverloadWithArrayParameters();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"WithParams\":\"int[]\",\"WithoutParams\":\"bool[]\"}", json);
}
public class OverloadWithBaseType
{
[JsonConverter(typeof(OverloadsJsonConverterer), new object[] { new string[] { "a", "b", "c" } })]
public int Overload { get; set; }
}
[Test]
public void JsonConverterConstructor_OverloadsWithBaseTypes()
{
OverloadWithBaseType value = new OverloadWithBaseType();
string json = JsonConvert.SerializeObject(value);
Assert.AreEqual("{\"Overload\":\"IList<string>\"}", json);
}
[Test]
public void CustomDoubleRounding()
{
var measurements = new Measurements
{
Loads = new List<double> { 23283.567554707258, 23224.849899771067, 23062.5, 22846.272519910868, 22594.281246368635 },
Positions = new List<double> { 57.724227689317019, 60.440934405753069, 63.444192925248643, 66.813119113482557, 70.4496501404433 },
Gain = 12345.67895111213
};
string json = JsonConvert.SerializeObject(measurements);
Assert.AreEqual("{\"Positions\":[57.72,60.44,63.44,66.81,70.45],\"Loads\":[23284.0,23225.0,23062.0,22846.0,22594.0],\"Gain\":12345.679}", json);
}
public class Measurements
{
[JsonProperty(ItemConverterType = typeof(RoundingJsonConverter))]
public List<double> Positions { get; set; }
[JsonProperty(ItemConverterType = typeof(RoundingJsonConverter), ItemConverterParameters = new object[] { 0, MidpointRounding.ToEven })]
public List<double> Loads { get; set; }
[JsonConverter(typeof(RoundingJsonConverter), 4)]
public double Gain { get; set; }
}
public class RoundingJsonConverter : JsonConverter
{
int _precision;
MidpointRounding _rounding;
public RoundingJsonConverter()
: this(2)
{
}
public RoundingJsonConverter(int precision)
: this(precision, MidpointRounding.AwayFromZero)
{
}
public RoundingJsonConverter(int precision, MidpointRounding rounding)
{
_precision = precision;
_rounding = rounding;
}
public override bool CanRead
{
get { return false; }
}
public override bool CanConvert(Type objectType)
{
return objectType == typeof(double);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteValue(Math.Round((double)value, _precision, _rounding));
}
}
[Test]
public void GenericBaseClassSerialization()
{
string json = JsonConvert.SerializeObject(new NonGenericChildClass());
Assert.AreEqual("{\"Data\":null}", json);
}
public class GenericBaseClass<O, T>
{
public virtual T Data { get; set; }
}
public class GenericIntermediateClass<O> : GenericBaseClass<O, string>
{
public override string Data { get; set; }
}
public class NonGenericChildClass : GenericIntermediateClass<int>
{
}
[Test]
public void ShouldNotPopulateReadOnlyEnumerableObjectWithNonDefaultConstructor()
{
object actual = JsonConvert.DeserializeObject<HasReadOnlyEnumerableObject>("{\"foo\":{}}");
Assert.IsNotNull(actual);
}
[Test]
public void ShouldNotPopulateReadOnlyEnumerableObjectWithDefaultConstructor()
{
object actual = JsonConvert.DeserializeObject<HasReadOnlyEnumerableObjectAndDefaultConstructor>("{\"foo\":{}}");
Assert.IsNotNull(actual);
}
[Test]
public void ShouldNotPopulateContructorArgumentEnumerableObject()
{
object actual = JsonConvert.DeserializeObject<AcceptsEnumerableObjectToConstructor>("{\"foo\":{}}");
Assert.IsNotNull(actual);
}
[Test]
public void ShouldNotPopulateEnumerableObjectProperty()
{
object actual = JsonConvert.DeserializeObject<HasEnumerableObject>("{\"foo\":{}}");
Assert.IsNotNull(actual);
}
#if !(NET40 || NET35 || NET20 || PORTABLE40)
[Test]
public void ShouldNotPopulateReadOnlyDictionaryObjectWithNonDefaultConstructor()
{
object actual = JsonConvert.DeserializeObject<HasReadOnlyDictionary>("{\"foo\":{'key':'value'}}");
Assert.IsNotNull(actual);
}
public sealed class HasReadOnlyDictionary
{
[JsonProperty("foo")]
public IReadOnlyDictionary<string, string> Foo { get; } = new ReadOnlyDictionary<string, string>(new Dictionary<string, string>());
[JsonConstructor]
public HasReadOnlyDictionary([JsonProperty("bar")] int bar)
{
}
}
#endif
public sealed class HasReadOnlyEnumerableObject
{
[JsonProperty("foo")]
public EnumerableWithConverter Foo { get; } = new EnumerableWithConverter();
[JsonConstructor]
public HasReadOnlyEnumerableObject([JsonProperty("bar")] int bar)
{
}
}
public sealed class HasReadOnlyEnumerableObjectAndDefaultConstructor
{
[JsonProperty("foo")]
public EnumerableWithConverter Foo { get; } = new EnumerableWithConverter();
[JsonConstructor]
public HasReadOnlyEnumerableObjectAndDefaultConstructor()
{
}
}
public sealed class AcceptsEnumerableObjectToConstructor
{
[JsonConstructor]
public AcceptsEnumerableObjectToConstructor
(
[JsonProperty("foo")] EnumerableWithConverter foo,
[JsonProperty("bar")] int bar
)
{
}
}
public sealed class HasEnumerableObject
{
[JsonProperty("foo")]
public EnumerableWithConverter Foo { get; set; } = new EnumerableWithConverter();
[JsonConstructor]
public HasEnumerableObject([JsonProperty("bar")] int bar)
{
}
}
[JsonConverter(typeof(Converter))]
public sealed class EnumerableWithConverter : IEnumerable<int>
{
public sealed class Converter : JsonConverter
{
public override bool CanConvert(Type objectType)
=> objectType == typeof(Foo);
public override object ReadJson
(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
reader.Skip();
return new EnumerableWithConverter();
}
public override void WriteJson
(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteStartObject();
writer.WriteEndObject();
}
}
public IEnumerator<int> GetEnumerator()
{
yield break;
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}
[Test]
public void ShouldNotRequireIgnoredPropertiesWithItemsRequired()
{
string json = @"{
""exp"": 1483228800,
""active"": true
}";
ItemsRequiredObjectWithIgnoredProperty value = JsonConvert.DeserializeObject<ItemsRequiredObjectWithIgnoredProperty>(json);
Assert.IsNotNull(value);
Assert.AreEqual(value.Expiration, new DateTime(2017, 1, 1, 0, 0, 0, DateTimeKind.Utc));
Assert.AreEqual(value.Active, true);
}
[JsonObject(ItemRequired = Required.Always)]
public sealed class ItemsRequiredObjectWithIgnoredProperty
{
private static readonly DateTime s_unixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
[JsonProperty("exp")]
private int _expiration
{
get
{
return (int)((Expiration - s_unixEpoch).TotalSeconds);
}
set
{
Expiration = s_unixEpoch.AddSeconds(value);
}
}
public bool Active { get; set; }
[JsonIgnore]
public DateTime Expiration { get; set; }
}
}
}
| |
using System;
using System.Text;
using UnityEditor;
using UnityEngine;
namespace GitHub.Unity
{
[Serializable]
class GitPathView : Subview
{
private const string GitInstallTitle = "Git installation";
private const string PathToGit = "Path to Git";
private const string PathToGitLfs = "Path to Git LFS";
private const string GitPathSaveButton = "Save";
private const string SetToBundledGitButton = "Set to bundled git";
private const string FindSystemGitButton = "Find system git";
private const string BrowseButton = "...";
private const string GitInstallBrowseTitle = "Select executable";
private const string ErrorInvalidPathMessage = "Invalid Path.";
private const string ErrorInstallingInternalGit = "Error installing portable git.";
private const string ErrorValidatingGitPath = "Error validating Git Path.";
private const string ErrorGitNotFoundMessage = "Git not found.";
private const string ErrorGitLfsNotFoundMessage = "Git LFS not found.";
private const string ErrorMinimumGitVersionMessageFormat = "Git version {0} found. Git version {1} is required.";
private const string ErrorMinimumGitLfsVersionMessageFormat = "Git LFS version {0} found. Git LFS version {1} is required.";
[SerializeField] private string gitPath;
[SerializeField] private string gitLfsPath;
[SerializeField] private string errorMessage;
[SerializeField] private bool resetToBundled;
[SerializeField] private bool resetToSystem;
[SerializeField] private bool changingManually;
[NonSerialized] private bool isBusy;
[NonSerialized] private bool refresh;
[NonSerialized] private GitInstaller.GitInstallationState installationState;
[NonSerialized] private GitInstaller.GitInstallDetails installDetails;
public override void InitializeView(IView parent)
{
base.InitializeView(parent);
}
public override void OnEnable()
{
base.OnEnable();
refresh = true;
}
public override void OnDataUpdate()
{
base.OnDataUpdate();
MaybeUpdateData();
}
private void MaybeUpdateData()
{
if (refresh)
{
installationState = Environment.GitInstallationState;
gitPath = installationState.GitExecutablePath;
gitLfsPath = installationState.GitLfsExecutablePath;
installDetails = new GitInstaller.GitInstallDetails(Environment.UserCachePath, Environment.IsWindows);
refresh = false;
}
}
public override void OnGUI()
{
// Install path
GUILayout.Label(GitInstallTitle, EditorStyles.boldLabel);
EditorGUI.BeginDisabledGroup(IsBusy || Parent.IsBusy);
{
GUILayout.BeginVertical();
{
GUILayout.BeginHorizontal();
{
EditorGUI.BeginChangeCheck();
{
gitPath = EditorGUILayout.TextField(PathToGit, gitPath);
gitPath = gitPath != null ? gitPath.Trim() : gitPath;
if (GUILayout.Button(BrowseButton, EditorStyles.miniButton, GUILayout.Width(Styles.BrowseButtonWidth)))
{
GUI.FocusControl(null);
var newPath = EditorUtility.OpenFilePanel(GitInstallBrowseTitle,
!String.IsNullOrEmpty(gitPath) ? gitPath.ToNPath().Parent : "",
Environment.ExecutableExtension.TrimStart('.'));
if (!string.IsNullOrEmpty(newPath))
{
gitPath = newPath.ToNPath().ToString();
}
}
}
if (EditorGUI.EndChangeCheck())
{
changingManually = ViewHasChanges;
}
}
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
{
EditorGUI.BeginChangeCheck();
{
gitLfsPath = EditorGUILayout.TextField(PathToGitLfs, gitLfsPath);
gitLfsPath = gitLfsPath != null ? gitLfsPath.Trim() : gitLfsPath;
if (GUILayout.Button(BrowseButton, EditorStyles.miniButton, GUILayout.Width(Styles.BrowseButtonWidth)))
{
GUI.FocusControl(null);
var newPath = EditorUtility.OpenFilePanel(GitInstallBrowseTitle,
!String.IsNullOrEmpty(gitLfsPath) ? gitLfsPath.ToNPath().Parent : "",
Environment.ExecutableExtension.TrimStart('.'));
if (!string.IsNullOrEmpty(newPath))
{
gitLfsPath = newPath.ToNPath().ToString();
}
}
}
if (EditorGUI.EndChangeCheck())
{
changingManually = ViewHasChanges;
errorMessage = "";
}
}
GUILayout.EndHorizontal();
}
GUILayout.EndVertical();
GUILayout.Space(EditorGUIUtility.standardVerticalSpacing);
GUILayout.BeginHorizontal();
{
EditorGUI.BeginDisabledGroup(!changingManually && !resetToBundled && !resetToSystem);
{
if (GUILayout.Button(GitPathSaveButton, GUILayout.ExpandWidth(false)))
{
GUI.FocusControl(null);
isBusy = true;
ValidateAndSetGitInstallPath();
}
}
EditorGUI.EndDisabledGroup();
// disable the button if the paths are already pointing to the bundled git
// both on windows, only lfs on mac
EditorGUI.BeginDisabledGroup(
(!Environment.IsWindows || gitPath == installDetails.GitExecutablePath) &&
gitLfsPath == installDetails.GitLfsExecutablePath);
{
if (GUILayout.Button(SetToBundledGitButton, GUILayout.ExpandWidth(false)))
{
GUI.FocusControl(null);
if (Environment.IsWindows)
gitPath = installDetails.GitExecutablePath;
gitLfsPath = installDetails.GitLfsExecutablePath;
resetToBundled = ViewHasChanges;
resetToSystem = false;
changingManually = false;
errorMessage = "";
}
}
EditorGUI.EndDisabledGroup();
//Find button - for attempting to locate a new install
if (GUILayout.Button(FindSystemGitButton, GUILayout.ExpandWidth(false)))
{
GUI.FocusControl(null);
isBusy = true;
new FuncTask<GitInstaller.GitInstallationState>(TaskManager.Token, () =>
{
var gitInstaller = new GitInstaller(Environment, Manager.ProcessManager, TaskManager.Token);
return gitInstaller.FindSystemGit(new GitInstaller.GitInstallationState());
})
{ Message = "Locating git..." }
.FinallyInUI((success, ex, state) =>
{
if (success)
{
if (state.GitIsValid)
{
gitPath = state.GitExecutablePath;
}
if (state.GitLfsIsValid)
{
gitLfsPath = state.GitLfsExecutablePath;
}
}
else
{
Logger.Error(ex);
}
isBusy = false;
resetToBundled = false;
resetToSystem = ViewHasChanges;
changingManually = false;
errorMessage = "";
Redraw();
})
.Start();
}
}
GUILayout.EndHorizontal();
if (!String.IsNullOrEmpty(errorMessage))
{
GUILayout.BeginHorizontal();
{
GUILayout.Label(errorMessage, Styles.ErrorLabel);
}
GUILayout.EndHorizontal();
}
}
EditorGUI.EndDisabledGroup();
}
private void ValidateAndSetGitInstallPath()
{
if (resetToBundled)
{
new FuncTask<GitInstaller.GitInstallationState>(TaskManager.Token, () =>
{
var gitInstaller = new GitInstaller(Environment, Manager.ProcessManager, TaskManager.Token);
var state = new GitInstaller.GitInstallationState();
state = gitInstaller.SetDefaultPaths(state);
// on non-windows we only bundle git-lfs
if (!Environment.IsWindows)
{
state.GitExecutablePath = installationState.GitExecutablePath;
state.GitInstallationPath = installationState.GitInstallationPath;
}
state = gitInstaller.SetupGitIfNeeded(state);
if (state.GitIsValid && state.GitLfsIsValid)
{
Manager.SetupGit(state);
Manager.RestartRepository();
}
return state;
})
{ Message = "Setting up git... " }
.FinallyInUI((success, exception, state) =>
{
if (!success)
{
Logger.Error(exception, ErrorInstallingInternalGit);
errorMessage = ErrorValidatingGitPath;
}
else
{
refresh = true;
}
isBusy = false;
resetToBundled = false;
resetToSystem = false;
changingManually = false;
Redraw();
}).Start();
}
else
{
var newState = new GitInstaller.GitInstallationState();
newState.GitExecutablePath = gitPath.ToNPath();
newState.GitLfsExecutablePath = gitLfsPath.ToNPath();
var installer = new GitInstaller(Environment, Manager.ProcessManager, TaskManager.Token);
installer.Progress.OnProgress += UpdateProgress;
new FuncTask<GitInstaller.GitInstallationState>(TaskManager.Token, () =>
{
return installer.SetupGitIfNeeded(newState);
})
.Then((success, state) =>
{
if (state.GitIsValid && state.GitLfsIsValid)
{
Manager.SetupGit(state);
Manager.RestartRepository();
}
return state;
})
.FinallyInUI((success, ex, state) =>
{
installer.Progress.OnProgress -= UpdateProgress;
if (!success)
{
Logger.Error(ex, ErrorValidatingGitPath);
return;
}
if (!state.GitIsValid || !state.GitLfsIsValid)
{
var errorMessageStringBuilder = new StringBuilder();
Logger.Warning(
"Software versions do not meet minimums Git:{0} (Minimum:{1}) GitLfs:{2} (Minimum:{3})",
state.GitVersion, Constants.MinimumGitVersion, state.GitLfsVersion,
Constants.MinimumGitLfsVersion);
if (state.GitVersion == TheVersion.Default)
{
errorMessageStringBuilder.Append(ErrorGitNotFoundMessage);
}
else if (state.GitLfsVersion == TheVersion.Default)
{
errorMessageStringBuilder.Append(ErrorGitLfsNotFoundMessage);
}
else
{
if (state.GitVersion < Constants.MinimumGitVersion)
{
errorMessageStringBuilder.AppendFormat(ErrorMinimumGitVersionMessageFormat,
state.GitVersion, Constants.MinimumGitVersion);
}
if (state.GitLfsVersion < Constants.MinimumGitLfsVersion)
{
if (errorMessageStringBuilder.Length > 0)
{
errorMessageStringBuilder.Append(Environment.NewLine);
}
errorMessageStringBuilder.AppendFormat(ErrorMinimumGitLfsVersionMessageFormat,
state.GitLfsVersion, Constants.MinimumGitLfsVersion);
}
}
errorMessage = errorMessageStringBuilder.ToString();
}
else
{
Logger.Trace("Software versions meet minimums Git:{0} GitLfs:{1}",
state.GitVersion,
state.GitLfsVersion);
refresh = true;
}
isBusy = false;
resetToBundled = false;
resetToSystem = false;
changingManually = false;
Redraw();
}).Start();
}
}
public bool ViewHasChanges
{
get
{
return gitPath != installationState.GitExecutablePath || gitLfsPath != installationState.GitLfsExecutablePath;
}
}
public override bool IsBusy
{
get { return isBusy; }
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional inFormation regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NStorage.Test.Util
{
using System;
using System.Text;
using NStorage.Utility;
using NUnit.Framework;
/**
* Unit Test for StringUtil
*
* @author Marc Johnson (mjohnson at apache dot org
* @author Glen Stampoultzis (glens at apache.org)
* @author Sergei Kozello (sergeikozello at mail.ru)
*/
[TestFixture]
public class TestStringUtil
{
/**
* Creates new TestStringUtil
*
* @param name
*/
public TestStringUtil()
{
}
/**
* Test simple form of getFromUnicode
*/
[Test]
public void TestSimpleGetFromUnicode()
{
byte[] Test_data = new byte[32];
int index = 0;
for (int k = 0; k < 16; k++)
{
Test_data[index++] = (byte)0;
Test_data[index++] = (byte)('a' + k);
}
Assert.AreEqual("abcdefghijklmnop",
StringUtil.GetFromUnicodeBE(Test_data));
}
/**
* Test simple form of getFromUnicode with symbols with code below and more 127
*/
[Test]
public void TestGetFromUnicodeSymbolsWithCodesMoreThan127()
{
byte[] Test_data = new byte[]{0x04, 0x22,
0x04, 0x35,
0x04, 0x41,
0x04, 0x42,
0x00, 0x20,
0x00, 0x74,
0x00, 0x65,
0x00, 0x73,
0x00, 0x74,
};
Assert.AreEqual("\u0422\u0435\u0441\u0442 test",
StringUtil.GetFromUnicodeBE(Test_data));
}
/**
* Test getFromUnicodeHigh for symbols with code below and more 127
*/
[Test]
public void TestGetFromUnicodeHighSymbolsWithCodesMoreThan127()
{
byte[] Test_data = new byte[]{0x22, 0x04,
0x35, 0x04,
0x41, 0x04,
0x42, 0x04,
0x20, 0x00,
0x74, 0x00,
0x65, 0x00,
0x73, 0x00,
0x74, 0x00,
};
Assert.AreEqual("\u0422\u0435\u0441\u0442 test",
StringUtil.GetFromUnicodeLE(Test_data));
}
/**
* Test more complex form of getFromUnicode
*/
[Test]
public void TestComplexGetFromUnicode()
{
byte[] Test_data = new byte[32];
int index = 0;
for (int k = 0; k < 16; k++)
{
Test_data[index++] = (byte)0;
Test_data[index++] = (byte)('a' + k);
}
Assert.AreEqual("abcdefghijklmno",
StringUtil.GetFromUnicodeBE(Test_data, 0, 15));
Assert.AreEqual("bcdefghijklmnop",
StringUtil.GetFromUnicodeBE(Test_data, 2, 15));
try
{
StringUtil.GetFromUnicodeBE(Test_data, -1, 16);
Assert.Fail("Should have caught IndexOutOfRangeException");
}
catch (IndexOutOfRangeException)// ignored
{
// as expected
}
try
{
StringUtil.GetFromUnicodeBE(Test_data, 32, 16);
Assert.Fail("Should have caught IndexOutOfRangeException");
}
catch (IndexOutOfRangeException)// ignored
{
// as expected
}
try
{
StringUtil.GetFromUnicodeBE(Test_data, 1, 16);
Assert.Fail("Should have caught ArgumentException");
}
catch (ArgumentException)// ignored
{
// as expected
}
try
{
StringUtil.GetFromUnicodeBE(Test_data, 1, -1);
Assert.Fail("Should have caught ArgumentException");
}
catch (ArgumentException)// ignored
{
// as expected
}
}
/**
* Test PutCompressedUnicode
*/
[Test]
public void TestPutCompressedUnicode()
{
byte[] outPut = new byte[100];
byte[] expected_outPut =
{
(byte) 'H', (byte) 'e', (byte) 'l', (byte) 'l',
(byte) 'o', (byte) ' ', (byte) 'W', (byte) 'o',
(byte) 'r', (byte) 'l', (byte) 'd', (byte) 0xAE
};
String inPut = Encoding.GetEncoding( StringUtil.GetPreferredEncoding()).GetString(expected_outPut);
StringUtil.PutCompressedUnicode(inPut, outPut, 0);
for (int j = 0; j < expected_outPut.Length; j++)
{
Assert.AreEqual(expected_outPut[j],
outPut[j], "Testing offset " + j);
}
StringUtil.PutCompressedUnicode(inPut, outPut,
100 - expected_outPut.Length);
for (int j = 0; j < expected_outPut.Length; j++)
{
Assert.AreEqual(expected_outPut[j],
outPut[100 + j - expected_outPut.Length], "Testing offset " + j);
}
try
{
StringUtil.PutCompressedUnicode(inPut, outPut,
101 - expected_outPut.Length);
Assert.Fail("Should have caught ArgumentException");
}
catch (ArgumentException)// ignored
{
// as expected
}
}
/**
* Test PutUncompressedUnicode
*/
[Test]
public void TestPutUncompressedUnicode()
{
byte[] outPut = new byte[100];
String inPut = "Hello World";
byte[] expected_outPut =
{
(byte) 'H', (byte) 0, (byte) 'e', (byte) 0, (byte) 'l',
(byte) 0, (byte) 'l', (byte) 0, (byte) 'o', (byte) 0,
(byte) ' ', (byte) 0, (byte) 'W', (byte) 0, (byte) 'o',
(byte) 0, (byte) 'r', (byte) 0, (byte) 'l', (byte) 0,
(byte) 'd', (byte) 0
};
StringUtil.PutUnicodeLE(inPut, outPut, 0);
for (int j = 0; j < expected_outPut.Length; j++)
{
Assert.AreEqual(expected_outPut[j],
outPut[j], "Testing offset " + j);
}
StringUtil.PutUnicodeLE(inPut, outPut,
100 - expected_outPut.Length);
for (int j = 0; j < expected_outPut.Length; j++)
{
Assert.AreEqual(expected_outPut[j],
outPut[100 + j - expected_outPut.Length], "Testing offset " + j);
}
try
{
StringUtil.PutUnicodeLE(inPut, outPut,
101 - expected_outPut.Length);
Assert.Fail("Should have caught ArgumentException");
}
catch (ArgumentException)// ignored
{
// as expected
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using Dbg = System.Management.Automation.Diagnostics;
using System.Management.Automation.Internal;
using Microsoft.Management.Infrastructure;
namespace System.Management.Automation.Runspaces
{
/// <summary>
/// Defines a Command object which can be added to <see cref="Pipeline"/> object
/// for invocation.
/// </summary>
public sealed class Command
{
#region constructors
/// <summary>
/// Initializes a new instance of Command class using specified command parameter.
/// </summary>
/// <param name="command">Name of the command or script contents.</param>
/// <exception cref="ArgumentNullException">Command is null.</exception>
public Command(string command)
: this(command, false, null)
{
}
/// <summary>
/// Initializes a new instance of Command class using specified command parameter.
/// </summary>
/// <param name="command">The command name or script contents.</param>
/// <param name="isScript">True if this command represents a script, otherwise; false.</param>
/// <exception cref="ArgumentNullException">Command is null.</exception>
public Command(string command, bool isScript)
: this(command, isScript, null)
{
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="command">The command name or script contents.</param>
/// <param name="isScript">True if this command represents a script, otherwise; false.</param>
/// <param name="useLocalScope">If true local scope is used to run the script command.</param>
/// <exception cref="ArgumentNullException">Command is null.</exception>
public Command(string command, bool isScript, bool useLocalScope)
{
IsEndOfStatement = false;
if (command == null)
{
throw PSTraceSource.NewArgumentNullException("command");
}
CommandText = command;
IsScript = isScript;
_useLocalScope = useLocalScope;
}
internal Command(string command, bool isScript, bool? useLocalScope)
{
IsEndOfStatement = false;
if (command == null)
{
throw PSTraceSource.NewArgumentNullException("command");
}
CommandText = command;
IsScript = isScript;
_useLocalScope = useLocalScope;
}
internal Command(string command, bool isScript, bool? useLocalScope, bool mergeUnclaimedPreviousErrorResults)
: this(command, isScript, useLocalScope)
{
if (mergeUnclaimedPreviousErrorResults)
{
_mergeUnclaimedPreviousCommandResults = PipelineResultTypes.Error | PipelineResultTypes.Output;
}
}
internal Command(CommandInfo commandInfo)
: this(commandInfo, false)
{
}
internal Command(CommandInfo commandInfo, bool isScript)
{
IsEndOfStatement = false;
CommandInfo = commandInfo;
CommandText = CommandInfo.Name;
IsScript = isScript;
}
/// <summary>
/// Copy constructor for clone operations.
/// </summary>
/// <param name="command">The source <see cref="Command"/> instance.</param>
internal Command(Command command)
{
IsScript = command.IsScript;
_useLocalScope = command._useLocalScope;
CommandText = command.CommandText;
MergeInstructions = command.MergeInstructions;
MergeMyResult = command.MergeMyResult;
MergeToResult = command.MergeToResult;
_mergeUnclaimedPreviousCommandResults = command._mergeUnclaimedPreviousCommandResults;
IsEndOfStatement = command.IsEndOfStatement;
foreach (CommandParameter param in command.Parameters)
{
Parameters.Add(new CommandParameter(param.Name, param.Value));
}
}
#endregion constructors
#region Properties
/// <summary>
/// Gets the set of parameters for this command.
/// </summary>
/// <remarks>
/// This property is used to add positional or named parameters to the command.
/// </remarks>
public CommandParameterCollection Parameters { get; } = new CommandParameterCollection();
/// <summary>
/// Access the command string.
/// </summary>
/// <value>The command name, if <see cref="Command.IsScript"/> is false; otherwise; the script contents</value>
public string CommandText { get; } = string.Empty;
/// <summary>
/// Access the commandInfo.
/// </summary>
/// <value>The command info object</value>
internal CommandInfo CommandInfo { get; }
/// <summary>
/// Access the value indicating if this <see cref="Command"/> represents a script.
/// </summary>
public bool IsScript { get; }
/// <summary>
/// Access the value indicating if LocalScope is to be used for running
/// this script command.
/// </summary>
/// <value>True if this command is a script and localScope is
/// used for executing the script</value>
/// <remarks>This value is always false for non-script commands</remarks>
public bool UseLocalScope
{
get { return _useLocalScope ?? false; }
}
/// <summary>
/// Gets or sets the command origin for this command. A command origin
/// of 'Runspace' (the default) applies Runspace restrictions to this command.
/// A command origin of 'Internal' does not apply runspace restrictions.
/// </summary>
public CommandOrigin CommandOrigin { get; set; } = CommandOrigin.Runspace;
/// <summary>
/// Access the actual value indicating if LocalScope is to be used for running
/// this script command. Needed for serialization in remoting.
/// </summary>
internal bool? UseLocalScopeNullable
{
get { return _useLocalScope; }
}
/// <summary>
/// Checks if the current command marks the end of a statement (see PowerShell.AddStatement())
/// </summary>
public bool IsEndOfStatement { get; internal set; }
#endregion Properties
#region Methods
/// <summary>
/// Creates a new <see cref="Command"/> that is a copy of the current instance.
/// </summary>
/// <returns>A new <see cref="Command"/> that is a copy of this instance.</returns>
internal Command Clone()
{
return new Command(this);
}
/// <summary>
/// For diagnostic purposes.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return CommandText;
}
#endregion Methods
#region Merge
private PipelineResultTypes _mergeUnclaimedPreviousCommandResults =
PipelineResultTypes.None;
/// <summary>
/// Sets this command as the mergepoint for previous unclaimed
/// commands' results.
/// </summary>
/// <value></value>
/// <remarks>
/// Currently only supported operation is to merge
/// Output and Error.
/// </remarks>
/// <exception cref="NotSupportedException">
/// Currently only supported operation is to merge Output and Error.
/// Attempt to set the property to something other than
/// PipelineResultTypes.Error | PipelineResultTypes.Output results
/// in this exception.
/// </exception>
public PipelineResultTypes MergeUnclaimedPreviousCommandResults
{
get
{
return _mergeUnclaimedPreviousCommandResults;
}
set
{
if (value == PipelineResultTypes.None)
{
_mergeUnclaimedPreviousCommandResults = value;
return;
}
if (value != (PipelineResultTypes.Error | PipelineResultTypes.Output))
{
throw PSTraceSource.NewNotSupportedException();
}
_mergeUnclaimedPreviousCommandResults = value;
}
}
//
// These properties are kept for backwards compatibility for V2
// over the wire, which allows merging only for Error stream.
//
internal PipelineResultTypes MergeMyResult { get; private set; } = PipelineResultTypes.None;
internal PipelineResultTypes MergeToResult { get; private set; } = PipelineResultTypes.None;
//
// For V3 we allow merging from all streams except Output.
//
internal enum MergeType
{
Error = 0,
Warning = 1,
Verbose = 2,
Debug = 3,
Information = 4
}
internal const int MaxMergeType = (int)(MergeType.Information + 1);
/// <summary>
/// Internal accessor for _mergeInstructions. It is used by serialization
/// code.
/// </summary>
internal PipelineResultTypes[] MergeInstructions { get; set; } = new PipelineResultTypes[MaxMergeType];
/// <summary>
/// Merges this commands results.
/// </summary>
/// <param name="myResult">
/// Pipeline stream to be redirected.
/// </param>
/// <param name="toResult">
/// Pipeline stream in to which myResult is merged
/// </param>
/// <exception cref="ArgumentException">
/// myResult parameter is not PipelineResultTypes.Error or
/// toResult parameter is not PipelineResultTypes.Output
/// </exception>
/// <remarks>
/// Currently only operation supported is to merge error of command to output of
/// command.
/// </remarks>
public void MergeMyResults(PipelineResultTypes myResult, PipelineResultTypes toResult)
{
if (myResult == PipelineResultTypes.None && toResult == PipelineResultTypes.None)
{
// For V2 backwards compatibility.
MergeMyResult = myResult;
MergeToResult = toResult;
for (int i = 0; i < MaxMergeType; ++i)
{
MergeInstructions[i] = PipelineResultTypes.None;
}
return;
}
// Validate parameters.
if (myResult == PipelineResultTypes.None || myResult == PipelineResultTypes.Output)
{
throw PSTraceSource.NewArgumentException("myResult", RunspaceStrings.InvalidMyResultError);
}
if (myResult == PipelineResultTypes.Error && toResult != PipelineResultTypes.Output)
{
throw PSTraceSource.NewArgumentException("toResult", RunspaceStrings.InvalidValueToResultError);
}
if (toResult != PipelineResultTypes.Output && toResult != PipelineResultTypes.Null)
{
throw PSTraceSource.NewArgumentException("toResult", RunspaceStrings.InvalidValueToResult);
}
// For V2 backwards compatibility.
if (myResult == PipelineResultTypes.Error)
{
MergeMyResult = myResult;
MergeToResult = toResult;
}
// Set internal merge instructions.
if (myResult == PipelineResultTypes.Error || myResult == PipelineResultTypes.All)
{
MergeInstructions[(int)MergeType.Error] = toResult;
}
if (myResult == PipelineResultTypes.Warning || myResult == PipelineResultTypes.All)
{
MergeInstructions[(int)MergeType.Warning] = toResult;
}
if (myResult == PipelineResultTypes.Verbose || myResult == PipelineResultTypes.All)
{
MergeInstructions[(int)MergeType.Verbose] = toResult;
}
if (myResult == PipelineResultTypes.Debug || myResult == PipelineResultTypes.All)
{
MergeInstructions[(int)MergeType.Debug] = toResult;
}
if (myResult == PipelineResultTypes.Information || myResult == PipelineResultTypes.All)
{
MergeInstructions[(int)MergeType.Information] = toResult;
}
}
/// <summary>
/// Set the merge settings on commandProcessor.
/// </summary>
/// <param name="commandProcessor"></param>
private
void
SetMergeSettingsOnCommandProcessor(CommandProcessorBase commandProcessor)
{
Dbg.Assert(commandProcessor != null, "caller should validate the parameter");
MshCommandRuntime mcr = commandProcessor.Command.commandRuntime as MshCommandRuntime;
if (_mergeUnclaimedPreviousCommandResults != PipelineResultTypes.None)
{
// Currently only merging previous unclaimed error and output is supported.
if (mcr != null)
{
mcr.MergeUnclaimedPreviousErrorResults = true;
}
}
// Error merge.
if (MergeInstructions[(int)MergeType.Error] == PipelineResultTypes.Output)
{
// Currently only merging error with output is supported.
mcr.ErrorMergeTo = MshCommandRuntime.MergeDataStream.Output;
}
// Warning merge.
PipelineResultTypes toType = MergeInstructions[(int)MergeType.Warning];
if (toType != PipelineResultTypes.None)
{
mcr.WarningOutputPipe = GetRedirectionPipe(toType, mcr);
}
// Verbose merge.
toType = MergeInstructions[(int)MergeType.Verbose];
if (toType != PipelineResultTypes.None)
{
mcr.VerboseOutputPipe = GetRedirectionPipe(toType, mcr);
}
// Debug merge.
toType = MergeInstructions[(int)MergeType.Debug];
if (toType != PipelineResultTypes.None)
{
mcr.DebugOutputPipe = GetRedirectionPipe(toType, mcr);
}
// Information merge.
toType = MergeInstructions[(int)MergeType.Information];
if (toType != PipelineResultTypes.None)
{
mcr.InformationOutputPipe = GetRedirectionPipe(toType, mcr);
}
}
private Pipe GetRedirectionPipe(
PipelineResultTypes toType,
MshCommandRuntime mcr)
{
if (toType == PipelineResultTypes.Output)
{
return mcr.OutputPipe;
}
Pipe pipe = new Pipe();
pipe.NullPipe = true;
return pipe;
}
#endregion Merge
/// <summary>
/// Create a CommandProcessorBase for this Command.
/// </summary>
/// <param name="executionContext"></param>
/// <param name="addToHistory"></param>
/// <param name="origin"></param>
/// <returns></returns>
internal
CommandProcessorBase
CreateCommandProcessor
(
ExecutionContext executionContext,
bool addToHistory,
CommandOrigin origin
)
{
Dbg.Assert(executionContext != null, "Caller should verify the parameters");
CommandProcessorBase commandProcessorBase;
if (IsScript)
{
if ((executionContext.LanguageMode == PSLanguageMode.NoLanguage) &&
(origin == Automation.CommandOrigin.Runspace))
{
throw InterpreterError.NewInterpreterException(CommandText, typeof(ParseException),
null, "ScriptsNotAllowed", ParserStrings.ScriptsNotAllowed);
}
ScriptBlock scriptBlock = executionContext.Engine.ParseScriptBlock(CommandText, addToHistory);
if (origin == Automation.CommandOrigin.Internal)
{
scriptBlock.LanguageMode = PSLanguageMode.FullLanguage;
}
// If running in restricted language mode, verify that the parse tree represents on legitimate
// constructions...
switch (scriptBlock.LanguageMode)
{
case PSLanguageMode.RestrictedLanguage:
scriptBlock.CheckRestrictedLanguage(null, null, false);
break;
case PSLanguageMode.FullLanguage:
// Interactive script commands are permitted in this mode.
break;
case PSLanguageMode.ConstrainedLanguage:
// Constrained Language is checked at runtime.
break;
default:
// This should never happen...
Diagnostics.Assert(false, "Invalid language mode was set when building a ScriptCommandProcessor");
throw new InvalidOperationException("Invalid language mode was set when building a ScriptCommandProcessor");
}
if (scriptBlock.UsesCmdletBinding)
{
FunctionInfo functionInfo = new FunctionInfo(string.Empty, scriptBlock, executionContext);
commandProcessorBase = new CommandProcessor(functionInfo, executionContext,
_useLocalScope ?? false, fromScriptFile: false, sessionState: executionContext.EngineSessionState);
}
else
{
commandProcessorBase = new DlrScriptCommandProcessor(scriptBlock,
executionContext, _useLocalScope ?? false,
origin,
executionContext.EngineSessionState);
}
}
else
{
// RestrictedLanguage / NoLanguage do not support dot-sourcing when CommandOrigin is Runspace
if ((_useLocalScope.HasValue) && (!_useLocalScope.Value))
{
switch (executionContext.LanguageMode)
{
case PSLanguageMode.RestrictedLanguage:
case PSLanguageMode.NoLanguage:
string message = StringUtil.Format(RunspaceStrings.UseLocalScopeNotAllowed,
"UseLocalScope",
PSLanguageMode.RestrictedLanguage.ToString(),
PSLanguageMode.NoLanguage.ToString());
throw new RuntimeException(message);
case PSLanguageMode.FullLanguage:
// Interactive script commands are permitted in this mode...
break;
}
}
commandProcessorBase = executionContext.CommandDiscovery.LookupCommandProcessor(CommandText, origin, _useLocalScope);
}
CommandParameterCollection parameters = Parameters;
if (parameters != null)
{
bool isNativeCommand = commandProcessorBase is NativeCommandProcessor;
foreach (CommandParameter publicParameter in parameters)
{
CommandParameterInternal internalParameter = CommandParameter.ToCommandParameterInternal(publicParameter, isNativeCommand);
commandProcessorBase.AddParameter(internalParameter);
}
}
string helpTarget;
HelpCategory helpCategory;
if (commandProcessorBase.IsHelpRequested(out helpTarget, out helpCategory))
{
commandProcessorBase = CommandProcessorBase.CreateGetHelpCommandProcessor(
executionContext,
helpTarget,
helpCategory);
}
// Set the merge settings
SetMergeSettingsOnCommandProcessor(commandProcessorBase);
return commandProcessorBase;
}
#region Private fields
/// <summary>
/// This is used for script commands (i.e. _isScript is true). If
/// _useLocalScope is true, script is run in LocalScope. If
/// null, it was unspecified and a suitable default is used (true
/// for non-script, false for script). Note that the public
/// property is bool, not bool? (from V1), so it should probably
/// be deprecated, at least for internal use.
/// </summary>
private bool? _useLocalScope;
#endregion Private fields
#region Serialization / deserialization for remoting
/// <summary>
/// Creates a Command object from a PSObject property bag.
/// PSObject has to be in the format returned by ToPSObjectForRemoting method.
/// </summary>
/// <param name="commandAsPSObject">PSObject to rehydrate.</param>
/// <returns>
/// Command rehydrated from a PSObject property bag
/// </returns>
/// <exception cref="ArgumentNullException">
/// Thrown if the PSObject is null.
/// </exception>
/// <exception cref="System.Management.Automation.Remoting.PSRemotingDataStructureException">
/// Thrown when the PSObject is not in the expected format
/// </exception>
internal static Command FromPSObjectForRemoting(PSObject commandAsPSObject)
{
if (commandAsPSObject == null)
{
throw PSTraceSource.NewArgumentNullException("commandAsPSObject");
}
string commandText = RemotingDecoder.GetPropertyValue<string>(commandAsPSObject, RemoteDataNameStrings.CommandText);
bool isScript = RemotingDecoder.GetPropertyValue<bool>(commandAsPSObject, RemoteDataNameStrings.IsScript);
bool? useLocalScopeNullable = RemotingDecoder.GetPropertyValue<bool?>(commandAsPSObject, RemoteDataNameStrings.UseLocalScopeNullable);
Command command = new Command(commandText, isScript, useLocalScopeNullable);
// For V2 backwards compatibility.
PipelineResultTypes mergeMyResult = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeMyResult);
PipelineResultTypes mergeToResult = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeToResult);
command.MergeMyResults(mergeMyResult, mergeToResult);
command.MergeUnclaimedPreviousCommandResults = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeUnclaimedPreviousCommandResults);
// V3 merge instructions will not be returned by V2 server and this is expected.
if (commandAsPSObject.Properties[RemoteDataNameStrings.MergeError] != null)
{
command.MergeInstructions[(int)MergeType.Error] = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeError);
}
if (commandAsPSObject.Properties[RemoteDataNameStrings.MergeWarning] != null)
{
command.MergeInstructions[(int)MergeType.Warning] = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeWarning);
}
if (commandAsPSObject.Properties[RemoteDataNameStrings.MergeVerbose] != null)
{
command.MergeInstructions[(int)MergeType.Verbose] = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeVerbose);
}
if (commandAsPSObject.Properties[RemoteDataNameStrings.MergeDebug] != null)
{
command.MergeInstructions[(int)MergeType.Debug] = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeDebug);
}
if (commandAsPSObject.Properties[RemoteDataNameStrings.MergeInformation] != null)
{
command.MergeInstructions[(int)MergeType.Information] = RemotingDecoder.GetPropertyValue<PipelineResultTypes>(commandAsPSObject, RemoteDataNameStrings.MergeInformation);
}
foreach (PSObject parameterAsPSObject in RemotingDecoder.EnumerateListProperty<PSObject>(commandAsPSObject, RemoteDataNameStrings.Parameters))
{
command.Parameters.Add(CommandParameter.FromPSObjectForRemoting(parameterAsPSObject));
}
return command;
}
/// <summary>
/// Returns this object as a PSObject property bag
/// that can be used in a remoting protocol data object.
/// </summary>
/// <param name="psRPVersion">PowerShell remoting protocol version.</param>
/// <returns>This object as a PSObject property bag.</returns>
internal PSObject ToPSObjectForRemoting(Version psRPVersion)
{
PSObject commandAsPSObject = RemotingEncoder.CreateEmptyPSObject();
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.CommandText, this.CommandText));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.IsScript, this.IsScript));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.UseLocalScopeNullable, this.UseLocalScopeNullable));
// For V2 backwards compatibility.
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeMyResult, this.MergeMyResult));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeToResult, this.MergeToResult));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeUnclaimedPreviousCommandResults, this.MergeUnclaimedPreviousCommandResults));
if (psRPVersion != null &&
psRPVersion >= RemotingConstants.ProtocolVersionWin10RTM)
{
// V5 merge instructions
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeError, MergeInstructions[(int)MergeType.Error]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeWarning, MergeInstructions[(int)MergeType.Warning]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeVerbose, MergeInstructions[(int)MergeType.Verbose]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeDebug, MergeInstructions[(int)MergeType.Debug]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeInformation, MergeInstructions[(int)MergeType.Information]));
}
else if (psRPVersion != null &&
psRPVersion >= RemotingConstants.ProtocolVersionWin8RTM)
{
// V3 merge instructions.
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeError, MergeInstructions[(int)MergeType.Error]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeWarning, MergeInstructions[(int)MergeType.Warning]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeVerbose, MergeInstructions[(int)MergeType.Verbose]));
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.MergeDebug, MergeInstructions[(int)MergeType.Debug]));
// If they've explicitly redirected the Information stream, generate an error. Don't
// generate an error if they've done "*", as that makes any new stream a breaking change.
if ((MergeInstructions[(int)MergeType.Information] == PipelineResultTypes.Output) &&
(MergeInstructions.Length != MaxMergeType))
{
throw new RuntimeException(
StringUtil.Format(RunspaceStrings.InformationRedirectionNotSupported));
}
}
else
{
// If they've explicitly redirected an unsupported stream, generate an error. Don't
// generate an error if they've done "*", as that makes any new stream a breaking change.
if (MergeInstructions.Length != MaxMergeType)
{
if (MergeInstructions[(int)MergeType.Warning] == PipelineResultTypes.Output)
{
throw new RuntimeException(
StringUtil.Format(RunspaceStrings.WarningRedirectionNotSupported));
}
if (MergeInstructions[(int)MergeType.Verbose] == PipelineResultTypes.Output)
{
throw new RuntimeException(
StringUtil.Format(RunspaceStrings.VerboseRedirectionNotSupported));
}
if (MergeInstructions[(int)MergeType.Debug] == PipelineResultTypes.Output)
{
throw new RuntimeException(
StringUtil.Format(RunspaceStrings.DebugRedirectionNotSupported));
}
if (MergeInstructions[(int)MergeType.Information] == PipelineResultTypes.Output)
{
throw new RuntimeException(
StringUtil.Format(RunspaceStrings.InformationRedirectionNotSupported));
}
}
}
List<PSObject> parametersAsListOfPSObjects = new List<PSObject>(this.Parameters.Count);
foreach (CommandParameter parameter in this.Parameters)
{
parametersAsListOfPSObjects.Add(parameter.ToPSObjectForRemoting());
}
commandAsPSObject.Properties.Add(new PSNoteProperty(RemoteDataNameStrings.Parameters, parametersAsListOfPSObjects));
return commandAsPSObject;
}
#endregion
#region Win Blue Extensions
#if !CORECLR // PSMI Not Supported On CSS
internal CimInstance ToCimInstance()
{
CimInstance c = InternalMISerializer.CreateCimInstance("PS_Command");
CimProperty commandTextProperty = InternalMISerializer.CreateCimProperty("CommandText",
this.CommandText,
Microsoft.Management.Infrastructure.CimType.String);
c.CimInstanceProperties.Add(commandTextProperty);
CimProperty isScriptProperty = InternalMISerializer.CreateCimProperty("IsScript",
this.IsScript,
Microsoft.Management.Infrastructure.CimType.Boolean);
c.CimInstanceProperties.Add(isScriptProperty);
if (this.Parameters != null && this.Parameters.Count > 0)
{
List<CimInstance> parameterInstances = new List<CimInstance>();
foreach (var p in this.Parameters)
{
parameterInstances.Add(p.ToCimInstance());
}
if (parameterInstances.Count > 0)
{
CimProperty parametersProperty = InternalMISerializer.CreateCimProperty("Parameters",
parameterInstances.ToArray(),
Microsoft.Management.Infrastructure.CimType.ReferenceArray);
c.CimInstanceProperties.Add(parametersProperty);
}
}
return c;
}
#endif
#endregion Win Blue Extensions
}
/// <summary>
/// Enum defining the types of streams coming out of a pipeline.
/// </summary>
[Flags]
public enum PipelineResultTypes
{
/// <summary>
/// Default streaming behavior.
/// </summary>
None,
/// <summary>
/// Success output.
/// </summary>
Output,
/// <summary>
/// Error output.
/// </summary>
Error,
/// <summary>
/// Warning information stream.
/// </summary>
Warning,
/// <summary>
/// Verbose information stream.
/// </summary>
Verbose,
/// <summary>
/// Debug information stream.
/// </summary>
Debug,
/// <summary>
/// Information information stream.
/// </summary>
Information,
/// <summary>
/// All streams.
/// </summary>
All,
/// <summary>
/// Redirect to nothing.
/// </summary>
Null
}
/// <summary>
/// Defines a collection of Commands. This collection is used by <see cref="Pipeline"/> to define
/// elements of pipeline.
/// </summary>
public sealed class CommandCollection : Collection<Command>
{
/// <summary>
/// Make the default constructor internal.
/// </summary>
internal CommandCollection()
{
}
/// <summary>
/// Adds a new command for given string.
/// </summary>
/// <exception cref="System.ArgumentNullException">
/// command is null.
/// </exception>
public void Add(string command)
{
if (string.Equals(command, "out-default", StringComparison.OrdinalIgnoreCase))
{
this.Add(command, true);
}
else
{
this.Add(new Command(command));
}
}
internal void Add(string command, bool mergeUnclaimedPreviousCommandError)
{
this.Add(new Command(command, false, false, mergeUnclaimedPreviousCommandError));
}
/// <summary>
/// Adds a new script command.
/// </summary>
/// <param name="scriptContents">Script contents.</param>
/// <exception cref="System.ArgumentNullException">
/// scriptContents is null.
/// </exception>
public void AddScript(string scriptContents)
{
this.Add(new Command(scriptContents, true));
}
/// <summary>
/// Adds a new scrip command for given script.
/// </summary>
/// <param name="scriptContents">Script contents.</param>
/// <param name="useLocalScope">If true local scope is used to run the script command.</param>
/// <exception cref="System.ArgumentNullException">
/// scriptContents is null.
/// </exception>
public void AddScript(string scriptContents, bool useLocalScope)
{
this.Add(new Command(scriptContents, true, useLocalScope));
}
/// <summary>
/// Gets the string representation of the command collection to be used for history.
/// </summary>
/// <returns>
/// string representing the command(s)
/// </returns>
internal string GetCommandStringForHistory()
{
Diagnostics.Assert(this.Count != 0, "this is called when there is at least one element in the collection");
Command firstCommand = this[0];
return firstCommand.CommandText;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Text;
namespace System.IO
{
// Provides methods for processing file system strings in a cross-platform manner.
// Most of the methods don't do a complete parsing (such as examining a UNC hostname),
// but they will handle most string operations.
public static partial class Path
{
// Public static readonly variant of the separators. The Path implementation itself is using
// internal const variant of the separators for better performance.
public static readonly char DirectorySeparatorChar = PathInternal.DirectorySeparatorChar;
public static readonly char AltDirectorySeparatorChar = PathInternal.AltDirectorySeparatorChar;
public static readonly char VolumeSeparatorChar = PathInternal.VolumeSeparatorChar;
public static readonly char PathSeparator = PathInternal.PathSeparator;
// For generating random file names
// 8 random bytes provides 12 chars in our encoding for the 8.3 name.
private const int KeyLength = 8;
[Obsolete("Please use GetInvalidPathChars or GetInvalidFileNameChars instead.")]
public static readonly char[] InvalidPathChars = GetInvalidPathChars();
// Changes the extension of a file path. The path parameter
// specifies a file path, and the extension parameter
// specifies a file extension (with a leading period, such as
// ".exe" or ".cs").
//
// The function returns a file path with the same root, directory, and base
// name parts as path, but with the file extension changed to
// the specified extension. If path is null, the function
// returns null. If path does not contain a file extension,
// the new file extension is appended to the path. If extension
// is null, any existing extension is removed from path.
public static string ChangeExtension(string path, string extension)
{
if (path != null)
{
PathInternal.CheckInvalidPathChars(path);
string s = path;
for (int i = path.Length - 1; i >= 0; i--)
{
char ch = path[i];
if (ch == '.')
{
s = path.Substring(0, i);
break;
}
if (PathInternal.IsDirectoryOrVolumeSeparator(ch)) break;
}
if (extension != null && path.Length != 0)
{
s = (extension.Length == 0 || extension[0] != '.') ?
s + "." + extension :
s + extension;
}
return s;
}
return null;
}
// Returns the directory path of a file path. This method effectively
// removes the last element of the given file path, i.e. it returns a
// string consisting of all characters up to but not including the last
// backslash ("\") in the file path. The returned value is null if the file
// path is null or if the file path denotes a root (such as "\", "C:", or
// "\\server\share").
public static string GetDirectoryName(string path)
{
if (PathInternal.IsEffectivelyEmpty(path))
{
if (path == null) return null;
throw new ArgumentException(SR.Arg_PathEmpty, nameof(path));
}
PathInternal.CheckInvalidPathChars(path);
path = PathInternal.NormalizeDirectorySeparators(path);
int root = PathInternal.GetRootLength(path);
int i = path.Length;
if (i > root)
{
while (i > root && !PathInternal.IsDirectorySeparator(path[--i])) ;
return path.Substring(0, i);
}
return null;
}
// Returns the extension of the given path. The returned value includes the
// period (".") character of the extension except when you have a terminal period when you get string.Empty, such as ".exe" or
// ".cpp". The returned value is null if the given path is
// null or if the given path does not include an extension.
[Pure]
public static string GetExtension(string path)
{
if (path == null)
return null;
PathInternal.CheckInvalidPathChars(path);
int length = path.Length;
for (int i = length - 1; i >= 0; i--)
{
char ch = path[i];
if (ch == '.')
{
if (i != length - 1)
return path.Substring(i, length - i);
else
return string.Empty;
}
if (PathInternal.IsDirectoryOrVolumeSeparator(ch))
break;
}
return string.Empty;
}
// Returns the name and extension parts of the given path. The resulting
// string contains the characters of path that follow the last
// separator in path. The resulting string is null if path is null.
[Pure]
public static string GetFileName(string path)
{
if (path == null)
return null;
int offset = PathInternal.FindFileNameIndex(path);
int count = path.Length - offset;
return path.Substring(offset, count);
}
[Pure]
public static string GetFileNameWithoutExtension(string path)
{
if (path == null)
return null;
int length = path.Length;
int offset = PathInternal.FindFileNameIndex(path);
int end = path.LastIndexOf('.', length - 1, length - offset);
return end == -1 ?
path.Substring(offset) : // No extension was found
path.Substring(offset, end - offset);
}
// Returns a cryptographically strong random 8.3 string that can be
// used as either a folder name or a file name.
public static unsafe string GetRandomFileName()
{
byte* pKey = stackalloc byte[KeyLength];
Interop.GetRandomBytes(pKey, KeyLength);
const int RandomFileNameLength = 12;
char* pRandomFileName = stackalloc char[RandomFileNameLength];
Populate83FileNameFromRandomBytes(pKey, KeyLength, pRandomFileName, RandomFileNameLength);
return new string(pRandomFileName, 0, RandomFileNameLength);
}
// Tests if a path includes a file extension. The result is
// true if the characters that follow the last directory
// separator ('\\' or '/') or volume separator (':') in the path include
// a period (".") other than a terminal period. The result is false otherwise.
[Pure]
public static bool HasExtension(string path)
{
if (path != null)
{
PathInternal.CheckInvalidPathChars(path);
for (int i = path.Length - 1; i >= 0; i--)
{
char ch = path[i];
if (ch == '.')
{
return i != path.Length - 1;
}
if (PathInternal.IsDirectoryOrVolumeSeparator(ch)) break;
}
}
return false;
}
public static string Combine(string path1, string path2)
{
if (path1 == null || path2 == null)
throw new ArgumentNullException((path1 == null) ? nameof(path1) : nameof(path2));
Contract.EndContractBlock();
PathInternal.CheckInvalidPathChars(path1);
PathInternal.CheckInvalidPathChars(path2);
return CombineNoChecks(path1, path2);
}
public static string Combine(string path1, string path2, string path3)
{
if (path1 == null || path2 == null || path3 == null)
throw new ArgumentNullException((path1 == null) ? nameof(path1) : (path2 == null) ? nameof(path2) : nameof(path3));
Contract.EndContractBlock();
PathInternal.CheckInvalidPathChars(path1);
PathInternal.CheckInvalidPathChars(path2);
PathInternal.CheckInvalidPathChars(path3);
return CombineNoChecks(path1, path2, path3);
}
public static string Combine(string path1, string path2, string path3, string path4)
{
if (path1 == null || path2 == null || path3 == null || path4 == null)
throw new ArgumentNullException((path1 == null) ? nameof(path1) : (path2 == null) ? nameof(path2) : (path3 == null) ? nameof(path3) : nameof(path4));
Contract.EndContractBlock();
PathInternal.CheckInvalidPathChars(path1);
PathInternal.CheckInvalidPathChars(path2);
PathInternal.CheckInvalidPathChars(path3);
PathInternal.CheckInvalidPathChars(path4);
return CombineNoChecks(path1, path2, path3, path4);
}
public static string Combine(params string[] paths)
{
if (paths == null)
{
throw new ArgumentNullException(nameof(paths));
}
Contract.EndContractBlock();
int finalSize = 0;
int firstComponent = 0;
// We have two passes, the first calculates how large a buffer to allocate and does some precondition
// checks on the paths passed in. The second actually does the combination.
for (int i = 0; i < paths.Length; i++)
{
if (paths[i] == null)
{
throw new ArgumentNullException(nameof(paths));
}
if (paths[i].Length == 0)
{
continue;
}
PathInternal.CheckInvalidPathChars(paths[i]);
if (IsPathRooted(paths[i]))
{
firstComponent = i;
finalSize = paths[i].Length;
}
else
{
finalSize += paths[i].Length;
}
char ch = paths[i][paths[i].Length - 1];
if (!PathInternal.IsDirectoryOrVolumeSeparator(ch))
finalSize++;
}
StringBuilder finalPath = StringBuilderCache.Acquire(finalSize);
for (int i = firstComponent; i < paths.Length; i++)
{
if (paths[i].Length == 0)
{
continue;
}
if (finalPath.Length == 0)
{
finalPath.Append(paths[i]);
}
else
{
char ch = finalPath[finalPath.Length - 1];
if (!PathInternal.IsDirectoryOrVolumeSeparator(ch))
{
finalPath.Append(PathInternal.DirectorySeparatorChar);
}
finalPath.Append(paths[i]);
}
}
return StringBuilderCache.GetStringAndRelease(finalPath);
}
private static string CombineNoChecks(string path1, string path2)
{
if (path2.Length == 0)
return path1;
if (path1.Length == 0)
return path2;
if (IsPathRooted(path2))
return path2;
char ch = path1[path1.Length - 1];
return PathInternal.IsDirectoryOrVolumeSeparator(ch) ?
path1 + path2 :
path1 + PathInternal.DirectorySeparatorCharAsString + path2;
}
private static string CombineNoChecks(string path1, string path2, string path3)
{
if (path1.Length == 0)
return CombineNoChecks(path2, path3);
if (path2.Length == 0)
return CombineNoChecks(path1, path3);
if (path3.Length == 0)
return CombineNoChecks(path1, path2);
if (IsPathRooted(path3))
return path3;
if (IsPathRooted(path2))
return CombineNoChecks(path2, path3);
bool hasSep1 = PathInternal.IsDirectoryOrVolumeSeparator(path1[path1.Length - 1]);
bool hasSep2 = PathInternal.IsDirectoryOrVolumeSeparator(path2[path2.Length - 1]);
if (hasSep1 && hasSep2)
{
return path1 + path2 + path3;
}
else if (hasSep1)
{
return path1 + path2 + PathInternal.DirectorySeparatorCharAsString + path3;
}
else if (hasSep2)
{
return path1 + PathInternal.DirectorySeparatorCharAsString + path2 + path3;
}
else
{
// string.Concat only has string-based overloads up to four arguments; after that requires allocating
// a params string[]. Instead, try to use a cached StringBuilder.
StringBuilder sb = StringBuilderCache.Acquire(path1.Length + path2.Length + path3.Length + 2);
sb.Append(path1)
.Append(PathInternal.DirectorySeparatorChar)
.Append(path2)
.Append(PathInternal.DirectorySeparatorChar)
.Append(path3);
return StringBuilderCache.GetStringAndRelease(sb);
}
}
private static string CombineNoChecks(string path1, string path2, string path3, string path4)
{
if (path1.Length == 0)
return CombineNoChecks(path2, path3, path4);
if (path2.Length == 0)
return CombineNoChecks(path1, path3, path4);
if (path3.Length == 0)
return CombineNoChecks(path1, path2, path4);
if (path4.Length == 0)
return CombineNoChecks(path1, path2, path3);
if (IsPathRooted(path4))
return path4;
if (IsPathRooted(path3))
return CombineNoChecks(path3, path4);
if (IsPathRooted(path2))
return CombineNoChecks(path2, path3, path4);
bool hasSep1 = PathInternal.IsDirectoryOrVolumeSeparator(path1[path1.Length - 1]);
bool hasSep2 = PathInternal.IsDirectoryOrVolumeSeparator(path2[path2.Length - 1]);
bool hasSep3 = PathInternal.IsDirectoryOrVolumeSeparator(path3[path3.Length - 1]);
if (hasSep1 && hasSep2 && hasSep3)
{
// Use string.Concat overload that takes four strings
return path1 + path2 + path3 + path4;
}
else
{
// string.Concat only has string-based overloads up to four arguments; after that requires allocating
// a params string[]. Instead, try to use a cached StringBuilder.
StringBuilder sb = StringBuilderCache.Acquire(path1.Length + path2.Length + path3.Length + path4.Length + 3);
sb.Append(path1);
if (!hasSep1)
{
sb.Append(PathInternal.DirectorySeparatorChar);
}
sb.Append(path2);
if (!hasSep2)
{
sb.Append(PathInternal.DirectorySeparatorChar);
}
sb.Append(path3);
if (!hasSep3)
{
sb.Append(PathInternal.DirectorySeparatorChar);
}
sb.Append(path4);
return StringBuilderCache.GetStringAndRelease(sb);
}
}
private static readonly char[] s_base32Char = {
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p',
'q', 'r', 's', 't', 'u', 'v', 'w', 'x',
'y', 'z', '0', '1', '2', '3', '4', '5'};
private static unsafe void Populate83FileNameFromRandomBytes(byte* bytes, int byteCount, char* chars, int charCount)
{
Debug.Assert(bytes != null);
Debug.Assert(chars != null);
// This method requires bytes of length 8 and chars of length 12.
Debug.Assert(byteCount == 8, $"Unexpected {nameof(byteCount)}");
Debug.Assert(charCount == 12, $"Unexpected {nameof(charCount)}");
byte b0 = bytes[0];
byte b1 = bytes[1];
byte b2 = bytes[2];
byte b3 = bytes[3];
byte b4 = bytes[4];
// Consume the 5 Least significant bits of the first 5 bytes
chars[0] = s_base32Char[b0 & 0x1F];
chars[1] = s_base32Char[b1 & 0x1F];
chars[2] = s_base32Char[b2 & 0x1F];
chars[3] = s_base32Char[b3 & 0x1F];
chars[4] = s_base32Char[b4 & 0x1F];
// Consume 3 MSB of b0, b1, MSB bits 6, 7 of b3, b4
chars[5] = s_base32Char[(
((b0 & 0xE0) >> 5) |
((b3 & 0x60) >> 2))];
chars[6] = s_base32Char[(
((b1 & 0xE0) >> 5) |
((b4 & 0x60) >> 2))];
// Consume 3 MSB bits of b2, 1 MSB bit of b3, b4
b2 >>= 5;
Debug.Assert(((b2 & 0xF8) == 0), "Unexpected set bits");
if ((b3 & 0x80) != 0)
b2 |= 0x08;
if ((b4 & 0x80) != 0)
b2 |= 0x10;
chars[7] = s_base32Char[b2];
// Set the file extension separator
chars[8] = '.';
// Consume the 5 Least significant bits of the remaining 3 bytes
chars[9] = s_base32Char[(bytes[5] & 0x1F)];
chars[10] = s_base32Char[(bytes[6] & 0x1F)];
chars[11] = s_base32Char[(bytes[7] & 0x1F)];
}
/// <summary>
/// Create a relative path from one path to another. Paths will be resolved before calculating the difference.
/// Default path comparison for the active platform will be used (OrdinalIgnoreCase for Windows or Mac, Ordinal for Unix).
/// </summary>
/// <param name="relativeTo">The source path the output should be relative to. This path is always considered to be a directory.</param>
/// <param name="path">The destination path.</param>
/// <returns>The relative path or <paramref name="path"/> if the paths don't share the same root.</returns>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="relativeTo"/> or <paramref name="path"/> is <c>null</c> or an empty string.</exception>
public static string GetRelativePath(string relativeTo, string path)
{
return GetRelativePath(relativeTo, path, StringComparison);
}
private static string GetRelativePath(string relativeTo, string path, StringComparison comparisonType)
{
if (string.IsNullOrEmpty(relativeTo)) throw new ArgumentNullException(nameof(relativeTo));
if (PathInternal.IsEffectivelyEmpty(path)) throw new ArgumentNullException(nameof(path));
Debug.Assert(comparisonType == StringComparison.Ordinal || comparisonType == StringComparison.OrdinalIgnoreCase);
relativeTo = GetFullPath(relativeTo);
path = GetFullPath(path);
// Need to check if the roots are different- if they are we need to return the "to" path.
if (!PathInternal.AreRootsEqual(relativeTo, path, comparisonType))
return path;
int commonLength = PathInternal.GetCommonPathLength(relativeTo, path, ignoreCase: comparisonType == StringComparison.OrdinalIgnoreCase);
// If there is nothing in common they can't share the same root, return the "to" path as is.
if (commonLength == 0)
return path;
// Trailing separators aren't significant for comparison
int relativeToLength = relativeTo.Length;
if (PathInternal.EndsInDirectorySeparator(relativeTo))
relativeToLength--;
bool pathEndsInSeparator = PathInternal.EndsInDirectorySeparator(path);
int pathLength = path.Length;
if (pathEndsInSeparator)
pathLength--;
// If we have effectively the same path, return "."
if (relativeToLength == pathLength && commonLength >= relativeToLength) return ".";
// We have the same root, we need to calculate the difference now using the
// common Length and Segment count past the length.
//
// Some examples:
//
// C:\Foo C:\Bar L3, S1 -> ..\Bar
// C:\Foo C:\Foo\Bar L6, S0 -> Bar
// C:\Foo\Bar C:\Bar\Bar L3, S2 -> ..\..\Bar\Bar
// C:\Foo\Foo C:\Foo\Bar L7, S1 -> ..\Bar
StringBuilder sb = StringBuilderCache.Acquire(Math.Max(relativeTo.Length, path.Length));
// Add parent segments for segments past the common on the "from" path
if (commonLength < relativeToLength)
{
sb.Append(PathInternal.ParentDirectoryPrefix);
for (int i = commonLength; i < relativeToLength; i++)
{
if (PathInternal.IsDirectorySeparator(relativeTo[i]))
{
sb.Append(PathInternal.ParentDirectoryPrefix);
}
}
}
else if (PathInternal.IsDirectorySeparator(path[commonLength]))
{
// No parent segments and we need to eat the initial separator
// (C:\Foo C:\Foo\Bar case)
commonLength++;
}
// Now add the rest of the "to" path, adding back the trailing separator
int count = pathLength - commonLength;
if (pathEndsInSeparator)
count++;
sb.Append(path, commonLength, count);
return StringBuilderCache.GetStringAndRelease(sb);
}
// StringComparison and IsCaseSensitive are also available in PathInternal.CaseSensitivity but we are
// too low in System.Runtime.Extensions to use it (no FileStream, etc.)
/// <summary>Returns a comparison that can be used to compare file and directory names for equality.</summary>
internal static StringComparison StringComparison
{
get
{
return IsCaseSensitive ?
StringComparison.Ordinal :
StringComparison.OrdinalIgnoreCase;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Threading;
using Azure.Core;
using Azure.Core.Pipeline;
namespace Azure.IoT.TimeSeriesInsights
{
/// <summary>
/// A client that can be used to query for events, series and aggregate series on Time Series Insights.
/// </summary>
public class TimeSeriesInsightsQueries
{
private readonly QueryRestClient _queryRestClient;
private readonly ClientDiagnostics _clientDiagnostics;
/// <summary>
/// Initializes a new instance of TimeSeriesInsightsQueries. This constructor should only be used for mocking purposes.
/// </summary>
protected TimeSeriesInsightsQueries()
{
}
internal TimeSeriesInsightsQueries(QueryRestClient queryRestClient, ClientDiagnostics clientDiagnostics)
{
Argument.AssertNotNull(queryRestClient, nameof(queryRestClient));
Argument.AssertNotNull(clientDiagnostics, nameof(clientDiagnostics));
_queryRestClient = queryRestClient;
_clientDiagnostics = clientDiagnostics;
}
/// <summary>
/// Retrieve raw events for a given Time Series Id asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve raw events for.</param>
/// <param name="startTime">Start timestamp of the time range. Events that have this timestamp are included.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded.</param>
/// <param name="options">Optional parameters to use when querying for events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQueryEvents">
/// Console.WriteLine("\n\nQuery for raw temperature events over the past 10 minutes.\n");
///
/// // Get events from last 10 minute
/// DateTimeOffset endTime = DateTime.UtcNow;
/// DateTimeOffset startTime = endTime.AddMinutes(-10);
///
/// QueryAnalyzer temperatureEventsQueryAnalyzer = client.Queries.CreateEventsQueryAnalyzer(tsId, startTime, endTime);
/// await foreach (TimeSeriesPoint point in temperatureEventsQueryAnalyzer.GetResultsAsync())
/// {
/// TimeSeriesValue temperatureValue = point.GetValue("Temperature");
///
/// // Figure out what is the underlying type for the time series value. Since you know your Time Series Insights
/// // environment best, you probably do not need this logic and you can skip to directly casting to the proper
/// // type. This logic demonstrates how you can figure out what type to cast to in the case where you are not
/// // too familiar with the property type.
/// if (temperatureValue.Type == typeof(double?))
/// {
/// Console.WriteLine($"{point.Timestamp} - Temperature: {(double?)temperatureValue}");
/// }
/// else if (temperatureValue.Type == typeof(int?))
/// {
/// Console.WriteLine($"{point.Timestamp} - Temperature: {(int?)temperatureValue}");
/// }
/// else
/// {
/// Console.WriteLine("The type of the Time Series value for Temperature is not numeric.");
/// }
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateEventsQueryAnalyzer(
TimeSeriesId timeSeriesId,
DateTimeOffset startTime,
DateTimeOffset endTime,
QueryEventsRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(GetEvents)}");
scope.Start();
try
{
var searchSpan = new DateTimeRange(startTime, endTime);
var queryRequest = new QueryRequest
{
GetEvents = new GetEvents(timeSeriesId, searchSpan)
};
BuildEventsRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieve raw events for a given Time Series Id over a specified time interval asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve raw events for.</param>
/// <param name="timeSpan">The time interval over which to query data.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded. If null is provided, <c>DateTimeOffset.UtcNow</c> is used.</param>
/// <param name="options">Optional parameters to use when querying for events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQueryEventsUsingTimeSpan">
/// Console.WriteLine("\n\nQuery for raw humidity events over the past 30 seconds.\n");
///
/// QueryAnalyzer humidityEventsQueryAnalyzer = client.Queries.CreateEventsQueryAnalyzer(tsId, TimeSpan.FromSeconds(30));
/// await foreach (TimeSeriesPoint point in humidityEventsQueryAnalyzer.GetResultsAsync())
/// {
/// TimeSeriesValue humidityValue = point.GetValue("Humidity");
///
/// // Figure out what is the underlying type for the time series value. Since you know your Time Series Insights
/// // environment best, you probably do not need this logic and you can skip to directly casting to the proper
/// // type. This logic demonstrates how you can figure out what type to cast to in the case where you are not
/// // too familiar with the property type.
/// if (humidityValue.Type == typeof(double?))
/// {
/// Console.WriteLine($"{point.Timestamp} - Humidity: {(double?)humidityValue}");
/// }
/// else if (humidityValue.Type == typeof(int?))
/// {
/// Console.WriteLine($"{point.Timestamp} - Humidity: {(int?)humidityValue}");
/// }
/// else
/// {
/// Console.WriteLine("The type of the Time Series value for Humidity is not numeric.");
/// }
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateEventsQueryAnalyzer(
TimeSeriesId timeSeriesId,
TimeSpan timeSpan,
DateTimeOffset? endTime = null,
QueryEventsRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(GetEvents)}");
scope.Start();
try
{
DateTimeOffset rangeEndTime = endTime ?? DateTimeOffset.UtcNow;
DateTimeOffset rangeStartTime = rangeEndTime - timeSpan;
var searchSpan = new DateTimeRange(rangeStartTime, rangeEndTime);
var queryRequest = new QueryRequest
{
GetEvents = new GetEvents(timeSeriesId, searchSpan)
};
BuildEventsRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieve series events for a given Time Series Id asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve series events for.</param>
/// <param name="startTime">Start timestamp of the time range. Events that have this timestamp are included.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded.</param>
/// <param name="options">Optional parameters to use when querying for series events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQuerySeries">
/// Console.WriteLine($"\n\nQuery for temperature series in Celsius and Fahrenheit over the past 10 minutes. " +
/// $"The Time Series instance belongs to a type that has predefined numeric variable that represents the temperature " +
/// $"in Celsuis, and a predefined numeric variable that represents the temperature in Fahrenheit.\n");
///
/// DateTimeOffset endTime = DateTime.UtcNow;
/// DateTimeOffset startTime = endTime.AddMinutes(-10);
/// QueryAnalyzer seriesQueryAnalyzer = client.Queries.CreateSeriesQueryAnalyzer(
/// tsId,
/// startTime,
/// endTime);
///
/// await foreach (TimeSeriesPoint point in seriesQueryAnalyzer.GetResultsAsync())
/// {
/// double? tempInCelsius = (double?)point.GetValue(celsiusVariableName);
/// double? tempInFahrenheit = (double?)point.GetValue(fahrenheitVariableName);
///
/// Console.WriteLine($"{point.Timestamp} - Average temperature in Celsius: {tempInCelsius}. " +
/// $"Average temperature in Fahrenheit: {tempInFahrenheit}.");
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateSeriesQueryAnalyzer(
TimeSeriesId timeSeriesId,
DateTimeOffset startTime,
DateTimeOffset endTime,
QuerySeriesRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(GetSeries)}");
scope.Start();
try
{
var searchSpan = new DateTimeRange(startTime, endTime);
var queryRequest = new QueryRequest
{
GetSeries = new GetSeries(timeSeriesId, searchSpan)
};
BuildSeriesRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieve series events for a given Time Series Id over a specified time interval asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve series events for.</param>
/// <param name="timeSpan">The time interval over which to query data.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded. If null is provided, <c>DateTimeOffset.UtcNow</c> is used.</param>
/// <param name="options">Optional parameters to use when querying for series events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQuerySeriesWithInlineVariables">
/// Console.WriteLine("\n\nQuery for temperature series in Celsius and Fahrenheit over the past 10 minutes.\n");
///
/// var celsiusVariable = new NumericVariable(
/// new TimeSeriesExpression("$event.Temperature"),
/// new TimeSeriesExpression("avg($value)"));
/// var fahrenheitVariable = new NumericVariable(
/// new TimeSeriesExpression("$event.Temperature * 1.8 + 32"),
/// new TimeSeriesExpression("avg($value)"));
///
/// var querySeriesRequestOptions = new QuerySeriesRequestOptions();
/// querySeriesRequestOptions.InlineVariables["TemperatureInCelsius"] = celsiusVariable;
/// querySeriesRequestOptions.InlineVariables["TemperatureInFahrenheit"] = fahrenheitVariable;
///
/// QueryAnalyzer seriesQueryAnalyzer = client.Queries.CreateSeriesQueryAnalyzer(
/// tsId,
/// TimeSpan.FromMinutes(10),
/// null,
/// querySeriesRequestOptions);
///
/// await foreach (TimeSeriesPoint point in seriesQueryAnalyzer.GetResultsAsync())
/// {
/// double? tempInCelsius = (double?)point.GetValue("TemperatureInCelsius");
/// double? tempInFahrenheit = (double?)point.GetValue("TemperatureInFahrenheit");
///
/// Console.WriteLine($"{point.Timestamp} - Average temperature in Celsius: {tempInCelsius}. Average temperature in Fahrenheit: {tempInFahrenheit}.");
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateSeriesQueryAnalyzer(
TimeSeriesId timeSeriesId,
TimeSpan timeSpan,
DateTimeOffset? endTime = null,
QuerySeriesRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(GetSeries)}");
scope.Start();
try
{
DateTimeOffset rangeEndTime = endTime ?? DateTimeOffset.UtcNow;
DateTimeOffset rangeStartTime = rangeEndTime - timeSpan;
var searchSpan = new DateTimeRange(rangeStartTime, rangeEndTime);
var queryRequest = new QueryRequest
{
GetSeries = new GetSeries(timeSeriesId, searchSpan)
};
BuildSeriesRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieve aggregated time series from events for a given Time Series Id asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve series events for.</param>
/// <param name="startTime">Start timestamp of the time range. Events that have this timestamp are included.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded.</param>
/// <param name="interval">Interval size used to group events by.</param>
/// <param name="options">Optional parameters to use when querying for aggregated series events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQueryAggregateSeriesWithAggregateVariable">
/// Console.WriteLine("\n\nCount the number of temperature events over the past 3 minutes, in 1-minute time slots.\n");
///
/// // Get the count of events in 60-second time slots over the past 3 minutes
/// DateTimeOffset endTime = DateTime.UtcNow;
/// DateTimeOffset startTime = endTime.AddMinutes(-3);
///
/// var aggregateVariable = new AggregateVariable(
/// new TimeSeriesExpression("count()"));
///
/// var countVariableName = "Count";
///
/// var aggregateSeriesRequestOptions = new QueryAggregateSeriesRequestOptions();
/// aggregateSeriesRequestOptions.InlineVariables[countVariableName] = aggregateVariable;
/// aggregateSeriesRequestOptions.ProjectedVariables.Add(countVariableName);
///
/// QueryAnalyzer aggregateSeriesQueryAnalyzer = client.Queries.CreateAggregateSeriesQueryAnalyzer(
/// tsId,
/// startTime,
/// endTime,
/// TimeSpan.FromSeconds(60),
/// aggregateSeriesRequestOptions);
///
/// await foreach (TimeSeriesPoint point in aggregateSeriesQueryAnalyzer.GetResultsAsync())
/// {
/// long? temperatureCount = (long?)point.GetValue(countVariableName);
/// Console.WriteLine($"{point.Timestamp} - Temperature count: {temperatureCount}");
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateAggregateSeriesQueryAnalyzer(
TimeSeriesId timeSeriesId,
DateTimeOffset startTime,
DateTimeOffset endTime,
TimeSpan interval,
QueryAggregateSeriesRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(CreateAggregateSeriesQueryAnalyzer)}");
scope.Start();
try
{
var searchSpan = new DateTimeRange(startTime, endTime);
var queryRequest = new QueryRequest
{
AggregateSeries = new AggregateSeries(timeSeriesId, searchSpan, interval)
};
BuildAggregateSeriesRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieve aggregated time series from events for a given Time Series Id over a specified time interval asynchronously.
/// </summary>
/// <param name="timeSeriesId">The Time Series Id to retrieve series events for.</param>
/// <param name="interval">Interval size used to group events by.</param>
/// <param name="timeSpan">The time interval over which to query data.</param>
/// <param name="endTime">End timestamp of the time range. Events that match this timestamp are excluded. If null is provided, <c>DateTimeOffset.UtcNow</c> is used.</param>
/// <param name="options">Optional parameters to use when querying for aggregated series events.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="QueryAnalyzer"/> object that can be used to retrieve the pageable list <see cref="AsyncPageable{TimeSeriesPoint}"/>.</returns>
/// <example>
/// <code snippet="Snippet:TimeSeriesInsightsSampleQueryAggregateSeriesWithNumericVariable">
/// Console.WriteLine("\n\nQuery for the average temperature over the past 30 seconds, in 2-second time slots.\n");
///
/// var numericVariable = new NumericVariable(
/// new TimeSeriesExpression("$event.Temperature"),
/// new TimeSeriesExpression("avg($value)"));
///
/// var requestOptions = new QueryAggregateSeriesRequestOptions();
/// requestOptions.InlineVariables["Temperature"] = numericVariable;
/// requestOptions.ProjectedVariables.Add("Temperature");
///
/// QueryAnalyzer queryAggregateSeriesAnalyzer = client.Queries.CreateAggregateSeriesQueryAnalyzer(
/// tsId,
/// TimeSpan.FromSeconds(2),
/// TimeSpan.FromSeconds(30),
/// null,
/// requestOptions);
///
/// await foreach (TimeSeriesPoint point in queryAggregateSeriesAnalyzer.GetResultsAsync())
/// {
/// double? averageTemperature = (double?)point.GetValue("Temperature");
/// if (averageTemperature != null)
/// {
/// Console.WriteLine($"{point.Timestamp} - Average temperature: {averageTemperature}.");
/// }
/// }
/// </code>
/// </example>
public virtual QueryAnalyzer CreateAggregateSeriesQueryAnalyzer(
TimeSeriesId timeSeriesId,
TimeSpan interval,
TimeSpan timeSpan,
DateTimeOffset? endTime = null,
QueryAggregateSeriesRequestOptions options = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(TimeSeriesInsightsClient)}.{nameof(CreateAggregateSeriesQueryAnalyzer)}");
scope.Start();
try
{
DateTimeOffset rangeEndTime = endTime ?? DateTimeOffset.UtcNow;
DateTimeOffset rangeStartTime = rangeEndTime - timeSpan;
var searchSpan = new DateTimeRange(rangeStartTime, rangeEndTime);
var queryRequest = new QueryRequest
{
AggregateSeries = new AggregateSeries(timeSeriesId, searchSpan, interval)
};
BuildAggregateSeriesRequestOptions(options, queryRequest);
return new QueryAnalyzer(_queryRestClient, queryRequest, options?.StoreType?.ToString(), cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
private static void BuildEventsRequestOptions(QueryEventsRequestOptions options, QueryRequest queryRequest)
{
if (options != null)
{
if (options.Filter != null)
{
queryRequest.GetEvents.Filter = new TimeSeriesExpression(options.Filter);
}
if (options.ProjectedProperties != null)
{
foreach (EventProperty projectedProperty in options.ProjectedProperties)
{
queryRequest.GetEvents.ProjectedProperties.Add(projectedProperty);
}
}
queryRequest.GetEvents.Take = options.MaximumNumberOfEvents;
}
}
private static void BuildSeriesRequestOptions(QuerySeriesRequestOptions options, QueryRequest queryRequest)
{
if (options != null)
{
if (options.Filter != null)
{
queryRequest.GetSeries.Filter = new TimeSeriesExpression(options.Filter);
}
if (options.ProjectedVariables != null)
{
foreach (string projectedVariable in options.ProjectedVariables)
{
queryRequest.GetSeries.ProjectedVariables.Add(projectedVariable);
}
}
if (options.InlineVariables != null)
{
foreach (string inlineVariableKey in options.InlineVariables.Keys)
{
queryRequest.GetSeries.InlineVariables[inlineVariableKey] = options.InlineVariables[inlineVariableKey];
}
}
queryRequest.GetSeries.Take = options.MaximumNumberOfEvents;
}
}
private static void BuildAggregateSeriesRequestOptions(QueryAggregateSeriesRequestOptions options, QueryRequest queryRequest)
{
if (options != null)
{
if (options.Filter != null)
{
queryRequest.AggregateSeries.Filter = new TimeSeriesExpression(options.Filter);
}
if (options.ProjectedVariables != null)
{
foreach (string projectedVariable in options.ProjectedVariables)
{
queryRequest.AggregateSeries.ProjectedVariables.Add(projectedVariable);
}
}
if (options.InlineVariables != null)
{
foreach (string inlineVariableKey in options.InlineVariables.Keys)
{
queryRequest.AggregateSeries.InlineVariables[inlineVariableKey] = options.InlineVariables[inlineVariableKey];
}
}
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.Azure.Management.BackupServices;
using Microsoft.Azure.Management.BackupServices.Models;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.BackupServices
{
/// <summary>
/// Definition of Protection Policy operations for the Azure Backup
/// extension.
/// </summary>
internal partial class CSMProtectionPolicyOperations : IServiceOperations<BackupServicesManagementClient>, ICSMProtectionPolicyOperations
{
/// <summary>
/// Initializes a new instance of the CSMProtectionPolicyOperations
/// class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal CSMProtectionPolicyOperations(BackupServicesManagementClient client)
{
this._client = client;
}
private BackupServicesManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.BackupServices.BackupServicesManagementClient.
/// </summary>
public BackupServicesManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Create new Protection Policy.
/// </summary>
/// <param name='resourceGroupName'>
/// Required.
/// </param>
/// <param name='resourceName'>
/// Required.
/// </param>
/// <param name='policyName'>
/// Required. The protection policy Name to be updated.
/// </param>
/// <param name='cSMAddProtectionPolicyRequest'>
/// Required. The protection policy creation request.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> AddAsync(string resourceGroupName, string resourceName, string policyName, CSMAddProtectionPolicyRequest cSMAddProtectionPolicyRequest, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceName == null)
{
throw new ArgumentNullException("resourceName");
}
if (policyName == null)
{
throw new ArgumentNullException("policyName");
}
if (cSMAddProtectionPolicyRequest == null)
{
throw new ArgumentNullException("cSMAddProtectionPolicyRequest");
}
if (cSMAddProtectionPolicyRequest.Properties != null)
{
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule != null)
{
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule.BackupType == null)
{
throw new ArgumentNullException("cSMAddProtectionPolicyRequest.Properties.BackupSchedule.BackupType");
}
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun == null)
{
throw new ArgumentNullException("cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun");
}
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes == null)
{
throw new ArgumentNullException("cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes");
}
}
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("resourceName", resourceName);
tracingParameters.Add("policyName", policyName);
tracingParameters.Add("cSMAddProtectionPolicyRequest", cSMAddProtectionPolicyRequest);
tracingParameters.Add("customRequestHeaders", customRequestHeaders);
TracingAdapter.Enter(invocationId, this, "AddAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/Subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.Backup";
url = url + "/";
url = url + "BackupVault";
url = url + "/";
url = url + Uri.EscapeDataString(resourceName);
url = url + "/protectionPolicies/";
url = url + Uri.EscapeDataString(policyName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept-Language", "en-us");
httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject cSMAddProtectionPolicyRequestValue = new JObject();
requestDoc = cSMAddProtectionPolicyRequestValue;
if (cSMAddProtectionPolicyRequest.Properties != null)
{
JObject propertiesValue = new JObject();
cSMAddProtectionPolicyRequestValue["properties"] = propertiesValue;
if (cSMAddProtectionPolicyRequest.Properties.WorkloadType != null)
{
propertiesValue["WorkloadType"] = cSMAddProtectionPolicyRequest.Properties.WorkloadType;
}
if (cSMAddProtectionPolicyRequest.Properties.PolicyName != null)
{
propertiesValue["PolicyName"] = cSMAddProtectionPolicyRequest.Properties.PolicyName;
}
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule != null)
{
JObject backupScheduleValue = new JObject();
propertiesValue["BackupSchedule"] = backupScheduleValue;
backupScheduleValue["backupType"] = cSMAddProtectionPolicyRequest.Properties.BackupSchedule.BackupType;
backupScheduleValue["scheduleRun"] = cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun;
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunDays != null)
{
JArray scheduleRunDaysArray = new JArray();
foreach (DayOfWeek scheduleRunDaysItem in cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunDays)
{
scheduleRunDaysArray.Add(scheduleRunDaysItem.ToString());
}
backupScheduleValue["scheduleRunDays"] = scheduleRunDaysArray;
}
if (cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes != null)
{
JArray scheduleRunTimesArray = new JArray();
foreach (DateTime scheduleRunTimesItem in cSMAddProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes)
{
scheduleRunTimesArray.Add(scheduleRunTimesItem);
}
backupScheduleValue["scheduleRunTimes"] = scheduleRunTimesArray;
}
}
if (cSMAddProtectionPolicyRequest.Properties.RetentionPolicy != null)
{
JObject retentionPolicyValue = new JObject();
propertiesValue["RetentionPolicy"] = retentionPolicyValue;
retentionPolicyValue["retentionType"] = cSMAddProtectionPolicyRequest.Properties.RetentionPolicy.RetentionType.ToString();
retentionPolicyValue["retentionDuration"] = cSMAddProtectionPolicyRequest.Properties.RetentionPolicy.RetentionDuration;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy != null)
{
JObject lTRRetentionPolicyValue = new JObject();
propertiesValue["LTRRetentionPolicy"] = lTRRetentionPolicyValue;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule != null)
{
JObject dailyScheduleValue = new JObject();
lTRRetentionPolicyValue["DailySchedule"] = dailyScheduleValue;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.RetentionTimes != null)
{
JArray retentionTimesArray = new JArray();
foreach (DateTime retentionTimesItem in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.RetentionTimes)
{
retentionTimesArray.Add(retentionTimesItem);
}
dailyScheduleValue["RetentionTimes"] = retentionTimesArray;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue = new JObject();
dailyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue;
cSMRetentionDurationValue["Count"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue["DurationType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule != null)
{
JObject weeklyScheduleValue = new JObject();
lTRRetentionPolicyValue["WeeklySchedule"] = weeklyScheduleValue;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray = new JArray();
foreach (DayOfWeek daysOfTheWeekItem in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.DaysOfTheWeek)
{
daysOfTheWeekArray.Add(daysOfTheWeekItem.ToString());
}
weeklyScheduleValue["DaysOfTheWeek"] = daysOfTheWeekArray;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.RetentionTimes != null)
{
JArray retentionTimesArray2 = new JArray();
foreach (DateTime retentionTimesItem2 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.RetentionTimes)
{
retentionTimesArray2.Add(retentionTimesItem2);
}
weeklyScheduleValue["RetentionTimes"] = retentionTimesArray2;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue2 = new JObject();
weeklyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue2;
cSMRetentionDurationValue2["Count"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue2["DurationType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule != null)
{
JObject monthlyScheduleValue = new JObject();
lTRRetentionPolicyValue["MonthlySchedule"] = monthlyScheduleValue;
monthlyScheduleValue["RetentionScheduleType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleType.ToString();
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily != null)
{
JObject retentionScheduleDailyValue = new JObject();
monthlyScheduleValue["RetentionScheduleDaily"] = retentionScheduleDailyValue;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily.DaysOfTheMonth != null)
{
JArray daysOfTheMonthArray = new JArray();
foreach (Day daysOfTheMonthItem in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily.DaysOfTheMonth)
{
JObject dayValue = new JObject();
daysOfTheMonthArray.Add(dayValue);
dayValue["Date"] = daysOfTheMonthItem.Date;
dayValue["IsLast"] = daysOfTheMonthItem.IsLast;
}
retentionScheduleDailyValue["DaysOfTheMonth"] = daysOfTheMonthArray;
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly != null)
{
JObject retentionScheduleWeeklyValue = new JObject();
monthlyScheduleValue["RetentionScheduleWeekly"] = retentionScheduleWeeklyValue;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray2 = new JArray();
foreach (DayOfWeek daysOfTheWeekItem2 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.DaysOfTheWeek)
{
daysOfTheWeekArray2.Add(daysOfTheWeekItem2.ToString());
}
retentionScheduleWeeklyValue["DaysOfTheWeek"] = daysOfTheWeekArray2;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth != null)
{
JArray weeksOfTheMonthArray = new JArray();
foreach (WeekNumber weeksOfTheMonthItem in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth)
{
weeksOfTheMonthArray.Add(weeksOfTheMonthItem.ToString());
}
retentionScheduleWeeklyValue["WeeksOfTheMonth"] = weeksOfTheMonthArray;
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionTimes != null)
{
JArray retentionTimesArray3 = new JArray();
foreach (DateTime retentionTimesItem3 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionTimes)
{
retentionTimesArray3.Add(retentionTimesItem3);
}
monthlyScheduleValue["RetentionTimes"] = retentionTimesArray3;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue3 = new JObject();
monthlyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue3;
cSMRetentionDurationValue3["Count"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue3["DurationType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule != null)
{
JObject yearlyScheduleValue = new JObject();
lTRRetentionPolicyValue["YearlySchedule"] = yearlyScheduleValue;
yearlyScheduleValue["RetentionScheduleType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleType.ToString();
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.MonthsOfYear != null)
{
JArray monthsOfYearArray = new JArray();
foreach (Month monthsOfYearItem in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.MonthsOfYear)
{
monthsOfYearArray.Add(monthsOfYearItem.ToString());
}
yearlyScheduleValue["MonthsOfYear"] = monthsOfYearArray;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily != null)
{
JObject retentionScheduleDailyValue2 = new JObject();
yearlyScheduleValue["RetentionScheduleDaily"] = retentionScheduleDailyValue2;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily.DaysOfTheMonth != null)
{
JArray daysOfTheMonthArray2 = new JArray();
foreach (Day daysOfTheMonthItem2 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily.DaysOfTheMonth)
{
JObject dayValue2 = new JObject();
daysOfTheMonthArray2.Add(dayValue2);
dayValue2["Date"] = daysOfTheMonthItem2.Date;
dayValue2["IsLast"] = daysOfTheMonthItem2.IsLast;
}
retentionScheduleDailyValue2["DaysOfTheMonth"] = daysOfTheMonthArray2;
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly != null)
{
JObject retentionScheduleWeeklyValue2 = new JObject();
yearlyScheduleValue["RetentionScheduleWeekly"] = retentionScheduleWeeklyValue2;
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray3 = new JArray();
foreach (DayOfWeek daysOfTheWeekItem3 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.DaysOfTheWeek)
{
daysOfTheWeekArray3.Add(daysOfTheWeekItem3.ToString());
}
retentionScheduleWeeklyValue2["DaysOfTheWeek"] = daysOfTheWeekArray3;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth != null)
{
JArray weeksOfTheMonthArray2 = new JArray();
foreach (WeekNumber weeksOfTheMonthItem2 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth)
{
weeksOfTheMonthArray2.Add(weeksOfTheMonthItem2.ToString());
}
retentionScheduleWeeklyValue2["WeeksOfTheMonth"] = weeksOfTheMonthArray2;
}
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionTimes != null)
{
JArray retentionTimesArray4 = new JArray();
foreach (DateTime retentionTimesItem4 in cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionTimes)
{
retentionTimesArray4.Add(retentionTimesItem4);
}
yearlyScheduleValue["RetentionTimes"] = retentionTimesArray4;
}
if (cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue4 = new JObject();
yearlyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue4;
cSMRetentionDurationValue4["Count"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue4["DurationType"] = cSMAddProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
}
}
if (cSMAddProtectionPolicyRequest.PolicyName != null)
{
cSMAddProtectionPolicyRequestValue["PolicyName"] = cSMAddProtectionPolicyRequest.PolicyName;
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Delete a Protection Policy.
/// </summary>
/// <param name='resourceGroupName'>
/// Required.
/// </param>
/// <param name='resourceName'>
/// Required.
/// </param>
/// <param name='policyName'>
/// Required. The protection policy Name to be deleted.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> DeleteAsync(string resourceGroupName, string resourceName, string policyName, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceName == null)
{
throw new ArgumentNullException("resourceName");
}
if (policyName == null)
{
throw new ArgumentNullException("policyName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("resourceName", resourceName);
tracingParameters.Add("policyName", policyName);
tracingParameters.Add("customRequestHeaders", customRequestHeaders);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/Subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.Backup";
url = url + "/";
url = url + "BackupVault";
url = url + "/";
url = url + Uri.EscapeDataString(resourceName);
url = url + "/protectionPolicies/";
url = url + Uri.EscapeDataString(policyName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept-Language", "en-us");
httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Get the list of all Protection Policy.
/// </summary>
/// <param name='resourceGroupName'>
/// Required.
/// </param>
/// <param name='resourceName'>
/// Required.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The definition of a CSMProtectionPolicyListOperationResponse.
/// </returns>
public async Task<CSMProtectionPolicyListOperationResponse> ListAsync(string resourceGroupName, string resourceName, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceName == null)
{
throw new ArgumentNullException("resourceName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("resourceName", resourceName);
tracingParameters.Add("customRequestHeaders", customRequestHeaders);
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/Subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId.ToString());
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.Backup";
url = url + "/";
url = url + "BackupVault";
url = url + "/";
url = url + Uri.EscapeDataString(resourceName);
url = url + "/protectionPolicies";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept-Language", "en-us");
httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
CSMProtectionPolicyListOperationResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new CSMProtectionPolicyListOperationResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
CSMProtectionPolicyListResponse cSMProtectionPolicyListResponseInstance = new CSMProtectionPolicyListResponse();
result.CSMProtectionPolicyListResponse = cSMProtectionPolicyListResponseInstance;
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
CSMProtectionPolicyResponse cSMProtectionPolicyResponseInstance = new CSMProtectionPolicyResponse();
cSMProtectionPolicyListResponseInstance.Value.Add(cSMProtectionPolicyResponseInstance);
JToken propertiesValue = valueValue["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
CSMProtectionPolicyProperties propertiesInstance = new CSMProtectionPolicyProperties();
cSMProtectionPolicyResponseInstance.Properties = propertiesInstance;
JToken workloadTypeValue = propertiesValue["workloadType"];
if (workloadTypeValue != null && workloadTypeValue.Type != JTokenType.Null)
{
string workloadTypeInstance = ((string)workloadTypeValue);
propertiesInstance.WorkloadType = workloadTypeInstance;
}
JToken policyNameValue = propertiesValue["policyName"];
if (policyNameValue != null && policyNameValue.Type != JTokenType.Null)
{
string policyNameInstance = ((string)policyNameValue);
propertiesInstance.PolicyName = policyNameInstance;
}
JToken backupScheduleValue = propertiesValue["backupSchedule"];
if (backupScheduleValue != null && backupScheduleValue.Type != JTokenType.Null)
{
CSMBackupSchedule backupScheduleInstance = new CSMBackupSchedule();
propertiesInstance.BackupSchedule = backupScheduleInstance;
JToken backupTypeValue = backupScheduleValue["backupType"];
if (backupTypeValue != null && backupTypeValue.Type != JTokenType.Null)
{
string backupTypeInstance = ((string)backupTypeValue);
backupScheduleInstance.BackupType = backupTypeInstance;
}
JToken scheduleRunValue = backupScheduleValue["scheduleRun"];
if (scheduleRunValue != null && scheduleRunValue.Type != JTokenType.Null)
{
string scheduleRunInstance = ((string)scheduleRunValue);
backupScheduleInstance.ScheduleRun = scheduleRunInstance;
}
JToken scheduleRunDaysArray = backupScheduleValue["scheduleRunDays"];
if (scheduleRunDaysArray != null && scheduleRunDaysArray.Type != JTokenType.Null)
{
foreach (JToken scheduleRunDaysValue in ((JArray)scheduleRunDaysArray))
{
backupScheduleInstance.ScheduleRunDays.Add(((DayOfWeek)Enum.Parse(typeof(DayOfWeek), ((string)scheduleRunDaysValue), true)));
}
}
JToken scheduleRunTimesArray = backupScheduleValue["scheduleRunTimes"];
if (scheduleRunTimesArray != null && scheduleRunTimesArray.Type != JTokenType.Null)
{
foreach (JToken scheduleRunTimesValue in ((JArray)scheduleRunTimesArray))
{
backupScheduleInstance.ScheduleRunTimes.Add(((DateTime)scheduleRunTimesValue));
}
}
}
JToken retentionPolicyValue = propertiesValue["retentionPolicy"];
if (retentionPolicyValue != null && retentionPolicyValue.Type != JTokenType.Null)
{
CSMRetentionPolicy retentionPolicyInstance = new CSMRetentionPolicy();
propertiesInstance.RetentionPolicy = retentionPolicyInstance;
JToken retentionTypeValue = retentionPolicyValue["retentionType"];
if (retentionTypeValue != null && retentionTypeValue.Type != JTokenType.Null)
{
RetentionDurationType retentionTypeInstance = ((RetentionDurationType)Enum.Parse(typeof(RetentionDurationType), ((string)retentionTypeValue), true));
retentionPolicyInstance.RetentionType = retentionTypeInstance;
}
JToken retentionDurationValue = retentionPolicyValue["retentionDuration"];
if (retentionDurationValue != null && retentionDurationValue.Type != JTokenType.Null)
{
int retentionDurationInstance = ((int)retentionDurationValue);
retentionPolicyInstance.RetentionDuration = retentionDurationInstance;
}
}
JToken ltrRetentionPolicyValue = propertiesValue["ltrRetentionPolicy"];
if (ltrRetentionPolicyValue != null && ltrRetentionPolicyValue.Type != JTokenType.Null)
{
CSMLongTermRetentionPolicy ltrRetentionPolicyInstance = new CSMLongTermRetentionPolicy();
propertiesInstance.LtrRetentionPolicy = ltrRetentionPolicyInstance;
JToken dailyScheduleValue = ltrRetentionPolicyValue["DailySchedule"];
if (dailyScheduleValue != null && dailyScheduleValue.Type != JTokenType.Null)
{
CSMDailyRetentionSchedule dailyScheduleInstance = new CSMDailyRetentionSchedule();
ltrRetentionPolicyInstance.DailySchedule = dailyScheduleInstance;
JToken retentionTimesArray = dailyScheduleValue["RetentionTimes"];
if (retentionTimesArray != null && retentionTimesArray.Type != JTokenType.Null)
{
foreach (JToken retentionTimesValue in ((JArray)retentionTimesArray))
{
dailyScheduleInstance.RetentionTimes.Add(((DateTime)retentionTimesValue));
}
}
JToken cSMRetentionDurationValue = dailyScheduleValue["CSMRetentionDuration"];
if (cSMRetentionDurationValue != null && cSMRetentionDurationValue.Type != JTokenType.Null)
{
CSMRetentionDuration cSMRetentionDurationInstance = new CSMRetentionDuration();
dailyScheduleInstance.CSMRetentionDuration = cSMRetentionDurationInstance;
JToken countValue = cSMRetentionDurationValue["Count"];
if (countValue != null && countValue.Type != JTokenType.Null)
{
int countInstance = ((int)countValue);
cSMRetentionDurationInstance.Count = countInstance;
}
JToken durationTypeValue = cSMRetentionDurationValue["DurationType"];
if (durationTypeValue != null && durationTypeValue.Type != JTokenType.Null)
{
RetentionDurationType durationTypeInstance = ((RetentionDurationType)Enum.Parse(typeof(RetentionDurationType), ((string)durationTypeValue), true));
cSMRetentionDurationInstance.DurationType = durationTypeInstance;
}
}
}
JToken weeklyScheduleValue = ltrRetentionPolicyValue["WeeklySchedule"];
if (weeklyScheduleValue != null && weeklyScheduleValue.Type != JTokenType.Null)
{
CSMWeeklyRetentionSchedule weeklyScheduleInstance = new CSMWeeklyRetentionSchedule();
ltrRetentionPolicyInstance.WeeklySchedule = weeklyScheduleInstance;
JToken daysOfTheWeekArray = weeklyScheduleValue["DaysOfTheWeek"];
if (daysOfTheWeekArray != null && daysOfTheWeekArray.Type != JTokenType.Null)
{
foreach (JToken daysOfTheWeekValue in ((JArray)daysOfTheWeekArray))
{
weeklyScheduleInstance.DaysOfTheWeek.Add(((DayOfWeek)Enum.Parse(typeof(DayOfWeek), ((string)daysOfTheWeekValue), true)));
}
}
JToken retentionTimesArray2 = weeklyScheduleValue["RetentionTimes"];
if (retentionTimesArray2 != null && retentionTimesArray2.Type != JTokenType.Null)
{
foreach (JToken retentionTimesValue2 in ((JArray)retentionTimesArray2))
{
weeklyScheduleInstance.RetentionTimes.Add(((DateTime)retentionTimesValue2));
}
}
JToken cSMRetentionDurationValue2 = weeklyScheduleValue["CSMRetentionDuration"];
if (cSMRetentionDurationValue2 != null && cSMRetentionDurationValue2.Type != JTokenType.Null)
{
CSMRetentionDuration cSMRetentionDurationInstance2 = new CSMRetentionDuration();
weeklyScheduleInstance.CSMRetentionDuration = cSMRetentionDurationInstance2;
JToken countValue2 = cSMRetentionDurationValue2["Count"];
if (countValue2 != null && countValue2.Type != JTokenType.Null)
{
int countInstance2 = ((int)countValue2);
cSMRetentionDurationInstance2.Count = countInstance2;
}
JToken durationTypeValue2 = cSMRetentionDurationValue2["DurationType"];
if (durationTypeValue2 != null && durationTypeValue2.Type != JTokenType.Null)
{
RetentionDurationType durationTypeInstance2 = ((RetentionDurationType)Enum.Parse(typeof(RetentionDurationType), ((string)durationTypeValue2), true));
cSMRetentionDurationInstance2.DurationType = durationTypeInstance2;
}
}
}
JToken monthlyScheduleValue = ltrRetentionPolicyValue["MonthlySchedule"];
if (monthlyScheduleValue != null && monthlyScheduleValue.Type != JTokenType.Null)
{
CSMMonthlyRetentionSchedule monthlyScheduleInstance = new CSMMonthlyRetentionSchedule();
ltrRetentionPolicyInstance.MonthlySchedule = monthlyScheduleInstance;
JToken retentionScheduleTypeValue = monthlyScheduleValue["RetentionScheduleType"];
if (retentionScheduleTypeValue != null && retentionScheduleTypeValue.Type != JTokenType.Null)
{
RetentionScheduleFormat retentionScheduleTypeInstance = ((RetentionScheduleFormat)Enum.Parse(typeof(RetentionScheduleFormat), ((string)retentionScheduleTypeValue), true));
monthlyScheduleInstance.RetentionScheduleType = retentionScheduleTypeInstance;
}
JToken retentionScheduleDailyValue = monthlyScheduleValue["RetentionScheduleDaily"];
if (retentionScheduleDailyValue != null && retentionScheduleDailyValue.Type != JTokenType.Null)
{
CSMDailyRetentionFormat retentionScheduleDailyInstance = new CSMDailyRetentionFormat();
monthlyScheduleInstance.RetentionScheduleDaily = retentionScheduleDailyInstance;
JToken daysOfTheMonthArray = retentionScheduleDailyValue["DaysOfTheMonth"];
if (daysOfTheMonthArray != null && daysOfTheMonthArray.Type != JTokenType.Null)
{
foreach (JToken daysOfTheMonthValue in ((JArray)daysOfTheMonthArray))
{
Day dayInstance = new Day();
retentionScheduleDailyInstance.DaysOfTheMonth.Add(dayInstance);
JToken dateValue = daysOfTheMonthValue["Date"];
if (dateValue != null && dateValue.Type != JTokenType.Null)
{
int dateInstance = ((int)dateValue);
dayInstance.Date = dateInstance;
}
JToken isLastValue = daysOfTheMonthValue["IsLast"];
if (isLastValue != null && isLastValue.Type != JTokenType.Null)
{
bool isLastInstance = ((bool)isLastValue);
dayInstance.IsLast = isLastInstance;
}
}
}
}
JToken retentionScheduleWeeklyValue = monthlyScheduleValue["RetentionScheduleWeekly"];
if (retentionScheduleWeeklyValue != null && retentionScheduleWeeklyValue.Type != JTokenType.Null)
{
CSMWeeklyRetentionFormat retentionScheduleWeeklyInstance = new CSMWeeklyRetentionFormat();
monthlyScheduleInstance.RetentionScheduleWeekly = retentionScheduleWeeklyInstance;
JToken daysOfTheWeekArray2 = retentionScheduleWeeklyValue["DaysOfTheWeek"];
if (daysOfTheWeekArray2 != null && daysOfTheWeekArray2.Type != JTokenType.Null)
{
foreach (JToken daysOfTheWeekValue2 in ((JArray)daysOfTheWeekArray2))
{
retentionScheduleWeeklyInstance.DaysOfTheWeek.Add(((DayOfWeek)Enum.Parse(typeof(DayOfWeek), ((string)daysOfTheWeekValue2), true)));
}
}
JToken weeksOfTheMonthArray = retentionScheduleWeeklyValue["WeeksOfTheMonth"];
if (weeksOfTheMonthArray != null && weeksOfTheMonthArray.Type != JTokenType.Null)
{
foreach (JToken weeksOfTheMonthValue in ((JArray)weeksOfTheMonthArray))
{
retentionScheduleWeeklyInstance.WeeksOfTheMonth.Add(((WeekNumber)Enum.Parse(typeof(WeekNumber), ((string)weeksOfTheMonthValue), true)));
}
}
}
JToken retentionTimesArray3 = monthlyScheduleValue["RetentionTimes"];
if (retentionTimesArray3 != null && retentionTimesArray3.Type != JTokenType.Null)
{
foreach (JToken retentionTimesValue3 in ((JArray)retentionTimesArray3))
{
monthlyScheduleInstance.RetentionTimes.Add(((DateTime)retentionTimesValue3));
}
}
JToken cSMRetentionDurationValue3 = monthlyScheduleValue["CSMRetentionDuration"];
if (cSMRetentionDurationValue3 != null && cSMRetentionDurationValue3.Type != JTokenType.Null)
{
CSMRetentionDuration cSMRetentionDurationInstance3 = new CSMRetentionDuration();
monthlyScheduleInstance.CSMRetentionDuration = cSMRetentionDurationInstance3;
JToken countValue3 = cSMRetentionDurationValue3["Count"];
if (countValue3 != null && countValue3.Type != JTokenType.Null)
{
int countInstance3 = ((int)countValue3);
cSMRetentionDurationInstance3.Count = countInstance3;
}
JToken durationTypeValue3 = cSMRetentionDurationValue3["DurationType"];
if (durationTypeValue3 != null && durationTypeValue3.Type != JTokenType.Null)
{
RetentionDurationType durationTypeInstance3 = ((RetentionDurationType)Enum.Parse(typeof(RetentionDurationType), ((string)durationTypeValue3), true));
cSMRetentionDurationInstance3.DurationType = durationTypeInstance3;
}
}
}
JToken yearlyScheduleValue = ltrRetentionPolicyValue["YearlySchedule"];
if (yearlyScheduleValue != null && yearlyScheduleValue.Type != JTokenType.Null)
{
CSMYearlyRetentionSchedule yearlyScheduleInstance = new CSMYearlyRetentionSchedule();
ltrRetentionPolicyInstance.YearlySchedule = yearlyScheduleInstance;
JToken retentionScheduleTypeValue2 = yearlyScheduleValue["RetentionScheduleType"];
if (retentionScheduleTypeValue2 != null && retentionScheduleTypeValue2.Type != JTokenType.Null)
{
RetentionScheduleFormat retentionScheduleTypeInstance2 = ((RetentionScheduleFormat)Enum.Parse(typeof(RetentionScheduleFormat), ((string)retentionScheduleTypeValue2), true));
yearlyScheduleInstance.RetentionScheduleType = retentionScheduleTypeInstance2;
}
JToken monthsOfYearArray = yearlyScheduleValue["MonthsOfYear"];
if (monthsOfYearArray != null && monthsOfYearArray.Type != JTokenType.Null)
{
foreach (JToken monthsOfYearValue in ((JArray)monthsOfYearArray))
{
yearlyScheduleInstance.MonthsOfYear.Add(((Month)Enum.Parse(typeof(Month), ((string)monthsOfYearValue), true)));
}
}
JToken retentionScheduleDailyValue2 = yearlyScheduleValue["RetentionScheduleDaily"];
if (retentionScheduleDailyValue2 != null && retentionScheduleDailyValue2.Type != JTokenType.Null)
{
CSMDailyRetentionFormat retentionScheduleDailyInstance2 = new CSMDailyRetentionFormat();
yearlyScheduleInstance.RetentionScheduleDaily = retentionScheduleDailyInstance2;
JToken daysOfTheMonthArray2 = retentionScheduleDailyValue2["DaysOfTheMonth"];
if (daysOfTheMonthArray2 != null && daysOfTheMonthArray2.Type != JTokenType.Null)
{
foreach (JToken daysOfTheMonthValue2 in ((JArray)daysOfTheMonthArray2))
{
Day dayInstance2 = new Day();
retentionScheduleDailyInstance2.DaysOfTheMonth.Add(dayInstance2);
JToken dateValue2 = daysOfTheMonthValue2["Date"];
if (dateValue2 != null && dateValue2.Type != JTokenType.Null)
{
int dateInstance2 = ((int)dateValue2);
dayInstance2.Date = dateInstance2;
}
JToken isLastValue2 = daysOfTheMonthValue2["IsLast"];
if (isLastValue2 != null && isLastValue2.Type != JTokenType.Null)
{
bool isLastInstance2 = ((bool)isLastValue2);
dayInstance2.IsLast = isLastInstance2;
}
}
}
}
JToken retentionScheduleWeeklyValue2 = yearlyScheduleValue["RetentionScheduleWeekly"];
if (retentionScheduleWeeklyValue2 != null && retentionScheduleWeeklyValue2.Type != JTokenType.Null)
{
CSMWeeklyRetentionFormat retentionScheduleWeeklyInstance2 = new CSMWeeklyRetentionFormat();
yearlyScheduleInstance.RetentionScheduleWeekly = retentionScheduleWeeklyInstance2;
JToken daysOfTheWeekArray3 = retentionScheduleWeeklyValue2["DaysOfTheWeek"];
if (daysOfTheWeekArray3 != null && daysOfTheWeekArray3.Type != JTokenType.Null)
{
foreach (JToken daysOfTheWeekValue3 in ((JArray)daysOfTheWeekArray3))
{
retentionScheduleWeeklyInstance2.DaysOfTheWeek.Add(((DayOfWeek)Enum.Parse(typeof(DayOfWeek), ((string)daysOfTheWeekValue3), true)));
}
}
JToken weeksOfTheMonthArray2 = retentionScheduleWeeklyValue2["WeeksOfTheMonth"];
if (weeksOfTheMonthArray2 != null && weeksOfTheMonthArray2.Type != JTokenType.Null)
{
foreach (JToken weeksOfTheMonthValue2 in ((JArray)weeksOfTheMonthArray2))
{
retentionScheduleWeeklyInstance2.WeeksOfTheMonth.Add(((WeekNumber)Enum.Parse(typeof(WeekNumber), ((string)weeksOfTheMonthValue2), true)));
}
}
}
JToken retentionTimesArray4 = yearlyScheduleValue["RetentionTimes"];
if (retentionTimesArray4 != null && retentionTimesArray4.Type != JTokenType.Null)
{
foreach (JToken retentionTimesValue4 in ((JArray)retentionTimesArray4))
{
yearlyScheduleInstance.RetentionTimes.Add(((DateTime)retentionTimesValue4));
}
}
JToken cSMRetentionDurationValue4 = yearlyScheduleValue["CSMRetentionDuration"];
if (cSMRetentionDurationValue4 != null && cSMRetentionDurationValue4.Type != JTokenType.Null)
{
CSMRetentionDuration cSMRetentionDurationInstance4 = new CSMRetentionDuration();
yearlyScheduleInstance.CSMRetentionDuration = cSMRetentionDurationInstance4;
JToken countValue4 = cSMRetentionDurationValue4["Count"];
if (countValue4 != null && countValue4.Type != JTokenType.Null)
{
int countInstance4 = ((int)countValue4);
cSMRetentionDurationInstance4.Count = countInstance4;
}
JToken durationTypeValue4 = cSMRetentionDurationValue4["DurationType"];
if (durationTypeValue4 != null && durationTypeValue4.Type != JTokenType.Null)
{
RetentionDurationType durationTypeInstance4 = ((RetentionDurationType)Enum.Parse(typeof(RetentionDurationType), ((string)durationTypeValue4), true));
cSMRetentionDurationInstance4.DurationType = durationTypeInstance4;
}
}
}
}
}
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
cSMProtectionPolicyResponseInstance.Id = idInstance;
}
JToken nameValue = valueValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
cSMProtectionPolicyResponseInstance.Name = nameInstance;
}
JToken typeValue = valueValue["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
cSMProtectionPolicyResponseInstance.Type = typeInstance;
}
}
}
JToken nextLinkValue = responseDoc["nextLink"];
if (nextLinkValue != null && nextLinkValue.Type != JTokenType.Null)
{
string nextLinkInstance = ((string)nextLinkValue);
cSMProtectionPolicyListResponseInstance.NextLink = nextLinkInstance;
}
JToken idValue2 = responseDoc["id"];
if (idValue2 != null && idValue2.Type != JTokenType.Null)
{
string idInstance2 = ((string)idValue2);
cSMProtectionPolicyListResponseInstance.Id = idInstance2;
}
JToken nameValue2 = responseDoc["name"];
if (nameValue2 != null && nameValue2.Type != JTokenType.Null)
{
string nameInstance2 = ((string)nameValue2);
cSMProtectionPolicyListResponseInstance.Name = nameInstance2;
}
JToken typeValue2 = responseDoc["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
cSMProtectionPolicyListResponseInstance.Type = typeInstance2;
}
}
}
result.StatusCode = statusCode;
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Update Protection Policy.
/// </summary>
/// <param name='resourceGroupName'>
/// Required.
/// </param>
/// <param name='resourceName'>
/// Required.
/// </param>
/// <param name='policyName'>
/// Required. The protection policy Name to be updated.
/// </param>
/// <param name='cSMUpdateProtectionPolicyRequest'>
/// Required. The protection policy creation request.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The definition of a Operation Response.
/// </returns>
public async Task<OperationResponse> UpdateAsync(string resourceGroupName, string resourceName, string policyName, CSMUpdateProtectionPolicyRequest cSMUpdateProtectionPolicyRequest, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceName == null)
{
throw new ArgumentNullException("resourceName");
}
if (policyName == null)
{
throw new ArgumentNullException("policyName");
}
if (cSMUpdateProtectionPolicyRequest == null)
{
throw new ArgumentNullException("cSMUpdateProtectionPolicyRequest");
}
if (cSMUpdateProtectionPolicyRequest.Properties == null)
{
throw new ArgumentNullException("cSMUpdateProtectionPolicyRequest.Properties");
}
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule != null)
{
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.BackupType == null)
{
throw new ArgumentNullException("cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.BackupType");
}
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun == null)
{
throw new ArgumentNullException("cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun");
}
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes == null)
{
throw new ArgumentNullException("cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes");
}
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("resourceName", resourceName);
tracingParameters.Add("policyName", policyName);
tracingParameters.Add("cSMUpdateProtectionPolicyRequest", cSMUpdateProtectionPolicyRequest);
tracingParameters.Add("customRequestHeaders", customRequestHeaders);
TracingAdapter.Enter(invocationId, this, "UpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/Subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.Backup";
url = url + "/";
url = url + "BackupVault";
url = url + "/";
url = url + Uri.EscapeDataString(resourceName);
url = url + "/protectionPolicies/";
url = url + Uri.EscapeDataString(policyName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = new HttpMethod("PATCH");
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept-Language", "en-us");
httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject cSMUpdateProtectionPolicyRequestValue = new JObject();
requestDoc = cSMUpdateProtectionPolicyRequestValue;
JObject propertiesValue = new JObject();
cSMUpdateProtectionPolicyRequestValue["properties"] = propertiesValue;
if (cSMUpdateProtectionPolicyRequest.Properties.PolicyName != null)
{
propertiesValue["policyName"] = cSMUpdateProtectionPolicyRequest.Properties.PolicyName;
}
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule != null)
{
JObject backupScheduleValue = new JObject();
propertiesValue["backupSchedule"] = backupScheduleValue;
backupScheduleValue["backupType"] = cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.BackupType;
backupScheduleValue["scheduleRun"] = cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRun;
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunDays != null)
{
JArray scheduleRunDaysArray = new JArray();
foreach (DayOfWeek scheduleRunDaysItem in cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunDays)
{
scheduleRunDaysArray.Add(scheduleRunDaysItem.ToString());
}
backupScheduleValue["scheduleRunDays"] = scheduleRunDaysArray;
}
if (cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes != null)
{
JArray scheduleRunTimesArray = new JArray();
foreach (DateTime scheduleRunTimesItem in cSMUpdateProtectionPolicyRequest.Properties.BackupSchedule.ScheduleRunTimes)
{
scheduleRunTimesArray.Add(scheduleRunTimesItem);
}
backupScheduleValue["scheduleRunTimes"] = scheduleRunTimesArray;
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.RetentionPolicy != null)
{
JObject retentionPolicyValue = new JObject();
propertiesValue["retentionPolicy"] = retentionPolicyValue;
retentionPolicyValue["retentionType"] = cSMUpdateProtectionPolicyRequest.Properties.RetentionPolicy.RetentionType.ToString();
retentionPolicyValue["retentionDuration"] = cSMUpdateProtectionPolicyRequest.Properties.RetentionPolicy.RetentionDuration;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy != null)
{
JObject ltrRetentionPolicyValue = new JObject();
propertiesValue["ltrRetentionPolicy"] = ltrRetentionPolicyValue;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule != null)
{
JObject dailyScheduleValue = new JObject();
ltrRetentionPolicyValue["DailySchedule"] = dailyScheduleValue;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.RetentionTimes != null)
{
JArray retentionTimesArray = new JArray();
foreach (DateTime retentionTimesItem in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.RetentionTimes)
{
retentionTimesArray.Add(retentionTimesItem);
}
dailyScheduleValue["RetentionTimes"] = retentionTimesArray;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue = new JObject();
dailyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue;
cSMRetentionDurationValue["Count"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue["DurationType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.DailySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule != null)
{
JObject weeklyScheduleValue = new JObject();
ltrRetentionPolicyValue["WeeklySchedule"] = weeklyScheduleValue;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray = new JArray();
foreach (DayOfWeek daysOfTheWeekItem in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.DaysOfTheWeek)
{
daysOfTheWeekArray.Add(daysOfTheWeekItem.ToString());
}
weeklyScheduleValue["DaysOfTheWeek"] = daysOfTheWeekArray;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.RetentionTimes != null)
{
JArray retentionTimesArray2 = new JArray();
foreach (DateTime retentionTimesItem2 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.RetentionTimes)
{
retentionTimesArray2.Add(retentionTimesItem2);
}
weeklyScheduleValue["RetentionTimes"] = retentionTimesArray2;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue2 = new JObject();
weeklyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue2;
cSMRetentionDurationValue2["Count"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue2["DurationType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.WeeklySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule != null)
{
JObject monthlyScheduleValue = new JObject();
ltrRetentionPolicyValue["MonthlySchedule"] = monthlyScheduleValue;
monthlyScheduleValue["RetentionScheduleType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleType.ToString();
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily != null)
{
JObject retentionScheduleDailyValue = new JObject();
monthlyScheduleValue["RetentionScheduleDaily"] = retentionScheduleDailyValue;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily.DaysOfTheMonth != null)
{
JArray daysOfTheMonthArray = new JArray();
foreach (Day daysOfTheMonthItem in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleDaily.DaysOfTheMonth)
{
JObject dayValue = new JObject();
daysOfTheMonthArray.Add(dayValue);
dayValue["Date"] = daysOfTheMonthItem.Date;
dayValue["IsLast"] = daysOfTheMonthItem.IsLast;
}
retentionScheduleDailyValue["DaysOfTheMonth"] = daysOfTheMonthArray;
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly != null)
{
JObject retentionScheduleWeeklyValue = new JObject();
monthlyScheduleValue["RetentionScheduleWeekly"] = retentionScheduleWeeklyValue;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray2 = new JArray();
foreach (DayOfWeek daysOfTheWeekItem2 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.DaysOfTheWeek)
{
daysOfTheWeekArray2.Add(daysOfTheWeekItem2.ToString());
}
retentionScheduleWeeklyValue["DaysOfTheWeek"] = daysOfTheWeekArray2;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth != null)
{
JArray weeksOfTheMonthArray = new JArray();
foreach (WeekNumber weeksOfTheMonthItem in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth)
{
weeksOfTheMonthArray.Add(weeksOfTheMonthItem.ToString());
}
retentionScheduleWeeklyValue["WeeksOfTheMonth"] = weeksOfTheMonthArray;
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionTimes != null)
{
JArray retentionTimesArray3 = new JArray();
foreach (DateTime retentionTimesItem3 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.RetentionTimes)
{
retentionTimesArray3.Add(retentionTimesItem3);
}
monthlyScheduleValue["RetentionTimes"] = retentionTimesArray3;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue3 = new JObject();
monthlyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue3;
cSMRetentionDurationValue3["Count"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue3["DurationType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.MonthlySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule != null)
{
JObject yearlyScheduleValue = new JObject();
ltrRetentionPolicyValue["YearlySchedule"] = yearlyScheduleValue;
yearlyScheduleValue["RetentionScheduleType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleType.ToString();
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.MonthsOfYear != null)
{
JArray monthsOfYearArray = new JArray();
foreach (Month monthsOfYearItem in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.MonthsOfYear)
{
monthsOfYearArray.Add(monthsOfYearItem.ToString());
}
yearlyScheduleValue["MonthsOfYear"] = monthsOfYearArray;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily != null)
{
JObject retentionScheduleDailyValue2 = new JObject();
yearlyScheduleValue["RetentionScheduleDaily"] = retentionScheduleDailyValue2;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily.DaysOfTheMonth != null)
{
JArray daysOfTheMonthArray2 = new JArray();
foreach (Day daysOfTheMonthItem2 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleDaily.DaysOfTheMonth)
{
JObject dayValue2 = new JObject();
daysOfTheMonthArray2.Add(dayValue2);
dayValue2["Date"] = daysOfTheMonthItem2.Date;
dayValue2["IsLast"] = daysOfTheMonthItem2.IsLast;
}
retentionScheduleDailyValue2["DaysOfTheMonth"] = daysOfTheMonthArray2;
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly != null)
{
JObject retentionScheduleWeeklyValue2 = new JObject();
yearlyScheduleValue["RetentionScheduleWeekly"] = retentionScheduleWeeklyValue2;
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.DaysOfTheWeek != null)
{
JArray daysOfTheWeekArray3 = new JArray();
foreach (DayOfWeek daysOfTheWeekItem3 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.DaysOfTheWeek)
{
daysOfTheWeekArray3.Add(daysOfTheWeekItem3.ToString());
}
retentionScheduleWeeklyValue2["DaysOfTheWeek"] = daysOfTheWeekArray3;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth != null)
{
JArray weeksOfTheMonthArray2 = new JArray();
foreach (WeekNumber weeksOfTheMonthItem2 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionScheduleWeekly.WeeksOfTheMonth)
{
weeksOfTheMonthArray2.Add(weeksOfTheMonthItem2.ToString());
}
retentionScheduleWeeklyValue2["WeeksOfTheMonth"] = weeksOfTheMonthArray2;
}
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionTimes != null)
{
JArray retentionTimesArray4 = new JArray();
foreach (DateTime retentionTimesItem4 in cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.RetentionTimes)
{
retentionTimesArray4.Add(retentionTimesItem4);
}
yearlyScheduleValue["RetentionTimes"] = retentionTimesArray4;
}
if (cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration != null)
{
JObject cSMRetentionDurationValue4 = new JObject();
yearlyScheduleValue["CSMRetentionDuration"] = cSMRetentionDurationValue4;
cSMRetentionDurationValue4["Count"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration.Count;
cSMRetentionDurationValue4["DurationType"] = cSMUpdateProtectionPolicyRequest.Properties.LtrRetentionPolicy.YearlySchedule.CSMRetentionDuration.DurationType.ToString();
}
}
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK || statusCode == HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new OperationResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
Guid operationIdInstance = Guid.Parse(((string)responseDoc));
result.OperationId = operationIdInstance;
}
}
result.StatusCode = statusCode;
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Org.Apache.Http.Conn.Scheme.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Org.Apache.Http.Conn.Scheme
{
/// <summary>
/// <para>The default class for creating sockets.</para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/conn/scheme/PlainSocketFactory
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/PlainSocketFactory", AccessFlags = 49)]
public sealed partial class PlainSocketFactory : global::Org.Apache.Http.Conn.Scheme.ISocketFactory
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(Lorg/apache/http/conn/scheme/HostNameResolver;)V", AccessFlags = 1)]
public PlainSocketFactory(global::Org.Apache.Http.Conn.Scheme.IHostNameResolver nameResolver) /* MethodBuilder.Create */
{
}
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public PlainSocketFactory() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets the singleton instance of this class. </para>
/// </summary>
/// <returns>
/// <para>the one and only plain socket factory </para>
/// </returns>
/// <java-name>
/// getSocketFactory
/// </java-name>
[Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/PlainSocketFactory;", AccessFlags = 9)]
public static global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory GetSocketFactory() /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory);
}
/// <summary>
/// <para>Creates a new, unconnected socket. The socket should subsequently be passed to connectSocket.</para><para></para>
/// </summary>
/// <returns>
/// <para>a new socket</para>
/// </returns>
/// <java-name>
/// createSocket
/// </java-name>
[Dot42.DexImport("createSocket", "()Ljava/net/Socket;", AccessFlags = 1)]
public global::Java.Net.Socket CreateSocket() /* MethodBuilder.Create */
{
return default(global::Java.Net.Socket);
}
/// <summary>
/// <para>Connects a socket to the given host.</para><para></para>
/// </summary>
/// <returns>
/// <para>the connected socket. The returned object may be different from the <code>sock</code> argument if this factory supports a layered protocol.</para>
/// </returns>
/// <java-name>
/// connectSocket
/// </java-name>
[Dot42.DexImport("connectSocket", "(Ljava/net/Socket;Ljava/lang/String;ILjava/net/InetAddress;ILorg/apache/http/para" +
"ms/HttpParams;)Ljava/net/Socket;", AccessFlags = 1)]
public global::Java.Net.Socket ConnectSocket(global::Java.Net.Socket sock, string host, int port, global::Java.Net.InetAddress localAddress, int localPort, global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */
{
return default(global::Java.Net.Socket);
}
/// <summary>
/// <para>Checks whether a socket connection is secure. This factory creates plain socket connections which are not considered secure.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>false</code></para>
/// </returns>
/// <java-name>
/// isSecure
/// </java-name>
[Dot42.DexImport("isSecure", "(Ljava/net/Socket;)Z", AccessFlags = 17)]
public bool IsSecure(global::Java.Net.Socket sock) /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Compares this factory with an object. There is only one instance of this class.</para><para></para>
/// </summary>
/// <returns>
/// <para>iff the argument is this object </para>
/// </returns>
/// <java-name>
/// equals
/// </java-name>
[Dot42.DexImport("equals", "(Ljava/lang/Object;)Z", AccessFlags = 1)]
public override bool Equals(object obj) /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Obtains a hash code for this object. All instances of this class have the same hash code. There is only one instance of this class. </para>
/// </summary>
/// <java-name>
/// hashCode
/// </java-name>
[Dot42.DexImport("hashCode", "()I", AccessFlags = 1)]
public override int GetHashCode() /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Gets the singleton instance of this class. </para>
/// </summary>
/// <returns>
/// <para>the one and only plain socket factory </para>
/// </returns>
/// <java-name>
/// getSocketFactory
/// </java-name>
public static global::Org.Apache.Http.Conn.Scheme.PlainSocketFactory SocketFactory
{
[Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/PlainSocketFactory;", AccessFlags = 9)]
get{ return GetSocketFactory(); }
}
}
/// <summary>
/// <para>A factory for creating and connecting sockets. The factory encapsulates the logic for establishing a socket connection. <br></br> Both Object.equals() and Object.hashCode() must be overridden for the correct operation of some connection managers.</para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para><simplesectsep></simplesectsep><para> </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/conn/scheme/SocketFactory
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/SocketFactory", AccessFlags = 1537)]
public partial interface ISocketFactory
/* scope: __dot42__ */
{
/// <summary>
/// <para>Creates a new, unconnected socket. The socket should subsequently be passed to connectSocket.</para><para></para>
/// </summary>
/// <returns>
/// <para>a new socket</para>
/// </returns>
/// <java-name>
/// createSocket
/// </java-name>
[Dot42.DexImport("createSocket", "()Ljava/net/Socket;", AccessFlags = 1025)]
global::Java.Net.Socket CreateSocket() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Connects a socket to the given host.</para><para></para>
/// </summary>
/// <returns>
/// <para>the connected socket. The returned object may be different from the <code>sock</code> argument if this factory supports a layered protocol.</para>
/// </returns>
/// <java-name>
/// connectSocket
/// </java-name>
[Dot42.DexImport("connectSocket", "(Ljava/net/Socket;Ljava/lang/String;ILjava/net/InetAddress;ILorg/apache/http/para" +
"ms/HttpParams;)Ljava/net/Socket;", AccessFlags = 1025)]
global::Java.Net.Socket ConnectSocket(global::Java.Net.Socket sock, string host, int port, global::Java.Net.InetAddress localAddress, int localPort, global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Checks whether a socket provides a secure connection. The socket must be connected by this factory. The factory will <b>not</b> perform I/O operations in this method. <br></br> As a rule of thumb, plain sockets are not secure and TLS/SSL sockets are secure. However, there may be application specific deviations. For example, a plain socket to a host in the same intranet ("trusted zone") could be considered secure. On the other hand, a TLS/SSL socket could be considered insecure based on the cypher suite chosen for the connection.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if the connection of the socket should be considered secure, or <code>false</code> if it should not</para>
/// </returns>
/// <java-name>
/// isSecure
/// </java-name>
[Dot42.DexImport("isSecure", "(Ljava/net/Socket;)Z", AccessFlags = 1025)]
bool IsSecure(global::Java.Net.Socket sock) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>A set of supported protocol schemes. Schemes are identified by lowercase names.</para><para><para></para><para></para><title>Revision:</title><para>648356 </para><title>Date:</title><para>2008-04-15 10:57:53 -0700 (Tue, 15 Apr 2008) </para></para><para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/conn/scheme/SchemeRegistry
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/SchemeRegistry", AccessFlags = 49)]
public sealed partial class SchemeRegistry
/* scope: __dot42__ */
{
/// <summary>
/// <para>Creates a new, empty scheme registry. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public SchemeRegistry() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Obtains the scheme for a host. Convenience method for <code>getScheme(host.getSchemeName())</code></para><para><code> </code></para>
/// </summary>
/// <returns>
/// <para>the scheme for the given host, never <code>null</code></para>
/// </returns>
/// <java-name>
/// getScheme
/// </java-name>
[Dot42.DexImport("getScheme", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)]
public global::Org.Apache.Http.Conn.Scheme.Scheme GetScheme(string host) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.Scheme);
}
/// <summary>
/// <para>Obtains the scheme for a host. Convenience method for <code>getScheme(host.getSchemeName())</code></para><para><code> </code></para>
/// </summary>
/// <returns>
/// <para>the scheme for the given host, never <code>null</code></para>
/// </returns>
/// <java-name>
/// getScheme
/// </java-name>
[Dot42.DexImport("getScheme", "(Lorg/apache/http/HttpHost;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)]
public global::Org.Apache.Http.Conn.Scheme.Scheme GetScheme(global::Org.Apache.Http.HttpHost host) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.Scheme);
}
/// <summary>
/// <para>Obtains a scheme by name, if registered.</para><para></para>
/// </summary>
/// <returns>
/// <para>the scheme, or <code>null</code> if there is none by this name </para>
/// </returns>
/// <java-name>
/// get
/// </java-name>
[Dot42.DexImport("get", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)]
public global::Org.Apache.Http.Conn.Scheme.Scheme Get(string name) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.Scheme);
}
/// <summary>
/// <para>Registers a scheme. The scheme can later be retrieved by its name using getScheme or get.</para><para></para>
/// </summary>
/// <returns>
/// <para>the scheme previously registered with that name, or <code>null</code> if none was registered </para>
/// </returns>
/// <java-name>
/// register
/// </java-name>
[Dot42.DexImport("register", "(Lorg/apache/http/conn/scheme/Scheme;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)]
public global::Org.Apache.Http.Conn.Scheme.Scheme Register(global::Org.Apache.Http.Conn.Scheme.Scheme sch) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.Scheme);
}
/// <summary>
/// <para>Unregisters a scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para>the unregistered scheme, or <code>null</code> if there was none </para>
/// </returns>
/// <java-name>
/// unregister
/// </java-name>
[Dot42.DexImport("unregister", "(Ljava/lang/String;)Lorg/apache/http/conn/scheme/Scheme;", AccessFlags = 49)]
public global::Org.Apache.Http.Conn.Scheme.Scheme Unregister(string name) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.Scheme);
}
/// <summary>
/// <para>Obtains the names of the registered schemes in their default order.</para><para></para>
/// </summary>
/// <returns>
/// <para>List containing registered scheme names. </para>
/// </returns>
/// <java-name>
/// getSchemeNames
/// </java-name>
[Dot42.DexImport("getSchemeNames", "()Ljava/util/List;", AccessFlags = 49, Signature = "()Ljava/util/List<Ljava/lang/String;>;")]
public global::Java.Util.IList<string> GetSchemeNames() /* MethodBuilder.Create */
{
return default(global::Java.Util.IList<string>);
}
/// <summary>
/// <para>Populates the internal collection of registered protocol schemes with the content of the map passed as a parameter.</para><para></para>
/// </summary>
/// <java-name>
/// setItems
/// </java-name>
[Dot42.DexImport("setItems", "(Ljava/util/Map;)V", AccessFlags = 33, Signature = "(Ljava/util/Map<Ljava/lang/String;Lorg/apache/http/conn/scheme/Scheme;>;)V")]
public void SetItems(global::Java.Util.IMap<string, global::Org.Apache.Http.Conn.Scheme.Scheme> map) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Obtains the names of the registered schemes in their default order.</para><para></para>
/// </summary>
/// <returns>
/// <para>List containing registered scheme names. </para>
/// </returns>
/// <java-name>
/// getSchemeNames
/// </java-name>
public global::Java.Util.IList<string> SchemeNames
{
[Dot42.DexImport("getSchemeNames", "()Ljava/util/List;", AccessFlags = 49, Signature = "()Ljava/util/List<Ljava/lang/String;>;")]
get{ return GetSchemeNames(); }
}
}
/// <summary>
/// <para>A SocketFactory for layered sockets (SSL/TLS). See there for things to consider when implementing a socket factory.</para><para><para>Michael Becke </para><simplesectsep></simplesectsep><para> </para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/conn/scheme/LayeredSocketFactory
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/LayeredSocketFactory", AccessFlags = 1537)]
public partial interface ILayeredSocketFactory : global::Org.Apache.Http.Conn.Scheme.ISocketFactory
/* scope: __dot42__ */
{
/// <summary>
/// <para>Returns a socket connected to the given host that is layered over an existing socket. Used primarily for creating secure sockets through proxies.</para><para></para>
/// </summary>
/// <returns>
/// <para>Socket a new socket</para>
/// </returns>
/// <java-name>
/// createSocket
/// </java-name>
[Dot42.DexImport("createSocket", "(Ljava/net/Socket;Ljava/lang/String;IZ)Ljava/net/Socket;", AccessFlags = 1025)]
global::Java.Net.Socket CreateSocket(global::Java.Net.Socket socket, string host, int port, bool autoClose) /* MethodBuilder.Create */ ;
}
/// <java-name>
/// org/apache/http/conn/scheme/HostNameResolver
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/HostNameResolver", AccessFlags = 1537)]
public partial interface IHostNameResolver
/* scope: __dot42__ */
{
/// <java-name>
/// resolve
/// </java-name>
[Dot42.DexImport("resolve", "(Ljava/lang/String;)Ljava/net/InetAddress;", AccessFlags = 1025)]
global::Java.Net.InetAddress Resolve(string hostname) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para>Encapsulates specifics of a protocol scheme such as "http" or "https". Schemes are identified by lowercase names. Supported schemes are typically collected in a SchemeRegistry.</para><para>For example, to configure support for "https://" URLs, you could write code like the following: </para><para><pre>
/// Scheme https = new Scheme("https", new MySecureSocketFactory(), 443);
/// SchemeRegistry.DEFAULT.register(https);
/// </pre></para><para><para> </para><simplesectsep></simplesectsep><para>Michael Becke </para><simplesectsep></simplesectsep><para>Jeff Dever </para><simplesectsep></simplesectsep><para> </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/conn/scheme/Scheme
/// </java-name>
[Dot42.DexImport("org/apache/http/conn/scheme/Scheme", AccessFlags = 49)]
public sealed partial class Scheme
/* scope: __dot42__ */
{
/// <summary>
/// <para>Creates a new scheme. Whether the created scheme allows for layered connections depends on the class of <code>factory</code>.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;Lorg/apache/http/conn/scheme/SocketFactory;I)V", AccessFlags = 1)]
public Scheme(string name, global::Org.Apache.Http.Conn.Scheme.ISocketFactory factory, int port) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Obtains the default port.</para><para></para>
/// </summary>
/// <returns>
/// <para>the default port for this scheme </para>
/// </returns>
/// <java-name>
/// getDefaultPort
/// </java-name>
[Dot42.DexImport("getDefaultPort", "()I", AccessFlags = 17)]
public int GetDefaultPort() /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Obtains the socket factory. If this scheme is layered, the factory implements LayeredSocketFactory.</para><para></para>
/// </summary>
/// <returns>
/// <para>the socket factory for this scheme </para>
/// </returns>
/// <java-name>
/// getSocketFactory
/// </java-name>
[Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/SocketFactory;", AccessFlags = 17)]
public global::Org.Apache.Http.Conn.Scheme.ISocketFactory GetSocketFactory() /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.Scheme.ISocketFactory);
}
/// <summary>
/// <para>Obtains the scheme name.</para><para></para>
/// </summary>
/// <returns>
/// <para>the name of this scheme, in lowercase </para>
/// </returns>
/// <java-name>
/// getName
/// </java-name>
[Dot42.DexImport("getName", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Indicates whether this scheme allows for layered connections.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if layered connections are possible, <code>false</code> otherwise </para>
/// </returns>
/// <java-name>
/// isLayered
/// </java-name>
[Dot42.DexImport("isLayered", "()Z", AccessFlags = 17)]
public bool IsLayered() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Resolves the correct port for this scheme. Returns the given port if it is valid, the default port otherwise.</para><para></para>
/// </summary>
/// <returns>
/// <para>the given port or the defaultPort </para>
/// </returns>
/// <java-name>
/// resolvePort
/// </java-name>
[Dot42.DexImport("resolvePort", "(I)I", AccessFlags = 17)]
public int ResolvePort(int port) /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Return a string representation of this object.</para><para></para>
/// </summary>
/// <returns>
/// <para>a human-readable string description of this scheme </para>
/// </returns>
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 17)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Compares this scheme to an object.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> iff the argument is equal to this scheme </para>
/// </returns>
/// <java-name>
/// equals
/// </java-name>
[Dot42.DexImport("equals", "(Ljava/lang/Object;)Z", AccessFlags = 17)]
public override bool Equals(object obj) /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Obtains a hash code for this scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para>the hash code </para>
/// </returns>
/// <java-name>
/// hashCode
/// </java-name>
[Dot42.DexImport("hashCode", "()I", AccessFlags = 1)]
public override int GetHashCode() /* MethodBuilder.Create */
{
return default(int);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal Scheme() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Obtains the default port.</para><para></para>
/// </summary>
/// <returns>
/// <para>the default port for this scheme </para>
/// </returns>
/// <java-name>
/// getDefaultPort
/// </java-name>
public int DefaultPort
{
[Dot42.DexImport("getDefaultPort", "()I", AccessFlags = 17)]
get{ return GetDefaultPort(); }
}
/// <summary>
/// <para>Obtains the socket factory. If this scheme is layered, the factory implements LayeredSocketFactory.</para><para></para>
/// </summary>
/// <returns>
/// <para>the socket factory for this scheme </para>
/// </returns>
/// <java-name>
/// getSocketFactory
/// </java-name>
public global::Org.Apache.Http.Conn.Scheme.ISocketFactory SocketFactory
{
[Dot42.DexImport("getSocketFactory", "()Lorg/apache/http/conn/scheme/SocketFactory;", AccessFlags = 17)]
get{ return GetSocketFactory(); }
}
/// <summary>
/// <para>Obtains the scheme name.</para><para></para>
/// </summary>
/// <returns>
/// <para>the name of this scheme, in lowercase </para>
/// </returns>
/// <java-name>
/// getName
/// </java-name>
public string Name
{
[Dot42.DexImport("getName", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetName(); }
}
}
}
| |
using System;
using UnityEngine;
namespace SpeedrunTimerMod
{
public static class Utils
{
public static string FormatTime(TimeSpan timespan, int decimals = 2)
{
timespan = TimeSpan.FromSeconds(timespan.TotalSeconds); // rounds to nearest millisecond
var sign = string.Empty;
if (timespan < TimeSpan.Zero)
{
sign = "-";
timespan = timespan.Negate();
}
var hours = ((int)timespan.TotalHours);
var hoursStr = hours > 0
? hours.ToString() + ":"
: string.Empty;
var minutes = timespan.Minutes;
var minutesStr = hours == 0 && minutes < 10
? minutes.ToString()
: minutes.ToString().PadLeft(2, '0');
var secondsStr = timespan.Seconds.ToString().PadLeft(2, '0');
var millisecondsStr = string.Empty;
if (decimals > 0)
{
var totalSeconds = timespan.TotalSeconds;
var milliseconds = Math.Round((totalSeconds - (int)totalSeconds) * Math.Pow(10, decimals));
millisecondsStr = "." + milliseconds.ToString().PadLeft(decimals, '0');
}
return $"{sign}{hoursStr}{minutesStr}:{secondsStr}{millisecondsStr}";
}
public static string FormatTime(double totalSeconds, int decimals = 2)
{
return FormatTime(TimeSpan.FromSeconds(totalSeconds), decimals);
}
public static string FormatVersion(Version ver)
{
if (ver == null)
return null;
var Build = ver.Build > 0 ? $".{ver.Build}" : "";
return $"{ver.Major}.{ver.Minor}{Build}";
}
public static void PlayerPrefsSetBool(string key, bool value)
{
PlayerPrefs.SetInt(key, value ? 1 : 0);
}
public static bool PlayerPrefsGetBool(string key, bool def = default(bool))
{
if (PlayerPrefs.HasKey(key))
return PlayerPrefs.GetInt(key, def ? 1 : 0) != 0;
return def;
}
// https://stackoverflow.com/questions/2353211/hsl-to-rgb-color-conversion
public static Color HslToRgba(float h, float s, float l, float a)
{
float r, g, b;
if (s == 0.0f)
r = g = b = l;
else
{
var q = l < 0.5f ? l * (1.0f + s) : l + s - l * s;
var p = 2.0f * l - q;
r = HueToRgb(p, q, h + 1.0f / 3.0f);
g = HueToRgb(p, q, h);
b = HueToRgb(p, q, h - 1.0f / 3.0f);
}
return new Color(r, g, b, a);
}
static float HueToRgb(float p, float q, float t)
{
if (t < 0.0f) t += 1.0f;
if (t > 1.0f) t -= 1.0f;
if (t < 1.0f / 6.0f) return p + (q - p) * 6.0f * t;
if (t < 1.0f / 2.0f) return q;
if (t < 2.0f / 3.0f) return p + (q - p) * (2.0f / 3.0f - t) * 6.0f;
return p;
}
public static int[] GetSimilarQuarterBeats(int beatIndex)
{
var beats = new int[4];
var beat = beatIndex;
for (var i = 0; i < 4; i++)
{
beats[i] = beat;
beat += 4;
if (beat > 15)
beat -= 16;
}
return beats;
}
internal class Label
{
public Rect Position => positionDelegate();
public bool Enabled
{
get { return enabled; }
set
{
if (value)
Enable();
else
Disable();
}
}
public int fontSize;
public Func<Rect> positionDelegate;
public GUIStyle style;
public string text;
public bool enableOutline;
public Color outlineColor;
public float displayTime = -1;
bool enabled = true;
float timer;
public void Toggle()
{
Enabled = !enabled;
}
public void ResetTimer()
{
timer = 0;
}
public void OnGUI(string newText = null)
{
if (newText != null)
text = newText;
style.fontSize = UI.ScaleVertical(fontSize);
if (!enabled)
return;
if (displayTime > 0)
{
if (timer >= displayTime)
{
Disable();
return;
}
timer += Time.deltaTime;
}
Draw();
}
void Enable()
{
if (!enabled)
ResetTimer();
enabled = true;
}
void Disable()
{
enabled = false;
}
void Draw()
{
if (enableOutline)
DrawOutline();
GUI.Label(Position, text, style);
}
void DrawOutline()
{
var position = Position;
var oldColor = style.normal.textColor;
style.normal.textColor = outlineColor;
position.x--;
GUI.Label(position, text, style); // left
position.x += 2;
GUI.Label(position, text, style); // right
position.x--;
position.y--;
GUI.Label(position, text, style); // bottom
position.y += 2;
GUI.Label(position, text, style); // up
position.y--;
style.normal.textColor = oldColor;
}
}
}
}
| |
namespace AjaxControlToolkit {
public static class Constants {
public const string
CdnPrefix = "//ajax.aspnetcdn.com/ajax/act/act_version/";
internal const string
CdnScriptDebugPrefix = CdnPrefix + "Scripts/AjaxControlToolkit/Debug/",
CdnScriptReleasePrefix = CdnPrefix + "Scripts/AjaxControlToolkit/Release/",
JsPostfix = ".js",
DebugJsPostfix = ".debug" + JsPostfix,
CssPostfix = ".css",
MinCssPostfix = ".min" + CssPostfix,
IconPostfix = ".bmp",
UploadTempFileExtension = ".tmp",
ContentVirtualPath = "~/Content/AjaxControlToolkit/",
StylesVirtualPath = ContentVirtualPath + "Styles/",
ImagesVirtualPath = ContentVirtualPath + "Images/",
ScriptsVirtualPath = "~/Scripts/AjaxControlToolkit/",
ScriptsDebugVirtualPath = ScriptsVirtualPath + "Debug/",
ScriptsReleaseVirtualPath = ScriptsVirtualPath + "Release/",
ImageResourcePrefix = "AjaxControlToolkit.Images.",
StyleResourcePrefix = "AjaxControlToolkit.Styles.",
BackgroundStylesName = "Backgrounds",
BaseScriptName = "BaseScripts",
CommonScriptName = "Common",
CompatDragDropScriptName = "Compat.DragDrop",
CompatTimerScriptName = "Compat.Timer",
ComponentSetName = "ComponentSet",
DateTimeScriptName = "Common.DateTime",
LocalizationScriptName = "Localization.Resources",
ThreadingScriptName = "Common.Threading",
AccordionName = "Accordion",
AjaxFileUploadName = "AjaxFileUpload",
AlwaysVisibleControlName = "AlwaysVisibleControl",
AnimationName = "Animation",
AnimationScriptsName = "AnimationScripts",
AreaChartName = "AreaChart",
AsyncFileUploadName = "AsyncFileUpload",
AutoCompleteName = "AutoComplete",
BalloonPopupName = "BalloonPopup",
BarChartName = "BarChart",
BubbleChartName = "BubbleChart",
CalendarName = "Calendar",
CascadingDropDownName = "CascadingDropDown",
CollapsiblePanelName = "CollapsiblePanel",
ColorPickerName = "ColorPicker",
ConfirmButtonName = "ConfirmButton",
ComboBoxName = "ComboBox",
DynamicPopulateName = "DynamicPopulate",
DraggableListItemName = "DraggableListItem",
DragPanelName = "DragPanel",
DropDownName = "DropDown",
DropShadowName = "DropShadow",
DropWatcherName = "DropWatcher",
FilteredTextBoxName = "FilteredTextBox",
FloatingBehaviorName = "FloatingBehavior",
GravatarName = "Gravatar",
HoverMenuName = "HoverMenu",
HoverName = "Hover",
HtmlEditorExtenderName = "HtmlEditorExtender",
LineChartName = "LineChart",
ListSearchName = "ListSearch",
MaskedEditName = "MaskedEdit",
MaskedEditValidatorName = "MaskedEditValidator",
ModalPopup = "ModalPopup",
MultiHandleSliderName = "MultiHandleSlider",
MutuallyExclusiveCheckBoxName = "MutuallyExclusiveCheckBox",
NoBotName = "NoBot",
NumericUpDownName = "NumericUpDown",
PagingBulletedListName = "PagingBulletedList",
PasswordStrengthName = "PasswordStrength",
PieChartName = "PieChart",
PopupControlName = "PopupControl",
PopupName = "Popup",
RatingName = "Rating",
ReorderListName = "ReorderList",
ResizableControlName = "ResizableControl",
RoundedCornersName = "RoundedCorners",
SeadragonName = "Seadragon",
SliderName = "Slider",
SlideShowName = "SlideShow",
TabsName = "Tabs",
TextBoxWatermarkName = "TextBoxWatermark",
ToggleButtonName = "ToggleButton",
TwitterName = "Twitter",
UpdatePanelAnimationName = "UpdatePanelAnimation",
ValidatorCalloutName = "ValidatorCallout",
AsyncFileUploadImage = AsyncFileUploadName + ".Button.png",
HtmlEditorName = "HtmlEditor",
HtmlEditorPopupsName = HtmlEditorName + ".Popups",
HtmlEditorToolbarButtonsName = HtmlEditorName + ".ToolbarButtons",
HtmlEditorHtmlEditorName = HtmlEditorName + ".HtmlEditor",
HtmlEditorEditorName = HtmlEditorName + ".Editor",
HtmlEditorToolbarName = HtmlEditorName + ".Toolbar",
HtmlEditorEnumsName = HtmlEditorName + ".Enums",
HtmlEditorEventsName = HtmlEditorName + ".Events",
HtmlEditorPreviewPanelName = HtmlEditorName + ".PreviewPanel",
HtmlEditorModePanelName = HtmlEditorName + ".ModePanel",
HtmlEditorEditPanelName = HtmlEditorName + ".EditPanel",
HtmlEditorHtmlPanelName = HtmlEditorName + ".HtmlPanel",
HtmlEditorDocumentName = HtmlEditorName + ".Document",
HtmlEditorDesignPanelName = HtmlEditorName + ".DesignPanel",
HtmlEditorExecCommandEmulationName = HtmlEditorName + ".ExecCommandEmulation",
HtmlEditorDesignPanelEventHandlerName = HtmlEditorName + ".DesignPanelEventHandler",
HtmlEditorPopupName = HtmlEditorPopupsName + ".Popup",
HtmlEditorAttachedTemplatePopupName = HtmlEditorPopupsName + ".AttachedTemplatePopup",
HtmlEditorAttachedPopupName = HtmlEditorPopupsName + ".AttachedPopup",
HtmlEditorPopupCommandButtonName = HtmlEditorPopupsName + ".PopupCommonButton",
HtmlEditorPopupBoxButtonName = HtmlEditorPopupsName + ".PopupBoxButton",
HtmlEditorPopupBGIButtonName = HtmlEditorPopupsName + ".PopupBGIButton",
HtmlEditorOkCancelAttachedTemplatePopupName = HtmlEditorPopupsName + ".OkCancelAttachedTemplatePopup",
HtmlEditorLinkPropertiesName = HtmlEditorPopupsName + ".LinkProperties",
HtmlEditorBaseColorsPopupName = HtmlEditorPopupsName + ".BaseColorsPopup",
HtmlEditorBackColorClearButtonName = HtmlEditorToolbarButtonsName + ".BackColorClear",
HtmlEditorBackColorSelectorButtonName = HtmlEditorToolbarButtonsName + ".BackColorSelector",
HtmlEditorBoldButtonName = HtmlEditorToolbarButtonsName + ".Bold",
HtmlEditorBoxButtonName = HtmlEditorToolbarButtonsName + ".BoxButton",
HtmlEditorBulletedListButtonName = HtmlEditorToolbarButtonsName + ".BulletedList",
HtmlEditorColorButtonName = HtmlEditorToolbarButtonsName + ".ColorButton",
HtmlEditorColorSelectorName = HtmlEditorToolbarButtonsName + ".ColorSelector",
HtmlEditorCommonButtonName = HtmlEditorToolbarButtonsName + ".CommonButton",
HtmlEditorCopyButtonName = HtmlEditorToolbarButtonsName + ".Copy",
HtmlEditorCutButtonName = HtmlEditorToolbarButtonsName + ".Cut",
HtmlEditorDecreaseIndentButtonName = HtmlEditorToolbarButtonsName + ".DecreaseIndent",
HtmlEditorDesignModeButtonName = HtmlEditorToolbarButtonsName + ".DesignMode",
HtmlEditorDesignModeBoxButtonName = HtmlEditorToolbarButtonsName + ".DesignModeBoxButton",
HtmlEditorDesignModeImageButtonName = HtmlEditorToolbarButtonsName + ".DesignModeImageButton",
HtmlEditorDesignModePopupImageButtonName = HtmlEditorToolbarButtonsName + ".DesignModePopupImageButton",
HtmlEditorDesignModeSelectButtonName = HtmlEditorToolbarButtonsName + ".DesignModeSelectButton",
HtmlEditorToggleButtonName = HtmlEditorToolbarButtonsName + ".EditorToggleButton",
HtmlEditorFixedBackColorButtonName = HtmlEditorToolbarButtonsName + ".FixedBackColor",
HtmlEditorFixedColorButtonName = HtmlEditorToolbarButtonsName + ".FixedColorButton",
HtmlEditorFixedForeColorButtonName = HtmlEditorToolbarButtonsName + ".FixedForeColor",
HtmlEditorFontNameButtonName = HtmlEditorToolbarButtonsName + ".FontName",
HtmlEditorFontSizeButtonName = HtmlEditorToolbarButtonsName + ".FontSize",
HtmlEditorForeColorButtonName = HtmlEditorToolbarButtonsName + ".ForeColor",
HtmlEditorForeColorClearButtonName = HtmlEditorToolbarButtonsName + ".ForeColorClear",
HtmlEditorForeColorSelectorButtonName = HtmlEditorToolbarButtonsName + ".ForeColorSelector",
HtmlEditorHorizontalSepearatorButtonName = HtmlEditorToolbarButtonsName + ".HorizontalSeparator",
HtmlEditorHtmlModeButtonName = HtmlEditorToolbarButtonsName + ".HtmlMode",
HtmlEditorImageButtonName = HtmlEditorToolbarButtonsName + ".ImageButton",
HtmlEditorIncreaseIndentButtonName = HtmlEditorToolbarButtonsName + ".IncreaseIndent",
HtmlEditorInsertHRButtonName = HtmlEditorToolbarButtonsName + ".InsertHR",
HtmlEditorInsertLinkButtonName = HtmlEditorToolbarButtonsName + ".InsertLink",
HtmlEditorItalicButtonName = HtmlEditorToolbarButtonsName + ".Italic",
HtmlEditorJustifyCenterButtonName = HtmlEditorToolbarButtonsName + ".JustifyCenter",
HtmlEditorJustifyFullButtonName = HtmlEditorToolbarButtonsName + ".JustifyFull",
HtmlEditorJustifyRightButtonName = HtmlEditorToolbarButtonsName + ".JustifyRight",
HtmlEditorJustifyLeftButtonName = HtmlEditorToolbarButtonsName + ".JustifyLeft",
HtmlEditorLtrButtonName = HtmlEditorToolbarButtonsName + ".Ltr",
HtmlEditorMethodButtonName = HtmlEditorToolbarButtonsName + ".MethodButton",
HtmlEditorModeButtonName = HtmlEditorToolbarButtonsName + ".ModeButton",
HtmlEditorOkCancelPopupButtonName = HtmlEditorToolbarButtonsName + ".OkCancelPopupButton",
HtmlEditorOrderedListButtonName = HtmlEditorToolbarButtonsName + ".OrderedList",
HtmlEditorParagraphButtonName = HtmlEditorToolbarButtonsName + ".Paragraph",
HtmlEditorPasteButtonName = HtmlEditorToolbarButtonsName + ".Paste",
HtmlEditorPasteTextButtonName = HtmlEditorToolbarButtonsName + ".PasteText",
HtmlEditorPasteWordButtonName = HtmlEditorToolbarButtonsName + ".PasteWord",
HtmlEditorPreviewModeButtonName = HtmlEditorToolbarButtonsName + ".PreviewMode",
HtmlEditorRedoButtonMode = HtmlEditorToolbarButtonsName + ".Redo",
HtmlEditorRemoveAlignmentButtonName = HtmlEditorToolbarButtonsName + ".RemoveAlignment",
HtmlEditorRemoveLinkButtonName = HtmlEditorToolbarButtonsName + ".RemoveLink",
HtmlEditorRemoveStylesButtonName = HtmlEditorToolbarButtonsName + ".RemoveStyles",
HtmlEditorRtlButtonName = HtmlEditorToolbarButtonsName + ".Rtl",
HtmlEditorSelectButtonName = HtmlEditorToolbarButtonsName + ".SelectButton",
HtmlEditorSelectorButtonName = HtmlEditorToolbarButtonsName + ".Selector",
HtmlEditorStrikeThroughButtonName = HtmlEditorToolbarButtonsName + ".StrikeThrough",
HtmlEditorSubScriptButtonName = HtmlEditorToolbarButtonsName + ".SubScript",
HtmlEditorSuperScriptButtonName = HtmlEditorToolbarButtonsName + ".SuperScript",
HtmlEditorUnderlineButtonName = HtmlEditorToolbarButtonsName + ".Underline",
HtmlEditorUndoButtonName = HtmlEditorToolbarButtonsName + ".Undo",
HtmlEditorBgiButtonImage = HtmlEditorName + ".BgiButton.gif",
HtmlEditorEdColorBgClearNImage = HtmlEditorName + ".Ed-ColorBgClear-Inactive.gif",
HtmlEditorEdColorBgClearAImage = HtmlEditorName + ".Ed-ColorBgClear-Active.gif",
HtmlEditorEdFormatBoldAImage = HtmlEditorName + ".Ed-FormatBold-Active.gif",
HtmlEditorEdFormatBoldNImage = HtmlEditorName + ".Ed-FormatBold-Inactive.gif",
HtmlEditorEdListBulletAImage = HtmlEditorName + ".Ed-ListBullet-Active.gif",
HtmlEditorEdListBulletNImage = HtmlEditorName + ".Ed-ListBullet-Inactive.gif",
HtmlEditorEdCopyAImage = HtmlEditorName + ".Ed-Copy-Active.gif",
HtmlEditorEdCopyNImage = HtmlEditorName + ".Ed-Copy-Inactive.gif",
HtmlEditorEdCutAImage = HtmlEditorName + ".Ed-Cut-Active.gif",
HtmlEditorEdCutNImage = HtmlEditorName + ".Ed-Cut-Inactive.gif",
HtmlEditorEdIndentLessAImage = HtmlEditorName + ".Ed-IndentLess-Active.gif",
HtmlEditorEdIndentLessNImage = HtmlEditorName + ".Ed-IndentLess-Inactive.gif",
HtmlEditorEdDesignAImage = HtmlEditorName + ".Ed-Design-Active.gif",
HtmlEditorEdDesignNImage = HtmlEditorName + ".Ed-Design-Inactive.gif",
HtmlEditorEdBackColorAImage = HtmlEditorName + ".Ed-BackColor-Active.gif",
HtmlEditorEdBackColorNImage = HtmlEditorName + ".Ed-BackColor-Inactive.gif",
HtmlEditorEdForeColorAImage = HtmlEditorName + ".Ed-ForeColor-Active.gif",
HtmlEditorEdForeColorNImage = HtmlEditorName + ".Ed-ForeColor-Inactive.gif",
HtmlEditorEdColorFgAImage = HtmlEditorName + ".Ed-ColorFg-Active.gif",
HtmlEditorEdColorFgNImage = HtmlEditorName + ".Ed-ColorFg-Inactive.gif",
HtmlEditorEdColorFgClearAImage = HtmlEditorName + ".Ed-ColorFgClear-Active.gif",
HtmlEditorEdColorFgClearNImage = HtmlEditorName + ".Ed-ColorFgClear-Inactive.gif",
HtmlEditorEdSepImage = HtmlEditorName + ".Ed-Separator.gif",
HtmlEditorEdHtmlAImage = HtmlEditorName + ".Ed-Html-Active.gif",
HtmlEditorEdHtmlNImage = HtmlEditorName + ".Ed-Html-Inactive.gif",
HtmlEditorEdIndentMoreAImage = HtmlEditorName + ".Ed-IndentMore-Active.gif",
HtmlEditorEdIndentMoreNImage = HtmlEditorName + ".Ed-IndentMore-Inactive.gif",
HtmlEditorEdRuleAImage = HtmlEditorName + ".Ed-Rule-Active.gif",
HtmlEditorEdRuleNImage = HtmlEditorName + ".Ed-Rule-Inactive.gif",
HtmlEditorEdLinkAImage = HtmlEditorName + ".Ed-Link-Active.gif",
HtmlEditorEdLinkNImage = HtmlEditorName + ".Ed-Link-Inactive.gif",
HtmlEditorEdFormatItalicAImage = HtmlEditorName + ".Ed-FormatItalic-Active.gif",
HtmlEditorEdFormatItalicNImage = HtmlEditorName + ".Ed-FormatItalic-Inactive.gif",
HtmlEditorEdAlingCenterAImage = HtmlEditorName + ".Ed-AlignCenter-Active.gif",
HtmlEditorEdAlingCenterNImage = HtmlEditorName + ".Ed-AlignCenter-Inactive.gif",
HtmlEditorEdAlignJustifyAImage = HtmlEditorName + ".Ed-AlignJustify-Active.gif",
HtmlEditorEdAlignJustifyNImage = HtmlEditorName + ".Ed-AlignJustify-Inactive.gif",
HtmlEditorEdAlignLeftAImage = HtmlEditorName + ".Ed-AlignLeft-Active.gif",
HtmlEditorEdAlignLeftNImage = HtmlEditorName + ".Ed-AlignLeft-Inactive.gif",
HtmlEditorEdAlignRightAImage = HtmlEditorName + ".Ed-AlignRight-Active.gif",
HtmlEditorEdAlignRightNImage = HtmlEditorName + ".Ed-AlignRight-Inactive.gif",
HtmlEditorEdFormatLtrAImage = HtmlEditorName + ".Ed-FormatLtr-Active.gif",
HtmlEditorEdFormatLtrNImage = HtmlEditorName + ".Ed-FormatLtr-Inactive.gif",
HtmlEditorEdListNumAImage = HtmlEditorName + ".Ed-ListNum-Active.gif",
HtmlEditorEdListNumNImage = HtmlEditorName + ".Ed-ListNum-Inactive.gif",
HtmlEditorEdFormatParagraphAImage = HtmlEditorName + ".Ed-FormatParagraph-Active.gif",
HtmlEditorEdFormatParagraphNImage = HtmlEditorName + ".Ed-FormatParagraph-Inactive.gif",
HtmlEditorEdPasteAImage = HtmlEditorName + ".Ed-Paste-Active.gif",
HtmlEditorEdPasteNImage = HtmlEditorName + ".Ed-Paste-Inactive.gif",
HtmlEditorEdPasteTextAImage = HtmlEditorName + ".Ed-PasteText-Active.gif",
HtmlEditorEdPasteTextNImage = HtmlEditorName + ".Ed-PasteText-Inactive.gif",
HtmlEditorEdPasteWordAImage = HtmlEditorName + ".Ed-PasteWord-Active.gif",
HtmlEditorEdPasteWordNImage = HtmlEditorName + ".Ed-PasteWord-Inactive.gif",
HtmlEditorEdPreviewAImage = HtmlEditorName + ".Ed-Preview-Active.gif",
HtmlEditorEdPreviewNImage = HtmlEditorName + ".Ed-Preview-Inactive.gif",
HtmlEditorEdRedoAImage = HtmlEditorName + ".Ed-Redo-Active.gif",
HtmlEditorEdRedoNImage = HtmlEditorName + ".Ed-Redo-Inactive.gif",
HtmlEditorEdRemoveAlignAImage = HtmlEditorName + ".Ed-AlignRemove-Active.gif",
HtmlEditorEdRemoveAlignNImage = HtmlEditorName + ".Ed-AlignRemove-Inactive.gif",
HtmlEditorEdUnlinkAImage = HtmlEditorName + ".Ed-Unlink-Active.gif",
HtmlEditorEdUnlinkNImage = HtmlEditorName + ".Ed-Unlink-Inactive.gif",
HtmlEditorEdUnformatAImage = HtmlEditorName + ".Ed-Unformat-Active.gif",
HtmlEditorEdUnformatNImage = HtmlEditorName + ".Ed-Unformat-Inactive.gif",
HtmlEditorEdFormatRtlAImage = HtmlEditorName + ".Ed-FormatRtl-Active.gif",
HtmlEditorEdFormatRtlNImage = HtmlEditorName + ".Ed-FormatRtl-Inactive.gif",
HtmlEditorEdSelectorAImage = HtmlEditorName + ".Ed-Selector-Active.gif",
HtmlEditorEdSelectorNImage = HtmlEditorName + ".Ed-Selector-Inactive.gif",
HtmlEditorEdFormatStrikeAImage = HtmlEditorName + ".Ed-FormatStrike-Active.gif",
HtmlEditorEdFormatStrikeNImage = HtmlEditorName + ".Ed-FormatStrike-Inactive.gif",
HtmlEditorEdFormatSubAImage = HtmlEditorName + ".Ed-FormatSub-Active.gif",
HtmlEditorEdFormatSubNImage = HtmlEditorName + ".Ed-FormatSub-Inactive.gif",
HtmlEditorEdFormatSupAImage = HtmlEditorName + ".Ed-FormatSup-Active.gif",
HtmlEditorEdFormatSupNImage = HtmlEditorName + ".Ed-FormatSup-Inactive.gif",
HtmlEditorEdFormatUnderlineAImage = HtmlEditorName + ".Ed-FormatUnderline-Active.gif",
HtmlEditorEdFormatUnderlineNImage = HtmlEditorName + ".Ed-FormatUnderline-Inactive.gif",
HtmlEditorEdUndoAImage = HtmlEditorName + ".Ed-Undo-Active.gif",
HtmlEditorEdUndoNImage = HtmlEditorName + ".Ed-Undo-Inactive.gif",
HtmlEditorEd1x1Image = HtmlEditorName + ".Ed-1x1.gif",
HtmlEditorEdAnchorImage = HtmlEditorName + ".Ed-Anchor.gif",
HtmlEditorEdFlashImage = HtmlEditorName + ".Ed-Flash.gif",
HtmlEditorEdMediaImage = HtmlEditorName + ".Ed-Media.gif",
HtmlEditorEdPlaceHolderImage = HtmlEditorName + ".Ed-Placeholder.gif",
HtmlEditorExtenderButtonsImage = HtmlEditorName + "Extender.Buttons.png",
BalloonPopupCloudGifSprite = BalloonPopupName + ".CloudSprite.gif",
BalloonPopupCloudPngSprite = BalloonPopupName + ".CloudSprite.png",
BalloonPopupRectangleGifSprite = BalloonPopupName + ".RectangleSprite.gif",
BalloonPopupRectanglePngSprite = BalloonPopupName + ".RectangleSprite.png",
CalendarArrowLeftImage = CalendarName + ".Arrow-Left.gif",
CalendarArrowRightImage = CalendarName + ".Arrow-Right.gif",
ComboBoxArrowDownImage = ComboBoxName + ".Arrow-Down.gif",
DropDownDropArrowImage = DropDownName + ".DropArrow.gif",
GravatarAnonymousImage = GravatarName + ".Anonymous.jpg",
GravatarGImage = GravatarName + ".G.jpg",
GravatarPGImage = GravatarName + ".PG.jpg",
GravatarRImage = GravatarName + ".R.jpg",
GravatarXImage = GravatarName + ".X.jpg",
MultiHandleSliderHHandleImage = MultiHandleSliderName + ".Handle-Horizontal.gif",
MultiHandleSliderHHandleDownImage = MultiHandleSliderName + ".HandleDown-Horizontal.gif",
MultiHandleSliderHHandleHoverImage = MultiHandleSliderName + ".HandleHover-Horizontal.gif",
MultiHandleSliderHRailImage = MultiHandleSliderName + ".Rail-Horizontal.gif",
MultiHandleSliderHRailOuterImage = MultiHandleSliderName + ".RailOuter-Horizontal.gif",
MultiHandleSliderVHandleImage = MultiHandleSliderName + ".Handle-Vertical.gif",
MultiHandleSliderVHandleDownImage = MultiHandleSliderName + ".HandleDown-Vertical.gif",
MultiHandleSliderVHandleHoverImage = MultiHandleSliderName + ".HandleHover-Vertical.gif",
MultiHandleSliderVRailImage = MultiHandleSliderName + ".Rail-Vertical.gif",
MultiHandleSliderVRailOuterImage = MultiHandleSliderName + ".RailOuter-Vertical.gif",
SeadragonFullpageGrouphoverImage = SeadragonName + ".Fullscreen-Grouphover.png",
SeadragonFullpageHoverImage = SeadragonName + ".Fullscreen-Hover.png",
SeadragonFullpagePressedImage = SeadragonName + ".Fullscreen-Pressed.png",
SeadragonFullpageRestImage = SeadragonName + ".Fullscreen-Rest.png",
SeadragonHomeGrouphoverImage = SeadragonName + ".Home-Grouphover.png",
SeadragonHomeHoverImage = SeadragonName + ".Home-Hover.png",
SeadragonHomePressedImage = SeadragonName + ".Home-Pressed.png",
SeadragonHomeRestImage = SeadragonName + ".Home-Rest.png",
SeadragonZoominGrouphoverImage = SeadragonName + ".ZoomIn-Grouphover.png",
SeadragonZoominHoverImage = SeadragonName + ".ZoomIn-Hover.png",
SeadragonZoominPressedImage = SeadragonName + ".ZoomIn-Pressed.png",
SeadragonZoominRestImage = SeadragonName + ".ZoomIn-Rest.png",
SeadragonZoomoutGrouphoverImage = SeadragonName + ".ZoomOut-Grouphover.png",
SeadragonZoomoutHoverImage = SeadragonName + ".ZoomOut-Hover.png",
SeadragonZoomoutPressedImage = SeadragonName + ".ZoomOut-Pressed.png",
SeadragonZoomoutRestImage = SeadragonName + ".ZoomOut-Rest.png",
SliderVerticalRailImage = SliderName + ".Rail-Vertical.gif",
SliderHorizontalRailImage = SliderName + ".Rail-Horizontal.gif",
SliderVerticalHandleImage = SliderName + ".Handle-Vertical.gif",
SliderHorizontalHandleImage = SliderName + ".Handle-Horizontal.gif",
TabsImage = TabsName + ".Background.gif",
TabsActiveLeftVerticalleftImage = TabsName + ".ActiveLeft-VerticalLeft.gif",
TabsActiveLeftVerticalrightImage = TabsName + ".ActiveLeft-VerticalRight.gif",
TabsActiveLeftImage = TabsName + ".ActiveLeft.gif",
TabsActiveRightVerticallefImage = TabsName + ".ActiveRight-VerticalLeft.gif",
TabsActiveRightVerticalrightImage = TabsName + ".ActiveRight-VerticalRight.gif",
TabsActiveRightImage = TabsName + ".ActiveRight.gif",
TabsActiveVerticalleftImage = TabsName + ".Active-VerticalLeft.gif",
TabsActiveVerticalrightImage = TabsName + ".Active-VerticalRight.gif",
TabsActiveImage = TabsName + ".Active.gif",
TabsBottomActiveLeftImage = TabsName + ".Bottom-ActiveLeft.gif",
TabsBottomActiveRightImage = TabsName + ".Bottom-ActiveRight.gif",
TabsBottomActiveImage = TabsName + ".Bottom-Active.gif",
TabsBottomHoverLeftImage = TabsName + ".Bottom-HoverLeft.gif",
TabsBottomHoverRightImage = TabsName + ".Bottom-HoverRight.gif",
TabsBottomHoverImage = TabsName + ".Bottom-Hover.gif",
TabsBottomLeftImage = TabsName + ".Bottom-Left.gif",
TabsBottomRightImage = TabsName + ".Bottom-Right.gif",
TabsBottomImage = TabsName + ".Bottom.gif",
TabsHoverLeftVerticalleftImage = TabsName + ".HoverLeft-VerticalLeft.gif",
TabsHoverLeftVerticalrightImage = TabsName + ".HoverLeft-VerticalRight.gif",
TabsHoverLeftImage = TabsName + ".HoverLeft.gif",
TabsHoverRightVerticalleftImage = TabsName + ".HoverRight-VerticalLeft.gif",
TabsHoverRightVerticalrightImage = TabsName + ".HoverRight-VerticalRight.gif",
TabsHoverRightImage = TabsName + ".HoverRight.gif",
TabsHoverVerticalleftImage = TabsName + ".Hover-VerticalLeft.gif",
TabsHoverVerticalrightImage = TabsName + ".Hover-VerticalRight.gif",
TabsHoverImage = TabsName + ".Hover.gif",
TabsLeftVerticalleftImage = TabsName + ".Left-VerticalLeft.gif",
TabsLeftVerticalrightImage = TabsName + ".Left-VerticalRight.gif",
TabsLeftImage = TabsName + ".Left.gif",
TabsLineImage = TabsName + ".Line.gif",
TabsRightVerticalleftImage = TabsName + ".Right-VerticalLeft.gif",
TabsRightVerticalrightImage = TabsName + ".Right-VerticalRight.gif",
TabsRightImage = TabsName + ".Right.gif",
TabsVerticalleftImage = TabsName + ".VerticalLeft.gif",
TabsVerticalrightImage = TabsName + ".VerticalRight.gif",
Twitter24Image = TwitterName + ".24.png",
Twitter32Image = TwitterName + ".32.png",
ValidatorCalloutAlertLargeImage = ValidatorCalloutName + ".Alert-Large.gif",
ValidatorCalloutAlertSmallImage = ValidatorCalloutName + ".Alert-Small.gif",
ValidatorCalloutCloseImage = ValidatorCalloutName + ".Close.gif";
}
}
| |
//
// https://github.com/ServiceStack/ServiceStack.Text
// ServiceStack.Text: .NET C# POCO JSON, JSV and CSV Text Serializers.
//
// Authors:
// Demis Bellot (demis.bellot@gmail.com)
//
// Copyright 2012 ServiceStack Ltd.
//
// Licensed under the same terms of ServiceStack: new BSD license.
//
using System;
using System.Globalization;
using System.IO;
using System.Text;
using System.Xml;
using ServiceStack.Text.Json;
namespace ServiceStack.Text.Common
{
public static class DateTimeSerializer
{
public const string ShortDateTimeFormat = "yyyy-MM-dd"; //11
public const string DefaultDateTimeFormat = "dd/MM/yyyy HH:mm:ss"; //20
public const string DefaultDateTimeFormatWithFraction = "dd/MM/yyyy HH:mm:ss.fff"; //24
public const string XsdDateTimeFormat = "yyyy-MM-ddTHH:mm:ss.fffffffZ"; //29
public const string XsdDateTimeFormat3F = "yyyy-MM-ddTHH:mm:ss.fffZ"; //25
public const string XsdDateTimeFormatSeconds = "yyyy-MM-ddTHH:mm:ssZ"; //21
public const string DateTimeFormatSecondsUtcOffset = "yyyy-MM-ddTHH:mm:sszzz"; //22
public const string DateTimeFormatTicksUtcOffset = "yyyy-MM-ddTHH:mm:ss.fffffffzzz"; //30
public const string EscapedWcfJsonPrefix = "\\/Date(";
public const string EscapedWcfJsonSuffix = ")\\/";
public const string WcfJsonPrefix = "/Date(";
public const char WcfJsonSuffix = ')';
public const string UnspecifiedOffset = "-0000";
/// <summary>
/// If AlwaysUseUtc is set to true then convert all DateTime to UTC.
/// </summary>
/// <param name="dateTime"></param>
/// <returns></returns>
private static DateTime Prepare(this DateTime dateTime, bool parsedAsUtc=false)
{
if (JsConfig.AlwaysUseUtc)
{
return dateTime.Kind != DateTimeKind.Utc ? dateTime.ToStableUniversalTime() : dateTime;
}
return parsedAsUtc ? dateTime.ToLocalTime() : dateTime;
}
public static DateTime? ParseShortestNullableXsdDateTime(string dateTimeStr)
{
if (dateTimeStr == null)
return null;
return ParseShortestXsdDateTime(dateTimeStr);
}
public static DateTime ParseShortestXsdDateTime(string dateTimeStr)
{
if (string.IsNullOrEmpty(dateTimeStr))
return DateTime.MinValue;
if (dateTimeStr.StartsWith(EscapedWcfJsonPrefix, StringComparison.Ordinal) || dateTimeStr.StartsWith(WcfJsonPrefix, StringComparison.Ordinal))
return ParseWcfJsonDate(dateTimeStr).Prepare();
if (dateTimeStr.Length == DefaultDateTimeFormat.Length
|| dateTimeStr.Length == DefaultDateTimeFormatWithFraction.Length)
{
return DateTime.Parse(dateTimeStr, CultureInfo.InvariantCulture).Prepare();
}
if (dateTimeStr.Length == XsdDateTimeFormatSeconds.Length)
return DateTime.ParseExact(dateTimeStr, XsdDateTimeFormatSeconds, null, DateTimeStyles.AdjustToUniversal).Prepare(parsedAsUtc:true);
if (dateTimeStr.Length >= XsdDateTimeFormat3F.Length
&& dateTimeStr.Length <= XsdDateTimeFormat.Length
&& dateTimeStr.EndsWith("Z"))
{
#if NETFX_CORE
var dateTimeType = JsConfig.DateHandler != JsonDateHandler.ISO8601
? "yyyy-MM-ddTHH:mm:sszzzzzzz"
: "yyyy-MM-ddTHH:mm:sszzzzzzz";
return XmlConvert.ToDateTimeOffset(dateTimeStr, dateTimeType).DateTime.Prepare();
#else
var dateTime = Env.IsMono ? ParseManual(dateTimeStr) : null;
if (dateTime != null)
return dateTime.Value;
return XmlConvert.ToDateTime(dateTimeStr, XmlDateTimeSerializationMode.Utc).Prepare();
#endif
}
try
{
return DateTime.Parse(dateTimeStr, null, DateTimeStyles.AssumeLocal).Prepare();
}
catch (FormatException)
{
var manualDate = ParseManual(dateTimeStr);
if (manualDate != null)
return manualDate.Value;
throw;
}
}
public static DateTime? ParseManual(string dateTimeStr)
{
if (dateTimeStr == null || dateTimeStr.Length < "YYYY-MM-DD".Length)
return null;
var dateKind = DateTimeKind.Utc;
if (dateTimeStr.EndsWith("Z"))
{
dateTimeStr = dateTimeStr.Substring(0, dateTimeStr.Length - 1);
}
var parts = dateTimeStr.Split('T');
if (parts.Length == 1)
parts = dateTimeStr.SplitOnFirst(' ');
var dateParts = parts[0].Split('-');
int hh = 0, min = 0, ss = 0, ms = 0;
double subMs = 0;
int offsetMultiplier = 0;
if (parts.Length == 2)
{
var timeStringParts = parts[1].Split('+');
if (timeStringParts.Length == 2)
{
offsetMultiplier = -1;
}
else
{
timeStringParts = parts[1].Split('-');
if (timeStringParts.Length == 2)
{
offsetMultiplier = 1;
}
}
var timeOffset = timeStringParts.Length == 2 ? timeStringParts[1] : null;
var timeParts = timeStringParts[0].Split(':');
if (timeParts.Length == 3)
{
int.TryParse(timeParts[0], out hh);
int.TryParse(timeParts[1], out min);
var secParts = timeParts[2].Split('.');
int.TryParse(secParts[0], out ss);
if (secParts.Length == 2)
{
var msStr = secParts[1].PadRight(3, '0');
ms = int.Parse(msStr.Substring(0, 3));
if (msStr.Length > 3)
{
var subMsStr = msStr.Substring(3);
subMs = double.Parse(subMsStr) / Math.Pow(10, subMsStr.Length);
}
}
}
var dateTime = new DateTime(int.Parse(dateParts[0]), int.Parse(dateParts[1]), int.Parse(dateParts[2]), hh, min, ss, ms, dateKind);
if (subMs != 0)
{
dateTime.AddMilliseconds(subMs);
}
if (offsetMultiplier != 0 && timeOffset != null)
{
timeParts = timeOffset.Split(':');
if (timeParts.Length == 2)
{
hh = int.Parse(timeParts[0]);
min = int.Parse(timeParts[1]);
}
else
{
hh = int.Parse(timeOffset.Substring(0, 2));
min = int.Parse(timeOffset.Substring(2));
}
dateTime = dateTime.AddHours(offsetMultiplier * hh);
dateTime = dateTime.AddMinutes(offsetMultiplier * min);
}
return dateTime.ToLocalTime().Prepare();
}
return null;
}
public static string ToDateTimeString(DateTime dateTime)
{
return dateTime.ToStableUniversalTime().ToString(XsdDateTimeFormat);
}
public static DateTime ParseDateTime(string dateTimeStr)
{
return DateTime.ParseExact(dateTimeStr, XsdDateTimeFormat, null);
}
public static DateTimeOffset ParseDateTimeOffset(string dateTimeOffsetStr)
{
if (string.IsNullOrEmpty(dateTimeOffsetStr)) return default(DateTimeOffset);
// for interop, do not assume format based on config
// format: prefer TimestampOffset, DCJSCompatible
if (dateTimeOffsetStr.StartsWith(EscapedWcfJsonPrefix, StringComparison.Ordinal) ||
dateTimeOffsetStr.StartsWith(WcfJsonPrefix, StringComparison.Ordinal))
{
return ParseWcfJsonDateOffset(dateTimeOffsetStr);
}
// format: next preference ISO8601
// assume utc when no offset specified
if (dateTimeOffsetStr.LastIndexOfAny(TimeZoneChars) < 10)
{
if (!dateTimeOffsetStr.EndsWith("Z")) dateTimeOffsetStr += "Z";
#if __MonoCS__
// Without that Mono uses a Local timezone))
dateTimeOffsetStr = dateTimeOffsetStr.Substring(0, dateTimeOffsetStr.Length - 1) + "+00:00";
#endif
}
return DateTimeOffset.Parse(dateTimeOffsetStr, CultureInfo.InvariantCulture);
}
public static string ToXsdDateTimeString(DateTime dateTime)
{
#if NETFX_CORE
return XmlConvert.ToString(dateTime.ToStableUniversalTime(), XsdDateTimeFormat);
#else
return XmlConvert.ToString(dateTime.ToStableUniversalTime(), XmlDateTimeSerializationMode.Utc);
#endif
}
public static string ToXsdTimeSpanString(TimeSpan timeSpan)
{
var r = XmlConvert.ToString(timeSpan);
#if __MonoCS__
// Mono returns DT even if time is 00:00:00
if (r.EndsWith("DT")) return r.Substring(0, r.Length - 1);
#endif
return r;
}
public static string ToXsdTimeSpanString(TimeSpan? timeSpan)
{
return (timeSpan != null) ? ToXsdTimeSpanString(timeSpan.Value) : null;
}
public static DateTime ParseXsdDateTime(string dateTimeStr)
{
#if NETFX_CORE
return XmlConvert.ToDateTimeOffset(dateTimeStr).DateTime;
#else
return XmlConvert.ToDateTime(dateTimeStr, XmlDateTimeSerializationMode.Utc);
#endif
}
public static TimeSpan ParseTimeSpan(string dateTimeStr)
{
return dateTimeStr.StartsWith("P", StringComparison.Ordinal) || dateTimeStr.StartsWith("-P", StringComparison.Ordinal)
? ParseXsdTimeSpan(dateTimeStr)
: TimeSpan.Parse(dateTimeStr);
}
public static TimeSpan ParseXsdTimeSpan(string dateTimeStr)
{
return XmlConvert.ToTimeSpan(dateTimeStr);
}
public static TimeSpan? ParseNullableTimeSpan(string dateTimeStr)
{
return string.IsNullOrEmpty(dateTimeStr)
? (TimeSpan?)null
: ParseTimeSpan(dateTimeStr);
}
public static TimeSpan? ParseXsdNullableTimeSpan(string dateTimeStr)
{
return String.IsNullOrEmpty(dateTimeStr) ?
null :
new TimeSpan?(XmlConvert.ToTimeSpan(dateTimeStr));
}
public static string ToShortestXsdDateTimeString(DateTime dateTime)
{
var timeOfDay = dateTime.TimeOfDay;
if (timeOfDay.Ticks == 0)
return dateTime.ToString(ShortDateTimeFormat);
if (timeOfDay.Milliseconds == 0)
return dateTime.Kind != DateTimeKind.Utc
? dateTime.ToString(DateTimeFormatSecondsUtcOffset)
: dateTime.ToStableUniversalTime().ToString(XsdDateTimeFormatSeconds);
return dateTime.Kind != DateTimeKind.Utc
? dateTime.ToString(DateTimeFormatTicksUtcOffset)
: ToXsdDateTimeString(dateTime);
}
static readonly char[] TimeZoneChars = new[] { '+', '-' };
/// <summary>
/// WCF Json format: /Date(unixts+0000)/
/// </summary>
/// <param name="wcfJsonDate"></param>
/// <returns></returns>
public static DateTimeOffset ParseWcfJsonDateOffset(string wcfJsonDate)
{
if (wcfJsonDate[0] == '\\')
{
wcfJsonDate = wcfJsonDate.Substring(1);
}
var suffixPos = wcfJsonDate.IndexOf(WcfJsonSuffix);
var timeString = (suffixPos < 0) ? wcfJsonDate : wcfJsonDate.Substring(WcfJsonPrefix.Length, suffixPos - WcfJsonPrefix.Length);
// for interop, do not assume format based on config
if (!wcfJsonDate.StartsWith(WcfJsonPrefix, StringComparison.Ordinal))
{
return DateTimeOffset.Parse(timeString, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
}
var timeZonePos = timeString.LastIndexOfAny(TimeZoneChars);
var timeZone = timeZonePos <= 0 ? string.Empty : timeString.Substring(timeZonePos);
var unixTimeString = timeString.Substring(0, timeString.Length - timeZone.Length);
var unixTime = long.Parse(unixTimeString);
if (timeZone == string.Empty)
{
// when no timezone offset is supplied, then treat the time as UTC
return unixTime.FromUnixTimeMs();
}
// DCJS ignores the offset and considers it local time if any offset exists
// REVIEW: DCJS shoves offset in a separate field 'offsetMinutes', we have the offset in the format, so shouldn't we use it?
if (JsConfig.DateHandler == JsonDateHandler.DCJSCompatible || timeZone == UnspecifiedOffset)
{
return unixTime.FromUnixTimeMs().ToLocalTime();
}
var offset = timeZone.FromTimeOffsetString();
var date = unixTime.FromUnixTimeMs();
return new DateTimeOffset(date.Ticks, offset);
}
/// <summary>
/// WCF Json format: /Date(unixts+0000)/
/// </summary>
/// <param name="wcfJsonDate"></param>
/// <returns></returns>
public static DateTime ParseWcfJsonDate(string wcfJsonDate)
{
if (wcfJsonDate[0] == JsonUtils.EscapeChar)
{
wcfJsonDate = wcfJsonDate.Substring(1);
}
var suffixPos = wcfJsonDate.IndexOf(WcfJsonSuffix);
var timeString = wcfJsonDate.Substring(WcfJsonPrefix.Length, suffixPos - WcfJsonPrefix.Length);
// for interop, do not assume format based on config
if (!wcfJsonDate.StartsWith(WcfJsonPrefix, StringComparison.Ordinal))
{
return DateTime.Parse(timeString, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
}
var timeZonePos = timeString.LastIndexOfAny(TimeZoneChars);
var timeZone = timeZonePos <= 0 ? string.Empty : timeString.Substring(timeZonePos);
var unixTimeString = timeString.Substring(0, timeString.Length - timeZone.Length);
var unixTime = long.Parse(unixTimeString);
if (timeZone == string.Empty)
{
// when no timezone offset is supplied, then treat the time as UTC
return unixTime.FromUnixTimeMs();
}
// DCJS ignores the offset and considers it local time if any offset exists
if (JsConfig.DateHandler == JsonDateHandler.DCJSCompatible || timeZone == UnspecifiedOffset)
{
return unixTime.FromUnixTimeMs().ToLocalTime();
}
var offset = timeZone.FromTimeOffsetString();
var date = unixTime.FromUnixTimeMs(offset);
return new DateTimeOffset(date, offset).DateTime;
}
private static TimeZoneInfo LocalTimeZone = TimeZoneInfo.Local;
public static void WriteWcfJsonDate(TextWriter writer, DateTime dateTime)
{
if (JsConfig.AssumeUtc && dateTime.Kind == DateTimeKind.Unspecified)
{
dateTime = DateTime.SpecifyKind(dateTime, DateTimeKind.Utc);
}
if (JsConfig.DateHandler == JsonDateHandler.ISO8601)
{
writer.Write(dateTime.ToString("o", CultureInfo.InvariantCulture));
return;
}
var timestamp = dateTime.ToUnixTimeMs();
string offset = null;
if (dateTime.Kind != DateTimeKind.Utc)
{
if (JsConfig.DateHandler == JsonDateHandler.TimestampOffset && dateTime.Kind == DateTimeKind.Unspecified)
offset = UnspecifiedOffset;
else
offset = LocalTimeZone.GetUtcOffset(dateTime).ToTimeOffsetString();
}
writer.Write(EscapedWcfJsonPrefix);
writer.Write(timestamp);
if (offset != null)
{
writer.Write(offset);
}
writer.Write(EscapedWcfJsonSuffix);
}
public static string ToWcfJsonDate(DateTime dateTime)
{
var sb = new StringBuilder();
using (var writer = new StringWriter(sb))
{
WriteWcfJsonDate(writer, dateTime);
return sb.ToString();
}
}
public static void WriteWcfJsonDateTimeOffset(TextWriter writer, DateTimeOffset dateTimeOffset)
{
if (JsConfig.DateHandler == JsonDateHandler.ISO8601)
{
writer.Write(dateTimeOffset.ToString("o", CultureInfo.InvariantCulture));
return;
}
var timestamp = dateTimeOffset.Ticks.ToUnixTimeMs();
var offset = dateTimeOffset.Offset == TimeSpan.Zero
? null
: dateTimeOffset.Offset.ToTimeOffsetString();
writer.Write(EscapedWcfJsonPrefix);
writer.Write(timestamp);
if (offset != null)
{
writer.Write(offset);
}
writer.Write(EscapedWcfJsonSuffix);
}
public static string ToWcfJsonDateTimeOffset(DateTimeOffset dateTimeOffset)
{
var sb = new StringBuilder();
using (var writer = new StringWriter(sb))
{
WriteWcfJsonDateTimeOffset(writer, dateTimeOffset);
return sb.ToString();
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Diagnostics;
using osu.Framework.Caching;
using osu.Framework.Input;
using osu.Framework.Input.Bindings;
using osu.Framework.Input.Events;
using osu.Framework.Utils;
using osuTK;
using osuTK.Input;
namespace osu.Framework.Graphics.Containers
{
public abstract class ScrollContainer<T> : Container<T>, DelayedLoadWrapper.IOnScreenOptimisingContainer, IKeyBindingHandler<PlatformAction>
where T : Drawable
{
/// <summary>
/// Determines whether the scroll dragger appears on the left side. If not, then it always appears on the right side.
/// </summary>
public Anchor ScrollbarAnchor
{
get => Scrollbar.Anchor;
set
{
Scrollbar.Anchor = value;
Scrollbar.Origin = value;
updatePadding();
}
}
private bool scrollbarVisible = true;
/// <summary>
/// Whether the scrollbar is visible.
/// </summary>
public bool ScrollbarVisible
{
get => scrollbarVisible;
set
{
scrollbarVisible = value;
scrollbarCache.Invalidate();
}
}
protected readonly ScrollbarContainer Scrollbar;
private bool scrollbarOverlapsContent = true;
/// <summary>
/// Whether the scrollbar overlaps the content or resides in its own padded space.
/// </summary>
public bool ScrollbarOverlapsContent
{
get => scrollbarOverlapsContent;
set
{
scrollbarOverlapsContent = value;
updatePadding();
}
}
/// <summary>
/// Size of available content (i.e. everything that can be scrolled to) in the scroll direction.
/// </summary>
public float AvailableContent => ScrollContent.DrawSize[ScrollDim];
/// <summary>
/// Size of the viewport in the scroll direction.
/// </summary>
public float DisplayableContent => ChildSize[ScrollDim];
/// <summary>
/// Controls the distance scrolled per unit of mouse scroll.
/// </summary>
public float ScrollDistance = 80;
/// <summary>
/// This limits how far out of clamping bounds we allow the target position to be at most.
/// Effectively, larger values result in bouncier behavior as the scroll boundaries are approached
/// with high velocity.
/// </summary>
public float ClampExtension = 500;
/// <summary>
/// This corresponds to the clamping force. A larger value means more aggressive clamping. Default is 0.012.
/// </summary>
private const double distance_decay_clamping = 0.012;
/// <summary>
/// Controls the rate with which the target position is approached after ending a drag. Default is 0.0035.
/// </summary>
public double DistanceDecayDrag = 0.0035;
/// <summary>
/// Controls the rate with which the target position is approached after scrolling. Default is 0.01
/// </summary>
public double DistanceDecayScroll = 0.01;
/// <summary>
/// Controls the rate with which the target position is approached after jumping to a specific location. Default is 0.01.
/// </summary>
public double DistanceDecayJump = 0.01;
/// <summary>
/// Controls the rate with which the target position is approached. It is automatically set after
/// dragging or scrolling.
/// </summary>
private double distanceDecay;
/// <summary>
/// The current scroll position.
/// </summary>
public float Current { get; private set; }
/// <summary>
/// The target scroll position which is exponentially approached by current via a rate of distanceDecay.
/// </summary>
protected float Target { get; private set; }
/// <summary>
/// The maximum distance that can be scrolled in the scroll direction.
/// </summary>
public float ScrollableExtent => Math.Max(AvailableContent - DisplayableContent, 0);
/// <summary>
/// The maximum distance that the scrollbar can move in the scroll direction.
/// </summary>
public float ScrollbarMovementExtent => Math.Max(DrawSize[ScrollDim] - Scrollbar.DrawSize[ScrollDim], 0);
/// <summary>
/// Clamp a value to the available scroll range.
/// </summary>
/// <param name="position">The value to clamp.</param>
/// <param name="extension">An extension value beyond the normal extent.</param>
/// <returns></returns>
protected float Clamp(float position, float extension = 0) => Math.Max(Math.Min(position, ScrollableExtent + extension), -extension);
protected override Container<T> Content => ScrollContent;
/// <summary>
/// Whether we are currently scrolled as far as possible into the scroll direction.
/// </summary>
/// <param name="lenience">How close to the extent we need to be.</param>
public bool IsScrolledToEnd(float lenience = Precision.FLOAT_EPSILON) => Precision.AlmostBigger(Target, ScrollableExtent, lenience);
/// <summary>
/// The container holding all children which are getting scrolled around.
/// </summary>
public Container<T> ScrollContent { get; }
protected virtual bool IsDragging { get; private set; }
public bool IsHandlingKeyboardScrolling
{
get
{
if (IsHovered)
return true;
InputManager inputManager = GetContainingInputManager();
return inputManager != null && ReceivePositionalInputAt(inputManager.CurrentState.Mouse.Position);
}
}
/// <summary>
/// The direction in which scrolling is supported.
/// </summary>
protected readonly Direction ScrollDirection;
/// <summary>
/// The direction in which scrolling is supported, converted to an int for array index lookups.
/// </summary>
protected int ScrollDim => ScrollDirection == Direction.Horizontal ? 0 : 1;
/// <summary>
/// Creates a scroll container.
/// </summary>
/// <param name="scrollDirection">The direction in which should be scrolled. Can be vertical or horizontal. Default is vertical.</param>
protected ScrollContainer(Direction scrollDirection = Direction.Vertical)
{
ScrollDirection = scrollDirection;
Masking = true;
Axes scrollAxis = scrollDirection == Direction.Horizontal ? Axes.X : Axes.Y;
AddRangeInternal(new Drawable[]
{
ScrollContent = new Container<T>
{
RelativeSizeAxes = Axes.Both & ~scrollAxis,
AutoSizeAxes = scrollAxis,
},
Scrollbar = CreateScrollbar(scrollDirection)
});
Scrollbar.Hide();
Scrollbar.Dragged = onScrollbarMovement;
ScrollbarAnchor = scrollDirection == Direction.Vertical ? Anchor.TopRight : Anchor.BottomLeft;
}
private float lastUpdateDisplayableContent = -1;
private float lastAvailableContent = -1;
private void updateSize()
{
// ensure we only update scrollbar when something has changed, to avoid transform helpers resetting their transform every frame.
// also avoids creating many needless Transforms every update frame.
if (lastAvailableContent != AvailableContent || lastUpdateDisplayableContent != DisplayableContent)
{
lastAvailableContent = AvailableContent;
lastUpdateDisplayableContent = DisplayableContent;
scrollbarCache.Invalidate();
}
}
private readonly Cached scrollbarCache = new Cached();
private void updatePadding()
{
if (scrollbarOverlapsContent || AvailableContent <= DisplayableContent)
ScrollContent.Padding = new MarginPadding();
else
{
if (ScrollDirection == Direction.Vertical)
{
ScrollContent.Padding = ScrollbarAnchor == Anchor.TopLeft
? new MarginPadding { Left = Scrollbar.Width + Scrollbar.Margin.Left }
: new MarginPadding { Right = Scrollbar.Width + Scrollbar.Margin.Right };
}
else
{
ScrollContent.Padding = ScrollbarAnchor == Anchor.TopLeft
? new MarginPadding { Top = Scrollbar.Height + Scrollbar.Margin.Top }
: new MarginPadding { Bottom = Scrollbar.Height + Scrollbar.Margin.Bottom };
}
}
}
protected override bool OnDragStart(DragStartEvent e)
{
if (IsDragging || e.Button != MouseButton.Left || Content.AliveInternalChildren.Count == 0)
return false;
lastDragTime = Time.Current;
averageDragDelta = averageDragTime = 0;
IsDragging = true;
dragButtonManager = GetContainingInputManager().GetButtonEventManagerFor(e.Button);
return true;
}
protected override bool OnKeyDown(KeyDownEvent e)
{
if (IsHandlingKeyboardScrolling && !IsDragging)
{
switch (e.Key)
{
case Key.PageUp:
OnUserScroll(Target - DisplayableContent);
return true;
case Key.PageDown:
OnUserScroll(Target + DisplayableContent);
return true;
}
}
return base.OnKeyDown(e);
}
protected override bool OnMouseDown(MouseDownEvent e)
{
if (IsDragging || e.Button != MouseButton.Left) return false;
// Continue from where we currently are scrolled to.
Target = Current;
return true;
}
// We keep track of this because input events may happen at different intervals than update frames
// and we are interested in the time difference between drag _input_ events.
private double lastDragTime;
// These keep track of a sliding average (w.r.t. time) of the time between drag events
// and the delta of drag events. Both of these moving averages are decayed at the same
// rate and thus the velocity remains constant across time. The overall magnitude
// of averageDragTime and averageDragDelta simple decreases such that more recent movements
// have a larger weight.
private double averageDragTime;
private double averageDragDelta;
private MouseButtonEventManager dragButtonManager;
private bool dragBlocksClick;
public override bool DragBlocksClick => dragBlocksClick;
protected override void OnDrag(DragEvent e)
{
Trace.Assert(IsDragging, "We should never receive OnDrag if we are not dragging.");
double currentTime = Time.Current;
double timeDelta = currentTime - lastDragTime;
double decay = Math.Pow(0.95, timeDelta);
averageDragTime = averageDragTime * decay + timeDelta;
averageDragDelta = averageDragDelta * decay - e.Delta[ScrollDim];
lastDragTime = currentTime;
Vector2 childDelta = ToLocalSpace(e.ScreenSpaceMousePosition) - ToLocalSpace(e.ScreenSpaceLastMousePosition);
float scrollOffset = -childDelta[ScrollDim];
float clampedScrollOffset = Clamp(Target + scrollOffset) - Clamp(Target);
Debug.Assert(Precision.AlmostBigger(Math.Abs(scrollOffset), clampedScrollOffset * Math.Sign(scrollOffset)));
// If we are dragging past the extent of the scrollable area, half the offset
// such that the user can feel it.
scrollOffset = clampedScrollOffset + (scrollOffset - clampedScrollOffset) / 2;
// similar calculation to what is already done in MouseButtonEventManager.HandlePositionChange
// handles the case where a drag was triggered on an axis we are not interested in.
// can be removed if/when drag events are split out per axis or contain direction information.
dragBlocksClick |= Math.Abs(e.MouseDownPosition[ScrollDim] - e.MousePosition[ScrollDim]) > dragButtonManager.ClickDragDistance;
scrollByOffset(scrollOffset, false);
}
protected override void OnDragEnd(DragEndEvent e)
{
Trace.Assert(IsDragging, "We should never receive OnDragEnd if we are not dragging.");
dragBlocksClick = false;
dragButtonManager = null;
IsDragging = false;
if (averageDragTime <= 0.0)
return;
double velocity = averageDragDelta / averageDragTime;
// Detect whether we halted at the end of the drag and in fact should _not_
// perform a flick event.
const double velocity_cutoff = 0.1;
if (Math.Abs(Math.Pow(0.95, Time.Current - lastDragTime) * velocity) < velocity_cutoff)
velocity = 0;
// Differentiate f(t) = distance * (1 - exp(-t)) w.r.t. "t" to obtain
// velocity w.r.t. time. Then rearrange to solve for distance given velocity.
double distance = velocity / (1 - Math.Exp(-DistanceDecayDrag));
scrollByOffset((float)distance, true, DistanceDecayDrag);
}
protected override bool OnScroll(ScrollEvent e)
{
if (Content.AliveInternalChildren.Count == 0)
return false;
bool isPrecise = e.IsPrecise;
Vector2 scrollDelta = e.ScrollDelta;
float scrollDeltaFloat = scrollDelta.Y;
if (ScrollDirection == Direction.Horizontal && scrollDelta.X != 0)
scrollDeltaFloat = scrollDelta.X;
scrollByOffset((isPrecise ? 10 : 80) * -scrollDeltaFloat, true, isPrecise ? 0.05 : DistanceDecayScroll);
return true;
}
private void onScrollbarMovement(float value) => scrollTo(Clamp(fromScrollbarPosition(value)), false);
/// <summary>
/// Immediately offsets the current and target scroll position.
/// </summary>
/// <param name="offset">The scroll offset.</param>
public void OffsetScrollPosition(float offset)
{
Target += offset;
Current += offset;
}
private void scrollByOffset(float value, bool animated, double distanceDecay = float.PositiveInfinity) =>
OnUserScroll(Target + value, animated, distanceDecay);
/// <summary>
/// Scroll to the start of available content.
/// </summary>
/// <param name="animated">Whether to animate the movement.</param>
/// <param name="allowDuringDrag">Whether we should interrupt a user's active drag.</param>
public void ScrollToStart(bool animated = true, bool allowDuringDrag = false)
{
if (!IsDragging || allowDuringDrag)
scrollTo(0, animated, DistanceDecayJump);
}
/// <summary>
/// Scroll to the end of available content.
/// </summary>
/// <param name="animated">Whether to animate the movement.</param>
/// <param name="allowDuringDrag">Whether we should interrupt a user's active drag.</param>
public void ScrollToEnd(bool animated = true, bool allowDuringDrag = false)
{
if (!IsDragging || allowDuringDrag)
scrollTo(ScrollableExtent, animated, DistanceDecayJump);
}
/// <summary>
/// Scrolls to a new position relative to the current scroll offset.
/// </summary>
/// <param name="offset">The amount by which we should scroll.</param>
/// <param name="animated">Whether to animate the movement.</param>
public void ScrollBy(float offset, bool animated = true) => scrollTo(Target + offset, animated);
/// <summary>
/// Handle a scroll to an absolute position from a user input.
/// </summary>
/// <param name="value">The position to scroll to.</param>
/// <param name="animated">Whether to animate the movement.</param>
/// <param name="distanceDecay">Controls the rate with which the target position is approached after jumping to a specific location. Default is <see cref="DistanceDecayJump"/>.</param>
protected virtual void OnUserScroll(float value, bool animated = true, double? distanceDecay = null) =>
ScrollTo(value, animated, distanceDecay);
/// <summary>
/// Scrolls to an absolute position.
/// </summary>
/// <param name="value">The position to scroll to.</param>
/// <param name="animated">Whether to animate the movement.</param>
/// <param name="distanceDecay">Controls the rate with which the target position is approached after jumping to a specific location. Default is <see cref="DistanceDecayJump"/>.</param>
public void ScrollTo(float value, bool animated = true, double? distanceDecay = null) => scrollTo(value, animated, distanceDecay ?? DistanceDecayJump);
private void scrollTo(float value, bool animated, double distanceDecay = float.PositiveInfinity)
{
Target = Clamp(value, ClampExtension);
if (animated)
this.distanceDecay = distanceDecay;
else
Current = Target;
}
/// <summary>
/// Scrolls a <see cref="Drawable"/> to the top.
/// </summary>
/// <param name="d">The <see cref="Drawable"/> to scroll to.</param>
/// <param name="animated">Whether to animate the movement.</param>
public void ScrollTo(Drawable d, bool animated = true) => ScrollTo(GetChildPosInContent(d), animated);
/// <summary>
/// Scrolls a <see cref="Drawable"/> into view.
/// </summary>
/// <param name="d">The <see cref="Drawable"/> to scroll into view.</param>
/// <param name="animated">Whether to animate the movement.</param>
public void ScrollIntoView(Drawable d, bool animated = true)
{
float childPos0 = GetChildPosInContent(d);
float childPos1 = GetChildPosInContent(d, d.DrawSize);
float minPos = Math.Min(childPos0, childPos1);
float maxPos = Math.Max(childPos0, childPos1);
if (minPos < Current || (minPos > Current && d.DrawSize[ScrollDim] > DisplayableContent))
ScrollTo(minPos, animated);
else if (maxPos > Current + DisplayableContent)
ScrollTo(maxPos - DisplayableContent, animated);
}
/// <summary>
/// Determines the position of a child in the content.
/// </summary>
/// <param name="d">The child to get the position from.</param>
/// <param name="offset">Positional offset in the child's space.</param>
/// <returns>The position of the child.</returns>
public float GetChildPosInContent(Drawable d, Vector2 offset) => d.ToSpaceOfOtherDrawable(offset, ScrollContent)[ScrollDim];
/// <summary>
/// Determines the position of a child in the content.
/// </summary>
/// <param name="d">The child to get the position from.</param>
/// <returns>The position of the child.</returns>
public float GetChildPosInContent(Drawable d) => GetChildPosInContent(d, Vector2.Zero);
private void updatePosition()
{
double localDistanceDecay = distanceDecay;
// If we are not currently dragging the content, and we have scrolled out of bounds,
// then we should handle the clamping force. Note, that if the target is _within_
// acceptable bounds, then we do not need special handling of the clamping force, as
// we will naturally scroll back into acceptable bounds.
if (!IsDragging && Current != Clamp(Current) && Target != Clamp(Target, -0.01f))
{
// Firstly, we want to limit how far out the target may go to limit overly bouncy
// behaviour with extreme scroll velocities.
Target = Clamp(Target, ClampExtension);
// Secondly, we would like to quickly approach the target while we are out of bounds.
// This is simulating a "strong" clamping force towards the target.
if (Current < Target && Target < 0 || Current > Target && Target > ScrollableExtent)
localDistanceDecay = distance_decay_clamping * 2;
// Lastly, we gradually nudge the target towards valid bounds.
Target = (float)Interpolation.Lerp(Clamp(Target), Target, Math.Exp(-distance_decay_clamping * Time.Elapsed));
float clampedTarget = Clamp(Target);
if (Precision.AlmostEquals(clampedTarget, Target))
Target = clampedTarget;
}
// Exponential interpolation between the target and our current scroll position.
Current = (float)Interpolation.Lerp(Target, Current, Math.Exp(-localDistanceDecay * Time.Elapsed));
// This prevents us from entering the de-normalized range of floating point numbers when approaching target closely.
if (Precision.AlmostEquals(Current, Target))
Current = Target;
}
protected override void UpdateAfterChildren()
{
base.UpdateAfterChildren();
updateSize();
updatePosition();
if (!scrollbarCache.IsValid)
{
var size = ScrollDirection == Direction.Horizontal ? DrawWidth : DrawHeight;
if (size > 0)
Scrollbar.ResizeTo(Math.Clamp(AvailableContent > 0 ? DisplayableContent / AvailableContent : 0, Math.Min(Scrollbar.MinimumDimSize / size, 1), 1), 200, Easing.OutQuint);
Scrollbar.FadeTo(ScrollbarVisible && AvailableContent - 1 > DisplayableContent ? 1 : 0, 200);
updatePadding();
scrollbarCache.Validate();
}
if (ScrollDirection == Direction.Horizontal)
{
Scrollbar.X = toScrollbarPosition(Current);
ScrollContent.X = -Current + ScrollableExtent * ScrollContent.RelativeAnchorPosition.X;
}
else
{
Scrollbar.Y = toScrollbarPosition(Current);
ScrollContent.Y = -Current + ScrollableExtent * ScrollContent.RelativeAnchorPosition.Y;
}
}
/// <summary>
/// Converts a scroll position to a scrollbar position.
/// </summary>
/// <param name="scrollPosition">The absolute scroll position (e.g. <see cref="Current"/>).</param>
/// <returns>The scrollbar position.</returns>
private float toScrollbarPosition(float scrollPosition)
{
if (Precision.AlmostEquals(0, ScrollableExtent))
return 0;
return ScrollbarMovementExtent * (scrollPosition / ScrollableExtent);
}
/// <summary>
/// Converts a scrollbar position to a scroll position.
/// </summary>
/// <param name="scrollbarPosition">The scrollbar position.</param>
/// <returns>The absolute scroll position.</returns>
private float fromScrollbarPosition(float scrollbarPosition)
{
if (Precision.AlmostEquals(0, ScrollbarMovementExtent))
return 0;
return ScrollableExtent * (scrollbarPosition / ScrollbarMovementExtent);
}
/// <summary>
/// Creates the scrollbar for this <see cref="ScrollContainer{T}"/>.
/// </summary>
/// <param name="direction">The scrolling direction.</param>
protected abstract ScrollbarContainer CreateScrollbar(Direction direction);
protected internal abstract class ScrollbarContainer : Container
{
private float dragOffset;
internal Action<float> Dragged;
protected readonly Direction ScrollDirection;
/// <summary>
/// The minimum size of this <see cref="ScrollbarContainer"/>. Defaults to the size in the non-scrolling direction.
/// </summary>
protected internal virtual float MinimumDimSize => Size[ScrollDirection == Direction.Vertical ? 0 : 1];
protected ScrollbarContainer(Direction direction)
{
ScrollDirection = direction;
RelativeSizeAxes = direction == Direction.Horizontal ? Axes.X : Axes.Y;
}
public abstract void ResizeTo(float val, int duration = 0, Easing easing = Easing.None);
protected override bool OnClick(ClickEvent e) => true;
protected override bool OnDragStart(DragStartEvent e)
{
if (e.Button != MouseButton.Left) return false;
dragOffset = e.MousePosition[(int)ScrollDirection] - Position[(int)ScrollDirection];
return true;
}
protected override bool OnMouseDown(MouseDownEvent e)
{
if (e.Button != MouseButton.Left) return false;
dragOffset = Position[(int)ScrollDirection];
Dragged?.Invoke(dragOffset);
return true;
}
protected override void OnDrag(DragEvent e)
{
Dragged?.Invoke(e.MousePosition[(int)ScrollDirection] - dragOffset);
}
}
public bool OnPressed(PlatformAction action)
{
if (!IsHandlingKeyboardScrolling)
return false;
switch (action.ActionType)
{
case PlatformActionType.LineStart:
ScrollToStart();
return true;
case PlatformActionType.LineEnd:
ScrollToEnd();
return true;
default:
return false;
}
}
public void OnReleased(PlatformAction action)
{
}
}
}
| |
/*
* @(#)Coefficients.cs 3.0.0 2016-05-07
*
* You may use this software under the condition of "Simplified BSD License"
*
* Copyright 2010-2016 MARIUSZ GROMADA. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY <MARIUSZ GROMADA> ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of MARIUSZ GROMADA.
*
* Some parts of the Coefficients class were adopted from Math.NET Numerics project
* Copyright (c) 2002-2015 Math.NET http://numerics.mathdotnet.com/
* http://numerics.mathdotnet.com/License.html
*
* If you have any questions/bugs feel free to contact:
*
* Mariusz Gromada
* mariuszgromada.org@gmail.com
* http://mathparser.org
* http://mathspace.pl
* http://janetsudoku.mariuszgromada.org
* http://github.com/mariuszgromada/MathParser.org-mXparser
* http://mariuszgromada.github.io/MathParser.org-mXparser
* http://mxparser.sourceforge.net
* http://bitbucket.org/mariuszgromada/mxparser
* http://mxparser.codeplex.com
* http://github.com/mariuszgromada/Janet-Sudoku
* http://janetsudoku.codeplex.com
* http://sourceforge.net/projects/janetsudoku
* http://bitbucket.org/mariuszgromada/janet-sudoku
* http://github.com/mariuszgromada/MathParser.org-mXparser
*
* Asked if he believes in one God, a mathematician answered:
* "Yes, up to isomorphism."
*/
namespace org.mariuszgromada.math.mxparser.mathcollection {
/**
* Coefficients - various coefficients supporting numerical computation.
*
* @author <b>Mariusz Gromada</b><br>
* <a href="mailto:mariuszgromada.org@gmail.com">mariuszgromada.org@gmail.com</a><br>
* <a href="http://mathspace.pl" target="_blank">MathSpace.pl</a><br>
* <a href="http://mathparser.org" target="_blank">MathParser.org - mXparser project page</a><br>
* <a href="http://github.com/mariuszgromada/MathParser.org-mXparser" target="_blank">mXparser on GitHub</a><br>
* <a href="http://mxparser.sourceforge.net" target="_blank">mXparser on SourceForge</a><br>
* <a href="http://bitbucket.org/mariuszgromada/mxparser" target="_blank">mXparser on Bitbucket</a><br>
* <a href="http://mxparser.codeplex.com" target="_blank">mXparser on CodePlex</a><br>
* <a href="http://janetsudoku.mariuszgromada.org" target="_blank">Janet Sudoku - project web page</a><br>
* <a href="http://github.com/mariuszgromada/Janet-Sudoku" target="_blank">Janet Sudoku on GitHub</a><br>
* <a href="http://janetsudoku.codeplex.com" target="_blank">Janet Sudoku on CodePlex</a><br>
* <a href="http://sourceforge.net/projects/janetsudoku" target="_blank">Janet Sudoku on SourceForge</a><br>
* <a href="http://bitbucket.org/mariuszgromada/janet-sudoku" target="_blank">Janet Sudoku on BitBucket</a><br>
*
* @version 3.0.0
*/
internal sealed class Coefficients {
/*
* --------------------------------------
* COEFFICIENTS FOR METHOD erfImp
* --------------------------------------
*/
/**
* Polynomial coefficients for a numerator of erfImp
* calculation for erf(x) in the interval [1e-10, 0.5].
*/
internal static double[] erfImpAn = { 0.00337916709551257388990745, -0.00073695653048167948530905, -0.374732337392919607868241, 0.0817442448733587196071743, -0.0421089319936548595203468, 0.0070165709512095756344528, -0.00495091255982435110337458, 0.000871646599037922480317225 };
/**
* Polynomial coefficients for adenominator of erfImp
* calculation for erf(x) in the interval [1e-10, 0.5].
*/
internal static double[] erfImpAd = { 1, -0.218088218087924645390535, 0.412542972725442099083918, -0.0841891147873106755410271, 0.0655338856400241519690695, -0.0120019604454941768171266, 0.00408165558926174048329689, -0.000615900721557769691924509 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [0.5, 0.75].
*/
internal static double[] erfImpBn = { -0.0361790390718262471360258, 0.292251883444882683221149, 0.281447041797604512774415, 0.125610208862766947294894, 0.0274135028268930549240776, 0.00250839672168065762786937 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [0.5, 0.75].
*/
internal static double[] erfImpBd = { 1, 1.8545005897903486499845, 1.43575803037831418074962, 0.582827658753036572454135, 0.124810476932949746447682, 0.0113724176546353285778481 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [0.75, 1.25].
*/
internal static double[] erfImpCn = { -0.0397876892611136856954425, 0.153165212467878293257683, 0.191260295600936245503129, 0.10276327061989304213645, 0.029637090615738836726027, 0.0046093486780275489468812, 0.000307607820348680180548455 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [0.75, 1.25].
*/
internal static double[] erfImpCd = { 1, 1.95520072987627704987886, 1.64762317199384860109595, 0.768238607022126250082483, 0.209793185936509782784315, 0.0319569316899913392596356, 0.00213363160895785378615014 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [1.25, 2.25].
*/
internal static double[] erfImpDn = { -0.0300838560557949717328341, 0.0538578829844454508530552, 0.0726211541651914182692959, 0.0367628469888049348429018, 0.00964629015572527529605267, 0.00133453480075291076745275, 0.778087599782504251917881e-4 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [1.25, 2.25].
*/
internal static double[] erfImpDd = { 1, 1.75967098147167528287343, 1.32883571437961120556307, 0.552528596508757581287907, 0.133793056941332861912279, 0.0179509645176280768640766, 0.00104712440019937356634038, -0.106640381820357337177643e-7 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [2.25, 3.5].
*/
internal static double[] erfImpEn = { -0.0117907570137227847827732, 0.014262132090538809896674, 0.0202234435902960820020765, 0.00930668299990432009042239, 0.00213357802422065994322516, 0.00025022987386460102395382, 0.120534912219588189822126e-4 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [2.25, 3.5].
*/
internal static double[] erfImpEd = { 1, 1.50376225203620482047419, 0.965397786204462896346934, 0.339265230476796681555511, 0.0689740649541569716897427, 0.00771060262491768307365526, 0.000371421101531069302990367 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [3.5, 5.25].
*/
internal static double[] erfImpFn = { -0.00546954795538729307482955, 0.00404190278731707110245394, 0.0054963369553161170521356, 0.00212616472603945399437862, 0.000394984014495083900689956, 0.365565477064442377259271e-4, 0.135485897109932323253786e-5 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [3.5, 5.25].
*/
internal static double[] erfImpFd = { 1, 1.21019697773630784832251, 0.620914668221143886601045, 0.173038430661142762569515, 0.0276550813773432047594539, 0.00240625974424309709745382, 0.891811817251336577241006e-4, -0.465528836283382684461025e-11 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [5.25, 8].
*/
internal static double[] erfImpGn = { -0.00270722535905778347999196, 0.0013187563425029400461378, 0.00119925933261002333923989, 0.00027849619811344664248235, 0.267822988218331849989363e-4, 0.923043672315028197865066e-6 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [5.25, 8].
*/
internal static double[] erfImpGd = { 1, 0.814632808543141591118279, 0.268901665856299542168425, 0.0449877216103041118694989, 0.00381759663320248459168994, 0.000131571897888596914350697, 0.404815359675764138445257e-11 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [8, 11.5].
*/
internal static double[] erfImpHn = { -0.00109946720691742196814323, 0.000406425442750422675169153, 0.000274499489416900707787024, 0.465293770646659383436343e-4, 0.320955425395767463401993e-5, 0.778286018145020892261936e-7 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [8, 11.5].
*/
internal static double[] erfImpHd = { 1, 0.588173710611846046373373, 0.139363331289409746077541, 0.0166329340417083678763028, 0.00100023921310234908642639, 0.24254837521587225125068e-4 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [11.5, 17].
*/
internal static double[] erfImpIn = { -0.00056907993601094962855594, 0.000169498540373762264416984, 0.518472354581100890120501e-4, 0.382819312231928859704678e-5, 0.824989931281894431781794e-7 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [11.5, 17].
*/
internal static double[] erfImpId = { 1, 0.339637250051139347430323, 0.043472647870310663055044, 0.00248549335224637114641629, 0.535633305337152900549536e-4, -0.117490944405459578783846e-12 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [17, 24].
*/
internal static double[] erfImpJn = { -0.000241313599483991337479091, 0.574224975202501512365975e-4, 0.115998962927383778460557e-4, 0.581762134402593739370875e-6, 0.853971555085673614607418e-8 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [17, 24].
*/
internal static double[] erfImpJd = { 1, 0.233044138299687841018015, 0.0204186940546440312625597, 0.000797185647564398289151125, 0.117019281670172327758019e-4 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [24, 38].
*/
internal static double[] erfImpKn = { -0.000146674699277760365803642, 0.162666552112280519955647e-4, 0.269116248509165239294897e-5, 0.979584479468091935086972e-7, 0.101994647625723465722285e-8 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [24, 38].
*/
internal static double[] erfImpKd = { 1, 0.165907812944847226546036, 0.0103361716191505884359634, 0.000286593026373868366935721, 0.298401570840900340874568e-5 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [38, 60].
*/
internal static double[] erfImpLn = { -0.583905797629771786720406e-4, 0.412510325105496173512992e-5, 0.431790922420250949096906e-6, 0.993365155590013193345569e-8, 0.653480510020104699270084e-10 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [38, 60].
*/
internal static double[] erfImpLd = { 1, 0.105077086072039915406159, 0.00414278428675475620830226, 0.726338754644523769144108e-4, 0.477818471047398785369849e-6 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [60, 85].
*/
internal static double[] erfImpMn = { -0.196457797609229579459841e-4, 0.157243887666800692441195e-5, 0.543902511192700878690335e-7, 0.317472492369117710852685e-9 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [60, 85].
*/
internal static double[] erfImpMd = { 1, 0.052803989240957632204885, 0.000926876069151753290378112, 0.541011723226630257077328e-5, 0.535093845803642394908747e-15 };
/**
* Polynomial coefficients for a numerator in erfImp
* calculation for erfc(x) in the interval [85, 110].
*/
internal static double[] erfImpNn = { -0.789224703978722689089794e-5, 0.622088451660986955124162e-6, 0.145728445676882396797184e-7, 0.603715505542715364529243e-10 };
/**
* Polynomial coefficients for a denominator in erfImp
* calculation for erfc(x) in the interval [85, 110].
*/
internal static double[] erfImpNd = { 1, 0.0375328846356293715248719, 0.000467919535974625308126054, 0.193847039275845656900547e-5 };
/*
*
* --------------------------------------
* COEFFICIENTS FOR METHOD erfInvImp
* --------------------------------------
*/
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0, 0.5].
*/
internal static double[] ervInvImpAn = { -0.000508781949658280665617, -0.00836874819741736770379, 0.0334806625409744615033, -0.0126926147662974029034, -0.0365637971411762664006, 0.0219878681111168899165, 0.00822687874676915743155, -0.00538772965071242932965 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0, 0.5].
*/
internal static double[] ervInvImpAd = { 1, -0.970005043303290640362, -1.56574558234175846809, 1.56221558398423026363, 0.662328840472002992063, -0.71228902341542847553, -0.0527396382340099713954, 0.0795283687341571680018, -0.00233393759374190016776, 0.000886216390456424707504 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.5, 0.75].
*/
internal static double[] ervInvImpBn = { -0.202433508355938759655, 0.105264680699391713268, 8.37050328343119927838, 17.6447298408374015486, -18.8510648058714251895, -44.6382324441786960818, 17.445385985570866523, 21.1294655448340526258, -3.67192254707729348546 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.5, 0.75].
*/
internal static double[] ervInvImpBd = { 1, 6.24264124854247537712, 3.9713437953343869095, -28.6608180499800029974, -20.1432634680485188801, 48.5609213108739935468, 10.8268667355460159008, -22.6436933413139721736, 1.72114765761200282724 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x less than 3.
*/
internal static double[] ervInvImpCn = { -0.131102781679951906451, -0.163794047193317060787, 0.117030156341995252019, 0.387079738972604337464, 0.337785538912035898924, 0.142869534408157156766, 0.0290157910005329060432, 0.00214558995388805277169, -0.679465575181126350155e-6, 0.285225331782217055858e-7, -0.681149956853776992068e-9 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x less than 3.
*/
internal static double[] ervInvImpCd = { 1, 3.46625407242567245975, 5.38168345707006855425, 4.77846592945843778382, 2.59301921623620271374, 0.848854343457902036425, 0.152264338295331783612, 0.01105924229346489121 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 3 and 6.
*/
internal static double[] ervInvImpDn = { -0.0350353787183177984712, -0.00222426529213447927281, 0.0185573306514231072324, 0.00950804701325919603619, 0.00187123492819559223345, 0.000157544617424960554631, 0.460469890584317994083e-5, -0.230404776911882601748e-9, 0.266339227425782031962e-11 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 3 and 6.
*/
internal static double[] ervInvImpDd = { 1, 1.3653349817554063097, 0.762059164553623404043, 0.220091105764131249824, 0.0341589143670947727934, 0.00263861676657015992959, 0.764675292302794483503e-4 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 6 and 18.
*/
internal static double[] ervInvImpEn = { -0.0167431005076633737133, -0.00112951438745580278863, 0.00105628862152492910091, 0.000209386317487588078668, 0.149624783758342370182e-4, 0.449696789927706453732e-6, 0.462596163522878599135e-8, -0.281128735628831791805e-13, 0.99055709973310326855e-16 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 6 and 18.
*/
internal static double[] ervInvImpEd = { 1, 0.591429344886417493481, 0.138151865749083321638, 0.0160746087093676504695, 0.000964011807005165528527, 0.275335474764726041141e-4, 0.282243172016108031869e-6 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 18 and 44.
*/
internal static double[] ervInvImpFn = { -0.0024978212791898131227, -0.779190719229053954292e-5, 0.254723037413027451751e-4, 0.162397777342510920873e-5, 0.396341011304801168516e-7, 0.411632831190944208473e-9, 0.145596286718675035587e-11, -0.116765012397184275695e-17 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x between 18 and 44.
*/
internal static double[] ervInvImpFd = { 1, 0.207123112214422517181, 0.0169410838120975906478, 0.000690538265622684595676, 0.145007359818232637924e-4, 0.144437756628144157666e-6, 0.509761276599778486139e-9 };
/**
* Polynomial coefficients for a numerator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x greater than 44.
*/
internal static double[] ervInvImpGn = { -0.000539042911019078575891, -0.28398759004727721098e-6, 0.899465114892291446442e-6, 0.229345859265920864296e-7, 0.225561444863500149219e-9, 0.947846627503022684216e-12, 0.135880130108924861008e-14, -0.348890393399948882918e-21 };
/**
* Polynomial coefficients for a denominator of erfInvImp
* calculation for erf^-1(z) in the interval [0.75, 1] with x greater than 44.
*/
internal static double[] ervInvImpGd = { 1, 0.0845746234001899436914, 0.00282092984726264681981, 0.468292921940894236786e-4, 0.399968812193862100054e-6, 0.161809290887904476097e-8, 0.231558608310259605225e-11 };
/**
* Supporting function
* while Exponential integral function Ei(x) calculation
*/
internal static double[] EI = {
1.915047433355013959531e2, 4.403798995348382689974e2,
1.037878290717089587658e3, 2.492228976241877759138e3,
6.071406374098611507965e3, 1.495953266639752885229e4,
3.719768849068903560439e4, 9.319251363396537129882e4,
2.349558524907683035782e5, 5.955609986708370018502e5,
1.516637894042516884433e6, 3.877904330597443502996e6,
9.950907251046844760026e6, 2.561565266405658882048e7,
6.612718635548492136250e7, 1.711446713003636684975e8,
4.439663698302712208698e8, 1.154115391849182948287e9,
3.005950906525548689841e9, 7.842940991898186370453e9,
2.049649711988081236484e10, 5.364511859231469415605e10,
1.405991957584069047340e11, 3.689732094072741970640e11,
9.694555759683939661662e11, 2.550043566357786926147e12,
6.714640184076497558707e12, 1.769803724411626854310e13,
4.669055014466159544500e13, 1.232852079912097685431e14,
3.257988998672263996790e14, 8.616388199965786544948e14,
2.280446200301902595341e15, 6.039718263611241578359e15,
1.600664914324504111070e16, 4.244796092136850759368e16,
1.126348290166966760275e17, 2.990444718632336675058e17,
7.943916035704453771510e17, 2.111342388647824195000e18,
5.614329680810343111535e18, 1.493630213112993142255e19,
3.975442747903744836007e19, 1.058563689713169096306e20
};
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Apis.WorkflowExecutions.v1beta
{
/// <summary>The WorkflowExecutions Service.</summary>
public class WorkflowExecutionsService : Google.Apis.Services.BaseClientService
{
/// <summary>The API version.</summary>
public const string Version = "v1beta";
/// <summary>The discovery version used to generate this service.</summary>
public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0;
/// <summary>Constructs a new service.</summary>
public WorkflowExecutionsService() : this(new Google.Apis.Services.BaseClientService.Initializer())
{
}
/// <summary>Constructs a new service.</summary>
/// <param name="initializer">The service initializer.</param>
public WorkflowExecutionsService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer)
{
Projects = new ProjectsResource(this);
}
/// <summary>Gets the service supported features.</summary>
public override System.Collections.Generic.IList<string> Features => new string[0];
/// <summary>Gets the service name.</summary>
public override string Name => "workflowexecutions";
/// <summary>Gets the service base URI.</summary>
public override string BaseUri =>
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45
BaseUriOverride ?? "https://workflowexecutions.googleapis.com/";
#else
"https://workflowexecutions.googleapis.com/";
#endif
/// <summary>Gets the service base path.</summary>
public override string BasePath => "";
#if !NET40
/// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary>
public override string BatchUri => "https://workflowexecutions.googleapis.com/batch";
/// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary>
public override string BatchPath => "batch";
#endif
/// <summary>Available OAuth 2.0 scopes for use with the Workflow Executions API.</summary>
public class Scope
{
/// <summary>
/// See, edit, configure, and delete your Google Cloud data and see the email address for your Google
/// Account.
/// </summary>
public static string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform";
}
/// <summary>Available OAuth 2.0 scope constants for use with the Workflow Executions API.</summary>
public static class ScopeConstants
{
/// <summary>
/// See, edit, configure, and delete your Google Cloud data and see the email address for your Google
/// Account.
/// </summary>
public const string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform";
}
/// <summary>Gets the Projects resource.</summary>
public virtual ProjectsResource Projects { get; }
}
/// <summary>A base abstract class for WorkflowExecutions requests.</summary>
public abstract class WorkflowExecutionsBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse>
{
/// <summary>Constructs a new WorkflowExecutionsBaseServiceRequest instance.</summary>
protected WorkflowExecutionsBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service)
{
}
/// <summary>V1 error format.</summary>
[Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<XgafvEnum> Xgafv { get; set; }
/// <summary>V1 error format.</summary>
public enum XgafvEnum
{
/// <summary>v1 error format</summary>
[Google.Apis.Util.StringValueAttribute("1")]
Value1 = 0,
/// <summary>v2 error format</summary>
[Google.Apis.Util.StringValueAttribute("2")]
Value2 = 1,
}
/// <summary>OAuth access token.</summary>
[Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string AccessToken { get; set; }
/// <summary>Data format for response.</summary>
[Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<AltEnum> Alt { get; set; }
/// <summary>Data format for response.</summary>
public enum AltEnum
{
/// <summary>Responses with Content-Type of application/json</summary>
[Google.Apis.Util.StringValueAttribute("json")]
Json = 0,
/// <summary>Media download with context-dependent Content-Type</summary>
[Google.Apis.Util.StringValueAttribute("media")]
Media = 1,
/// <summary>Responses with Content-Type of application/x-protobuf</summary>
[Google.Apis.Util.StringValueAttribute("proto")]
Proto = 2,
}
/// <summary>JSONP</summary>
[Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Callback { get; set; }
/// <summary>Selector specifying which fields to include in a partial response.</summary>
[Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Fields { get; set; }
/// <summary>
/// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required
/// unless you provide an OAuth 2.0 token.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Key { get; set; }
/// <summary>OAuth 2.0 token for the current user.</summary>
[Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string OauthToken { get; set; }
/// <summary>Returns response with indentations and line breaks.</summary>
[Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<bool> PrettyPrint { get; set; }
/// <summary>
/// Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a
/// user, but should not exceed 40 characters.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)]
public virtual string QuotaUser { get; set; }
/// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadType { get; set; }
/// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadProtocol { get; set; }
/// <summary>Initializes WorkflowExecutions parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("$.xgafv", new Google.Apis.Discovery.Parameter
{
Name = "$.xgafv",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("access_token", new Google.Apis.Discovery.Parameter
{
Name = "access_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter
{
Name = "alt",
IsRequired = false,
ParameterType = "query",
DefaultValue = "json",
Pattern = null,
});
RequestParameters.Add("callback", new Google.Apis.Discovery.Parameter
{
Name = "callback",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter
{
Name = "fields",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("key", new Google.Apis.Discovery.Parameter
{
Name = "key",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter
{
Name = "oauth_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter
{
Name = "prettyPrint",
IsRequired = false,
ParameterType = "query",
DefaultValue = "true",
Pattern = null,
});
RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter
{
Name = "quotaUser",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("uploadType", new Google.Apis.Discovery.Parameter
{
Name = "uploadType",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("upload_protocol", new Google.Apis.Discovery.Parameter
{
Name = "upload_protocol",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>The "projects" collection of methods.</summary>
public class ProjectsResource
{
private const string Resource = "projects";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public ProjectsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
Locations = new LocationsResource(service);
}
/// <summary>Gets the Locations resource.</summary>
public virtual LocationsResource Locations { get; }
/// <summary>The "locations" collection of methods.</summary>
public class LocationsResource
{
private const string Resource = "locations";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public LocationsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
Workflows = new WorkflowsResource(service);
}
/// <summary>Gets the Workflows resource.</summary>
public virtual WorkflowsResource Workflows { get; }
/// <summary>The "workflows" collection of methods.</summary>
public class WorkflowsResource
{
private const string Resource = "workflows";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public WorkflowsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
Executions = new ExecutionsResource(service);
}
/// <summary>Gets the Executions resource.</summary>
public virtual ExecutionsResource Executions { get; }
/// <summary>The "executions" collection of methods.</summary>
public class ExecutionsResource
{
private const string Resource = "executions";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public ExecutionsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
}
/// <summary>Cancels an execution of the given name.</summary>
/// <param name="body">The body of the request.</param>
/// <param name="name">
/// Required. Name of the execution to be cancelled. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}
/// </param>
public virtual CancelRequest Cancel(Google.Apis.WorkflowExecutions.v1beta.Data.CancelExecutionRequest body, string name)
{
return new CancelRequest(service, body, name);
}
/// <summary>Cancels an execution of the given name.</summary>
public class CancelRequest : WorkflowExecutionsBaseServiceRequest<Google.Apis.WorkflowExecutions.v1beta.Data.Execution>
{
/// <summary>Constructs a new Cancel request.</summary>
public CancelRequest(Google.Apis.Services.IClientService service, Google.Apis.WorkflowExecutions.v1beta.Data.CancelExecutionRequest body, string name) : base(service)
{
Name = name;
Body = body;
InitParameters();
}
/// <summary>
/// Required. Name of the execution to be cancelled. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Name { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.WorkflowExecutions.v1beta.Data.CancelExecutionRequest Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "cancel";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta/{+name}:cancel";
/// <summary>Initializes Cancel parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("name", new Google.Apis.Discovery.Parameter
{
Name = "name",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^projects/[^/]+/locations/[^/]+/workflows/[^/]+/executions/[^/]+$",
});
}
}
/// <summary>Creates a new execution using the latest revision of the given workflow.</summary>
/// <param name="body">The body of the request.</param>
/// <param name="parent">
/// Required. Name of the workflow for which an execution should be created. Format:
/// projects/{project}/locations/{location}/workflows/{workflow} The latest revision of the workflow
/// will be used.
/// </param>
public virtual CreateRequest Create(Google.Apis.WorkflowExecutions.v1beta.Data.Execution body, string parent)
{
return new CreateRequest(service, body, parent);
}
/// <summary>Creates a new execution using the latest revision of the given workflow.</summary>
public class CreateRequest : WorkflowExecutionsBaseServiceRequest<Google.Apis.WorkflowExecutions.v1beta.Data.Execution>
{
/// <summary>Constructs a new Create request.</summary>
public CreateRequest(Google.Apis.Services.IClientService service, Google.Apis.WorkflowExecutions.v1beta.Data.Execution body, string parent) : base(service)
{
Parent = parent;
Body = body;
InitParameters();
}
/// <summary>
/// Required. Name of the workflow for which an execution should be created. Format:
/// projects/{project}/locations/{location}/workflows/{workflow} The latest revision of the
/// workflow will be used.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("parent", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Parent { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.WorkflowExecutions.v1beta.Data.Execution Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "create";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta/{+parent}/executions";
/// <summary>Initializes Create parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("parent", new Google.Apis.Discovery.Parameter
{
Name = "parent",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^projects/[^/]+/locations/[^/]+/workflows/[^/]+$",
});
}
}
/// <summary>Returns an execution of the given name.</summary>
/// <param name="name">
/// Required. Name of the execution to be retrieved. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}
/// </param>
public virtual GetRequest Get(string name)
{
return new GetRequest(service, name);
}
/// <summary>Returns an execution of the given name.</summary>
public class GetRequest : WorkflowExecutionsBaseServiceRequest<Google.Apis.WorkflowExecutions.v1beta.Data.Execution>
{
/// <summary>Constructs a new Get request.</summary>
public GetRequest(Google.Apis.Services.IClientService service, string name) : base(service)
{
Name = name;
InitParameters();
}
/// <summary>
/// Required. Name of the execution to be retrieved. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Name { get; private set; }
/// <summary>
/// Optional. A view defining which fields should be filled in the returned execution. The API
/// will default to the FULL view.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("view", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<ViewEnum> View { get; set; }
/// <summary>
/// Optional. A view defining which fields should be filled in the returned execution. The API
/// will default to the FULL view.
/// </summary>
public enum ViewEnum
{
/// <summary>The default / unset value.</summary>
[Google.Apis.Util.StringValueAttribute("EXECUTION_VIEW_UNSPECIFIED")]
EXECUTIONVIEWUNSPECIFIED = 0,
/// <summary>
/// Includes only basic metadata about the execution. Following fields are returned: name,
/// start_time, end_time, state and workflow_revision_id.
/// </summary>
[Google.Apis.Util.StringValueAttribute("BASIC")]
BASIC = 1,
/// <summary>Includes all data.</summary>
[Google.Apis.Util.StringValueAttribute("FULL")]
FULL = 2,
}
/// <summary>Gets the method name.</summary>
public override string MethodName => "get";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "GET";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta/{+name}";
/// <summary>Initializes Get parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("name", new Google.Apis.Discovery.Parameter
{
Name = "name",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^projects/[^/]+/locations/[^/]+/workflows/[^/]+/executions/[^/]+$",
});
RequestParameters.Add("view", new Google.Apis.Discovery.Parameter
{
Name = "view",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>
/// Returns a list of executions which belong to the workflow with the given name. The method
/// returns executions of all workflow revisions. Returned executions are ordered by their start
/// time (newest first).
/// </summary>
/// <param name="parent">
/// Required. Name of the workflow for which the executions should be listed. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}
/// </param>
public virtual ListRequest List(string parent)
{
return new ListRequest(service, parent);
}
/// <summary>
/// Returns a list of executions which belong to the workflow with the given name. The method
/// returns executions of all workflow revisions. Returned executions are ordered by their start
/// time (newest first).
/// </summary>
public class ListRequest : WorkflowExecutionsBaseServiceRequest<Google.Apis.WorkflowExecutions.v1beta.Data.ListExecutionsResponse>
{
/// <summary>Constructs a new List request.</summary>
public ListRequest(Google.Apis.Services.IClientService service, string parent) : base(service)
{
Parent = parent;
InitParameters();
}
/// <summary>
/// Required. Name of the workflow for which the executions should be listed. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("parent", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Parent { get; private set; }
/// <summary>
/// Maximum number of executions to return per call. Max supported value depends on the selected
/// Execution view: it's 10000 for BASIC and 100 for FULL. The default value used if the field
/// is not specified is 100, regardless of the selected view. Values greater than the max value
/// will be coerced down to it.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("pageSize", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<int> PageSize { get; set; }
/// <summary>
/// A page token, received from a previous `ListExecutions` call. Provide this to retrieve the
/// subsequent page. When paginating, all other parameters provided to `ListExecutions` must
/// match the call that provided the page token.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("pageToken", Google.Apis.Util.RequestParameterType.Query)]
public virtual string PageToken { get; set; }
/// <summary>
/// Optional. A view defining which fields should be filled in the returned executions. The API
/// will default to the BASIC view.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("view", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<ViewEnum> View { get; set; }
/// <summary>
/// Optional. A view defining which fields should be filled in the returned executions. The API
/// will default to the BASIC view.
/// </summary>
public enum ViewEnum
{
/// <summary>The default / unset value.</summary>
[Google.Apis.Util.StringValueAttribute("EXECUTION_VIEW_UNSPECIFIED")]
EXECUTIONVIEWUNSPECIFIED = 0,
/// <summary>
/// Includes only basic metadata about the execution. Following fields are returned: name,
/// start_time, end_time, state and workflow_revision_id.
/// </summary>
[Google.Apis.Util.StringValueAttribute("BASIC")]
BASIC = 1,
/// <summary>Includes all data.</summary>
[Google.Apis.Util.StringValueAttribute("FULL")]
FULL = 2,
}
/// <summary>Gets the method name.</summary>
public override string MethodName => "list";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "GET";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta/{+parent}/executions";
/// <summary>Initializes List parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("parent", new Google.Apis.Discovery.Parameter
{
Name = "parent",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^projects/[^/]+/locations/[^/]+/workflows/[^/]+$",
});
RequestParameters.Add("pageSize", new Google.Apis.Discovery.Parameter
{
Name = "pageSize",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("pageToken", new Google.Apis.Discovery.Parameter
{
Name = "pageToken",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("view", new Google.Apis.Discovery.Parameter
{
Name = "view",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
}
}
}
}
}
namespace Google.Apis.WorkflowExecutions.v1beta.Data
{
/// <summary>Request for the CancelExecution method.</summary>
public class CancelExecutionRequest : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Error describes why the execution was abnormally terminated.</summary>
public class Error : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Human-readable stack trace string.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("context")]
public virtual string Context { get; set; }
/// <summary>Error message and data returned represented as a JSON string.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("payload")]
public virtual string Payload { get; set; }
/// <summary>Stack trace with detailed information of where error was generated.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("stackTrace")]
public virtual StackTrace StackTrace { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// A running instance of a [Workflow](/workflows/docs/reference/rest/v1beta/projects.locations.workflows).
/// </summary>
public class Execution : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// Input parameters of the execution represented as a JSON string. The size limit is 32KB. *Note*: If you are
/// using the REST API directly to run your workflow, you must escape any JSON string value of `argument`.
/// Example: `'{"argument":"{\"firstName\":\"FIRST\",\"lastName\":\"LAST\"}"}'`
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("argument")]
public virtual string Argument { get; set; }
/// <summary>The call logging level associated to this execution.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("callLogLevel")]
public virtual string CallLogLevel { get; set; }
/// <summary>Output only. Marks the end of execution, successful or not.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("endTime")]
public virtual object EndTime { get; set; }
/// <summary>
/// Output only. The error which caused the execution to finish prematurely. The value is only present if the
/// execution's state is `FAILED` or `CANCELLED`.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("error")]
public virtual Error Error { get; set; }
/// <summary>
/// Output only. The resource name of the execution. Format:
/// projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("name")]
public virtual string Name { get; set; }
/// <summary>
/// Output only. Output of the execution represented as a JSON string. The value can only be present if the
/// execution's state is `SUCCEEDED`.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("result")]
public virtual string Result { get; set; }
/// <summary>Output only. Marks the beginning of execution.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("startTime")]
public virtual object StartTime { get; set; }
/// <summary>Output only. Current state of the execution.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("state")]
public virtual string State { get; set; }
/// <summary>Output only. Revision of the workflow this execution is using.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("workflowRevisionId")]
public virtual string WorkflowRevisionId { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Response for the ListExecutions method.</summary>
public class ListExecutionsResponse : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The executions which match the request.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("executions")]
public virtual System.Collections.Generic.IList<Execution> Executions { get; set; }
/// <summary>
/// A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no
/// subsequent pages.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("nextPageToken")]
public virtual string NextPageToken { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// Position contains source position information about the stack trace element such as line number, column number
/// and length of the code block in bytes.
/// </summary>
public class Position : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The source code column position (of the line) the current instruction was generated from.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("column")]
public virtual System.Nullable<long> Column { get; set; }
/// <summary>The number of bytes of source code making up this stack trace element.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("length")]
public virtual System.Nullable<long> Length { get; set; }
/// <summary>The source code line number the current instruction was generated from.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("line")]
public virtual System.Nullable<long> Line { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>A collection of stack elements (frames) where an error occurred.</summary>
public class StackTrace : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>An array of stack elements.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("elements")]
public virtual System.Collections.Generic.IList<StackTraceElement> Elements { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>A single stack element (frame) where an error occurred.</summary>
public class StackTraceElement : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The source position information of the stack trace element.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("position")]
public virtual Position Position { get; set; }
/// <summary>The routine where the error occurred.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("routine")]
public virtual string Routine { get; set; }
/// <summary>The step the error occurred at.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("step")]
public virtual string Step { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
}
| |
/******************************************************************************
* Spine Runtimes Software License v2.5
*
* Copyright (c) 2013-2016, Esoteric Software
* All rights reserved.
*
* You are granted a perpetual, non-exclusive, non-sublicensable, and
* non-transferable license to use, install, execute, and perform the Spine
* Runtimes software and derivative works solely for personal or internal
* use. Without the written permission of Esoteric Software (see Section 2 of
* the Spine Software License Agreement), you may not (a) modify, translate,
* adapt, or develop new applications using the Spine Runtimes or otherwise
* create derivative works or improvements of the Spine Runtimes or (b) remove,
* delete, alter, or obscure any trademarks or any copyright, trademark, patent,
* or other intellectual property or proprietary rights notices on or in the
* Software, including any copy thereof. Redistributions in binary or source
* form must include this license and terms.
*
* THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, BUSINESS INTERRUPTION, OR LOSS OF
* USE, DATA, OR PROFITS) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/
using System;
using System.IO;
using UnityEngine;
using Spine;
namespace Spine.Unity {
public class SkeletonDataAsset : ScriptableObject {
#region Inspector
public AtlasAsset[] atlasAssets = new AtlasAsset[0];
#if SPINE_TK2D
public tk2dSpriteCollectionData spriteCollection;
public float scale = 1f;
#else
public float scale = 0.01f;
#endif
public TextAsset skeletonJSON;
[SpineAnimation(includeNone: false)]
public string[] fromAnimation = new string[0];
[SpineAnimation(includeNone: false)]
public string[] toAnimation = new string[0];
public float[] duration = new float[0];
public float defaultMix;
public RuntimeAnimatorController controller;
public bool IsLoaded { get { return this.skeletonData != null; } }
void Reset () {
Clear();
}
#endregion
SkeletonData skeletonData;
AnimationStateData stateData;
#region Runtime Instantiation
/// <summary>
/// Creates a runtime SkeletonDataAsset.</summary>
public static SkeletonDataAsset CreateRuntimeInstance (TextAsset skeletonDataFile, AtlasAsset atlasAsset, bool initialize, float scale = 0.01f) {
return CreateRuntimeInstance(skeletonDataFile, new [] {atlasAsset}, initialize, scale);
}
/// <summary>
/// Creates a runtime SkeletonDataAsset.</summary>
public static SkeletonDataAsset CreateRuntimeInstance (TextAsset skeletonDataFile, AtlasAsset[] atlasAssets, bool initialize, float scale = 0.01f) {
SkeletonDataAsset skeletonDataAsset = ScriptableObject.CreateInstance<SkeletonDataAsset>();
skeletonDataAsset.Clear();
skeletonDataAsset.skeletonJSON = skeletonDataFile;
skeletonDataAsset.atlasAssets = atlasAssets;
skeletonDataAsset.scale = scale;
if (initialize)
skeletonDataAsset.GetSkeletonData(true);
return skeletonDataAsset;
}
#endregion
public void Clear () {
skeletonData = null;
stateData = null;
}
public SkeletonData GetSkeletonData (bool quiet) {
if (skeletonJSON == null) {
if (!quiet)
Debug.LogError("Skeleton JSON file not set for SkeletonData asset: " + name, this);
Clear();
return null;
}
// Disabled to support attachmentless/skinless SkeletonData.
// if (atlasAssets == null) {
// atlasAssets = new AtlasAsset[0];
// if (!quiet)
// Debug.LogError("Atlas not set for SkeletonData asset: " + name, this);
// Clear();
// return null;
// }
// #if !SPINE_TK2D
// if (atlasAssets.Length == 0) {
// Clear();
// return null;
// }
// #else
// if (atlasAssets.Length == 0 && spriteCollection == null) {
// Clear();
// return null;
// }
// #endif
if (skeletonData != null)
return skeletonData;
AttachmentLoader attachmentLoader;
float skeletonDataScale;
Atlas[] atlasArray = this.GetAtlasArray();
#if !SPINE_TK2D
attachmentLoader = new AtlasAttachmentLoader(atlasArray);
skeletonDataScale = scale;
#else
if (spriteCollection != null) {
attachmentLoader = new Spine.Unity.TK2D.SpriteCollectionAttachmentLoader(spriteCollection);
skeletonDataScale = (1.0f / (spriteCollection.invOrthoSize * spriteCollection.halfTargetHeight) * scale);
} else {
if (atlasArray.Length == 0) {
Reset();
if (!quiet) Debug.LogError("Atlas not set for SkeletonData asset: " + name, this);
return null;
}
attachmentLoader = new AtlasAttachmentLoader(atlasArray);
skeletonDataScale = scale;
}
#endif
bool isBinary = skeletonJSON.name.ToLower().Contains(".skel");
SkeletonData loadedSkeletonData;
try {
if (isBinary)
loadedSkeletonData = SkeletonDataAsset.ReadSkeletonData(skeletonJSON.bytes, attachmentLoader, skeletonDataScale);
else
loadedSkeletonData = SkeletonDataAsset.ReadSkeletonData(skeletonJSON.text, attachmentLoader, skeletonDataScale);
} catch (Exception ex) {
if (!quiet)
Debug.LogError("Error reading skeleton JSON file for SkeletonData asset: " + name + "\n" + ex.Message + "\n" + ex.StackTrace, this);
return null;
}
this.InitializeWithData(loadedSkeletonData);
return skeletonData;
}
internal void InitializeWithData (SkeletonData sd) {
this.skeletonData = sd;
this.stateData = new AnimationStateData(skeletonData);
FillStateData();
}
internal Atlas[] GetAtlasArray () {
var returnList = new System.Collections.Generic.List<Atlas>(atlasAssets.Length);
for (int i = 0; i < atlasAssets.Length; i++) {
var aa = atlasAssets[i];
if (aa == null) continue;
var a = aa.GetAtlas();
if (a == null) continue;
returnList.Add(a);
}
return returnList.ToArray();
}
internal static SkeletonData ReadSkeletonData (byte[] bytes, AttachmentLoader attachmentLoader, float scale) {
var input = new MemoryStream(bytes);
var binary = new SkeletonBinary(attachmentLoader) {
Scale = scale
};
return binary.ReadSkeletonData(input);
}
internal static SkeletonData ReadSkeletonData (string text, AttachmentLoader attachmentLoader, float scale) {
var input = new StringReader(text);
var json = new SkeletonJson(attachmentLoader) {
Scale = scale
};
return json.ReadSkeletonData(input);
}
public void FillStateData () {
if (stateData != null) {
stateData.defaultMix = defaultMix;
for (int i = 0, n = fromAnimation.Length; i < n; i++) {
if (fromAnimation[i].Length == 0 || toAnimation[i].Length == 0)
continue;
stateData.SetMix(fromAnimation[i], toAnimation[i], duration[i]);
}
}
}
public AnimationStateData GetAnimationStateData () {
if (stateData != null)
return stateData;
GetSkeletonData(false);
return stateData;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
//
// Description:
// Class that serializes and deserializes Styles.
//
using System;
using System.ComponentModel;
using System.ComponentModel.Design.Serialization;
using System.Diagnostics;
using System.Collections;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Text;
using System.Xml;
using MS.Utility;
#if !PBTCOMPILER
using System.Windows.Data;
#endif
#if PBTCOMPILER
namespace MS.Internal.Markup
#else
namespace System.Windows.Markup
#endif
{
/// <summary>
/// Class that knows how to serialize and deserialize Style objects
/// </summary>
internal class XamlStyleSerializer : XamlSerializer
{
#if PBTCOMPILER
#region Construction
/// <summary>
/// Constructor for XamlStyleSerializer
/// </summary>
public XamlStyleSerializer() : base()
{
}
internal XamlStyleSerializer(ParserHooks parserHooks) : base()
{
_parserHooks = parserHooks;
}
private ParserHooks _parserHooks = null;
#endregion Construction
/// <summary>
/// Convert from Xaml read by a token reader into baml being written
/// out by a record writer. The context gives mapping information.
/// </summary>
internal override void ConvertXamlToBaml (
XamlReaderHelper tokenReader,
ParserContext context,
XamlNode xamlNode,
BamlRecordWriter bamlWriter)
{
StyleXamlParser styleParser = new StyleXamlParser(tokenReader, context);
styleParser.BamlRecordWriter = bamlWriter;
styleParser.ParserHooks = _parserHooks;
// Process the xamlNode that is passed in so that the <Style> element is written to baml
styleParser.WriteElementStart((XamlElementStartNode)xamlNode);
// Parse the entire Style section now, writing everything out directly to BAML.
styleParser.Parse();
}
#else
/// <summary>
/// If the Style represented by a group of baml records is stored in a dictionary, this
/// method will extract the key used for this dictionary from the passed
/// collection of baml records. For Style, this is the type of the first element record
/// in the record collection, skipping over the Style element itself.
/// </summary>
internal override object GetDictionaryKey(BamlRecord startRecord, ParserContext parserContext)
{
Type styleTargetType = Style.DefaultTargetType;
bool styleTargetTypeSet = false;
object targetType = null;
int numberOfElements = 0;
BamlRecord record = startRecord;
short ownerTypeId = 0;
while (record != null)
{
if (record.RecordType == BamlRecordType.ElementStart)
{
BamlElementStartRecord elementStart = record as BamlElementStartRecord;
if (++numberOfElements == 1)
{
// save the type ID of the first element (i.e. <Style>)
ownerTypeId = elementStart.TypeId;
}
else if (numberOfElements == 2)
{
styleTargetType = parserContext.MapTable.GetTypeFromId(elementStart.TypeId);
styleTargetTypeSet = true;
break;
}
}
else if (record.RecordType == BamlRecordType.Property && numberOfElements == 1)
{
// look for the TargetType property on the <Style> element
BamlPropertyRecord propertyRecord = record as BamlPropertyRecord;
if (parserContext.MapTable.DoesAttributeMatch(propertyRecord.AttributeId, ownerTypeId, TargetTypePropertyName))
{
targetType = parserContext.XamlTypeMapper.GetDictionaryKey(propertyRecord.Value, parserContext);
}
}
else if (record.RecordType == BamlRecordType.PropertyComplexStart ||
record.RecordType == BamlRecordType.PropertyIListStart)
{
// We didn't find the second element before a complex property like
// Style.Triggers, so return the default style target type: FrameworkElement.
break;
}
record = record.Next;
}
if (targetType == null)
{
if (!styleTargetTypeSet)
{
ThrowException(SRID.StyleNoDictionaryKey,
parserContext.LineNumber,
parserContext.LinePosition);
}
return styleTargetType;
}
else
return targetType;
}
// Helper to insert line and position numbers into message, if they are present
void ThrowException(
string id,
int lineNumber,
int linePosition)
{
string message = SR.Get(id);
XamlParseException parseException;
// Throw the appropriate execption. If we have line numbers, then we are
// parsing a xaml file, so throw a xaml exception. Otherwise were are
// parsing a baml file.
if (lineNumber > 0)
{
message += " ";
message += SR.Get(SRID.ParserLineAndOffset,
lineNumber.ToString(CultureInfo.CurrentCulture),
linePosition.ToString(CultureInfo.CurrentCulture));
parseException = new XamlParseException(message, lineNumber, linePosition);
}
else
{
parseException = new XamlParseException(message);
}
throw parseException;
}
#endif // !PBTCOMPILER
#region Data
// Constants used for emitting specific properties and attributes for a Style
internal const string StyleTagName = "Style";
internal const string TargetTypePropertyName = "TargetType";
internal const string BasedOnPropertyName = "BasedOn";
internal const string VisualTriggersPropertyName = "Triggers";
internal const string ResourcesPropertyName = "Resources";
internal const string SettersPropertyName = "Setters";
internal const string VisualTriggersFullPropertyName = StyleTagName + "." + VisualTriggersPropertyName;
internal const string SettersFullPropertyName = StyleTagName + "." + SettersPropertyName;
internal const string ResourcesFullPropertyName = StyleTagName + "." + ResourcesPropertyName;
internal const string PropertyTriggerPropertyName = "Property";
internal const string PropertyTriggerValuePropertyName = "Value";
internal const string PropertyTriggerSourceName = "SourceName";
internal const string PropertyTriggerEnterActions = "EnterActions";
internal const string PropertyTriggerExitActions = "ExitActions";
internal const string DataTriggerBindingPropertyName = "Binding";
internal const string EventTriggerEventName = "RoutedEvent";
internal const string EventTriggerSourceName = "SourceName";
internal const string EventTriggerActions = "Actions";
internal const string MultiPropertyTriggerConditionsPropertyName = "Conditions";
internal const string SetterTagName = "Setter";
internal const string SetterPropertyAttributeName = "Property";
internal const string SetterValueAttributeName = "Value";
internal const string SetterTargetAttributeName = "TargetName";
internal const string SetterEventAttributeName = "Event";
internal const string SetterHandlerAttributeName = "Handler";
#if HANDLEDEVENTSTOO
internal const string SetterHandledEventsTooAttributeName = "HandledEventsToo";
#endif
#endregion Data
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Term = Lucene.Net.Index.Term;
using PriorityQueue = Lucene.Net.Util.PriorityQueue;
namespace Lucene.Net.Search
{
/// <summary>Implements parallel search over a set of <code>Searchables</code>.
///
/// <p>Applications usually need only call the inherited {@link #Search(Query)}
/// or {@link #search(Query,Filter)} methods.
/// </summary>
public class ParallelMultiSearcher : MultiSearcher
{
private class AnonymousClassHitCollector1 : HitCollector
{
public AnonymousClassHitCollector1(Lucene.Net.Search.HitCollector results, int start, ParallelMultiSearcher enclosingInstance)
{
InitBlock(results, start, enclosingInstance);
}
private void InitBlock(Lucene.Net.Search.HitCollector results, int start, ParallelMultiSearcher enclosingInstance)
{
this.results = results;
this.start = start;
this.enclosingInstance = enclosingInstance;
}
private Lucene.Net.Search.HitCollector results;
private int start;
private ParallelMultiSearcher enclosingInstance;
public ParallelMultiSearcher Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
public override void Collect(int doc, float score)
{
results.Collect(doc + start, score);
}
}
private Lucene.Net.Search.Searchable[] searchables;
private int[] starts;
/// <summary>Creates a searcher which searches <i>searchables</i>. </summary>
public ParallelMultiSearcher(Lucene.Net.Search.Searchable[] searchables) : base(searchables)
{
this.searchables = searchables;
this.starts = GetStarts();
}
/// <summary> TODO: parallelize this one too</summary>
public override int DocFreq(Term term)
{
return base.DocFreq(term);
}
/// <summary> A search implementation which spans a new thread for each
/// Searchable, waits for each search to complete and merge
/// the results back together.
/// </summary>
public override TopDocs Search(Weight weight, Filter filter, int nDocs)
{
HitQueue hq = new HitQueue(nDocs);
int totalHits = 0;
MultiSearcherThread[] msta = new MultiSearcherThread[searchables.Length];
for (int i = 0; i < searchables.Length; i++)
{
// search each searcher
// Assume not too many searchables and cost of creating a thread is by far inferior to a search
msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, i, starts, "MultiSearcher thread #" + (i + 1));
msta[i].Start();
}
for (int i = 0; i < searchables.Length; i++)
{
try
{
msta[i].Join();
}
catch (System.Threading.ThreadInterruptedException)
{
; // TODO: what should we do with this???
}
System.IO.IOException ioe = msta[i].GetIOException();
if (ioe == null)
{
totalHits += msta[i].Hits();
}
else
{
// if one search produced an IOException, rethrow it
throw ioe;
}
}
ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()];
for (int i = hq.Size() - 1; i >= 0; i--)
// put docs in array
scoreDocs[i] = (ScoreDoc) hq.Pop();
float maxScore = (totalHits == 0) ? System.Single.NegativeInfinity : scoreDocs[0].score;
return new TopDocs(totalHits, scoreDocs, maxScore);
}
/// <summary> A search implementation allowing sorting which spans a new thread for each
/// Searchable, waits for each search to complete and merges
/// the results back together.
/// </summary>
public override TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort)
{
// don't specify the fields - we'll wait to do this until we get results
FieldDocSortedHitQueue hq = new FieldDocSortedHitQueue(null, nDocs);
int totalHits = 0;
MultiSearcherThread[] msta = new MultiSearcherThread[searchables.Length];
for (int i = 0; i < searchables.Length; i++)
{
// search each searcher
// Assume not too many searchables and cost of creating a thread is by far inferior to a search
msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, sort, i, starts, "MultiSearcher thread #" + (i + 1));
msta[i].Start();
}
float maxScore = System.Single.NegativeInfinity;
for (int i = 0; i < searchables.Length; i++)
{
try
{
msta[i].Join();
}
catch (System.Threading.ThreadInterruptedException)
{
; // TODO: what should we do with this???
}
System.IO.IOException ioe = msta[i].GetIOException();
if (ioe == null)
{
totalHits += msta[i].Hits();
maxScore = System.Math.Max(maxScore, msta[i].GetMaxScore());
}
else
{
// if one search produced an IOException, rethrow it
throw ioe;
}
}
ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()];
for (int i = hq.Size() - 1; i >= 0; i--)
// put docs in array
scoreDocs[i] = (ScoreDoc) hq.Pop();
return new TopFieldDocs(totalHits, scoreDocs, hq.GetFields(), maxScore);
}
/// <summary>Lower-level search API.
///
/// <p>{@link HitCollector#Collect(int,float)} is called for every matching document.
///
/// <p>Applications should only use this if they need <i>all</i> of the
/// matching documents. The high-level search API ({@link
/// Searcher#Search(Query)}) is usually more efficient, as it skips
/// non-high-scoring hits.
///
/// </summary>
/// <param name="weight">to match documents
/// </param>
/// <param name="filter">if non-null, a bitset used to eliminate some documents
/// </param>
/// <param name="results">to receive hits
///
/// </param>
/// <todo> parallelize this one too </todo>
public override void Search(Weight weight, Filter filter, HitCollector results)
{
for (int i = 0; i < searchables.Length; i++)
{
int start = starts[i];
searchables[i].Search(weight, filter, new AnonymousClassHitCollector1(results, start, this));
}
}
/*
* TODO: this one could be parallelized too
* @see Lucene.Net.Search.Searchable#rewrite(Lucene.Net.Search.Query)
*/
public override Query Rewrite(Query original)
{
return base.Rewrite(original);
}
}
/// <summary> A thread subclass for searching a single searchable </summary>
class MultiSearcherThread : SupportClass.ThreadClass
{
private Lucene.Net.Search.Searchable searchable;
private Weight weight;
private Filter filter;
private int nDocs;
private TopDocs docs;
private int i;
private PriorityQueue hq;
private int[] starts;
private System.IO.IOException ioe;
private Sort sort;
public MultiSearcherThread(Lucene.Net.Search.Searchable searchable, Weight weight, Filter filter, int nDocs, HitQueue hq, int i, int[] starts, System.String name):base(name)
{
this.searchable = searchable;
this.weight = weight;
this.filter = filter;
this.nDocs = nDocs;
this.hq = hq;
this.i = i;
this.starts = starts;
}
public MultiSearcherThread(Lucene.Net.Search.Searchable searchable, Weight weight, Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i, int[] starts, System.String name):base(name)
{
this.searchable = searchable;
this.weight = weight;
this.filter = filter;
this.nDocs = nDocs;
this.hq = hq;
this.i = i;
this.starts = starts;
this.sort = sort;
}
override public void Run()
{
try
{
docs = (sort == null) ? searchable.Search(weight, filter, nDocs) : searchable.Search(weight, filter, nDocs, sort);
}
// Store the IOException for later use by the caller of this thread
catch (System.IO.IOException ioe)
{
this.ioe = ioe;
}
if (this.ioe == null)
{
// if we are sorting by fields, we need to tell the field sorted hit queue
// the actual type of fields, in case the original list contained AUTO.
// if the searchable returns null for fields, we'll have problems.
if (sort != null)
{
((FieldDocSortedHitQueue) hq).SetFields(((TopFieldDocs) docs).fields);
}
ScoreDoc[] scoreDocs = docs.scoreDocs;
for (int j = 0; j < scoreDocs.Length; j++)
{
// merge scoreDocs into hq
ScoreDoc scoreDoc = scoreDocs[j];
scoreDoc.doc += starts[i]; // convert doc
//it would be so nice if we had a thread-safe insert
lock (hq)
{
if (!hq.Insert(scoreDoc))
break;
} // no more scores > minScore
}
}
}
public virtual int Hits()
{
return docs.totalHits;
}
public virtual float GetMaxScore()
{
return docs.GetMaxScore();
}
public virtual System.IO.IOException GetIOException()
{
return ioe;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using Newtonsoft.Json.Linq;
using NuGet.Client.Installation;
using NuGet.Client.Resolution;
using NuGet.Versioning;
using NuGet.VisualStudio;
using NuGetConsole;
using Resx = NuGet.Client.VisualStudio.UI.Resources;
namespace NuGet.Client.VisualStudio.UI
{
/// <summary>
/// Interaction logic for PackageManagerControl.xaml
/// </summary>
public partial class PackageManagerControl : UserControl
{
private const int PageSize = 10;
// Copied from file Constants.cs in NuGet.Core:
// This is temporary until we fix the gallery to have proper first class support for this.
// The magic unpublished date is 1900-01-01T00:00:00
public static readonly DateTimeOffset Unpublished = new DateTimeOffset(1900, 1, 1, 0, 0, 0, TimeSpan.FromHours(-8));
private bool _initialized;
// used to prevent starting new search when we update the package sources
// list in response to PackageSourcesChanged event.
private bool _dontStartNewSearch;
private int _busyCount;
public PackageManagerModel Model { get; private set; }
public SourceRepositoryManager Sources
{
get
{
return Model.Sources;
}
}
public InstallationTarget Target
{
get
{
return Model.Target;
}
}
private IConsole _outputConsole;
internal IUserInterfaceService UI { get; private set; }
private PackageRestoreBar _restoreBar;
private IPackageRestoreManager _packageRestoreManager;
public PackageManagerControl(PackageManagerModel model, IUserInterfaceService ui)
{
UI = ui;
Model = model;
InitializeComponent();
_searchControl.Text = model.SearchText;
_filter.Items.Add(Resx.Resources.Filter_All);
_filter.Items.Add(Resx.Resources.Filter_Installed);
_filter.Items.Add(Resx.Resources.Filter_UpdateAvailable);
// TODO: Relocate to v3 API.
_packageRestoreManager = ServiceLocator.GetInstance<IPackageRestoreManager>();
AddRestoreBar();
_packageDetail.Visibility = System.Windows.Visibility.Collapsed;
_packageDetail.Control = this;
_packageSolutionDetail.Visibility = System.Windows.Visibility.Collapsed;
_packageSolutionDetail.Control = this;
_busyCount = 0;
if (Target.IsSolution)
{
_packageSolutionDetail.Visibility = System.Windows.Visibility.Visible;
}
else
{
_packageDetail.Visibility = System.Windows.Visibility.Visible;
}
var outputConsoleProvider = ServiceLocator.GetInstance<IOutputConsoleProvider>();
_outputConsole = outputConsoleProvider.CreateOutputConsole(requirePowerShellHost: false);
InitSourceRepoList();
this.Unloaded += PackageManagerControl_Unloaded;
_initialized = true;
Model.Sources.PackageSourcesChanged += Sources_PackageSourcesChanged;
}
// Set the PackageStatus property of the given package.
private static void SetPackageStatus(
UiSearchResultPackage package,
InstallationTarget target)
{
var latestStableVersion = package.AllVersions
.Where(p => !p.Version.IsPrerelease)
.Max(p => p.Version);
// Get the minimum version installed in any target project/solution
var minimumInstalledPackage = target.GetAllTargetsRecursively()
.Select(t => t.InstalledPackages.GetInstalledPackage(package.Id))
.Where(p => p != null)
.OrderBy(r => r.Identity.Version)
.FirstOrDefault();
PackageStatus status;
if (minimumInstalledPackage != null)
{
if (minimumInstalledPackage.Identity.Version < latestStableVersion)
{
status = PackageStatus.UpdateAvailable;
}
else
{
status = PackageStatus.Installed;
}
}
else
{
status = PackageStatus.NotInstalled;
}
}
private void Sources_PackageSourcesChanged(object sender, EventArgs e)
{
package.Status = status;
if (!Target.IsSolution)
{
var installedPackage = Target.InstalledPackages.GetInstalledPackage(selectedPackage.Id);
var installedVersion = installedPackage == null ? null : installedPackage.Identity.Version;
_packageDetail.DataContext = new PackageDetailControlModel(selectedPackage, installedVersion);
}
else
{
_packageSolutionDetail.DataContext = new PackageSolutionDetailControlModel(selectedPackage, (VsSolution)Target);
}
// Set _dontStartNewSearch to true to prevent a new search started in
// _sourceRepoList_SelectionChanged(). This method will start the new
// search when needed by itself.
_dontStartNewSearch = true;
try
{
var oldActiveSource = _sourceRepoList.SelectedItem as PackageSource;
var newSources = new List<PackageSource>(Sources.AvailableSources);
// Update the source repo list with the new value.
_sourceRepoList.Items.Clear();
foreach (var source in newSources)
{
_sourceRepoList.Items.Add(source);
}
if (oldActiveSource != null && newSources.Contains(oldActiveSource))
{
// active source is not changed. Set _dontStartNewSearch to true
// to prevent a new search when _sourceRepoList.SelectedItem is set.
_sourceRepoList.SelectedItem = oldActiveSource;
}
else
{
// active source changed.
_sourceRepoList.SelectedItem =
newSources.Count > 0 ?
newSources[0] :
null;
// start search explicitly.
SearchPackageInActivePackageSource();
}
}
finally
{
_dontStartNewSearch = false;
}
}
private void PackageManagerControl_Unloaded(object sender, RoutedEventArgs e)
{
RemoveRestoreBar();
}
private void AddRestoreBar()
{
_restoreBar = new PackageRestoreBar(_packageRestoreManager);
_root.Children.Add(_restoreBar);
_packageRestoreManager.PackagesMissingStatusChanged += packageRestoreManager_PackagesMissingStatusChanged;
}
private void RemoveRestoreBar()
{
_restoreBar.CleanUp();
_packageRestoreManager.PackagesMissingStatusChanged -= packageRestoreManager_PackagesMissingStatusChanged;
}
private void packageRestoreManager_PackagesMissingStatusChanged(object sender, PackagesMissingStatusEventArgs e)
{
// PackageRestoreManager fires this event even when solution is closed.
// Don't do anything if solution is closed.
if (!Target.IsAvailable)
{
return;
}
if (!e.PackagesMissing)
{
// packages are restored. Update the UI
if (Target.IsSolution)
{
// TODO: update UI here
}
else
{
// TODO: update UI here
}
}
}
private void InitSourceRepoList()
{
_label.Text = string.Format(
CultureInfo.CurrentCulture,
Resx.Resources.Label_PackageManager,
Target.Name);
// init source repo list
_sourceRepoList.Items.Clear();
foreach (var source in Sources.AvailableSources)
{
_sourceRepoList.Items.Add(source);
}
if (Sources.ActiveRepository != null)
{
_sourceRepoList.SelectedItem = Sources.ActiveRepository.Source;
}
}
private void SetBusy(bool busy)
{
if (busy)
{
_busyCount++;
if (_busyCount > 0)
{
_busyControl.Visibility = System.Windows.Visibility.Visible;
this.IsEnabled = false;
}
}
else
{
_busyCount--;
if (_busyCount <= 0)
{
_busyControl.Visibility = System.Windows.Visibility.Collapsed;
this.IsEnabled = true;
}
}
}
private class PackageLoaderOption
{
public bool IncludePrerelease { get; set; }
public bool ShowUpdatesAvailable { get; set; }
}
private class PackageLoader : ILoader
{
// where to get the package list
private Func<int, CancellationToken, Task<IEnumerable<JObject>>> _loader;
private InstallationTarget _target;
private PackageLoaderOption _option;
public PackageLoader(
Func<int, CancellationToken, Task<IEnumerable<JObject>>> loader,
InstallationTarget target,
PackageLoaderOption option,
string searchText)
{
_loader = loader;
_target = target;
_option = option;
LoadingMessage = string.IsNullOrWhiteSpace(searchText) ?
Resx.Resources.Text_Loading :
string.Format(
CultureInfo.CurrentCulture,
Resx.Resources.Text_Searching,
searchText);
}
public string LoadingMessage
{
get;
private set;
}
private Task<List<JObject>> InternalLoadItems(
int startIndex,
CancellationToken ct,
Func<int, CancellationToken, Task<IEnumerable<JObject>>> loader)
{
return Task.Factory.StartNew(() =>
{
var r1 = _loader(startIndex, ct);
return r1.Result.ToList();
});
}
private UiDetailedPackage ToDetailedPackage(UiSearchResultPackage package)
{
var detailedPackage = new UiDetailedPackage();
detailedPackage.Id = package.Id;
detailedPackage.Version = package.Version;
detailedPackage.Summary = package.Summary;
return detailedPackage;
}
public async Task<LoadResult> LoadItems(int startIndex, CancellationToken ct)
{
var results = await InternalLoadItems(startIndex, ct, _loader);
List<UiSearchResultPackage> packages = new List<UiSearchResultPackage>();
foreach (var package in results)
{
ct.ThrowIfCancellationRequested();
// As a debugging aide, I am intentionally NOT using an object initializer -anurse
var searchResultPackage = new UiSearchResultPackage();
searchResultPackage.Id = package.Value<string>(Properties.PackageId);
searchResultPackage.Version = NuGetVersion.Parse(package.Value<string>(Properties.LatestVersion));
if (searchResultPackage.Version.IsPrerelease && !_option.IncludePrerelease)
{
// don't include prerelease version if includePrerelease is false
continue;
}
searchResultPackage.IconUrl = GetUri(package, Properties.IconUrl);
var allVersions = LoadVersions(
package.Value<JArray>(Properties.Packages),
searchResultPackage.Version);
if (!allVersions.Select(v => v.Version).Contains(searchResultPackage.Version))
{
// make sure allVersions contains searchResultPackage itself.
allVersions.Add(ToDetailedPackage(searchResultPackage));
}
searchResultPackage.AllVersions = allVersions;
SetPackageStatus(searchResultPackage, _target);
if (_option.ShowUpdatesAvailable &&
searchResultPackage.Status != PackageStatus.UpdateAvailable)
{
continue;
}
searchResultPackage.Summary = package.Value<string>(Properties.Summary);
if (string.IsNullOrWhiteSpace(searchResultPackage.Summary))
{
// summary is empty. Use its description instead.
var self = searchResultPackage.AllVersions.FirstOrDefault(p => p.Version == searchResultPackage.Version);
if (self != null)
{
searchResultPackage.Summary = self.Description;
}
}
packages.Add(searchResultPackage);
}
ct.ThrowIfCancellationRequested();
return new LoadResult()
{
Items = packages,
HasMoreItems = packages.Count == PageSize
};
}
// Get all versions of the package
private List<UiDetailedPackage> LoadVersions(JArray versions, NuGetVersion searchResultVersion)
{
var retValue = new List<UiDetailedPackage>();
// If repo is AggregateRepository, the package duplicates can be returned by
// FindPackagesById(), so Distinct is needed here to remove the duplicates.
foreach (var token in versions)
{
Debug.Assert(token.Type == JTokenType.Object);
JObject version = (JObject)token;
var detailedPackage = new UiDetailedPackage();
detailedPackage.Id = version.Value<string>(Properties.PackageId);
detailedPackage.Version = NuGetVersion.Parse(version.Value<string>(Properties.Version));
if (detailedPackage.Version.IsPrerelease &&
!_option.IncludePrerelease &&
detailedPackage.Version != searchResultVersion)
{
// don't include prerelease version if includePrerelease is false
continue;
}
string publishedStr = version.Value<string>(Properties.Published);
if (!String.IsNullOrEmpty(publishedStr))
{
detailedPackage.Published = DateTime.Parse(publishedStr);
if (detailedPackage.Published <= Unpublished &&
detailedPackage.Version != searchResultVersion)
{
// don't include unlisted package
continue;
}
}
detailedPackage.Summary = version.Value<string>(Properties.Summary);
detailedPackage.Description = version.Value<string>(Properties.Description);
detailedPackage.Authors = version.Value<string>(Properties.Authors);
detailedPackage.Owners = version.Value<string>(Properties.Owners);
detailedPackage.IconUrl = GetUri(version, Properties.IconUrl);
detailedPackage.LicenseUrl = GetUri(version, Properties.LicenseUrl);
detailedPackage.ProjectUrl = GetUri(version, Properties.ProjectUrl);
detailedPackage.Tags = String.Join(" ", (version.Value<JArray>(Properties.Tags) ?? Enumerable.Empty<JToken>()).Select(t => t.ToString()));
detailedPackage.DownloadCount = version.Value<int>(Properties.DownloadCount);
detailedPackage.DependencySets = (version.Value<JArray>(Properties.DependencyGroups) ?? Enumerable.Empty<JToken>()).Select(obj => LoadDependencySet((JObject)obj));
detailedPackage.HasDependencies = detailedPackage.DependencySets.Any(
set => set.Dependencies != null && set.Dependencies.Count > 0);
retValue.Add(detailedPackage);
}
return retValue;
}
private Uri GetUri(JObject json, string property)
{
if (json[property] == null)
{
return null;
}
string str = json[property].ToString();
if (String.IsNullOrEmpty(str))
{
return null;
}
return new Uri(str);
}
private UiPackageDependencySet LoadDependencySet(JObject set)
{
var fxName = set.Value<string>(Properties.TargetFramework);
return new UiPackageDependencySet(
String.IsNullOrEmpty(fxName) ? null : FrameworkNameHelper.ParsePossiblyShortenedFrameworkName(fxName),
(set.Value<JArray>(Properties.Dependencies) ?? Enumerable.Empty<JToken>()).Select(obj => LoadDependency((JObject)obj)));
}
private UiPackageDependency LoadDependency(JObject dep)
{
var ver = dep.Value<string>(Properties.Range);
return new UiPackageDependency(
dep.Value<string>(Properties.PackageId),
String.IsNullOrEmpty(ver) ? null : VersionRange.Parse(ver));
}
private string StringCollectionToString(JArray v)
{
if (v == null)
{
return null;
}
string retValue = String.Join(", ", v.Select(t => t.ToString()));
if (retValue == String.Empty)
{
return null;
}
return retValue;
}
}
private bool ShowInstalled
{
get
{
return Resx.Resources.Filter_Installed.Equals(_filter.SelectedItem);
}
}
private bool ShowUpdatesAvailable
{
get
{
return Resx.Resources.Filter_UpdateAvailable.Equals(_filter.SelectedItem);
}
}
public bool IncludePrerelease
{
get
{
return _checkboxPrerelease.IsChecked == true;
}
}
internal SourceRepository CreateActiveRepository()
{
var activeSource = _sourceRepoList.SelectedItem as PackageSource;
if (activeSource == null)
{
return null;
}
return Sources.CreateSourceRepository(activeSource);
}
private void SearchPackageInActivePackageSource()
{
var searchText = _searchControl.Text;
var supportedFrameworks = Target.GetSupportedFrameworks();
// search online
var activeSource = _sourceRepoList.SelectedItem as PackageSource;
var sourceRepository = Sources.CreateSourceRepository(activeSource);
PackageLoaderOption option = new PackageLoaderOption()
{
IncludePrerelease = this.IncludePrerelease,
ShowUpdatesAvailable = this.ShowUpdatesAvailable
};
if (ShowInstalled || ShowUpdatesAvailable)
{
// search installed packages
var loader = new PackageLoader(
(startIndex, ct) =>
Target.SearchInstalled(
sourceRepository,
searchText,
startIndex,
PageSize,
ct),
Target,
option,
searchText);
_packageList.Loader = loader;
}
else
{
// search in active package source
if (activeSource == null)
{
var loader = new PackageLoader(
(startIndex, ct) =>
{
return Task.Factory.StartNew(() =>
{
return Enumerable.Empty<JObject>();
});
},
Target,
option,
searchText);
_packageList.Loader = loader;
}
else
{
var loader = new PackageLoader(
(startIndex, ct) =>
sourceRepository.Search(
searchText,
new SearchFilter()
{
SupportedFrameworks = supportedFrameworks,
IncludePrerelease = option.IncludePrerelease
},
startIndex,
PageSize,
ct),
Target,
option,
searchText);
_packageList.Loader = loader;
}
}
}
private void SettingsButtonClick(object sender, RoutedEventArgs e)
{
UI.LaunchNuGetOptionsDialog();
}
private void PackageList_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
UpdateDetailPane();
}
/// <summary>
/// Updates the detail pane based on the selected package
/// </summary>
private void UpdateDetailPane()
{
var selectedPackage = _packageList.SelectedItem as UiSearchResultPackage;
if (selectedPackage == null)
{
_packageDetail.DataContext = null;
_packageSolutionDetail.DataContext = null;
}
else
{
if (!Target.IsSolution)
{
var installedPackage = Target.InstalledPackages.GetInstalledPackage(selectedPackage.Id);
var installedVersion = installedPackage == null ? null : installedPackage.Identity.Version;
_packageDetail.DataContext = new PackageDetailControlModel(selectedPackage, installedVersion);
}
else
{
_packageSolutionDetail.DataContext = new PackageSolutionDetailControlModel(selectedPackage, (VsSolution)Target);
}
}
}
private void _sourceRepoList_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
if (_dontStartNewSearch)
{
return;
}
var newSource = _sourceRepoList.SelectedItem as PackageSource;
if (newSource != null)
{
Sources.ChangeActiveSource(newSource);
}
SearchPackageInActivePackageSource();
}
private void _filter_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
if (_initialized)
{
SearchPackageInActivePackageSource();
}
}
internal void UpdatePackageStatus()
{
if (ShowInstalled || ShowUpdatesAvailable)
{
// refresh the whole package list
_packageList.Reload();
}
else
{
// in this case, we only need to update PackageStatus of
// existing items in the package list
foreach (var item in _packageList.Items)
{
var package = item as UiSearchResultPackage;
if (package == null)
{
continue;
}
SetPackageStatus(package, Target);
}
}
}
public bool ShowLicenseAgreement(IEnumerable<PackageAction> operations)
{
var licensePackages = operations.Where(op =>
op.ActionType == PackageActionType.Install &&
op.Package.Value<bool>("requireLicenseAcceptance"));
// display license window if necessary
if (licensePackages.Any())
{
// Hacky distinct without writing a custom comparer
var licenseModels = licensePackages
.GroupBy(a => Tuple.Create(a.Package["id"], a.Package["version"]))
.Select(g =>
{
dynamic p = g.First().Package;
string licenseUrl = (string)p.licenseUrl;
string id = (string)p.id;
string authors = (string)p.authors;
return new PackageLicenseInfo(
id,
licenseUrl == null ? null : new Uri(licenseUrl),
authors);
})
.Where(pli => pli.LicenseUrl != null); // Shouldn't get nulls, but just in case
bool accepted = this.UI.PromptForLicenseAcceptance(licenseModels);
if (!accepted)
{
return false;
}
}
return true;
}
private void PreviewActions(IEnumerable<PackageAction> actions)
{
var w = new PreviewWindow();
w.DataContext = new PreviewWindowModel(actions, Target);
w.ShowModal();
}
// preview user selected action
internal async void Preview(IDetailControl detailControl)
{
SetBusy(true);
try
{
_outputConsole.Clear();
var actions = await detailControl.ResolveActionsAsync();
PreviewActions(actions);
}
catch (Exception ex)
{
var errorDialog = new ErrorReportingDialog(
ex.Message,
ex.ToString());
errorDialog.ShowModal();
}
finally
{
SetBusy(false);
}
}
// perform the user selected action
internal async void PerformAction(IDetailControl detailControl)
{
SetBusy(true);
_outputConsole.Clear();
var progressDialog = new ProgressDialog(_outputConsole);
progressDialog.Owner = Window.GetWindow(this);
progressDialog.WindowStartupLocation = WindowStartupLocation.CenterOwner;
try
{
var actions = await detailControl.ResolveActionsAsync();
// show license agreeement
bool acceptLicense = ShowLicenseAgreement(actions);
if (!acceptLicense)
{
return;
}
// Create the executor and execute the actions
progressDialog.FileConflictAction = detailControl.FileConflictAction;
progressDialog.Show();
var executor = new ActionExecutor();
await executor.ExecuteActionsAsync(actions, logger: progressDialog, cancelToken: CancellationToken.None);
UpdatePackageStatus();
detailControl.Refresh();
}
catch (Exception ex)
{
var errorDialog = new ErrorReportingDialog(
ex.Message,
ex.ToString());
errorDialog.ShowModal();
}
finally
{
progressDialog.RequestToClose();
SetBusy(false);
}
}
private void _searchControl_SearchStart(object sender, EventArgs e)
{
if (!_initialized)
{
return;
}
SearchPackageInActivePackageSource();
}
private void _checkboxPrerelease_CheckChanged(object sender, RoutedEventArgs e)
{
if (!_initialized)
{
return;
}
SearchPackageInActivePackageSource();
}
}
}
| |
namespace Appleseed.Framework.UrlRewriting
{
using System.Configuration;
using System.Globalization;
using System.Web;
using System.Linq;
using UrlRewritingNet.Configuration;
using UrlRewritingNet.Web;
using System.Text.RegularExpressions;
using Appleseed.Framework.Site.Configuration;
using System;
using Appleseed.Framework.Settings;
/// <summary>
/// The appleseed url rewriting rule.
/// </summary>
public class AppleseedUrlRewritingRule : RewriteRule
{
#region Constants and Fields
/// <summary>
/// The default splitter.
/// </summary>
private string defaultSplitter = "__";
/// <summary>
/// The friendly page name.
/// </summary>
private string friendlyPageName = "Default.aspx";
/// <summary>
/// The handler flag.
/// </summary>
private string handlerFlag = "site";
/// <summary>
/// The friendly url extension
/// </summary>
private string friendlyUrlExtension = ".aspx";
/// <summary>
/// The friendly url extension
/// </summary>
private bool friendlyUrlNoExtension = false;
#endregion
#region Public Methods
/// <summary>
/// Initializes the specified rewrite settings.
/// </summary>
/// <param name="rewriteSettings">The rewrite settings.</param>
public override void Initialize(RewriteSettings rewriteSettings)
{
base.Initialize(rewriteSettings);
if (!string.IsNullOrEmpty(rewriteSettings.Attributes["handlerflag"]))
{
this.handlerFlag = rewriteSettings.Attributes["handlerflag"].ToLower(CultureInfo.InvariantCulture);
}
if (!string.IsNullOrEmpty(rewriteSettings.Attributes["handlersplitter"]))
{
this.defaultSplitter = rewriteSettings.Attributes["handlersplitter"];
}
else
{
if (ConfigurationManager.AppSettings["HandlerDefaultSplitter"] != null)
{
this.defaultSplitter = ConfigurationManager.AppSettings["HandlerDefaultSplitter"];
}
}
if (!string.IsNullOrEmpty(rewriteSettings.Attributes["pageidnosplitter"]))
{
bool.Parse(rewriteSettings.Attributes["pageidnosplitter"]);
}
if (!string.IsNullOrEmpty(rewriteSettings.Attributes["friendlyPageName"]))
{
this.friendlyPageName = rewriteSettings.Attributes["friendlyPageName"];
}
// Ashish.patel@haptix.biz - 2014/12/16 - Set friendlyURl from Web.config
//if (!string.IsNullOrEmpty(rewriteSettings.Attributes["friendlyUrlExtension"]))
if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["FriendlyUrlExtension"]))
{
this.friendlyUrlExtension = System.Configuration.ConfigurationManager.AppSettings["FriendlyUrlExtension"];
}
//if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["friendlyUrlNoExtension"]) && System.Configuration.ConfigurationManager.AppSettings["friendlyUrlNoExtension"] == "1")
if (PortalSettings.FriendlyUrlNoExtensionEnabled())
{
friendlyUrlNoExtension = true;
}
}
/// <summary>
/// Determines whether the specified request URL is rewrite.
/// </summary>
/// <param name="requestUrl">The request URL.</param>
/// <returns>
/// <c>true</c> if the specified request URL is rewrite; otherwise, <c>false</c>.
/// </returns>
public override bool IsRewrite(string requestUrl)
{
if (requestUrl.Contains(string.Format("/{0}/", this.handlerFlag)))
{
return true;
}
var path = HttpContext.Current.Request.ApplicationPath;
if (!path.EndsWith("/"))
{
path = string.Concat(path, "/");
}
if (requestUrl.Equals(string.Format("{0}{1}", path, this.handlerFlag)))
{
return true;
}
//Check the page extenstion
if (requestUrl.Contains(this.friendlyUrlExtension))
{
return true;
}
try
{
if (this.friendlyUrlNoExtension
&& !System.IO.File.Exists(HttpContext.Current.Server.MapPath(requestUrl.Split('?').GetValue(0).ToString()))
&& !requestUrl.ToLower().Contains("/design/")
&& !requestUrl.ToLower().Contains("aspnet_client/")
&& !requestUrl.ToLower().Contains("browserlink")
&& !requestUrl.ToLower().Contains(".js")
&& !requestUrl.ToLower().Contains("sitemap.axd")
&& !requestUrl.ToLower().Contains("webresource.axd")
&& !requestUrl.ToLower().Contains("scriptresource.axd")
&& !HasPathInRoutes(requestUrl.ToLower())
)
{
return true;
}
}
catch { }
return false;
}
private static bool HasPathInRoutes(string path)
{
string routeController = path.Split('/')[0];
if (string.IsNullOrEmpty(routeController))
{
routeController = path.Split('/')[1];
}
var rtut = System.Web.Routing.RouteTable.Routes.Where(item=> item.GetType().FullName.ToLower() == "system.web.routing.route").FirstOrDefault(rt => ((System.Web.Routing.Route)rt).Url.ToLower().Contains(routeController));
return rtut != null;
}
/// <summary>
/// Rewrites the URL.
/// </summary>
/// <param name="url">The URL to rewrite.</param>
/// <returns>The rewritten URL.</returns>
public override string RewriteUrl(string url)
{
var handler = string.Format("/{0}", this.handlerFlag);
var rewrittenUrl = "";
var settings = PortalSettings.GetPortalSettingsbyPageID(Portal.PageID, Config.DefaultPortal);
// Ashish.patel@haptix.biz - 2014/12/16 - Only when Url contains handler and EnablePageFriendlyUrl = false
if (url.Contains(handler) && !settings.EnablePageFriendlyUrl)
{
rewrittenUrl = url.Substring(0, url.IndexOf(handler));
}
string[] parts;
if (url.IndexOf(handler) > -1)
{
parts = url.Substring(url.IndexOf(handler) + handler.Length).Split(new char[] { '/' }, System.StringSplitOptions.RemoveEmptyEntries);
}
else
{
parts = url.Split(new char[] { '/' }, System.StringSplitOptions.RemoveEmptyEntries);
}
rewrittenUrl += string.Format("/{0}", this.friendlyPageName);
var pageId = "0"; //this is made in order to allow urls formed only with the handler (/site/ been the default). Those urls will be redirected to the portal home.
Regex regex = new Regex("^\\d+$", RegexOptions.IgnoreCase | RegexOptions.Compiled);
// Need to search for the pageId in the url
int indexNumber = -1;
// Ashish.patel@haptix.biz - 2014/12/16 -
//Set the pageid If Enable frindly Url is false
// if true then set the pageid from URLRewriteFriendlyUrl class
if (!settings.EnablePageFriendlyUrl)
{
for (int i = 0; i < parts.Length && indexNumber == -1; i++)
{
if (regex.IsMatch(parts[i]))
{
indexNumber = i;
}
}
if (url.Contains("alias" + this.defaultSplitter))
{
pageId = 0.ToString();
}
else if (indexNumber != -1)
{
pageId = parts[indexNumber];
}
}
else
{
for (int i = 0; i < parts.Length && indexNumber == -1; i++)
{
if (regex.IsMatch(parts[i]))
{
indexNumber = i;
}
}
if (indexNumber != -1)
{
pageId = parts[indexNumber];
}
else
{
// Ashish.patel@haptix.biz - 2014/12/16 - Set when EnableFriendlyUrl is true
pageId = UrlRewritingFriendlyUrl.GetPageIDFromPageName(url);
}
}
var queryString = string.Format("?pageId={0}", pageId);
if (parts.Length > 2)
{
for (var i = 0; i < indexNumber; i++)
{
var queryStringParam = parts[i];
if (queryStringParam.IndexOf(this.defaultSplitter) < 0)
{
continue;
}
queryString += string.Format(
"&{0}={1}",
queryStringParam.Substring(0, queryStringParam.IndexOf(this.defaultSplitter)),
queryStringParam.Substring(queryStringParam.IndexOf(this.defaultSplitter) + this.defaultSplitter.Length));
}
}
if (HttpContext.Current.Request.Form["signed_request"] != null)
{
queryString += string.Format(
"&signed_request={0}",
HttpContext.Current.Request.Params["signed_request"]);
}
HttpContext.Current.RewritePath(rewrittenUrl, string.Empty, queryString);
return rewrittenUrl + queryString;
}
#endregion
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V8.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V8.Services
{
/// <summary>Settings for <see cref="MobileDeviceConstantServiceClient"/> instances.</summary>
public sealed partial class MobileDeviceConstantServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="MobileDeviceConstantServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="MobileDeviceConstantServiceSettings"/>.</returns>
public static MobileDeviceConstantServiceSettings GetDefault() => new MobileDeviceConstantServiceSettings();
/// <summary>
/// Constructs a new <see cref="MobileDeviceConstantServiceSettings"/> object with default settings.
/// </summary>
public MobileDeviceConstantServiceSettings()
{
}
private MobileDeviceConstantServiceSettings(MobileDeviceConstantServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetMobileDeviceConstantSettings = existing.GetMobileDeviceConstantSettings;
OnCopy(existing);
}
partial void OnCopy(MobileDeviceConstantServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>MobileDeviceConstantServiceClient.GetMobileDeviceConstant</c> and
/// <c>MobileDeviceConstantServiceClient.GetMobileDeviceConstantAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetMobileDeviceConstantSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="MobileDeviceConstantServiceSettings"/> object.</returns>
public MobileDeviceConstantServiceSettings Clone() => new MobileDeviceConstantServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="MobileDeviceConstantServiceClient"/> to provide simple configuration of
/// credentials, endpoint etc.
/// </summary>
internal sealed partial class MobileDeviceConstantServiceClientBuilder : gaxgrpc::ClientBuilderBase<MobileDeviceConstantServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public MobileDeviceConstantServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public MobileDeviceConstantServiceClientBuilder()
{
UseJwtAccessWithScopes = MobileDeviceConstantServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref MobileDeviceConstantServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<MobileDeviceConstantServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override MobileDeviceConstantServiceClient Build()
{
MobileDeviceConstantServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<MobileDeviceConstantServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<MobileDeviceConstantServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private MobileDeviceConstantServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return MobileDeviceConstantServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<MobileDeviceConstantServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return MobileDeviceConstantServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => MobileDeviceConstantServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => MobileDeviceConstantServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => MobileDeviceConstantServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>MobileDeviceConstantService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to fetch mobile device constants.
/// </remarks>
public abstract partial class MobileDeviceConstantServiceClient
{
/// <summary>
/// The default endpoint for the MobileDeviceConstantService service, which is a host of
/// "googleads.googleapis.com" and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default MobileDeviceConstantService scopes.</summary>
/// <remarks>
/// The default MobileDeviceConstantService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="MobileDeviceConstantServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="MobileDeviceConstantServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="MobileDeviceConstantServiceClient"/>.</returns>
public static stt::Task<MobileDeviceConstantServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new MobileDeviceConstantServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="MobileDeviceConstantServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="MobileDeviceConstantServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="MobileDeviceConstantServiceClient"/>.</returns>
public static MobileDeviceConstantServiceClient Create() => new MobileDeviceConstantServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="MobileDeviceConstantServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="MobileDeviceConstantServiceSettings"/>.</param>
/// <returns>The created <see cref="MobileDeviceConstantServiceClient"/>.</returns>
internal static MobileDeviceConstantServiceClient Create(grpccore::CallInvoker callInvoker, MobileDeviceConstantServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
MobileDeviceConstantService.MobileDeviceConstantServiceClient grpcClient = new MobileDeviceConstantService.MobileDeviceConstantServiceClient(callInvoker);
return new MobileDeviceConstantServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC MobileDeviceConstantService client</summary>
public virtual MobileDeviceConstantService.MobileDeviceConstantServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::MobileDeviceConstant GetMobileDeviceConstant(GetMobileDeviceConstantRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(GetMobileDeviceConstantRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(GetMobileDeviceConstantRequest request, st::CancellationToken cancellationToken) =>
GetMobileDeviceConstantAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::MobileDeviceConstant GetMobileDeviceConstant(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetMobileDeviceConstant(new GetMobileDeviceConstantRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetMobileDeviceConstantAsync(new GetMobileDeviceConstantRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetMobileDeviceConstantAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::MobileDeviceConstant GetMobileDeviceConstant(gagvr::MobileDeviceConstantName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetMobileDeviceConstant(new GetMobileDeviceConstantRequest
{
ResourceNameAsMobileDeviceConstantName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(gagvr::MobileDeviceConstantName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetMobileDeviceConstantAsync(new GetMobileDeviceConstantRequest
{
ResourceNameAsMobileDeviceConstantName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the mobile device to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(gagvr::MobileDeviceConstantName resourceName, st::CancellationToken cancellationToken) =>
GetMobileDeviceConstantAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>MobileDeviceConstantService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to fetch mobile device constants.
/// </remarks>
public sealed partial class MobileDeviceConstantServiceClientImpl : MobileDeviceConstantServiceClient
{
private readonly gaxgrpc::ApiCall<GetMobileDeviceConstantRequest, gagvr::MobileDeviceConstant> _callGetMobileDeviceConstant;
/// <summary>
/// Constructs a client wrapper for the MobileDeviceConstantService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="MobileDeviceConstantServiceSettings"/> used within this client.
/// </param>
public MobileDeviceConstantServiceClientImpl(MobileDeviceConstantService.MobileDeviceConstantServiceClient grpcClient, MobileDeviceConstantServiceSettings settings)
{
GrpcClient = grpcClient;
MobileDeviceConstantServiceSettings effectiveSettings = settings ?? MobileDeviceConstantServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetMobileDeviceConstant = clientHelper.BuildApiCall<GetMobileDeviceConstantRequest, gagvr::MobileDeviceConstant>(grpcClient.GetMobileDeviceConstantAsync, grpcClient.GetMobileDeviceConstant, effectiveSettings.GetMobileDeviceConstantSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetMobileDeviceConstant);
Modify_GetMobileDeviceConstantApiCall(ref _callGetMobileDeviceConstant);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetMobileDeviceConstantApiCall(ref gaxgrpc::ApiCall<GetMobileDeviceConstantRequest, gagvr::MobileDeviceConstant> call);
partial void OnConstruction(MobileDeviceConstantService.MobileDeviceConstantServiceClient grpcClient, MobileDeviceConstantServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC MobileDeviceConstantService client</summary>
public override MobileDeviceConstantService.MobileDeviceConstantServiceClient GrpcClient { get; }
partial void Modify_GetMobileDeviceConstantRequest(ref GetMobileDeviceConstantRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::MobileDeviceConstant GetMobileDeviceConstant(GetMobileDeviceConstantRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetMobileDeviceConstantRequest(ref request, ref callSettings);
return _callGetMobileDeviceConstant.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested mobile device constant in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::MobileDeviceConstant> GetMobileDeviceConstantAsync(GetMobileDeviceConstantRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetMobileDeviceConstantRequest(ref request, ref callSettings);
return _callGetMobileDeviceConstant.Async(request, callSettings);
}
}
}
| |
using GuiLabs.Canvas.Controls;
using GuiLabs.Editor.Blocks;
namespace GuiLabs.Editor.UI
{
public class TreeViewNode : ContainerBlock
{
#region ctors
public TreeViewNode()
: base()
{
HMembers = new HContainerBlock();
VMembers = new VContainerBlock();
this.Add(HMembers);
this.Add(VMembers);
InitControl();
}
public TreeViewNode(string Caption)
: this()
{
HMembers.Add(new TreeViewLabelBlock(Caption));
}
#endregion
#region AddNode
public TreeViewNode AddNode(string Caption)
{
TreeViewNode childToAdd = new TreeViewNode(Caption);
this.VMembers.Add(childToAdd);
return childToAdd;
}
#endregion
#region Control
protected virtual void InitControl()
{
MyNodeControl = new TreeViewNodeControl(
HMembers.MyListControl,
VMembers.MyListControl);
}
private TreeViewNodeControl mMyNodeControl;
public TreeViewNodeControl MyNodeControl
{
get
{
return mMyNodeControl;
}
protected set
{
if (mMyNodeControl != null)
{
UnSubscribeControl();
}
mMyNodeControl = value;
if (mMyNodeControl != null)
{
SubscribeControl();
}
}
}
public override Control MyControl
{
get { return MyNodeControl; }
}
#endregion
#region OnKeyDown
protected override void OnKeyDown(object sender, System.Windows.Forms.KeyEventArgs e)
{
Block nextFocusable = null;
switch (e.KeyCode)
{
case System.Windows.Forms.Keys.Left:
if (!this.MyControl.Collapsed && this.VMembers.Children.Count > 0)
{
e.Handled = true;
this.MyNodeControl.ToggleCollapse(true);
}
break;
case System.Windows.Forms.Keys.Right:
if (this.MyControl.Collapsed)
{
e.Handled = true;
this.MyNodeControl.ToggleCollapse(true);
}
else
{
nextFocusable = VMembers.FindFirstFocusableBlock();
}
break;
case System.Windows.Forms.Keys.Down:
if (VMembers.MyControl.Visible && !VMembers.MyControl.Collapsed)
{
nextFocusable = VMembers.FindFirstFocusableBlock();
}
break;
case System.Windows.Forms.Keys.End:
nextFocusable = HMembers.FindLastFocusableBlock();
break;
case System.Windows.Forms.Keys.Add:
if (this.MyControl.Collapsed)
{
e.Handled = true;
this.MyNodeControl.ToggleCollapse(true);
}
break;
case System.Windows.Forms.Keys.Subtract:
if (!this.MyControl.Collapsed && this.VMembers.Children.Count > 0)
{
e.Handled = true;
this.MyNodeControl.ToggleCollapse(true);
}
break;
case System.Windows.Forms.Keys.Space:
e.Handled = true;
this.MyNodeControl.ToggleCollapse(true);
break;
case System.Windows.Forms.Keys.Multiply:
e.Handled = true;
this.MyNodeControl.CollapseAll(false, true);
break;
case System.Windows.Forms.Keys.Divide:
e.Handled = true;
this.MyNodeControl.CollapseAll(true, true);
break;
case System.Windows.Forms.Keys.Delete:
this.Delete();
e.Handled = true;
break;
default:
break;
}
if (nextFocusable != null && nextFocusable.CanGetFocus)
{
nextFocusable.SetFocus();
e.Handled = true;
}
RaiseKeyDown(e);
}
#endregion
#region HMembers
private HContainerBlock mHMembers;
public HContainerBlock HMembers
{
get { return mHMembers; }
set
{
if (mHMembers != null)
{
mHMembers.KeyDown -= HMembers_KeyDown;
}
mHMembers = value;
if (mHMembers != null)
{
mHMembers.KeyDown += HMembers_KeyDown;
}
}
}
void HMembers_KeyDown(Block Block, System.Windows.Forms.KeyEventArgs e)
{
Block nextFocusable = null;
switch (e.KeyCode)
{
case System.Windows.Forms.Keys.Return:
case System.Windows.Forms.Keys.Down:
if (VMembers != null && this.VMembers.MyControl.Visible)
{
nextFocusable = this.VMembers.FindFirstFocusableBlock();
}
break;
case System.Windows.Forms.Keys.Left:
case System.Windows.Forms.Keys.Home:
nextFocusable = this;
break;
default:
break;
}
if (nextFocusable != null)
{
nextFocusable.SetFocus();
e.Handled = true;
}
RaiseKeyDown(e);
}
#endregion
#region VMembers
private VContainerBlock mVMembers;
public VContainerBlock VMembers
{
get { return mVMembers; }
set
{
if (mVMembers != null)
{
mVMembers.KeyDown -= VMembers_KeyDown;
}
mVMembers = value;
if (mVMembers != null)
{
mVMembers.KeyDown += VMembers_KeyDown;
}
}
}
void VMembers_KeyDown(Block Block, System.Windows.Forms.KeyEventArgs e)
{
Block nextFocusable = null;
switch (e.KeyCode)
{
// Let's select ourselves each time we're going up
case System.Windows.Forms.Keys.Up:
case System.Windows.Forms.Keys.Left:
case System.Windows.Forms.Keys.Home:
nextFocusable = this;
break;
default:
break;
}
if (nextFocusable != null && nextFocusable.CanGetFocus)
{
nextFocusable.SetFocus();
e.Handled = true;
}
RaiseKeyDown(e);
}
#endregion
#region Style
protected override string StyleName()
{
return "TreeViewNode";
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using System.Numerics;
using System.Linq;
using NUnit.Framework;
using Newtonsoft.Json;
using GlmSharp;
// ReSharper disable InconsistentNaming
namespace GlmSharpTest.Generated.Vec4
{
[TestFixture]
public class DoubleVec4Test
{
[Test]
public void Constructors()
{
{
var v = new dvec4(-7.5d);
Assert.AreEqual(-7.5d, v.x);
Assert.AreEqual(-7.5d, v.y);
Assert.AreEqual(-7.5d, v.z);
Assert.AreEqual(-7.5d, v.w);
}
{
var v = new dvec4(1.5d, -8d, -3.5d, 0.0);
Assert.AreEqual(1.5d, v.x);
Assert.AreEqual(-8d, v.y);
Assert.AreEqual(-3.5d, v.z);
Assert.AreEqual(0.0, v.w);
}
{
var v = new dvec4(new dvec2(-3.5d, 1.5d));
Assert.AreEqual(-3.5d, v.x);
Assert.AreEqual(1.5d, v.y);
Assert.AreEqual(0.0, v.z);
Assert.AreEqual(0.0, v.w);
}
{
var v = new dvec4(new dvec3(-2d, 8.5d, 0.5d));
Assert.AreEqual(-2d, v.x);
Assert.AreEqual(8.5d, v.y);
Assert.AreEqual(0.5d, v.z);
Assert.AreEqual(0.0, v.w);
}
{
var v = new dvec4(new dvec4(-4.5d, -2d, 4.5d, -1d));
Assert.AreEqual(-4.5d, v.x);
Assert.AreEqual(-2d, v.y);
Assert.AreEqual(4.5d, v.z);
Assert.AreEqual(-1d, v.w);
}
}
[Test]
public void Indexer()
{
var v = new dvec4(8.5d, 4.5d, 3d, 1.0);
Assert.AreEqual(8.5d, v[0]);
Assert.AreEqual(4.5d, v[1]);
Assert.AreEqual(3d, v[2]);
Assert.AreEqual(1.0, v[3]);
Assert.Throws<ArgumentOutOfRangeException>(() => { var s = v[-2147483648]; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { v[-2147483648] = 0.0; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { var s = v[-1]; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { v[-1] = 0.0; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { var s = v[4]; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { v[4] = 0.0; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { var s = v[2147483647]; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { v[2147483647] = 0.0; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { var s = v[5]; } );
Assert.Throws<ArgumentOutOfRangeException>(() => { v[5] = 0.0; } );
v[1] = 0.0;
Assert.AreEqual(0.0, v[1]);
v[3] = 1.0;
Assert.AreEqual(1.0, v[3]);
v[1] = 2d;
Assert.AreEqual(2d, v[1]);
v[2] = 3d;
Assert.AreEqual(3d, v[2]);
v[3] = 4d;
Assert.AreEqual(4d, v[3]);
v[2] = 5d;
Assert.AreEqual(5d, v[2]);
v[2] = 6d;
Assert.AreEqual(6d, v[2]);
v[2] = 7d;
Assert.AreEqual(7d, v[2]);
v[2] = 8d;
Assert.AreEqual(8d, v[2]);
v[1] = 9d;
Assert.AreEqual(9d, v[1]);
v[0] = -1d;
Assert.AreEqual(-1d, v[0]);
v[1] = -2d;
Assert.AreEqual(-2d, v[1]);
v[0] = -3d;
Assert.AreEqual(-3d, v[0]);
v[3] = -4d;
Assert.AreEqual(-4d, v[3]);
v[2] = -5d;
Assert.AreEqual(-5d, v[2]);
v[3] = -6d;
Assert.AreEqual(-6d, v[3]);
v[0] = -7d;
Assert.AreEqual(-7d, v[0]);
v[0] = -8d;
Assert.AreEqual(-8d, v[0]);
v[1] = -9d;
Assert.AreEqual(-9d, v[1]);
v[0] = -9.5d;
Assert.AreEqual(-9.5d, v[0]);
v[2] = -8.5d;
Assert.AreEqual(-8.5d, v[2]);
v[1] = -7.5d;
Assert.AreEqual(-7.5d, v[1]);
v[1] = -6.5d;
Assert.AreEqual(-6.5d, v[1]);
v[0] = -5.5d;
Assert.AreEqual(-5.5d, v[0]);
v[1] = -4.5d;
Assert.AreEqual(-4.5d, v[1]);
v[1] = -3.5d;
Assert.AreEqual(-3.5d, v[1]);
v[3] = -2.5d;
Assert.AreEqual(-2.5d, v[3]);
v[0] = -1.5d;
Assert.AreEqual(-1.5d, v[0]);
v[2] = -0.5d;
Assert.AreEqual(-0.5d, v[2]);
v[0] = 0.5d;
Assert.AreEqual(0.5d, v[0]);
v[0] = 1.5d;
Assert.AreEqual(1.5d, v[0]);
v[1] = 2.5d;
Assert.AreEqual(2.5d, v[1]);
v[2] = 3.5d;
Assert.AreEqual(3.5d, v[2]);
v[1] = 4.5d;
Assert.AreEqual(4.5d, v[1]);
v[3] = 5.5d;
Assert.AreEqual(5.5d, v[3]);
v[1] = 6.5d;
Assert.AreEqual(6.5d, v[1]);
v[3] = 7.5d;
Assert.AreEqual(7.5d, v[3]);
v[0] = 8.5d;
Assert.AreEqual(8.5d, v[0]);
v[3] = 9.5d;
Assert.AreEqual(9.5d, v[3]);
}
[Test]
public void PropertyValues()
{
var v = new dvec4(6.5d, 8d, 1.0, 6d);
var vals = v.Values;
Assert.AreEqual(6.5d, vals[0]);
Assert.AreEqual(8d, vals[1]);
Assert.AreEqual(1.0, vals[2]);
Assert.AreEqual(6d, vals[3]);
Assert.That(vals.SequenceEqual(v.ToArray()));
}
[Test]
public void StaticProperties()
{
Assert.AreEqual(0.0, dvec4.Zero.x);
Assert.AreEqual(0.0, dvec4.Zero.y);
Assert.AreEqual(0.0, dvec4.Zero.z);
Assert.AreEqual(0.0, dvec4.Zero.w);
Assert.AreEqual(1.0, dvec4.Ones.x);
Assert.AreEqual(1.0, dvec4.Ones.y);
Assert.AreEqual(1.0, dvec4.Ones.z);
Assert.AreEqual(1.0, dvec4.Ones.w);
Assert.AreEqual(1.0, dvec4.UnitX.x);
Assert.AreEqual(0.0, dvec4.UnitX.y);
Assert.AreEqual(0.0, dvec4.UnitX.z);
Assert.AreEqual(0.0, dvec4.UnitX.w);
Assert.AreEqual(0.0, dvec4.UnitY.x);
Assert.AreEqual(1.0, dvec4.UnitY.y);
Assert.AreEqual(0.0, dvec4.UnitY.z);
Assert.AreEqual(0.0, dvec4.UnitY.w);
Assert.AreEqual(0.0, dvec4.UnitZ.x);
Assert.AreEqual(0.0, dvec4.UnitZ.y);
Assert.AreEqual(1.0, dvec4.UnitZ.z);
Assert.AreEqual(0.0, dvec4.UnitZ.w);
Assert.AreEqual(0.0, dvec4.UnitW.x);
Assert.AreEqual(0.0, dvec4.UnitW.y);
Assert.AreEqual(0.0, dvec4.UnitW.z);
Assert.AreEqual(1.0, dvec4.UnitW.w);
Assert.AreEqual(double.MaxValue, dvec4.MaxValue.x);
Assert.AreEqual(double.MaxValue, dvec4.MaxValue.y);
Assert.AreEqual(double.MaxValue, dvec4.MaxValue.z);
Assert.AreEqual(double.MaxValue, dvec4.MaxValue.w);
Assert.AreEqual(double.MinValue, dvec4.MinValue.x);
Assert.AreEqual(double.MinValue, dvec4.MinValue.y);
Assert.AreEqual(double.MinValue, dvec4.MinValue.z);
Assert.AreEqual(double.MinValue, dvec4.MinValue.w);
Assert.AreEqual(double.Epsilon, dvec4.Epsilon.x);
Assert.AreEqual(double.Epsilon, dvec4.Epsilon.y);
Assert.AreEqual(double.Epsilon, dvec4.Epsilon.z);
Assert.AreEqual(double.Epsilon, dvec4.Epsilon.w);
Assert.AreEqual(double.NaN, dvec4.NaN.x);
Assert.AreEqual(double.NaN, dvec4.NaN.y);
Assert.AreEqual(double.NaN, dvec4.NaN.z);
Assert.AreEqual(double.NaN, dvec4.NaN.w);
Assert.AreEqual(double.NegativeInfinity, dvec4.NegativeInfinity.x);
Assert.AreEqual(double.NegativeInfinity, dvec4.NegativeInfinity.y);
Assert.AreEqual(double.NegativeInfinity, dvec4.NegativeInfinity.z);
Assert.AreEqual(double.NegativeInfinity, dvec4.NegativeInfinity.w);
Assert.AreEqual(double.PositiveInfinity, dvec4.PositiveInfinity.x);
Assert.AreEqual(double.PositiveInfinity, dvec4.PositiveInfinity.y);
Assert.AreEqual(double.PositiveInfinity, dvec4.PositiveInfinity.z);
Assert.AreEqual(double.PositiveInfinity, dvec4.PositiveInfinity.w);
}
[Test]
public void Operators()
{
var v1 = new dvec4(4d, 3.5d, 5.5d, -6d);
var v2 = new dvec4(4d, 3.5d, 5.5d, -6d);
var v3 = new dvec4(-6d, 5.5d, 3.5d, 4d);
Assert.That(v1 == new dvec4(v1));
Assert.That(v2 == new dvec4(v2));
Assert.That(v3 == new dvec4(v3));
Assert.That(v1 == v2);
Assert.That(v1 != v3);
Assert.That(v2 != v3);
}
[Test]
public void StringInterop()
{
var v = new dvec4(0.0, 3d, -6.5d, 1.5d);
var s0 = v.ToString();
var s1 = v.ToString("#");
var v0 = dvec4.Parse(s0);
var v1 = dvec4.Parse(s1, "#");
Assert.AreEqual(v, v0);
Assert.AreEqual(v, v1);
var b0 = dvec4.TryParse(s0, out v0);
var b1 = dvec4.TryParse(s1, "#", out v1);
Assert.That(b0);
Assert.That(b1);
Assert.AreEqual(v, v0);
Assert.AreEqual(v, v1);
b0 = dvec4.TryParse(null, out v0);
Assert.False(b0);
b0 = dvec4.TryParse("", out v0);
Assert.False(b0);
b0 = dvec4.TryParse(s0 + ", 0", out v0);
Assert.False(b0);
Assert.Throws<NullReferenceException>(() => { dvec4.Parse(null); });
Assert.Throws<FormatException>(() => { dvec4.Parse(""); });
Assert.Throws<FormatException>(() => { dvec4.Parse(s0 + ", 0"); });
var s2 = v.ToString(";", CultureInfo.InvariantCulture);
Assert.That(s2.Length > 0);
var s3 = v.ToString("; ", "G");
var s4 = v.ToString("; ", "G", CultureInfo.InvariantCulture);
var v3 = dvec4.Parse(s3, "; ", NumberStyles.Number);
var v4 = dvec4.Parse(s4, "; ", NumberStyles.Number, CultureInfo.InvariantCulture);
Assert.AreEqual(v, v3);
Assert.AreEqual(v, v4);
var b4 = dvec4.TryParse(s4, "; ", NumberStyles.Number, CultureInfo.InvariantCulture, out v4);
Assert.That(b4);
Assert.AreEqual(v, v4);
}
[Test]
public void SerializationJson()
{
var v0 = new dvec4(6.5d, 2d, -6.5d, 2d);
var s0 = JsonConvert.SerializeObject(v0);
var v1 = JsonConvert.DeserializeObject<dvec4>(s0);
var s1 = JsonConvert.SerializeObject(v1);
Assert.AreEqual(v0, v1);
Assert.AreEqual(s0, s1);
}
[Test]
public void InvariantId()
{
{
var v0 = new dvec4(-8.5d, 9d, 1.5d, 2d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(6.5d, -1d, 5.5d, -1.5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(-4d, 7d, -4.5d, 9d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(3.5d, 4.5d, 7.5d, 1.5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(-3d, 6d, 8d, 3d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(-2.5d, -8d, -3d, -5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(5d, 8.5d, -3d, 8.5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(-6.5d, 4d, 9d, -2.5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(7.5d, -9.5d, -7.5d, 4.5d);
Assert.AreEqual(v0, +v0);
}
{
var v0 = new dvec4(5.5d, -5.5d, 6d, 1.0);
Assert.AreEqual(v0, +v0);
}
}
[Test]
public void InvariantDouble()
{
{
var v0 = new dvec4(-5.5d, 6.5d, -2.5d, 0.0);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(9d, 1.5d, -5d, 0.0);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(-9d, -3.5d, 6d, -1d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(-5d, 1.5d, 3.5d, 2.5d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(9d, -6.5d, 5d, 7.5d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(-4.5d, -2.5d, -5.5d, 1.0);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(3d, -7.5d, -3.5d, 5d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(-8d, 6.5d, -1d, -1.5d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(-6.5d, -0.5d, -3d, 0.0);
Assert.AreEqual(v0 + v0, 2 * v0);
}
{
var v0 = new dvec4(8.5d, 0.0, -7d, -7d);
Assert.AreEqual(v0 + v0, 2 * v0);
}
}
[Test]
public void InvariantTriple()
{
{
var v0 = new dvec4(-2d, -4d, 1.5d, 9.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-2.5d, 1.0, -7.5d, -4d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(2d, 3d, 3.5d, 2d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-9.5d, -7.5d, -9d, -3.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-4d, -7.5d, -3d, 2d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(0.5d, 8.5d, 3.5d, -7.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-7.5d, -0.5d, 7d, 5.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-3d, 4.5d, -6d, 7.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-8d, 9d, -7.5d, -7.5d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
{
var v0 = new dvec4(-4d, 9.5d, 8.5d, 2d);
Assert.AreEqual(v0 + v0 + v0, 3 * v0);
}
}
[Test]
public void InvariantCommutative()
{
{
var v0 = new dvec4(-8d, 7d, 4.5d, -9.5d);
var v1 = new dvec4(-8.5d, -9d, 1.0, 1.0);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(-6d, -6d, 4d, -1d);
var v1 = new dvec4(8.5d, -8.5d, -4d, -7d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(-2.5d, -1d, 4.5d, 6d);
var v1 = new dvec4(3d, -2.5d, 9d, -5.5d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(6.5d, 6d, 0.5d, -4.5d);
var v1 = new dvec4(-8d, 1.5d, -4.5d, 3d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(-3d, -9.5d, 8d, 4d);
var v1 = new dvec4(3d, -6.5d, 2.5d, 9.5d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(8d, 0.5d, -8d, -2.5d);
var v1 = new dvec4(-6d, 5.5d, 1.0, 7.5d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(3d, 1.0, -6d, 3d);
var v1 = new dvec4(-4d, -1d, -2.5d, 0.5d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(8.5d, 9d, -5.5d, -5d);
var v1 = new dvec4(-1.5d, -6d, -6.5d, -5.5d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(0.5d, 1.0, 8.5d, -8d);
var v1 = new dvec4(-3d, 8d, -3d, 4d);
Assert.AreEqual(v0 * v1, v1 * v0);
}
{
var v0 = new dvec4(-8d, 5.5d, 7.5d, -3d);
var v1 = new dvec4(9d, -6.5d, 8d, 1.0);
Assert.AreEqual(v0 * v1, v1 * v0);
}
}
[Test]
public void InvariantAssociative()
{
{
var v0 = new dvec4(-6d, -6.5d, 2.5d, 3.5d);
var v1 = new dvec4(7.5d, -6d, 7.5d, 0.5d);
var v2 = new dvec4(4d, 4d, 0.5d, -7.5d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(-2d, 0.5d, -2.5d, -4d);
var v1 = new dvec4(2d, -5.5d, 7d, -5d);
var v2 = new dvec4(-3.5d, 1.0, -4d, -1d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(1.5d, 3.5d, -0.5d, -9d);
var v1 = new dvec4(-0.5d, 8d, -2.5d, 6d);
var v2 = new dvec4(-1.5d, -1d, -8.5d, -3.5d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(-3.5d, 2d, -7.5d, 0.0);
var v1 = new dvec4(-1.5d, 5d, -4.5d, 0.0);
var v2 = new dvec4(6d, 7.5d, 4d, 9d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(5d, 3.5d, 2d, -7.5d);
var v1 = new dvec4(2.5d, 3d, 4.5d, -5d);
var v2 = new dvec4(8d, -8.5d, 1.0, 4d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(-3.5d, -9d, 0.0, 5.5d);
var v1 = new dvec4(-8d, -6.5d, 4.5d, 1.0);
var v2 = new dvec4(8d, 0.0, -2.5d, 0.0);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(2d, 6d, -2.5d, -8.5d);
var v1 = new dvec4(-6d, -4d, 3d, 3.5d);
var v2 = new dvec4(-1.5d, -9.5d, -3d, 5.5d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(6d, 4d, -5d, -5.5d);
var v1 = new dvec4(-6d, 6d, -7d, 5d);
var v2 = new dvec4(0.0, -8d, -4d, 8d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(4d, -0.5d, -6.5d, -5.5d);
var v1 = new dvec4(3d, 3d, 0.0, 4d);
var v2 = new dvec4(5d, 1.0, -9.5d, -4.5d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
{
var v0 = new dvec4(-7d, -3d, 8.5d, 5.5d);
var v1 = new dvec4(-8d, 8d, -6d, 6d);
var v2 = new dvec4(5d, 5d, -0.5d, -3d);
Assert.AreEqual(v0 * (v1 + v2), v0 * v1 + v0 * v2);
}
}
[Test]
public void InvariantIdNeg()
{
{
var v0 = new dvec4(-4d, 6.5d, 6.5d, 6.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(-3.5d, -3.5d, -8.5d, 0.0);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(6.5d, -5d, 6.5d, 1.0);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(-6.5d, 2d, -3d, -7.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(7d, -3.5d, -7.5d, 4.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(-6d, -1.5d, 4d, 9.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(2d, 2.5d, -8.5d, 5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(-3d, -6.5d, -9d, -8.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(-9d, 0.5d, 2.5d, -7.5d);
Assert.AreEqual(v0, -(-v0));
}
{
var v0 = new dvec4(1.0, 5d, -7d, -8d);
Assert.AreEqual(v0, -(-v0));
}
}
[Test]
public void InvariantCommutativeNeg()
{
{
var v0 = new dvec4(8d, -2d, -8.5d, 0.5d);
var v1 = new dvec4(8d, -6.5d, -2.5d, 0.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(8d, 0.5d, 8d, 3d);
var v1 = new dvec4(5d, 7.5d, 3.5d, 6d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(7.5d, -1d, 5d, -4.5d);
var v1 = new dvec4(-6d, 9d, -2d, -5.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(-4.5d, -2.5d, -9.5d, 6d);
var v1 = new dvec4(1.5d, -7.5d, 4.5d, -4.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(-5.5d, -2d, -4.5d, 0.0);
var v1 = new dvec4(1.5d, -3.5d, -8d, 8d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(1.0, 6d, -3d, 4d);
var v1 = new dvec4(-5.5d, -3.5d, 2d, 2.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(-1.5d, 2.5d, 5d, -2d);
var v1 = new dvec4(-9d, -5.5d, 7d, 9.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(0.0, 6.5d, 5d, -7.5d);
var v1 = new dvec4(2d, -9.5d, 9.5d, -3.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(6.5d, -5.5d, -9.5d, 3.5d);
var v1 = new dvec4(-2d, 2.5d, -5d, -2d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
{
var v0 = new dvec4(3.5d, 7.5d, -5.5d, 8d);
var v1 = new dvec4(6.5d, 6d, 0.0, 9.5d);
Assert.AreEqual(v0 - v1, -(v1 - v0));
}
}
[Test]
public void InvariantAssociativeNeg()
{
{
var v0 = new dvec4(3d, 4d, 2d, -2d);
var v1 = new dvec4(-3d, 4d, 0.5d, 6.5d);
var v2 = new dvec4(3d, -7.5d, -1d, -1d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-5d, -9d, 7.5d, -2.5d);
var v1 = new dvec4(-9.5d, 4d, -8d, -1d);
var v2 = new dvec4(7d, 9.5d, 4d, 9d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-3d, -2.5d, -4d, -6d);
var v1 = new dvec4(1.0, 1.0, 5.5d, -9d);
var v2 = new dvec4(7.5d, -4d, -9d, 8d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-2.5d, -7.5d, -1d, -7.5d);
var v1 = new dvec4(5d, -5d, -6.5d, -4.5d);
var v2 = new dvec4(-4d, -5.5d, 6d, 8.5d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-5.5d, 6.5d, 4d, -9d);
var v1 = new dvec4(7d, 1.5d, -2.5d, 4d);
var v2 = new dvec4(9.5d, 3.5d, 8.5d, -4.5d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(0.5d, -4d, 5.5d, 2d);
var v1 = new dvec4(-3.5d, 1.5d, -3d, 0.0);
var v2 = new dvec4(0.0, -1.5d, 0.0, 8.5d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(4.5d, 5.5d, 4d, 3.5d);
var v1 = new dvec4(-7d, -9.5d, 5.5d, -1.5d);
var v2 = new dvec4(-8.5d, -6d, 2.5d, 4d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(6.5d, -7d, -2d, 6d);
var v1 = new dvec4(-3.5d, -5d, 9d, 3.5d);
var v2 = new dvec4(-6.5d, -4.5d, 2.5d, 2.5d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-9d, -9.5d, 8.5d, -6.5d);
var v1 = new dvec4(-2d, 5d, 8.5d, 5.5d);
var v2 = new dvec4(6.5d, 5d, -5.5d, -2d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
{
var v0 = new dvec4(-3.5d, 4.5d, -2.5d, -9.5d);
var v1 = new dvec4(8.5d, -1d, 4d, -5d);
var v2 = new dvec4(-8.5d, 0.5d, 5d, 9d);
Assert.AreEqual(v0 * (v1 - v2), v0 * v1 - v0 * v2);
}
}
[Test]
public void TriangleInequality()
{
{
var v0 = new dvec4(-2d, -8.5d, 1.0, -2.5d);
var v1 = new dvec4(-4d, -2.5d, -5d, 7.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(-3.5d, 3d, 7d, -9.5d);
var v1 = new dvec4(0.0, 6.5d, 2.5d, -7.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(-1.5d, -7.5d, 1.0, -7d);
var v1 = new dvec4(-5d, -6d, 0.5d, 0.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(8.5d, 4.5d, -3.5d, -8d);
var v1 = new dvec4(-2.5d, -4d, 1.0, 7d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(-0.5d, -4d, -0.5d, -9d);
var v1 = new dvec4(5d, -2.5d, 2d, -0.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(-2.5d, -8.5d, -2d, 9.5d);
var v1 = new dvec4(-7d, 1.0, -6d, 9d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(0.5d, -3.5d, -4d, 2d);
var v1 = new dvec4(4.5d, 7.5d, -3d, 5.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(1.5d, -2d, -1.5d, -9d);
var v1 = new dvec4(0.0, 6.5d, 9d, -3d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(2d, 9.5d, 0.5d, -2.5d);
var v1 = new dvec4(7d, -4d, -7d, 1.0);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
{
var v0 = new dvec4(-9d, -2d, -0.5d, 4.5d);
var v1 = new dvec4(4d, 0.5d, -3d, 6.5d);
Assert.GreaterOrEqual(v0.NormMax + v1.NormMax, (v0 + v1).NormMax);
}
}
[Test]
public void InvariantNorm()
{
{
var v0 = new dvec4(8d, 2d, -6d, 0.0);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(6d, -9.5d, 8.5d, -8d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(9d, 0.0, -9d, -4d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(-1d, 8.5d, -2d, 7.5d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(9d, -1d, 7.5d, -4.5d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(-9d, 0.5d, 5d, 5.5d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(6d, -4.5d, -9d, -9d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(7d, -4d, -6.5d, -7d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(-7d, -8d, -1.5d, 3.5d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
{
var v0 = new dvec4(-5d, 2.5d, -4d, 9d);
Assert.LessOrEqual(v0.NormMax, v0.Norm);
}
}
[Test]
public void RandomUniform0()
{
var random = new Random(946833268);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.Random(random, -3, 1);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, -1, 1.0);
Assert.AreEqual(avg.y, -1, 1.0);
Assert.AreEqual(avg.z, -1, 1.0);
Assert.AreEqual(avg.w, -1, 1.0);
Assert.AreEqual(variance.x, 1.33333333333333, 3.0);
Assert.AreEqual(variance.y, 1.33333333333333, 3.0);
Assert.AreEqual(variance.z, 1.33333333333333, 3.0);
Assert.AreEqual(variance.w, 1.33333333333333, 3.0);
}
[Test]
public void RandomUniform1()
{
var random = new Random(1665401270);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomUniform(random, 3, 7);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 5, 1.0);
Assert.AreEqual(avg.y, 5, 1.0);
Assert.AreEqual(avg.z, 5, 1.0);
Assert.AreEqual(avg.w, 5, 1.0);
Assert.AreEqual(variance.x, 1.33333333333333, 3.0);
Assert.AreEqual(variance.y, 1.33333333333333, 3.0);
Assert.AreEqual(variance.z, 1.33333333333333, 3.0);
Assert.AreEqual(variance.w, 1.33333333333333, 3.0);
}
[Test]
public void RandomUniform2()
{
var random = new Random(236485625);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.Random(random, -2, 2);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1.33333333333333, 3.0);
Assert.AreEqual(variance.y, 1.33333333333333, 3.0);
Assert.AreEqual(variance.z, 1.33333333333333, 3.0);
Assert.AreEqual(variance.w, 1.33333333333333, 3.0);
}
[Test]
public void RandomUniform3()
{
var random = new Random(955053627);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomUniform(random, 3, 4);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 3.5, 1.0);
Assert.AreEqual(avg.y, 3.5, 1.0);
Assert.AreEqual(avg.z, 3.5, 1.0);
Assert.AreEqual(avg.w, 3.5, 1.0);
Assert.AreEqual(variance.x, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.y, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.z, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.w, 0.0833333333333333, 3.0);
}
[Test]
public void RandomUniform4()
{
var random = new Random(220044907);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.Random(random, -2, -1);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, -1.5, 1.0);
Assert.AreEqual(avg.y, -1.5, 1.0);
Assert.AreEqual(avg.z, -1.5, 1.0);
Assert.AreEqual(avg.w, -1.5, 1.0);
Assert.AreEqual(variance.x, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.y, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.z, 0.0833333333333333, 3.0);
Assert.AreEqual(variance.w, 0.0833333333333333, 3.0);
}
[Test]
public void RandomGaussian0()
{
var random = new Random(803825297);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random, 1.0502472887981d, 2.7189641691367d);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 1.0502472887981, 1.0);
Assert.AreEqual(avg.y, 1.0502472887981, 1.0);
Assert.AreEqual(avg.z, 1.0502472887981, 1.0);
Assert.AreEqual(avg.w, 1.0502472887981, 1.0);
Assert.AreEqual(variance.x, 2.7189641691367, 3.0);
Assert.AreEqual(variance.y, 2.7189641691367, 3.0);
Assert.AreEqual(variance.z, 2.7189641691367, 3.0);
Assert.AreEqual(variance.w, 2.7189641691367, 3.0);
}
[Test]
public void RandomGaussian1()
{
var random = new Random(968149683);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomGaussian(random, 1.65950470774411d, 8.5450055210595d);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 1.65950470774411, 1.0);
Assert.AreEqual(avg.y, 1.65950470774411, 1.0);
Assert.AreEqual(avg.z, 1.65950470774411, 1.0);
Assert.AreEqual(avg.w, 1.65950470774411, 1.0);
Assert.AreEqual(variance.x, 8.5450055210595, 3.0);
Assert.AreEqual(variance.y, 8.5450055210595, 3.0);
Assert.AreEqual(variance.z, 8.5450055210595, 3.0);
Assert.AreEqual(variance.w, 8.5450055210595, 3.0);
}
[Test]
public void RandomGaussian2()
{
var random = new Random(378860534);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random, 1.66665352492903d, 0.15697779606887d);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 1.66665352492903, 1.0);
Assert.AreEqual(avg.y, 1.66665352492903, 1.0);
Assert.AreEqual(avg.z, 1.66665352492903, 1.0);
Assert.AreEqual(avg.w, 1.66665352492903, 1.0);
Assert.AreEqual(variance.x, 0.15697779606887, 3.0);
Assert.AreEqual(variance.y, 0.15697779606887, 3.0);
Assert.AreEqual(variance.z, 0.15697779606887, 3.0);
Assert.AreEqual(variance.w, 0.15697779606887, 3.0);
}
[Test]
public void RandomGaussian3()
{
var random = new Random(428522525);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomGaussian(random, -0.702141867346196d, 1.34130113354945d);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, -0.702141867346196, 1.0);
Assert.AreEqual(avg.y, -0.702141867346196, 1.0);
Assert.AreEqual(avg.z, -0.702141867346196, 1.0);
Assert.AreEqual(avg.w, -0.702141867346196, 1.0);
Assert.AreEqual(variance.x, 1.34130113354945, 3.0);
Assert.AreEqual(variance.y, 1.34130113354945, 3.0);
Assert.AreEqual(variance.z, 1.34130113354945, 3.0);
Assert.AreEqual(variance.w, 1.34130113354945, 3.0);
}
[Test]
public void RandomGaussian4()
{
var random = new Random(1561752260);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random, -0.76640407311097d, 2.40718037933446d);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, -0.76640407311097, 1.0);
Assert.AreEqual(avg.y, -0.76640407311097, 1.0);
Assert.AreEqual(avg.z, -0.76640407311097, 1.0);
Assert.AreEqual(avg.w, -0.76640407311097, 1.0);
Assert.AreEqual(variance.x, 2.40718037933446, 3.0);
Assert.AreEqual(variance.y, 2.40718037933446, 3.0);
Assert.AreEqual(variance.z, 2.40718037933446, 3.0);
Assert.AreEqual(variance.w, 2.40718037933446, 3.0);
}
[Test]
public void RandomNormal0()
{
var random = new Random(276079068);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1, 3.0);
Assert.AreEqual(variance.y, 1, 3.0);
Assert.AreEqual(variance.z, 1, 3.0);
Assert.AreEqual(variance.w, 1, 3.0);
}
[Test]
public void RandomNormal1()
{
var random = new Random(1397978887);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1, 3.0);
Assert.AreEqual(variance.y, 1, 3.0);
Assert.AreEqual(variance.z, 1, 3.0);
Assert.AreEqual(variance.w, 1, 3.0);
}
[Test]
public void RandomNormal2()
{
var random = new Random(372395059);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1, 3.0);
Assert.AreEqual(variance.y, 1, 3.0);
Assert.AreEqual(variance.z, 1, 3.0);
Assert.AreEqual(variance.w, 1, 3.0);
}
[Test]
public void RandomNormal3()
{
var random = new Random(1494294878);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1, 3.0);
Assert.AreEqual(variance.y, 1, 3.0);
Assert.AreEqual(variance.z, 1, 3.0);
Assert.AreEqual(variance.w, 1, 3.0);
}
[Test]
public void RandomNormal4()
{
var random = new Random(83447086);
var sum = new dvec4(0.0);
var sumSqr = new dvec4(0.0);
const int count = 50000;
for (var _ = 0; _ < count; ++_)
{
var v = dvec4.RandomNormal(random);
sum += (dvec4)v;
sumSqr += glm.Pow2((dvec4)v);
}
var avg = sum / (double)count;
var variance = sumSqr / (double)count - avg * avg;
Assert.AreEqual(avg.x, 0, 1.0);
Assert.AreEqual(avg.y, 0, 1.0);
Assert.AreEqual(avg.z, 0, 1.0);
Assert.AreEqual(avg.w, 0, 1.0);
Assert.AreEqual(variance.x, 1, 3.0);
Assert.AreEqual(variance.y, 1, 3.0);
Assert.AreEqual(variance.z, 1, 3.0);
Assert.AreEqual(variance.w, 1, 3.0);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using Microsoft.Build.Framework;
using Microsoft.Build.Shared;
using Microsoft.Build.Utilities;
using Shouldly;
using Xunit;
#pragma warning disable 0219
namespace Microsoft.Build.UnitTests
{
public class TaskItemTests
{
// Make sure a TaskItem can be constructed using an ITaskItem
[Fact]
public void ConstructWithITaskItem()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.SetMetadata("Dog", "Bingo");
from.SetMetadata("Cat", "Morris");
TaskItem to = new TaskItem((ITaskItem)from);
to.ItemSpec.ShouldBe("Monkey.txt");
((string)to).ShouldBe("Monkey.txt");
to.GetMetadata("Dog").ShouldBe("Bingo");
to.GetMetadata("Cat").ShouldBe("Morris");
// Test that item metadata are case-insensitive.
to.SetMetadata("CaT", "");
to.GetMetadata("Cat").ShouldBe("");
// manipulate the item-spec a bit
to.GetMetadata(FileUtilities.ItemSpecModifiers.Filename).ShouldBe("Monkey");
to.GetMetadata(FileUtilities.ItemSpecModifiers.Extension).ShouldBe(".txt");
to.GetMetadata(FileUtilities.ItemSpecModifiers.RelativeDir).ShouldBe(string.Empty);
}
// Make sure metadata can be cloned from an existing ITaskItem
[Fact]
public void CopyMetadataFromITaskItem()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.SetMetadata("Dog", "Bingo");
from.SetMetadata("Cat", "Morris");
from.SetMetadata("Bird", "Big");
TaskItem to = new TaskItem();
to.ItemSpec = "Bonobo.txt";
to.SetMetadata("Sponge", "Bob");
to.SetMetadata("Dog", "Harriet");
to.SetMetadata("Cat", "Mike");
from.CopyMetadataTo(to);
to.ItemSpec.ShouldBe("Bonobo.txt"); // ItemSpec is never overwritten
to.GetMetadata("Sponge").ShouldBe("Bob"); // Metadata not in source are preserved.
to.GetMetadata("Dog").ShouldBe("Harriet"); // Metadata present on destination are not overwritten.
to.GetMetadata("Cat").ShouldBe("Mike");
to.GetMetadata("Bird").ShouldBe("Big");
}
[Fact]
public void NullITaskItem()
{
Should.Throw<ArgumentNullException>(() =>
{
ITaskItem item = null;
TaskItem taskItem = new TaskItem(item);
// no NullReferenceException
}
);
}
/// <summary>
/// Even without any custom metadata metadatanames should
/// return the built in metadata
/// </summary>
[Fact]
public void MetadataNamesNoCustomMetadata()
{
TaskItem taskItem = new TaskItem("x");
taskItem.MetadataNames.Count.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length);
taskItem.MetadataCount.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length);
// Now add one
taskItem.SetMetadata("m", "m1");
taskItem.MetadataNames.Count.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length + 1);
taskItem.MetadataCount.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length + 1);
}
[Fact]
public void NullITaskItemCast()
{
Should.Throw<ArgumentNullException>(() =>
{
TaskItem item = null;
string result = (string)item;
// no NullReferenceException
}
);
}
[Fact]
public void ConstructFromDictionary()
{
Hashtable h = new Hashtable();
h[FileUtilities.ItemSpecModifiers.Filename] = "foo";
h[FileUtilities.ItemSpecModifiers.Extension] = "bar";
h["custom"] = "hello";
TaskItem t = new TaskItem("bamboo.baz", h);
// item-spec modifiers were not overridden by dictionary passed to constructor
t.GetMetadata(FileUtilities.ItemSpecModifiers.Filename).ShouldBe("bamboo");
t.GetMetadata(FileUtilities.ItemSpecModifiers.Extension).ShouldBe(".baz");
t.GetMetadata("CUSTOM").ShouldBe("hello");
}
[Fact]
public void CannotChangeModifiers()
{
Should.Throw<ArgumentException>(() =>
{
TaskItem t = new TaskItem("foo");
try
{
t.SetMetadata(FileUtilities.ItemSpecModifiers.FullPath, "bazbaz");
}
catch (Exception e)
{
// so I can see the exception message in NUnit's "Standard Out" window
Console.WriteLine(e.Message);
throw;
}
}
);
}
[Fact]
public void CannotRemoveModifiers()
{
Should.Throw<ArgumentException>(() =>
{
TaskItem t = new TaskItem("foor");
try
{
t.RemoveMetadata(FileUtilities.ItemSpecModifiers.RootDir);
}
catch (Exception e)
{
// so I can see the exception message in NUnit's "Standard Out" window
Console.WriteLine(e.Message);
throw;
}
}
);
}
[Fact]
public void CheckMetadataCount()
{
TaskItem t = new TaskItem("foo");
t.MetadataCount.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length);
t.SetMetadata("grog", "RUM");
t.MetadataCount.ShouldBe(FileUtilities.ItemSpecModifiers.All.Length + 1);
}
[Fact]
public void NonexistentRequestFullPath()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.FullPath).ShouldBe(
Path.Combine
(
Directory.GetCurrentDirectory(),
"Monkey.txt"
)
);
}
[Fact]
public void NonexistentRequestRootDir()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.RootDir).ShouldBe(Path.GetPathRoot(from.GetMetadata(FileUtilities.ItemSpecModifiers.FullPath)));
}
[Fact]
public void NonexistentRequestFilename()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.Filename).ShouldBe("Monkey");
}
[Fact]
public void NonexistentRequestExtension()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.Extension).ShouldBe(".txt");
}
[Fact]
public void NonexistentRequestRelativeDir()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.RelativeDir).Length.ShouldBe(0);
}
[Fact]
public void NonexistentRequestDirectory()
{
TaskItem from = new TaskItem();
from.ItemSpec = NativeMethodsShared.IsWindows ? @"c:\subdir\Monkey.txt" : "/subdir/Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.Directory).ShouldBe(NativeMethodsShared.IsWindows ? @"subdir\" : "subdir/");
}
[Fact]
public void NonexistentRequestDirectoryUNC()
{
if (!NativeMethodsShared.IsWindows)
{
return; // "UNC is not implemented except under Windows"
}
TaskItem from = new TaskItem();
from.ItemSpec = @"\\local\share\subdir\Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.Directory).ShouldBe(@"subdir\");
}
[Fact]
public void NonexistentRequestRecursiveDir()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.RecursiveDir).Length.ShouldBe(0);
}
[Fact]
public void NonexistentRequestIdentity()
{
TaskItem from = new TaskItem();
from.ItemSpec = "Monkey.txt";
from.GetMetadata(FileUtilities.ItemSpecModifiers.Identity).ShouldBe("Monkey.txt");
}
[Fact]
public void RequestTimeStamps()
{
TaskItem from = new TaskItem();
from.ItemSpec = FileUtilities.GetTemporaryFile();
from.GetMetadata(FileUtilities.ItemSpecModifiers.ModifiedTime).Length.ShouldBeGreaterThan(0);
from.GetMetadata(FileUtilities.ItemSpecModifiers.CreatedTime).Length.ShouldBeGreaterThan(0);
from.GetMetadata(FileUtilities.ItemSpecModifiers.AccessedTime).Length.ShouldBeGreaterThan(0);
File.Delete(from.ItemSpec);
from.GetMetadata(FileUtilities.ItemSpecModifiers.ModifiedTime).Length.ShouldBe(0);
from.GetMetadata(FileUtilities.ItemSpecModifiers.CreatedTime).Length.ShouldBe(0);
from.GetMetadata(FileUtilities.ItemSpecModifiers.AccessedTime).Length.ShouldBe(0);
}
/// <summary>
/// Verify metadata cannot be created with null name
/// </summary>
[Fact]
public void CreateNullNamedMetadata()
{
Should.Throw<ArgumentNullException>(() =>
{
TaskItem item = new TaskItem("foo");
item.SetMetadata(null, "x");
}
);
}
/// <summary>
/// Verify metadata cannot be created with empty name
/// </summary>
[Fact]
public void CreateEmptyNamedMetadata()
{
Should.Throw<ArgumentException>(() =>
{
TaskItem item = new TaskItem("foo");
item.SetMetadata("", "x");
}
);
}
/// <summary>
/// Create a TaskItem with a null metadata value -- this is allowed, but
/// internally converted to the empty string.
/// </summary>
[Fact]
public void CreateTaskItemWithNullMetadata()
{
IDictionary<string, string> metadata = new Dictionary<string, string>();
metadata.Add("m", null);
TaskItem item = new TaskItem("bar", (IDictionary)metadata);
item.GetMetadata("m").ShouldBe(string.Empty);
}
/// <summary>
/// Set metadata value to null value -- this is allowed, but
/// internally converted to the empty string.
/// </summary>
[Fact]
public void SetNullMetadataValue()
{
TaskItem item = new TaskItem("bar");
item.SetMetadata("m", null);
item.GetMetadata("m").ShouldBe(string.Empty);
}
#if FEATURE_APPDOMAIN
/// <summary>
/// Test that task items can be successfully constructed based on a task item from another appdomain.
/// </summary>
[Fact]
[Trait("Category", "mono-osx-failing")]
[Trait("Category", "mono-windows-failing")]
public void RemoteTaskItem()
{
AppDomain appDomain = null;
try
{
appDomain = AppDomain.CreateDomain
(
"generateResourceAppDomain",
null,
AppDomain.CurrentDomain.SetupInformation
);
object obj = appDomain.CreateInstanceFromAndUnwrap
(
typeof(TaskItemCreator).Module.FullyQualifiedName,
typeof(TaskItemCreator).FullName
);
TaskItemCreator creator = (TaskItemCreator)obj;
IDictionary<string, string> metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
metadata.Add("c", "C");
metadata.Add("d", "D");
creator.Run(new[] { "a", "b" }, metadata);
ITaskItem[] itemsInThisAppDomain = new ITaskItem[creator.CreatedTaskItems.Length];
for (int i = 0; i < creator.CreatedTaskItems.Length; i++)
{
itemsInThisAppDomain[i] = new TaskItem(creator.CreatedTaskItems[i]);
itemsInThisAppDomain[i].ItemSpec.ShouldBe(creator.CreatedTaskItems[i].ItemSpec);
itemsInThisAppDomain[i].MetadataCount.ShouldBe(creator.CreatedTaskItems[i].MetadataCount + 1);
Dictionary<string, string> creatorMetadata = new Dictionary<string, string>(creator.CreatedTaskItems[i].MetadataCount);
foreach (string metadatum in creator.CreatedTaskItems[i].MetadataNames)
{
creatorMetadata[metadatum] = creator.CreatedTaskItems[i].GetMetadata(metadatum);
}
Dictionary<string, string> metadataInThisAppDomain = new Dictionary<string, string>(itemsInThisAppDomain[i].MetadataCount);
foreach (string metadatum in itemsInThisAppDomain[i].MetadataNames)
{
if (!string.Equals("OriginalItemSpec", metadatum))
{
metadataInThisAppDomain[metadatum] = itemsInThisAppDomain[i].GetMetadata(metadatum);
}
}
metadataInThisAppDomain.ShouldBe(creatorMetadata, ignoreOrder: true);
}
}
finally
{
if (appDomain != null)
{
AppDomain.Unload(appDomain);
}
}
}
/// <summary>
/// Miniature class to be remoted to another appdomain that just creates some TaskItems and makes them available for returning.
/// </summary>
private sealed class TaskItemCreator
#if FEATURE_APPDOMAIN
: MarshalByRefObject
#endif
{
/// <summary>
/// Task items that will be consumed by the other appdomain
/// </summary>
public ITaskItem[] CreatedTaskItems
{
get;
private set;
}
/// <summary>
/// Creates task items
/// </summary>
public void Run(string[] includes, IDictionary<string, string> metadataToAdd)
{
ErrorUtilities.VerifyThrowArgumentNull(includes, nameof(includes));
CreatedTaskItems = new TaskItem[includes.Length];
for (int i = 0; i < includes.Length; i++)
{
CreatedTaskItems[i] = new TaskItem(includes[i], (IDictionary)metadataToAdd);
}
}
}
#endif
}
}
| |
namespace CDMservers.Models
{
using System;
using System.Data.Entity;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
public partial class Model1525 : DbContext
{
public Model1525()
: base("name=Model1525")
{
}
public virtual DbSet<BUSINESS_CHANGDAO> BUSINESS_CHANGDAO { get; set; }
public virtual DbSet<BUSINESS_FUSHAN> BUSINESS_FUSHAN { get; set; }
public virtual DbSet<BUSINESS_HAIYANG> BUSINESS_HAIYANG { get; set; }
public virtual DbSet<BUSINESS_LAISHAN> BUSINESS_LAISHAN { get; set; }
public virtual DbSet<BUSINESS_LAIYANG> BUSINESS_LAIYANG { get; set; }
public virtual DbSet<BUSINESS_LAIZHOU> BUSINESS_LAIZHOU { get; set; }
public virtual DbSet<BUSINESS_LONGKOU> BUSINESS_LONGKOU { get; set; }
public virtual DbSet<BUSINESS_MUPING> BUSINESS_MUPING { get; set; }
public virtual DbSet<BUSINESS_PENGLAI> BUSINESS_PENGLAI { get; set; }
public virtual DbSet<BUSINESS_QIXIA> BUSINESS_QIXIA { get; set; }
public virtual DbSet<BUSINESS_ZHAOYUAN> BUSINESS_ZHAOYUAN { get; set; }
public virtual DbSet<BUSINESS_ZHIFU> BUSINESS_ZHIFU { get; set; }
public virtual DbSet<BUSINESSCATEGORY> BUSINESSCATEGORY { get; set; }
public virtual DbSet<BUSINESSORDINAL> BUSINESSORDINAL { get; set; }
public virtual DbSet<CARINFOR> CARINFOR { get; set; }
public virtual DbSet<CATEGORIES> CATEGORIES { get; set; }
public virtual DbSet<CONFIG> CONFIG { get; set; }
public virtual DbSet<CORPORATEINFO> CORPORATEINFO { get; set; }
public virtual DbSet<COUNTY> COUNTY { get; set; }
public virtual DbSet<POPULATION> POPULATION { get; set; }
public virtual DbSet<USERS> USERS { get; set; }
public virtual DbSet<ZHIFUBUSINESS> ZHIFUBUSINESS { get; set; }
public virtual DbSet<VITALLOG> VITALLOG { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_CHANGDAO>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_FUSHAN>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_HAIYANG>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAISHAN>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIYANG>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LAIZHOU>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_LONGKOU>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_MUPING>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_PENGLAI>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_QIXIA>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHAOYUAN>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESS_ZHIFU>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSCATEGORY>()
.Property(e => e.BUSINESSCODE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSCATEGORY>()
.Property(e => e.BUSINESSNAME)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSCATEGORY>()
.Property(e => e.CATEGORY)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSCATEGORY>()
.Property(e => e.SERVICEAPI)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSORDINAL>()
.Property(e => e.BUSINESSDATE)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSORDINAL>()
.Property(e => e.CATEGORY)
.IsUnicode(false);
modelBuilder.Entity<BUSINESSORDINAL>()
.Property(e => e.ORDINAL)
.HasPrecision(38, 0);
modelBuilder.Entity<BUSINESSORDINAL>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.BRAND)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.MODEL_TYPE)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.VIN)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.PLATE_TYPE)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.OWNER)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.OWNER_ID)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.CAR_LENGTH)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.CAR_WIDTH)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.CAR_HEIGHT)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.STANDARD_LENGTH)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.STANDARD_WIDTH)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.STANDARD_HEIGHT)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.QUEUE_NUM)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.FINISH)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.TASK_TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.INSPECTOR)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.RECHECKER)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.UNLOAD_TASK_NUM)
.IsUnicode(false);
modelBuilder.Entity<CARINFOR>()
.Property(e => e.INVALID_TASK)
.HasPrecision(38, 0);
modelBuilder.Entity<CATEGORIES>()
.Property(e => e.CATEGORY)
.IsUnicode(false);
modelBuilder.Entity<CATEGORIES>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<CONFIG>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<CONFIG>()
.Property(e => e.BUSINESSTABLENAME)
.IsUnicode(false);
modelBuilder.Entity<CORPORATEINFO>()
.Property(e => e.CODE)
.IsUnicode(false);
modelBuilder.Entity<CORPORATEINFO>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<CORPORATEINFO>()
.Property(e => e.ADDRESS)
.IsUnicode(false);
modelBuilder.Entity<CORPORATEINFO>()
.Property(e => e.PHONENUMBER)
.IsUnicode(false);
modelBuilder.Entity<CORPORATEINFO>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<COUNTY>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<COUNTY>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.SEX)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.NATION)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.BORN)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.ADDRESS)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.POSTCODE)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.POSTADDRESS)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.MOBILE)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.TELEPHONE)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.EMAIL)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.IDNUM)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.FIRSTFINGER)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.SECONDFINGER)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.LEFTEYE)
.IsUnicode(false);
modelBuilder.Entity<POPULATION>()
.Property(e => e.RIGHTEYE)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<USERS>()
.Property(e => e.USERNAME)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.PASSWORD)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.LIMIT)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.DEPARTMENT)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.POST)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.POLICENUM)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.REALNAME)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.PDA_TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<USERS>()
.Property(e => e.FIRSTFINGER)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.SECONDFINGER)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<USERS>()
.Property(e => e.AUTHORITYLEVEL)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.ID)
.HasPrecision(38, 0);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.TYPE)
.HasPrecision(38, 0);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.SERIAL_NUM)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.REJECT_REASON)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.NAME)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.PHONE_NUM)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.PROCESS_USER)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.FILE_RECV_USER)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.TRANSFER_STATUS)
.HasPrecision(38, 0);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.UPLOADER)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.COMPLETE_PAY_USER)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.ATTENTION)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.COUNTYCODE)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.POSTPHONE)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.POSTADDR)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.CHECK_FILE)
.HasPrecision(38, 0);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.CAR_NUM)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.TAX_TYPE)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.TAX_NUM)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.ORIGIN_TYPE)
.IsUnicode(false);
modelBuilder.Entity<ZHIFUBUSINESS>()
.Property(e => e.ORIGIN_NUM)
.IsUnicode(false);
modelBuilder.Entity<VITALLOG>()
.Property(e => e.USERNAME)
.IsUnicode(false);
modelBuilder.Entity<VITALLOG>()
.Property(e => e.KEYWORD)
.IsUnicode(false);
modelBuilder.Entity<VITALLOG>()
.Property(e => e.IP)
.IsUnicode(false);
modelBuilder.Entity<VITALLOG>()
.Property(e => e.OPERATION)
.IsUnicode(false);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using dexih.functions;
using dexih.functions.Parameter;
using dexih.transforms.Mapping;
using Dexih.Utils.DataType;
using Xunit;
namespace dexih.transforms.tests
{
public class TransformNodeTests
{
[Fact]
public async Task NodeParentChildTest()
{
var source = Helpers.CreateParentChildReader();
Assert.Equal("parent_id", source.CacheTable.Columns[1].Name);
Assert.Equal("name", source.CacheTable.Columns[2].Name);
var children = source.CacheTable.Columns["children"].ChildColumns;
Assert.Equal("parent_id", children[1].Name);
Assert.Equal("child_id", children[2].Name);
Assert.Equal("name", children[3].Name);
var grandChildren = children[0].ChildColumns;
Assert.Equal("child_id", grandChildren[0].Name);
Assert.Equal("grandChild_id", grandChildren[1].Name);
Assert.Equal("name", grandChildren[2].Name);
Assert.True(await source.Open());
Assert.True(await source.ReadAsync());
Assert.Equal(0, source["parent_id"]);
Assert.Equal("parent 0", source["name"]);
var childTransform = (Transform) source["children"];
Assert.True(await childTransform.ReadAsync());
Assert.Equal(0, childTransform["parent_id"]);
Assert.Equal(0, childTransform["child_id"]);
Assert.Equal("child 00", childTransform["name"]);
var grandChildTransform = (Transform) childTransform["grandChildren"];
Assert.True(await grandChildTransform.ReadAsync());
Assert.Equal(0, grandChildTransform["child_id"]);
Assert.Equal(0, grandChildTransform["grandChild_id"]);
Assert.Equal("grandChild 000", grandChildTransform["name"]);
Assert.True(await grandChildTransform.ReadAsync());
Assert.Equal(0, grandChildTransform["child_id"]);
Assert.Equal(1, grandChildTransform["grandChild_id"]);
Assert.Equal("grandChild 001", grandChildTransform["name"]);
Assert.False(await grandChildTransform.ReadAsync());
Assert.True(await childTransform.ReadAsync());
Assert.Equal(0, childTransform["parent_id"]);
Assert.Equal(1, childTransform["child_id"]);
Assert.Equal("child 01", childTransform["name"]);
grandChildTransform = (Transform) childTransform["grandChildren"];
Assert.False(await grandChildTransform.ReadAsync());
Assert.False(await childTransform.ReadAsync());
Assert.True(await source.ReadAsync());
Assert.Equal(1, source["parent_id"]);
Assert.Equal("parent 1", source["name"]);
childTransform = (Transform) source["children"];
Assert.False(await childTransform.ReadAsync());
Assert.True(await source.ReadAsync());
Assert.Equal(2, source["parent_id"]);
Assert.Equal("parent 2", source["name"]);
childTransform = (Transform) source["children"];
Assert.True(await childTransform.ReadAsync());
Assert.Equal(2, childTransform["parent_id"]);
Assert.Equal(20, childTransform["child_id"]);
Assert.Equal("child 20", childTransform["name"]);
}
[Fact]
public async Task UnGroupNodeTest()
{
var source = Helpers.CreateParentChildReader();
var mapping = new MapUnGroup(source.CacheTable["children"]);
var mappings = new Mappings(true) {mapping};
var flatten = new TransformRows(source, mappings) {};
await flatten.Open(0, null, CancellationToken.None);
Assert.Equal("children.parent_id", flatten.CacheTable.Columns[1].Name);
Assert.Equal("children.child_id", flatten.CacheTable.Columns[2].Name);
Assert.Equal("children.name", flatten.CacheTable.Columns[3].Name);
Assert.Equal("parent_id", flatten.CacheTable.Columns[5].Name);
Assert.Equal("name", flatten.CacheTable.Columns[6].Name);
await flatten.ReadAsync();
Assert.Equal(0, flatten["parent_id"]);
Assert.Equal("parent 0", flatten["name"]);
Assert.Equal(0, flatten["children.parent_id"]);
Assert.Equal(0, flatten["children.child_id"]);
Assert.Equal("child 00", flatten["children.name"]);
await flatten.ReadAsync();
Assert.Equal(0, flatten["parent_id"]);
Assert.Equal("parent 0", flatten["name"]);
Assert.Equal(0, flatten["children.parent_id"]);
Assert.Equal(1, flatten["children.child_id"]);
Assert.Equal("child 01", flatten["children.name"]);
await flatten.ReadAsync();
Assert.Equal(1, flatten["parent_id"]);
Assert.Equal("parent 1", flatten["name"]);
Assert.Null(flatten["children.parent_id"]);
Assert.Null(flatten["children.child_id"]);
Assert.Null(flatten["children.name"]);
await flatten.ReadAsync();
Assert.Equal(2, flatten["parent_id"]);
Assert.Equal("parent 2", flatten["name"]);
Assert.Equal(2, flatten["children.parent_id"]);
Assert.Equal(20, flatten["children.child_id"]);
Assert.Equal("child 20", flatten["children.name"]);
await flatten.ReadAsync();
Assert.Equal(3, flatten["parent_id"]);
Assert.Equal("parent 3", flatten["name"]);
Assert.Equal(3, flatten["children.parent_id"]);
Assert.Equal(30, flatten["children.child_id"]);
Assert.Equal("child 30", flatten["children.name"]);
Assert.False(await flatten.ReadAsync());
}
[Fact]
public async Task NodeMappingTest()
{
var sourceReader = Helpers.CreateParentChildReader();
var nodeMappings = new Mappings();
var function = new TransformFunction(new Func<string, string, string>((parent, child) => parent + "-" + child), typeof(string), null, null);
var parameters = new Parameters
{
Inputs = new List<Parameter>
{
new ParameterColumn("parent.name", ETypeCode.String),
new ParameterColumn("name", ETypeCode.String),
},
ReturnParameters = new List<Parameter> { new ParameterOutputColumn("parent_child", ETypeCode.String)}
};
nodeMappings.Add(new MapFunction(function, parameters, EFunctionCaching.NoCache));
var mapping = new TransformMapping();
var parentTransform = mapping.CreateNodeMapping(sourceReader, null, nodeMappings,new[] {new TableColumn("children")});
// var childrenTable = sourceReader.CacheTable["children"];
//
// var mapNode = new MapNode(childrenTable, sourceReader.CacheTable);
// var nodeTransform = mapNode.Transform;
// var nodeMapping = new TransformMapping(nodeTransform, nodeMappings);
// mapNode.OutputTransform = nodeMapping;
//
// var mappings = new Mappings {mapNode};
//
// var mapping = new TransformMapping(sourceReader, mappings);
await parentTransform.Open();
Assert.True(await parentTransform.ReadAsync());
Assert.Equal(0, parentTransform["parent_id"]);
Assert.Equal("parent 0", parentTransform["name"]);
var childTransform = (Transform) parentTransform["children"];
Assert.True(await childTransform.ReadAsync());
Assert.Equal("parent 0-child 00", childTransform["parent_child"]);
Assert.True(await childTransform.ReadAsync());
Assert.Equal("parent 0-child 01", childTransform["parent_child"]);
Assert.False(await childTransform.ReadAsync());
Assert.True(await parentTransform.ReadAsync());
Assert.Equal(1, parentTransform["parent_id"]);
Assert.Equal("parent 1", parentTransform["name"]);
childTransform = (Transform) parentTransform["children"];
Assert.False(await childTransform.ReadAsync());
Assert.True(await parentTransform.ReadAsync());
Assert.Equal(2, parentTransform["parent_id"]);
Assert.Equal("parent 2", parentTransform["name"]);
childTransform = (Transform) parentTransform["children"];
await childTransform.Open();
Assert.True(await childTransform.ReadAsync());
Assert.Equal("parent 2-child 20", childTransform["parent_child"]);
childTransform = (Transform) parentTransform["children"];
Assert.False(await childTransform.ReadAsync());
Assert.True(await parentTransform.ReadAsync());
Assert.Equal(3, parentTransform["parent_id"]);
Assert.Equal("parent 3", parentTransform["name"]);
childTransform = (Transform) parentTransform["children"];
await childTransform.Open();
Assert.True(await childTransform.ReadAsync());
Assert.Equal("parent 3-child 30", childTransform["parent_child"]);
childTransform = (Transform) parentTransform["children"];
Assert.False(await childTransform.ReadAsync());
Assert.False(await parentTransform.ReadAsync());
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ResourceManager
{
using System.Threading.Tasks;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for TagsOperations.
/// </summary>
public static partial class TagsOperationsExtensions
{
/// <summary>
/// Deletes a tag value.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
/// <param name='tagValue'>
/// The value of the tag to delete.
/// </param>
public static void DeleteValue(this ITagsOperations operations, string tagName, string tagValue)
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).DeleteValueAsync(tagName, tagValue), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a tag value.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
/// <param name='tagValue'>
/// The value of the tag to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task DeleteValueAsync(this ITagsOperations operations, string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.DeleteValueWithHttpMessagesAsync(tagName, tagValue, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Creates a tag value. The name of the tag must already exist.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
/// <param name='tagValue'>
/// The value of the tag to create.
/// </param>
public static TagValue CreateOrUpdateValue(this ITagsOperations operations, string tagName, string tagValue)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).CreateOrUpdateValueAsync(tagName, tagValue), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a tag value. The name of the tag must already exist.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
/// <param name='tagValue'>
/// The value of the tag to create.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<TagValue> CreateOrUpdateValueAsync(this ITagsOperations operations, string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.CreateOrUpdateValueWithHttpMessagesAsync(tagName, tagValue, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates a tag in the subscription.
/// </summary>
/// <remarks>
/// The tag name can have a maximum of 512 characters and is case insensitive.
/// Tag names created by Azure have prefixes of microsoft, azure, or windows.
/// You cannot create tags with one of these prefixes.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag to create.
/// </param>
public static TagDetails CreateOrUpdate(this ITagsOperations operations, string tagName)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).CreateOrUpdateAsync(tagName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a tag in the subscription.
/// </summary>
/// <remarks>
/// The tag name can have a maximum of 512 characters and is case insensitive.
/// Tag names created by Azure have prefixes of microsoft, azure, or windows.
/// You cannot create tags with one of these prefixes.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag to create.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<TagDetails> CreateOrUpdateAsync(this ITagsOperations operations, string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(tagName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a tag from the subscription.
/// </summary>
/// <remarks>
/// You must remove all values from a resource tag before you can delete it.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
public static void Delete(this ITagsOperations operations, string tagName)
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).DeleteAsync(tagName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a tag from the subscription.
/// </summary>
/// <remarks>
/// You must remove all values from a resource tag before you can delete it.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='tagName'>
/// The name of the tag.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task DeleteAsync(this ITagsOperations operations, string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(tagName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the names and values of all resource tags that are defined in a
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static Microsoft.Rest.Azure.IPage<TagDetails> List(this ITagsOperations operations)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).ListAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the names and values of all resource tags that are defined in a
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<TagDetails>> ListAsync(this ITagsOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets the names and values of all resource tags that are defined in a
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static Microsoft.Rest.Azure.IPage<TagDetails> ListNext(this ITagsOperations operations, string nextPageLink)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((ITagsOperations)s).ListNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the names and values of all resource tags that are defined in a
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<TagDetails>> ListNextAsync(this ITagsOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Xunit;
namespace System.Collections.Immutable.Tests
{
public class ImmutableSortedDictionaryTest : ImmutableDictionaryTestBase
{
private enum Operation
{
Add,
Set,
Remove,
Last,
}
[Fact]
public void RandomOperationsTest()
{
int operationCount = this.RandomOperationsCount;
var expected = new SortedDictionary<int, bool>();
var actual = ImmutableSortedDictionary<int, bool>.Empty;
int seed = (int)DateTime.Now.Ticks;
Debug.WriteLine("Using random seed {0}", seed);
var random = new Random(seed);
for (int iOp = 0; iOp < operationCount; iOp++)
{
switch ((Operation)random.Next((int)Operation.Last))
{
case Operation.Add:
int key;
do
{
key = random.Next();
}
while (expected.ContainsKey(key));
bool value = random.Next() % 2 == 0;
Debug.WriteLine("Adding \"{0}\"={1} to the set.", key, value);
expected.Add(key, value);
actual = actual.Add(key, value);
break;
case Operation.Set:
bool overwrite = expected.Count > 0 && random.Next() % 2 == 0;
if (overwrite)
{
int position = random.Next(expected.Count);
key = expected.Skip(position).First().Key;
}
else
{
do
{
key = random.Next();
}
while (expected.ContainsKey(key));
}
value = random.Next() % 2 == 0;
Debug.WriteLine("Setting \"{0}\"={1} to the set (overwrite={2}).", key, value, overwrite);
expected[key] = value;
actual = actual.SetItem(key, value);
break;
case Operation.Remove:
if (expected.Count > 0)
{
int position = random.Next(expected.Count);
key = expected.Skip(position).First().Key;
Debug.WriteLine("Removing element \"{0}\" from the set.", key);
Assert.True(expected.Remove(key));
actual = actual.Remove(key);
}
break;
}
Assert.Equal<KeyValuePair<int, bool>>(expected.ToList(), actual.ToList());
}
}
[Fact]
public void AddExistingKeySameValueTest()
{
AddExistingKeySameValueTestHelper(Empty(StringComparer.Ordinal, StringComparer.Ordinal), "Company", "Microsoft", "Microsoft");
AddExistingKeySameValueTestHelper(Empty(StringComparer.Ordinal, StringComparer.OrdinalIgnoreCase), "Company", "Microsoft", "MICROSOFT");
}
[Fact]
public void AddExistingKeyDifferentValueTest()
{
AddExistingKeyDifferentValueTestHelper(Empty(StringComparer.Ordinal, StringComparer.Ordinal), "Company", "Microsoft", "MICROSOFT");
}
[Fact]
public void ToUnorderedTest()
{
var sortedMap = Empty<int, GenericParameterHelper>().AddRange(Enumerable.Range(1, 100).Select(n => new KeyValuePair<int, GenericParameterHelper>(n, new GenericParameterHelper(n))));
var unsortedMap = sortedMap.ToImmutableDictionary();
Assert.IsAssignableFrom(typeof(ImmutableDictionary<int, GenericParameterHelper>), unsortedMap);
Assert.Equal(sortedMap.Count, unsortedMap.Count);
Assert.Equal<KeyValuePair<int, GenericParameterHelper>>(sortedMap.ToList(), unsortedMap.ToList());
}
[Fact]
public void SortChangeTest()
{
var map = Empty<string, string>(StringComparer.Ordinal)
.Add("Johnny", "Appleseed")
.Add("JOHNNY", "Appleseed");
Assert.Equal(2, map.Count);
Assert.True(map.ContainsKey("Johnny"));
Assert.False(map.ContainsKey("johnny"));
var newMap = map.ToImmutableSortedDictionary(StringComparer.OrdinalIgnoreCase);
Assert.Equal(1, newMap.Count);
Assert.True(newMap.ContainsKey("Johnny"));
Assert.True(newMap.ContainsKey("johnny")); // because it's case insensitive
}
[Fact]
public void InitialBulkAddUniqueTest()
{
var uniqueEntries = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string,string>("a", "b"),
new KeyValuePair<string,string>("c", "d"),
};
var map = Empty<string, string>(StringComparer.Ordinal, StringComparer.Ordinal);
var actual = map.AddRange(uniqueEntries);
Assert.Equal(2, actual.Count);
}
[Fact]
public void InitialBulkAddWithExactDuplicatesTest()
{
var uniqueEntries = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string,string>("a", "b"),
new KeyValuePair<string,string>("a", "b"),
};
var map = Empty<string, string>(StringComparer.Ordinal, StringComparer.Ordinal);
var actual = map.AddRange(uniqueEntries);
Assert.Equal(1, actual.Count);
}
[Fact]
public void ContainsValueTest()
{
this.ContainsValueTestHelper(ImmutableSortedDictionary<int, GenericParameterHelper>.Empty, 1, new GenericParameterHelper());
}
[Fact]
public void InitialBulkAddWithKeyCollisionTest()
{
var uniqueEntries = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string,string>("a", "b"),
new KeyValuePair<string,string>("a", "d"),
};
var map = Empty<string, string>(StringComparer.Ordinal, StringComparer.Ordinal);
Assert.Throws<ArgumentException>(() => map.AddRange(uniqueEntries));
}
[Fact]
public void Create()
{
IEnumerable<KeyValuePair<string, string>> pairs = new Dictionary<string, string> { { "a", "b" } };
var keyComparer = StringComparer.OrdinalIgnoreCase;
var valueComparer = StringComparer.CurrentCulture;
var dictionary = ImmutableSortedDictionary.Create<string, string>();
Assert.Equal(0, dictionary.Count);
Assert.Same(Comparer<string>.Default, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = ImmutableSortedDictionary.Create<string, string>(keyComparer);
Assert.Equal(0, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = ImmutableSortedDictionary.Create(keyComparer, valueComparer);
Assert.Equal(0, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(valueComparer, dictionary.ValueComparer);
dictionary = ImmutableSortedDictionary.CreateRange(pairs);
Assert.Equal(1, dictionary.Count);
Assert.Same(Comparer<string>.Default, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = ImmutableSortedDictionary.CreateRange(keyComparer, pairs);
Assert.Equal(1, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = ImmutableSortedDictionary.CreateRange(keyComparer, valueComparer, pairs);
Assert.Equal(1, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(valueComparer, dictionary.ValueComparer);
}
[Fact]
public void ToImmutableSortedDictionary()
{
IEnumerable<KeyValuePair<string, string>> pairs = new Dictionary<string, string> { { "a", "B" } };
var keyComparer = StringComparer.OrdinalIgnoreCase;
var valueComparer = StringComparer.CurrentCulture;
ImmutableSortedDictionary<string, string> dictionary = pairs.ToImmutableSortedDictionary();
Assert.Equal(1, dictionary.Count);
Assert.Same(Comparer<string>.Default, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = pairs.ToImmutableSortedDictionary(keyComparer);
Assert.Equal(1, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = pairs.ToImmutableSortedDictionary(keyComparer, valueComparer);
Assert.Equal(1, dictionary.Count);
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(valueComparer, dictionary.ValueComparer);
dictionary = pairs.ToImmutableSortedDictionary(p => p.Key.ToUpperInvariant(), p => p.Value.ToLowerInvariant());
Assert.Equal(1, dictionary.Count);
Assert.Equal("A", dictionary.Keys.Single());
Assert.Equal("b", dictionary.Values.Single());
Assert.Same(Comparer<string>.Default, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = pairs.ToImmutableSortedDictionary(p => p.Key.ToUpperInvariant(), p => p.Value.ToLowerInvariant(), keyComparer);
Assert.Equal(1, dictionary.Count);
Assert.Equal("A", dictionary.Keys.Single());
Assert.Equal("b", dictionary.Values.Single());
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(EqualityComparer<string>.Default, dictionary.ValueComparer);
dictionary = pairs.ToImmutableSortedDictionary(p => p.Key.ToUpperInvariant(), p => p.Value.ToLowerInvariant(), keyComparer, valueComparer);
Assert.Equal(1, dictionary.Count);
Assert.Equal("A", dictionary.Keys.Single());
Assert.Equal("b", dictionary.Values.Single());
Assert.Same(keyComparer, dictionary.KeyComparer);
Assert.Same(valueComparer, dictionary.ValueComparer);
}
[Fact]
public void WithComparers()
{
var map = ImmutableSortedDictionary.Create<string, string>().Add("a", "1").Add("B", "1");
Assert.Same(Comparer<string>.Default, map.KeyComparer);
Assert.True(map.ContainsKey("a"));
Assert.False(map.ContainsKey("A"));
map = map.WithComparers(StringComparer.OrdinalIgnoreCase);
Assert.Same(StringComparer.OrdinalIgnoreCase, map.KeyComparer);
Assert.Equal(2, map.Count);
Assert.True(map.ContainsKey("a"));
Assert.True(map.ContainsKey("A"));
Assert.True(map.ContainsKey("b"));
var cultureComparer = StringComparer.CurrentCulture;
map = map.WithComparers(StringComparer.OrdinalIgnoreCase, cultureComparer);
Assert.Same(StringComparer.OrdinalIgnoreCase, map.KeyComparer);
Assert.Same(cultureComparer, map.ValueComparer);
Assert.Equal(2, map.Count);
Assert.True(map.ContainsKey("a"));
Assert.True(map.ContainsKey("A"));
Assert.True(map.ContainsKey("b"));
}
[Fact]
public void WithComparersCollisions()
{
// First check where collisions have matching values.
var map = ImmutableSortedDictionary.Create<string, string>()
.Add("a", "1").Add("A", "1");
map = map.WithComparers(StringComparer.OrdinalIgnoreCase);
Assert.Same(StringComparer.OrdinalIgnoreCase, map.KeyComparer);
Assert.Equal(1, map.Count);
Assert.True(map.ContainsKey("a"));
Assert.Equal("1", map["a"]);
// Now check where collisions have conflicting values.
map = ImmutableSortedDictionary.Create<string, string>()
.Add("a", "1").Add("A", "2").Add("b", "3");
Assert.Throws<ArgumentException>(() => map.WithComparers(StringComparer.OrdinalIgnoreCase));
// Force all values to be considered equal.
map = map.WithComparers(StringComparer.OrdinalIgnoreCase, EverythingEqual<string>.Default);
Assert.Same(StringComparer.OrdinalIgnoreCase, map.KeyComparer);
Assert.Same(EverythingEqual<string>.Default, map.ValueComparer);
Assert.Equal(2, map.Count);
Assert.True(map.ContainsKey("a"));
Assert.True(map.ContainsKey("b"));
}
[Fact]
public void CollisionExceptionMessageContainsKey()
{
var map = ImmutableSortedDictionary.Create<string, string>()
.Add("firstKey", "1").Add("secondKey", "2");
var exception = Assert.Throws<ArgumentException>(() => map.Add("firstKey", "3"));
Assert.Contains("firstKey", exception.Message);
}
[Fact]
public void WithComparersEmptyCollection()
{
var map = ImmutableSortedDictionary.Create<string, string>();
Assert.Same(Comparer<string>.Default, map.KeyComparer);
map = map.WithComparers(StringComparer.OrdinalIgnoreCase);
Assert.Same(StringComparer.OrdinalIgnoreCase, map.KeyComparer);
}
[Fact]
public void EnumeratorRecyclingMisuse()
{
var collection = ImmutableSortedDictionary.Create<int, int>().Add(3, 5);
var enumerator = collection.GetEnumerator();
var enumeratorCopy = enumerator;
Assert.True(enumerator.MoveNext());
Assert.False(enumerator.MoveNext());
enumerator.Dispose();
Assert.Throws<ObjectDisposedException>(() => enumerator.MoveNext());
Assert.Throws<ObjectDisposedException>(() => enumerator.Reset());
Assert.Throws<ObjectDisposedException>(() => enumerator.Current);
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.MoveNext());
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.Reset());
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.Current);
enumerator.Dispose(); // double-disposal should not throw
enumeratorCopy.Dispose();
// We expect that acquiring a new enumerator will use the same underlying Stack<T> object,
// but that it will not throw exceptions for the new enumerator.
enumerator = collection.GetEnumerator();
Assert.True(enumerator.MoveNext());
Assert.False(enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
enumerator.Dispose();
}
[Fact]
public void DebuggerAttributesValid()
{
DebuggerAttributes.ValidateDebuggerDisplayReferences(ImmutableSortedDictionary.Create<string, int>());
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(ImmutableSortedDictionary.Create<int, int>());
object rootNode = DebuggerAttributes.GetFieldValue(ImmutableSortedDictionary.Create<string, string>(), "_root");
DebuggerAttributes.ValidateDebuggerDisplayReferences(rootNode);
}
////[Fact] // not really a functional test -- but very useful to enable when collecting perf traces.
public void EnumerationPerformance()
{
var dictionary = Enumerable.Range(1, 1000).ToImmutableSortedDictionary(k => k, k => k);
var timing = new TimeSpan[3];
var sw = new Stopwatch();
for (int j = 0; j < timing.Length; j++)
{
sw.Start();
for (int i = 0; i < 10000; i++)
{
foreach (var entry in dictionary)
{
}
}
timing[j] = sw.Elapsed;
sw.Reset();
}
string timingText = string.Join(Environment.NewLine, timing);
Debug.WriteLine("Timing:{0}{1}", Environment.NewLine, timingText);
}
////[Fact] // not really a functional test -- but very useful to enable when collecting perf traces.
public void EnumerationPerformance_Empty()
{
var dictionary = ImmutableSortedDictionary<int, int>.Empty;
var timing = new TimeSpan[3];
var sw = new Stopwatch();
for (int j = 0; j < timing.Length; j++)
{
sw.Start();
for (int i = 0; i < 10000; i++)
{
foreach (var entry in dictionary)
{
}
}
timing[j] = sw.Elapsed;
sw.Reset();
}
string timingText = string.Join(Environment.NewLine, timing);
Debug.WriteLine("Timing_Empty:{0}{1}", Environment.NewLine, timingText);
}
protected override IImmutableDictionary<TKey, TValue> Empty<TKey, TValue>()
{
return ImmutableSortedDictionaryTest.Empty<TKey, TValue>();
}
protected override IImmutableDictionary<string, TValue> Empty<TValue>(StringComparer comparer)
{
return ImmutableSortedDictionary.Create<string, TValue>(comparer);
}
protected override IEqualityComparer<TValue> GetValueComparer<TKey, TValue>(IImmutableDictionary<TKey, TValue> dictionary)
{
return ((ImmutableSortedDictionary<TKey, TValue>)dictionary).ValueComparer;
}
internal override IBinaryTree GetRootNode<TKey, TValue>(IImmutableDictionary<TKey, TValue> dictionary)
{
return ((ImmutableSortedDictionary<TKey, TValue>)dictionary).Root;
}
protected void ContainsValueTestHelper<TKey, TValue>(ImmutableSortedDictionary<TKey, TValue> map, TKey key, TValue value)
{
Assert.False(map.ContainsValue(value));
Assert.True(map.Add(key, value).ContainsValue(value));
}
private static IImmutableDictionary<TKey, TValue> Empty<TKey, TValue>(IComparer<TKey> keyComparer = null, IEqualityComparer<TValue> valueComparer = null)
{
return ImmutableSortedDictionary<TKey, TValue>.Empty.WithComparers(keyComparer, valueComparer);
}
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !NETSTANDARD1_3
namespace NLog.Targets
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using NLog.Common;
using NLog.Internal;
using NLog.Layouts;
/// <summary>
/// Writes log messages to the console.
/// </summary>
/// <seealso href="https://github.com/nlog/nlog/wiki/Console-target">Documentation on NLog Wiki</seealso>
/// <example>
/// <p>
/// To set up the target in the <a href="config.html">configuration file</a>,
/// use the following syntax:
/// </p>
/// <code lang="XML" source="examples/targets/Configuration File/Console/NLog.config" />
/// <p>
/// This assumes just one target and a single rule. More configuration
/// options are described <a href="config.html">here</a>.
/// </p>
/// <p>
/// To set up the log target programmatically use code like this:
/// </p>
/// <code lang="C#" source="examples/targets/Configuration API/Console/Simple/Example.cs" />
/// </example>
[Target("Console")]
public sealed class ConsoleTarget : TargetWithLayoutHeaderAndFooter
{
/// <summary>
/// Should logging being paused/stopped because of the race condition bug in Console.Writeline?
/// </summary>
/// <remarks>
/// Console.Out.Writeline / Console.Error.Writeline could throw 'IndexOutOfRangeException', which is a bug.
/// See https://stackoverflow.com/questions/33915790/console-out-and-console-error-race-condition-error-in-a-windows-service-written
/// and https://connect.microsoft.com/VisualStudio/feedback/details/2057284/console-out-probable-i-o-race-condition-issue-in-multi-threaded-windows-service
///
/// Full error:
/// Error during session close: System.IndexOutOfRangeException: Probable I/ O race condition detected while copying memory.
/// The I/ O package is not thread safe by default. In multi threaded applications,
/// a stream must be accessed in a thread-safe way, such as a thread - safe wrapper returned by TextReader's or
/// TextWriter's Synchronized methods.This also applies to classes like StreamWriter and StreamReader.
///
/// </remarks>
private bool _pauseLogging;
private readonly ReusableBufferCreator _reusableEncodingBuffer = new ReusableBufferCreator(16 * 1024);
/// <summary>
/// Gets or sets a value indicating whether to send the log messages to the standard error instead of the standard output.
/// </summary>
/// <docgen category='Console Options' order='10' />
[Obsolete("Replaced by StdErr to align with ColoredConsoleTarget. Marked obsolete on NLog 5.0")]
public bool Error { get => StdErr; set => StdErr = value; }
/// <summary>
/// Gets or sets a value indicating whether to send the log messages to the standard error instead of the standard output.
/// </summary>
/// <docgen category='Console Options' order='10' />
public bool StdErr { get; set; }
/// <summary>
/// The encoding for writing messages to the <see cref="Console"/>.
/// </summary>
/// <remarks>Has side effect</remarks>
/// <docgen category='Console Options' order='10' />
public Encoding Encoding
{
get => ConsoleTargetHelper.GetConsoleOutputEncoding(_encoding, IsInitialized, _pauseLogging);
set
{
if (ConsoleTargetHelper.SetConsoleOutputEncoding(value, IsInitialized, _pauseLogging))
_encoding = value;
}
}
private Encoding _encoding;
/// <summary>
/// Gets or sets a value indicating whether to auto-check if the console is available
/// - Disables console writing if Environment.UserInteractive = False (Windows Service)
/// - Disables console writing if Console Standard Input is not available (Non-Console-App)
/// </summary>
/// <docgen category='Console Options' order='10' />
public bool DetectConsoleAvailable { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to auto-flush after <see cref="Console.WriteLine()"/>
/// </summary>
/// <remarks>
/// Normally not required as standard Console.Out will have <see cref="StreamWriter.AutoFlush"/> = true, but not when pipe to file
/// </remarks>
/// <docgen category='Console Options' order='10' />
public bool AutoFlush { get; set; }
/// <summary>
/// Gets or sets whether to activate internal buffering to allow batch writing, instead of using <see cref="Console.WriteLine()"/>
/// </summary>
/// <docgen category='Console Options' order='10' />
public bool WriteBuffer { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="ConsoleTarget" /> class.
/// </summary>
/// <remarks>
/// The default value of the layout is: <code>${longdate}|${level:uppercase=true}|${logger}|${message:withexception=true}</code>
/// </remarks>
public ConsoleTarget() : base()
{
}
/// <summary>
///
/// Initializes a new instance of the <see cref="ConsoleTarget" /> class.
/// </summary>
/// <remarks>
/// The default value of the layout is: <code>${longdate}|${level:uppercase=true}|${logger}|${message:withexception=true}</code>
/// </remarks>
/// <param name="name">Name of the target.</param>
public ConsoleTarget(string name) : this()
{
Name = name;
}
/// <inheritdoc/>
protected override void InitializeTarget()
{
_pauseLogging = false;
if (DetectConsoleAvailable)
{
string reason;
_pauseLogging = !ConsoleTargetHelper.IsConsoleAvailable(out reason);
if (_pauseLogging)
{
InternalLogger.Info("{0}: Console has been detected as turned off. Disable DetectConsoleAvailable to skip detection. Reason: {1}", this, reason);
}
}
if (_encoding != null)
ConsoleTargetHelper.SetConsoleOutputEncoding(_encoding, true, _pauseLogging);
base.InitializeTarget();
if (Header != null)
{
RenderToOutput(Header, LogEventInfo.CreateNullEvent());
}
}
/// <inheritdoc/>
protected override void CloseTarget()
{
if (Footer != null)
{
RenderToOutput(Footer, LogEventInfo.CreateNullEvent());
}
ExplicitConsoleFlush();
base.CloseTarget();
}
/// <inheritdoc/>
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
try
{
ExplicitConsoleFlush();
base.FlushAsync(asyncContinuation);
}
catch (Exception ex)
{
asyncContinuation(ex);
}
}
private void ExplicitConsoleFlush()
{
if (!_pauseLogging && !AutoFlush)
{
var output = GetOutput();
output.Flush();
}
}
/// <inheritdoc/>
protected override void Write(LogEventInfo logEvent)
{
if (_pauseLogging)
{
//check early for performance
return;
}
RenderToOutput(Layout, logEvent);
}
/// <inheritdoc/>
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
if (_pauseLogging)
{
return;
}
if (WriteBuffer)
{
WriteBufferToOutput(logEvents);
}
else
{
base.Write(logEvents); // Console.WriteLine
}
}
private void RenderToOutput(Layout layout, LogEventInfo logEvent)
{
if (_pauseLogging)
{
return;
}
var output = GetOutput();
if (WriteBuffer)
{
WriteBufferToOutput(output, layout, logEvent);
}
else
{
WriteLineToOutput(output, RenderLogEvent(layout, logEvent));
}
}
private void WriteBufferToOutput(TextWriter output, Layout layout, LogEventInfo logEvent)
{
int targetBufferPosition = 0;
using (var targetBuffer = _reusableEncodingBuffer.Allocate())
using (var targetBuilder = ReusableLayoutBuilder.Allocate())
{
RenderLogEventToWriteBuffer(output, layout, logEvent, targetBuilder.Result, targetBuffer.Result, ref targetBufferPosition);
if (targetBufferPosition > 0)
{
WriteBufferToOutput(output, targetBuffer.Result, targetBufferPosition);
}
}
}
private void WriteBufferToOutput(IList<AsyncLogEventInfo> logEvents)
{
var output = GetOutput();
using (var targetBuffer = _reusableEncodingBuffer.Allocate())
using (var targetBuilder = ReusableLayoutBuilder.Allocate())
{
int targetBufferPosition = 0;
try
{
for (int i = 0; i < logEvents.Count; ++i)
{
targetBuilder.Result.ClearBuilder();
RenderLogEventToWriteBuffer(output, Layout, logEvents[i].LogEvent, targetBuilder.Result, targetBuffer.Result, ref targetBufferPosition);
logEvents[i].Continuation(null);
}
}
finally
{
if (targetBufferPosition > 0)
{
WriteBufferToOutput(output, targetBuffer.Result, targetBufferPosition);
}
}
}
}
private void RenderLogEventToWriteBuffer(TextWriter output, Layout layout, LogEventInfo logEvent, StringBuilder targetBuilder, char[] targetBuffer, ref int targetBufferPosition)
{
int environmentNewLineLength = System.Environment.NewLine.Length;
layout.Render(logEvent, targetBuilder);
if (targetBuilder.Length > targetBuffer.Length - targetBufferPosition - environmentNewLineLength)
{
if (targetBufferPosition > 0)
{
WriteBufferToOutput(output, targetBuffer, targetBufferPosition);
targetBufferPosition = 0;
}
if (targetBuilder.Length > targetBuffer.Length - environmentNewLineLength)
{
WriteLineToOutput(output, targetBuilder.ToString());
return;
}
}
targetBuilder.Append(System.Environment.NewLine);
targetBuilder.CopyToBuffer(targetBuffer, targetBufferPosition);
targetBufferPosition += targetBuilder.Length;
}
private void WriteLineToOutput(TextWriter output, string message)
{
try
{
ConsoleTargetHelper.WriteLineThreadSafe(output, message, AutoFlush);
}
catch (Exception ex) when (ex is OverflowException || ex is IndexOutOfRangeException || ex is ArgumentOutOfRangeException)
{
//this is a bug and therefor stopping logging. For docs, see PauseLogging property
_pauseLogging = true;
InternalLogger.Warn(ex, "{0}: {1} has been thrown and this is probably due to a race condition." +
"Logging to the console will be paused. Enable by reloading the config or re-initialize the targets", this, ex.GetType());
}
}
private void WriteBufferToOutput(TextWriter output, char[] buffer, int length)
{
try
{
ConsoleTargetHelper.WriteBufferThreadSafe(output, buffer, length, AutoFlush);
}
catch (Exception ex) when (ex is OverflowException || ex is IndexOutOfRangeException || ex is ArgumentOutOfRangeException)
{
//this is a bug and therefor stopping logging. For docs, see PauseLogging property
_pauseLogging = true;
InternalLogger.Warn(ex, "{0}: {1} has been thrown and this is probably due to a race condition." +
"Logging to the console will be paused. Enable by reloading the config or re-initialize the targets", this, ex.GetType());
}
}
private TextWriter GetOutput()
{
return StdErr ? Console.Error : Console.Out;
}
}
}
#endif
| |
/*
* PROPRIETARY INFORMATION. This software is proprietary to
* Side Effects Software Inc., and is not to be reproduced,
* transmitted, or disclosed in any way without written permission.
*
* Produced by:
* Side Effects Software Inc
* 123 Front Street West, Suite 1401
* Toronto, Ontario
* Canada M5J 2M2
* 416-504-9876
*
* COMMENTS:
*
*/
// Master control for enabling runtime.
#if ( UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX || ( UNITY_METRO && UNITY_EDITOR ) )
#define HAPI_ENABLE_RUNTIME
#endif
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
#endif // UNITY_EDITOR
using System.Runtime.InteropServices;
using System.Collections;
using System.Collections.Generic;
[ ExecuteInEditMode ]
public class HoudiniAssetOTL : HoudiniAsset
{
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Public Properties
// Please keep these in the same order and grouping as their initializations in HAPI_Asset.reset().
public string prAssetPath { get { return myAssetPath; }
set { myAssetPath = value; } }
public HoudiniGeoAttributeManager prActiveAttributeManager {get { return myActiveAttributeManager; }
set { myActiveAttributeManager = value; } }
public List< HoudiniGeoControl > prEditPaintGeos { get { return myEditPaintGeos; }
set { myEditPaintGeos = value; } }
public HAPI_HandleInfo[] prHandleInfos { get; set; }
public List< HAPI_HandleBindingInfo[] > prHandleBindingInfos { get; set; }
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Public Methods
public HoudiniAssetOTL()
{
if ( prEnableLogging )
Debug.Log( "HoudiniAssetOTL created - Instance Id: " + GetInstanceID() );
// These variables need to keep between asset reloads.
prAssetPath = "";
reset();
}
~HoudiniAssetOTL()
{}
public override void reset()
{
// Save the asset type so we can restore it after the reset.
HoudiniAsset.AssetType asset_type = prAssetType;
base.reset();
// Please keep these in the same order and grouping as their declarations at the top.
prAssetPath = "";
prActiveAttributeManager = null;
prEditPaintGeos = new List< HoudiniGeoControl >();
prHandleInfos = new HAPI_HandleInfo[ 0 ];
prHandleBindingInfos = null;
// Need to restore the asset type here.
prAssetType = asset_type;
}
#if ( HAPI_ENABLE_RUNTIME )
public override void Update()
{
base.Update();
if (
#if UNITY_EDITOR
EditorApplication.isPlaying &&
#endif // UNITY_EDITOR
prPlaymodePerFrameCooking )
{
HoudiniHost.setTime( Time.time );
buildClientSide();
}
}
#endif // ( HAPI_ENABLE_RUNTIME )
public override bool buildAll()
{
bool unload_asset_first = ( prAssetType == HoudiniAsset.AssetType.TYPE_OTL );
return base.build( true, // reload_asset
unload_asset_first,
false, // serializatin_recovery_only
true, // force_reconnect
false, // is_duplication
prCookingTriggersDownCooks,
false // use_delay_for_progress_bar
);
}
public override bool build( bool reload_asset, bool unload_asset_first,
bool serialization_recovery_only,
bool force_reconnect,
bool is_duplication,
bool cook_downstream_assets,
bool use_delay_for_progress_bar )
{
unload_asset_first = unload_asset_first
&& prAssetType == HoudiniAsset.AssetType.TYPE_OTL
&& !serialization_recovery_only;
bool base_built = base.build(
reload_asset,
unload_asset_first,
serialization_recovery_only,
force_reconnect,
is_duplication,
cook_downstream_assets,
use_delay_for_progress_bar );
if ( !base_built )
return false;
return true;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Protected Methods
protected override int buildCreateAsset( HoudiniProgressBar progress_bar )
{
// Try to find the otl if it has moved.
#if UNITY_EDITOR
#if UNITY_4_3 || UNITY_4_4
// Finding moved assets is not supported in Unity 4.4 and earlier versions.
#else
if ( !System.IO.File.Exists( prAssetPath ) )
{
string file_name = System.IO.Path.GetFileNameWithoutExtension( prAssetPath );
string[] guids = AssetDatabase.FindAssets( file_name );
if ( guids.Length == 0 )
{
throw new HoudiniError(
"Houdini asset file has moved from last location: " + prAssetPath );
}
string new_path = AssetDatabase.GUIDToAssetPath( guids[ 0 ] );
if ( new_path != null && new_path != "" )
{
Debug.Log( "Changing asset path for " + name + " to: " + new_path );
prAssetPath = new_path;
}
}
#endif // UNITY_4_3 || UNITY_4_4
#endif // UNITY_EDITOR
return HoudiniHost.loadOTL(
prAssetPath, prSplitGeosByGroup, prSplitPointsByVertexAttribute,
prImportTemplatedGeos, progress_bar, false );
}
protected override void buildFullBuildCustomWork( ref HoudiniProgressBar progress_bar, bool is_duplication )
{
// Initialize (or leave alone!) the pain structs.
if ( prEditPaintGeos == null )
{
prEditPaintGeos = new List< HoudiniGeoControl >();
prActiveAttributeManager = null;
}
else if ( is_duplication )
{
prActiveAttributeManager = null;
}
progress_bar.prMessage = "Loading handles...";
// Get exposed handle information.
prHandleInfos = new HAPI_HandleInfo[ prHandleCount ];
HoudiniAssetUtility.getArray1Id( prAssetId, HoudiniHost.getHandleInfo, prHandleInfos, prHandleCount );
// Get handles.
prHandleBindingInfos = new List< HAPI_HandleBindingInfo[] >( prHandleCount );
for ( int handle_index = 0; handle_index < prHandleCount; ++handle_index )
{
progress_bar.incrementProgressBar();
HAPI_HandleInfo handle_info = prHandleInfos[ handle_index ];
HAPI_HandleBindingInfo[] binding_infos = new HAPI_HandleBindingInfo[ handle_info.bindingsCount ];
HoudiniAssetUtility.getArray2Id(
prAssetId, handle_index, HoudiniHost.getHandleBindingInfo,
binding_infos, handle_info.bindingsCount );
prHandleBindingInfos.Add( binding_infos );
}
}
protected override bool buildCreateObjects( bool reload_asset, ref HoudiniProgressBar progress_bar )
{
bool needs_recook = false;
for ( int object_index = 0; object_index < prObjectCount; ++object_index )
{
progress_bar.incrementProgressBar();
try
{
if ( !prObjects[ object_index ].isInstancer &&
( reload_asset || prObjects[ object_index ].hasTransformChanged
|| prObjects[ object_index ].haveGeosChanged ) )
{
needs_recook |= createObject( object_index, reload_asset );
}
}
catch ( HoudiniError error )
{
// Per-object errors are not re-thrown so that the rest of the asset has a chance to load.
Debug.LogWarning( error.ToString() );
}
}
// Processing instancers.
for ( int object_index = 0; object_index < prObjectCount; ++object_index )
{
HAPI_ObjectInfo object_info = prObjects[ object_index ];
if ( object_info.isInstancer )
{
try
{
if ( object_info.objectToInstanceId >= 0 &&
prGameObjects[ object_info.objectToInstanceId ] == null )
needs_recook |= createObject( object_info.objectToInstanceId, reload_asset );
if( reload_asset || object_info.haveGeosChanged )
instanceObjects( object_index, progress_bar );
}
catch ( HoudiniError error )
{
// Per-object errors are not re-thrown so that the rest of the asset has a chance to load.
Debug.LogWarning( error.ToString() );
}
}
}
// Enumerate edit and paint geos.
HoudiniGeoControl[] geo_controls = gameObject.GetComponentsInChildren< HoudiniGeoControl >();
prEditPaintGeos.Clear();
foreach ( HoudiniGeoControl geo_control in geo_controls )
{
if ( geo_control.prGeoType == HAPI_GeoType.HAPI_GEOTYPE_INTERMEDIATE
&& geo_control.GetType() == typeof( HoudiniGeoControl ) )
{
prEditPaintGeos.Add( geo_control );
}
}
if ( prEditPaintGeos.Count > 0 && prActiveAttributeManager == null )
prActiveAttributeManager = prEditPaintGeos[ 0 ].prGeoAttributeManager;
return needs_recook;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Private Methods
private void instanceObjects( int object_id, HoudiniProgressBar progress_bar )
{
HAPI_ObjectInfo object_info = prObjects[ object_id ];
HoudiniInstancer instancer = null;
Transform old_instancer_transform = transform.Find( object_info.name );
if ( old_instancer_transform && old_instancer_transform.gameObject.GetComponent< HoudiniInstancer >() )
{
instancer = old_instancer_transform.gameObject.GetComponent< HoudiniInstancer >();
}
else
{
if( gameObject.GetComponent< HoudiniInstancerManager >() == null )
gameObject.AddComponent< HoudiniInstancerManager >();
GameObject main_object = new GameObject( object_info.name );
main_object.transform.parent = transform;
main_object.AddComponent< HoudiniInstancer >();
prGameObjects[ object_id ] = main_object;
instancer = main_object.GetComponent< HoudiniInstancer >();
HoudiniInstancerManager instancer_manager = gameObject.GetComponent< HoudiniInstancerManager >();
instancer_manager.updateInstancerData( instancer );
}
instancer.prAsset = this;
instancer.prObjectId = object_id;
instancer.instanceObjects( progress_bar );
}
private bool createObject( int object_id, bool reload_asset )
{
bool needs_recook = false;
HoudiniObjectControl object_control = null;
HAPI_ObjectInfo object_info = prObjects[ object_id ];
// Create main underling.
if ( prGameObjects[ object_id ] == null )
{
prGameObjects[ object_id ] = new GameObject( object_info.name );
prGameObjects[ object_id ].transform.parent = transform;
prGameObjects[ object_id ].isStatic = gameObject.isStatic;
object_control = prGameObjects[ object_id ].AddComponent< HoudiniObjectControl >();
object_control.init(
prAssetId, object_info.nodeId, prAsset, object_id, object_info.name, object_info.isVisible );
}
else
{
object_control = prGameObjects[ object_id ].GetComponent< HoudiniObjectControl >();
}
GameObject main_child = prGameObjects[ object_id ];
try
{
needs_recook |= object_control.refresh( reload_asset, object_info );
if ( reload_asset || object_info.hasTransformChanged )
{
// Get transforms.
HAPI_Transform trans = prObjectTransforms[ object_id ];
HoudiniAssetUtility.applyTransform( trans, main_child.transform );
}
}
catch ( HoudiniError error )
{
DestroyImmediate( main_child );
prGameObjects[ object_id ] = null;
error.addMessagePrefix( "Obj(id: " + object_info.id + ", name: " + object_info.name + ")" );
error.addMessageDetail( "Object Path: " + object_info.objectInstancePath );
throw;
}
return needs_recook;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Serialized Data
[SerializeField] private string myAssetPath;
[SerializeField] private HoudiniGeoAttributeManager myActiveAttributeManager;
[SerializeField] private List< HoudiniGeoControl > myEditPaintGeos;
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ServerManagement
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// GatewayOperations operations.
/// </summary>
public partial interface IGatewayOperations
{
/// <summary>
/// Creates or updates a ManagementService gateway.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='location'>
/// location of the resource
/// </param>
/// <param name='tags'>
/// resource tags
/// </param>
/// <param name='autoUpgrade'>
/// The autoUpgrade property gives the flexibility to gateway to auto
/// upgrade itself. If properties value not specified, then we assume
/// autoUpgrade = Off. Possible values include: 'On', 'Off'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayResource>> CreateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), AutoUpgrade? autoUpgrade = default(AutoUpgrade?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or updates a ManagementService gateway.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='location'>
/// location of the resource
/// </param>
/// <param name='tags'>
/// resource tags
/// </param>
/// <param name='autoUpgrade'>
/// The autoUpgrade property gives the flexibility to gateway to auto
/// upgrade itself. If properties value not specified, then we assume
/// autoUpgrade = Off. Possible values include: 'On', 'Off'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayResource>> BeginCreateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), AutoUpgrade? autoUpgrade = default(AutoUpgrade?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates a gateway belonging to a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='location'>
/// location of the resource
/// </param>
/// <param name='tags'>
/// resource tags
/// </param>
/// <param name='autoUpgrade'>
/// The autoUpgrade property gives the flexibility to gateway to auto
/// upgrade itself. If properties value not specified, then we assume
/// autoUpgrade = Off. Possible values include: 'On', 'Off'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayResource>> UpdateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), AutoUpgrade? autoUpgrade = default(AutoUpgrade?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates a gateway belonging to a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='location'>
/// location of the resource
/// </param>
/// <param name='tags'>
/// resource tags
/// </param>
/// <param name='autoUpgrade'>
/// The autoUpgrade property gives the flexibility to gateway to auto
/// upgrade itself. If properties value not specified, then we assume
/// autoUpgrade = Off. Possible values include: 'On', 'Off'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayResource>> BeginUpdateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), AutoUpgrade? autoUpgrade = default(AutoUpgrade?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a gateway from a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns a gateway
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum)
/// </param>
/// <param name='expand'>
/// Gets subscription credentials which uniquely identify Microsoft
/// Azure subscription. The subscription ID forms part of the URI for
/// every service call. Possible values include: 'status'
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayResource>> GetWithHttpMessagesAsync(string resourceGroupName, string gatewayName, GatewayExpandOption? expand = default(GatewayExpandOption?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns gateways in a subscription
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<GatewayResource>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns gateways in a resource group
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<GatewayResource>>> ListForResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Upgrade a gateway
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> UpgradeWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Upgrade a gateway
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> BeginUpgradeWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Regenerate a gateway's profile
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> RegenerateProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Regenerate a gateway's profile
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> BeginRegenerateProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a gateway profile
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayProfile>> GetProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a gateway profile
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name uniquely identifies the resource group
/// within the user subscriptionId.
/// </param>
/// <param name='gatewayName'>
/// The gateway name (256 characters maximum).
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<GatewayProfile>> BeginGetProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns gateways in a subscription
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<GatewayResource>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns gateways in a resource group
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<GatewayResource>>> ListForResourceGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
//
// AudioCdService.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
//
// Copyright (C) 2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using Mono.Unix;
using Hyena;
using Banshee.ServiceStack;
using Banshee.Configuration;
using Banshee.Preferences;
using Banshee.Hardware;
using Banshee.Gui;
namespace Banshee.AudioCd
{
public class AudioCdService : IExtensionService, IDisposable
{
private Dictionary<string, AudioCdSource> sources;
private List<DeviceCommand> unhandled_device_commands;
private SourcePage pref_page;
private Section pref_section;
private uint global_interface_id;
public AudioCdService ()
{
}
public void Initialize ()
{
lock (this) {
InstallPreferences ();
sources = new Dictionary<string, AudioCdSource> ();
foreach (ICdromDevice device in ServiceManager.HardwareManager.GetAllCdromDevices ()) {
MapCdromDevice (device);
}
ServiceManager.HardwareManager.DeviceAdded += OnHardwareDeviceAdded;
ServiceManager.HardwareManager.DeviceRemoved += OnHardwareDeviceRemoved;
ServiceManager.HardwareManager.DeviceCommand += OnDeviceCommand;
SetupActions ();
}
}
public void Dispose ()
{
lock (this) {
UninstallPreferences ();
ServiceManager.HardwareManager.DeviceAdded -= OnHardwareDeviceAdded;
ServiceManager.HardwareManager.DeviceRemoved -= OnHardwareDeviceRemoved;
ServiceManager.HardwareManager.DeviceCommand -= OnDeviceCommand;
foreach (AudioCdSource source in sources.Values) {
source.Dispose ();
ServiceManager.SourceManager.RemoveSource (source);
}
sources.Clear ();
sources = null;
DisposeActions ();
}
}
private void MapCdromDevice (ICdromDevice device)
{
lock (this) {
foreach (IVolume volume in device) {
if (volume is IDiscVolume) {
MapDiscVolume ((IDiscVolume)volume);
}
}
}
}
private void MapDiscVolume (IDiscVolume volume)
{
lock (this) {
if (!sources.ContainsKey (volume.Uuid) && volume.HasAudio) {
AudioCdSource source = new AudioCdSource (this, new AudioCdDiscModel (volume));
sources.Add (volume.Uuid, source);
ServiceManager.SourceManager.AddSource (source);
// If there are any queued device commands, see if they are to be
// handled by this new volume (e.g. --device-activate-play=cdda://sr0/)
try {
if (unhandled_device_commands != null) {
foreach (DeviceCommand command in unhandled_device_commands) {
if (DeviceCommandMatchesSource (source, command)) {
HandleDeviceCommand (source, command.Action);
unhandled_device_commands.Remove (command);
if (unhandled_device_commands.Count == 0) {
unhandled_device_commands = null;
}
break;
}
}
}
} catch (Exception e) {
Log.Exception (e);
}
Log.DebugFormat ("Mapping audio CD ({0})", volume.Uuid);
}
}
}
internal void UnmapDiscVolume (string uuid)
{
lock (this) {
if (sources.ContainsKey (uuid)) {
AudioCdSource source = sources[uuid];
source.StopPlayingDisc ();
ServiceManager.SourceManager.RemoveSource (source);
sources.Remove (uuid);
Log.DebugFormat ("Unmapping audio CD ({0})", uuid);
}
}
}
private void OnHardwareDeviceAdded (object o, DeviceAddedArgs args)
{
lock (this) {
if (args.Device is ICdromDevice) {
MapCdromDevice ((ICdromDevice)args.Device);
} else if (args.Device is IDiscVolume) {
MapDiscVolume ((IDiscVolume)args.Device);
}
}
}
private void OnHardwareDeviceRemoved (object o, DeviceRemovedArgs args)
{
lock (this) {
UnmapDiscVolume (args.DeviceUuid);
}
}
#region DeviceCommand Handling
private bool DeviceCommandMatchesSource (AudioCdSource source, DeviceCommand command)
{
if (command.DeviceId.StartsWith ("cdda:")) {
try {
Uri uri = new Uri (command.DeviceId);
string match_device_node = String.Format ("{0}{1}", uri.Host,
uri.AbsolutePath).TrimEnd ('/', '\\');
string device_node = source.DiscModel.Volume.DeviceNode;
return device_node.EndsWith (match_device_node);
} catch {
}
}
return false;
}
private void HandleDeviceCommand (AudioCdSource source, DeviceCommandAction action)
{
if ((action & DeviceCommandAction.Activate) != 0) {
ServiceManager.SourceManager.SetActiveSource (source);
}
if ((action & DeviceCommandAction.Play) != 0) {
ServiceManager.PlaybackController.NextSource = source;
if (!ServiceManager.PlayerEngine.IsPlaying ()) {
ServiceManager.PlaybackController.Next ();
}
}
}
private void OnDeviceCommand (object o, DeviceCommand command)
{
lock (this) {
// Check to see if we have an already mapped disc volume that should
// handle this incoming command; if not, queue it for later discs
foreach (AudioCdSource source in sources.Values) {
if (DeviceCommandMatchesSource (source, command)) {
HandleDeviceCommand (source, command.Action);
return;
}
}
if (unhandled_device_commands == null) {
unhandled_device_commands = new List<DeviceCommand> ();
}
unhandled_device_commands.Add (command);
}
}
#endregion
#region Preferences
private void InstallPreferences ()
{
PreferenceService service = ServiceManager.Get<PreferenceService> ();
if (service == null) {
return;
}
service.InstallWidgetAdapters += OnPreferencesServiceInstallWidgetAdapters;
pref_page = new Banshee.Preferences.SourcePage ("audio-cd", Catalog.GetString ("Audio CDs"), "media-cdrom", 400);
pref_section = pref_page.Add (new Section ("audio-cd", Catalog.GetString ("Audio CD Importing"), 20));
pref_section.ShowLabel = false;
pref_section.Add (new VoidPreference ("import-profile", Catalog.GetString ("_Import format")));
pref_section.Add (new VoidPreference ("import-profile-desc"));
pref_section.Add (new SchemaPreference<bool> (AutoRip,
Catalog.GetString ("_Automatically import audio CDs when inserted"),
Catalog.GetString ("When an audio CD is inserted, automatically begin importing it " +
"if metadata can be found and it is not already in the library.")));
pref_section.Add (new SchemaPreference<bool> (EjectAfterRipped,
Catalog.GetString ("_Eject when done importing"),
Catalog.GetString ("When an audio CD has been imported, automatically eject it.")));
pref_section.Add (new SchemaPreference<bool> (ErrorCorrection,
Catalog.GetString ("Use error correction when importing"),
Catalog.GetString ("Error correction tries to work around problem areas on a disc, such " +
"as surface scratches, but will slow down importing substantially.")));
}
private void UninstallPreferences ()
{
PreferenceService service = ServiceManager.Get<PreferenceService> ();
if (service == null || pref_page == null) {
return;
}
service.InstallWidgetAdapters -= OnPreferencesServiceInstallWidgetAdapters;
pref_page.Dispose ();
pref_page = null;
pref_section = null;
}
private void OnPreferencesServiceInstallWidgetAdapters (object o, EventArgs args)
{
if (pref_section == null) {
return;
}
Gtk.HBox description_box = new Gtk.HBox ();
Banshee.MediaProfiles.Gui.ProfileComboBoxConfigurable chooser
= new Banshee.MediaProfiles.Gui.ProfileComboBoxConfigurable (ServiceManager.MediaProfileManager,
"cd-importing", description_box);
pref_section["import-profile"].DisplayWidget = chooser;
pref_section["import-profile"].MnemonicWidget = chooser.Combo;
pref_section["import-profile-desc"].DisplayWidget = description_box;
}
public static readonly SchemaEntry<bool> ErrorCorrection = new SchemaEntry<bool> (
"import", "audio_cd_error_correction",
false,
"Enable error correction",
"When importing an audio CD, enable error correction (paranoia mode)"
);
public static readonly SchemaEntry<bool> AutoRip = new SchemaEntry<bool> (
"import", "auto_rip_cds",
false,
"Enable audio CD auto ripping",
"When an audio CD is inserted, automatically begin ripping it."
);
public static readonly SchemaEntry<bool> EjectAfterRipped = new SchemaEntry<bool> (
"import", "eject_after_ripped",
false,
"Eject audio CD after ripped",
"After an audio CD has been ripped, automatically eject it."
);
#endregion
#region UI Actions
private void SetupActions ()
{
InterfaceActionService uia_service = ServiceManager.Get<InterfaceActionService> ();
if (uia_service == null) {
return;
}
uia_service.GlobalActions.AddImportant (new Gtk.ActionEntry [] {
new Gtk.ActionEntry ("RipDiscAction", null,
Catalog.GetString ("Import CD"), null,
Catalog.GetString ("Import this audio CD to the library"),
OnImportDisc)
});
uia_service.GlobalActions.AddImportant (
new Gtk.ActionEntry ("DuplicateDiscAction", null,
Catalog.GetString ("Duplicate CD"), null,
Catalog.GetString ("Duplicate this audio CD"),
OnDuplicateDisc)
);
global_interface_id = uia_service.UIManager.AddUiFromResource ("GlobalUI.xml");
}
private void DisposeActions ()
{
InterfaceActionService uia_service = ServiceManager.Get<InterfaceActionService> ();
if (uia_service == null) {
return;
}
uia_service.GlobalActions.Remove ("RipDiscAction");
uia_service.GlobalActions.Remove ("DuplicateDiscAction");
uia_service.UIManager.RemoveUi (global_interface_id);
}
private void OnImportDisc (object o, EventArgs args)
{
ImportOrDuplicateDisc (true);
}
private void OnDuplicateDisc (object o, EventArgs args)
{
ImportOrDuplicateDisc (false);
}
private void ImportOrDuplicateDisc (bool import)
{
InterfaceActionService uia_service = ServiceManager.Get<InterfaceActionService> ();
if (uia_service == null) {
return;
}
AudioCdSource source = uia_service.SourceActions.ActionSource as AudioCdSource;
if (source != null) {
if (import) {
source.ImportDisc ();
} else {
source.DuplicateDisc ();
}
}
}
#endregion
string IService.ServiceName {
get { return "AudioCdService"; }
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Data;
using OpenSim.Framework;
using OpenSim.Server.Base;
using OpenSim.Services.Interfaces;
using OpenSim.Services.InventoryService;
using System;
using System.Collections.Generic;
using System.Reflection;
namespace OpenSim.Services.HypergridService
{
/// <summary>
/// Hypergrid inventory service. It serves the IInventoryService interface,
/// but implements it in ways that are appropriate for inter-grid
/// inventory exchanges. Specifically, it does not performs deletions
/// and it responds to GetRootFolder requests with the ID of the
/// Suitcase folder, not the actual "My Inventory" folder.
/// </summary>
public class HGSuitcaseInventoryService : XInventoryService, IInventoryService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
// private string m_HomeURL;
private IUserAccountService m_UserAccountService;
private IAvatarService m_AvatarService;
// private UserAccountCache m_Cache;
private ThreadedClasses.ExpiringCache<UUID, List<XInventoryFolder>> m_SuitcaseTrees = new ThreadedClasses.ExpiringCache<UUID, List<XInventoryFolder>>(30);
private ThreadedClasses.ExpiringCache<UUID, AvatarAppearance> m_Appearances = new ThreadedClasses.ExpiringCache<UUID, AvatarAppearance>(30);
public HGSuitcaseInventoryService(IConfigSource config, string configName)
: base(config, configName)
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: Starting with config name {0}", configName);
if (configName != string.Empty)
m_ConfigName = configName;
if (m_Database == null)
m_log.ErrorFormat("[HG SUITCASE INVENTORY SERVICE]: m_Database is null!");
//
// Try reading the [InventoryService] section, if it exists
//
IConfig invConfig = config.Configs[m_ConfigName];
if (invConfig != null)
{
string userAccountsDll = invConfig.GetString("UserAccountsService", string.Empty);
if (userAccountsDll == string.Empty)
throw new Exception("Please specify UserAccountsService in HGInventoryService configuration");
Object[] args = new Object[] { config };
m_UserAccountService = ServerUtils.LoadPlugin<IUserAccountService>(userAccountsDll, args);
if (m_UserAccountService == null)
throw new Exception(String.Format("Unable to create UserAccountService from {0}", userAccountsDll));
string avatarDll = invConfig.GetString("AvatarService", string.Empty);
if (avatarDll == string.Empty)
throw new Exception("Please specify AvatarService in HGInventoryService configuration");
m_AvatarService = ServerUtils.LoadPlugin<IAvatarService>(avatarDll, args);
if (m_AvatarService == null)
throw new Exception(String.Format("Unable to create m_AvatarService from {0}", avatarDll));
// m_HomeURL = Util.GetConfigVarFromSections<string>(config, "HomeURI",
// new string[] { "Startup", "Hypergrid", m_ConfigName }, String.Empty);
// m_Cache = UserAccountCache.CreateUserAccountCache(m_UserAccountService);
}
m_log.Debug("[HG SUITCASE INVENTORY SERVICE]: Starting...");
}
public override bool CreateUserInventory(UUID principalID)
{
// NOGO
return false;
}
public override List<InventoryFolderBase> GetInventorySkeleton(UUID principalID)
{
XInventoryFolder suitcase = GetSuitcaseXFolder(principalID);
if (suitcase == null)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: Found no suitcase folder for user {0} when looking for inventory skeleton", principalID);
return null;
}
List<XInventoryFolder> tree = GetFolderTree(principalID, suitcase.folderID);
if (tree == null || (tree != null && tree.Count == 0))
return null;
List<InventoryFolderBase> folders = new List<InventoryFolderBase>();
foreach (XInventoryFolder x in tree)
{
folders.Add(ConvertToOpenSim(x));
}
SetAsNormalFolder(suitcase);
folders.Add(ConvertToOpenSim(suitcase));
return folders;
}
public override InventoryFolderBase GetRootFolder(UUID principalID)
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: GetRootFolder for {0}", principalID);
// Let's find out the local root folder
XInventoryFolder root = GetRootXFolder(principalID);
if (root == null)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: Unable to retrieve local root folder for user {0}", principalID);
return null;
}
// Warp! Root folder for travelers is the suitcase folder
XInventoryFolder suitcase = GetSuitcaseXFolder(principalID);
if (suitcase == null)
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: User {0} does not have a Suitcase folder. Creating it...", principalID);
// Create the My Suitcase folder under the user's root folder.
// In the DB we tag it as type 100, but we use type 8 (Folder) outside, as this affects the sort order.
suitcase = CreateFolder(principalID, root.folderID, InventoryItemBase.SUITCASE_FOLDER_TYPE, InventoryItemBase.SUITCASE_FOLDER_NAME);
if (suitcase == null)
{
m_log.ErrorFormat("[HG SUITCASE INVENTORY SERVICE]: Unable to create suitcase folder");
return null;
}
m_Database.StoreFolder(suitcase);
CreateSystemFolders(principalID, suitcase.folderID);
}
SetAsNormalFolder(suitcase);
return ConvertToOpenSim(suitcase);
}
protected void CreateSystemFolders(UUID principalID, UUID rootID)
{
m_log.Debug("[HG SUITCASE INVENTORY SERVICE]: Creating System folders under Suitcase...");
XInventoryFolder[] sysFolders = GetSystemFolders(principalID, rootID);
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Animation) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Animation, "Animations");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Bodypart) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Bodypart, "Body Parts");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.CallingCard) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.CallingCard, "Calling Cards");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Clothing) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Clothing, "Clothing");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Gesture) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Gesture, "Gestures");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Landmark) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Landmark, "Landmarks");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.LostAndFoundFolder) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.LostAndFoundFolder, "Lost And Found");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Notecard) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Notecard, "Notecards");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Object) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Object, "Objects");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.SnapshotFolder) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.SnapshotFolder, "Photo Album");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.LSLText) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.LSLText, "Scripts");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Sound) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Sound, "Sounds");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.Texture) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.Texture, "Textures");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.TrashFolder) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.TrashFolder, "Trash");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.FavoriteFolder) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.FavoriteFolder, "Favorites");
if (!Array.Exists(sysFolders, delegate(XInventoryFolder f) { if (f.type == (int)AssetType.CurrentOutfitFolder) return true; return false; }))
CreateFolder(principalID, rootID, (int)AssetType.CurrentOutfitFolder, "Current Outfit");
}
public override InventoryFolderBase GetFolderForType(UUID principalID, AssetType type)
{
//m_log.DebugFormat("[HG INVENTORY SERVICE]: GetFolderForType for {0} {0}", principalID, type);
XInventoryFolder suitcase = GetSuitcaseXFolder(principalID);
if (suitcase == null)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: Found no suitcase folder for user {0} when looking for child type folder {1}", principalID, type);
return null;
}
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "agentID", "type", "parentFolderID" },
new string[] { principalID.ToString(), ((int)type).ToString(), suitcase.folderID.ToString() });
if (folders.Length == 0)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: Found no folder for type {0} for user {1}", type, principalID);
return null;
}
m_log.DebugFormat(
"[HG SUITCASE INVENTORY SERVICE]: Found folder {0} {1} for type {2} for user {3}",
folders[0].folderName, folders[0].folderID, type, principalID);
return ConvertToOpenSim(folders[0]);
}
public override InventoryCollection GetFolderContent(UUID principalID, UUID folderID)
{
InventoryCollection coll = null;
if (!IsWithinSuitcaseTree(principalID, folderID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: GetFolderContent: folder {0} is not within Suitcase tree", folderID);
return new InventoryCollection();
}
coll = base.GetFolderContent(principalID, folderID);
if (coll == null)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: Something wrong with user {0}'s suitcase folder", principalID);
coll = new InventoryCollection();
}
return coll;
}
public override List<InventoryItemBase> GetFolderItems(UUID principalID, UUID folderID)
{
// Let's do a bit of sanity checking, more than the base service does
// make sure the given folder exists under the suitcase tree of this user
if (!IsWithinSuitcaseTree(principalID, folderID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: GetFolderItems: folder {0} is not within Suitcase tree", folderID);
return new List<InventoryItemBase>();
}
return base.GetFolderItems(principalID, folderID);
}
public override bool AddFolder(InventoryFolderBase folder)
{
//m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: AddFolder {0} {1}", folder.Name, folder.ParentID);
// Let's do a bit of sanity checking, more than the base service does
// make sure the given folder's parent folder exists under the suitcase tree of this user
if (!IsWithinSuitcaseTree(folder.Owner, folder.ParentID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: AddFolder: folder {0} is not within Suitcase tree", folder.ParentID);
return false;
}
// OK, it's legit
if (base.AddFolder(folder))
{
List<XInventoryFolder> tree;
if (m_SuitcaseTrees.TryGetValue(folder.Owner, out tree))
tree.Add(ConvertFromOpenSim(folder));
return true;
}
return false;
}
public override bool UpdateFolder(InventoryFolderBase folder)
{
//m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: Update folder {0}, version {1}", folder.ID, folder.Version);
if (!IsWithinSuitcaseTree(folder.Owner, folder.ID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: UpdateFolder: folder {0}/{1} is not within Suitcase tree", folder.Name, folder.ID);
return false;
}
// For all others
return base.UpdateFolder(folder);
}
public override bool MoveFolder(InventoryFolderBase folder)
{
if (!IsWithinSuitcaseTree(folder.Owner, folder.ID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: MoveFolder: folder {0} is not within Suitcase tree", folder.ID);
return false;
}
if (!IsWithinSuitcaseTree(folder.Owner, folder.ParentID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: MoveFolder: folder {0} is not within Suitcase tree", folder.ParentID);
return false;
}
return base.MoveFolder(folder);
}
public override bool DeleteFolders(UUID principalID, List<UUID> folderIDs)
{
// NOGO
return false;
}
public override bool PurgeFolder(InventoryFolderBase folder)
{
// NOGO
return false;
}
public override bool AddItem(InventoryItemBase item)
{
// Let's do a bit of sanity checking, more than the base service does
// make sure the given folder's parent folder exists under the suitcase tree of this user
if (!IsWithinSuitcaseTree(item.Owner, item.Folder))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: AddItem: folder {0} is not within Suitcase tree", item.Folder);
return false;
}
// OK, it's legit
return base.AddItem(item);
}
public override bool UpdateItem(InventoryItemBase item)
{
if (!IsWithinSuitcaseTree(item.Owner, item.Folder))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: UpdateItem: folder {0} is not within Suitcase tree", item.Folder);
return false;
}
return base.UpdateItem(item);
}
public override bool MoveItems(UUID principalID, List<InventoryItemBase> items)
{
// Principal is b0rked. *sigh*
// Check the items' destination folders
foreach (InventoryItemBase item in items)
{
if (!IsWithinSuitcaseTree(item.Owner, item.Folder))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: MoveItems: folder {0} is not within Suitcase tree", item.Folder);
return false;
}
}
// Check the items' current folders
foreach (InventoryItemBase item in items)
{
InventoryItemBase originalItem = base.GetItem(item);
if (!IsWithinSuitcaseTree(originalItem.Owner, originalItem.Folder))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: MoveItems: folder {0} is not within Suitcase tree", originalItem.Folder);
return false;
}
}
return base.MoveItems(principalID, items);
}
public override bool DeleteItems(UUID principalID, List<UUID> itemIDs)
{
return false;
}
public new InventoryItemBase GetItem(InventoryItemBase item)
{
InventoryItemBase it = base.GetItem(item);
if (it == null)
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: Unable to retrieve item {0} ({1}) in folder {2}",
item.Name, item.ID, item.Folder);
return null;
}
if (!IsWithinSuitcaseTree(it.Owner, it.Folder) && !IsPartOfAppearance(it.Owner, it.ID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: Item {0}/{1} (folder {2}) is not within Suitcase tree or Appearance",
it.Name, it.ID, it.Folder);
return null;
}
// UserAccount user = m_Cache.GetUser(it.CreatorId);
// // Adjust the creator data
// if (user != null && it != null && (it.CreatorData == null || it.CreatorData == string.Empty))
// it.CreatorData = m_HomeURL + ";" + user.FirstName + " " + user.LastName;
//}
return it;
}
public new InventoryFolderBase GetFolder(InventoryFolderBase folder)
{
InventoryFolderBase f = base.GetFolder(folder);
if (f != null)
{
if (!IsWithinSuitcaseTree(f.Owner, f.ID))
{
m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: Folder {0}/{1} is not within Suitcase tree",
f.Name, f.ID);
return null;
}
}
return f;
}
//public List<InventoryItemBase> GetActiveGestures(UUID principalID)
//{
//}
#region Auxiliary functions
private XInventoryFolder GetXFolder(UUID userID, UUID folderID)
{
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "agentID", "folderID" },
new string[] { userID.ToString(), folderID.ToString() });
if (folders.Length == 0)
return null;
return folders[0];
}
private XInventoryFolder GetRootXFolder(UUID principalID)
{
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "agentID", "folderName", "type" },
new string[] { principalID.ToString(), "My Inventory", ((int)AssetType.RootFolder).ToString() });
if (folders != null && folders.Length > 0)
return folders[0];
// OK, so the RootFolder type didn't work. Let's look for any type with parent UUID.Zero.
folders = m_Database.GetFolders(
new string[] { "agentID", "folderName", "parentFolderID" },
new string[] { principalID.ToString(), "My Inventory", UUID.Zero.ToString() });
if (folders != null && folders.Length > 0)
return folders[0];
return null;
}
private XInventoryFolder GetCurrentOutfitXFolder(UUID userID)
{
XInventoryFolder root = GetRootXFolder(userID);
if (root == null)
return null;
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "agentID", "type", "parentFolderID" },
new string[] { userID.ToString(), ((int)AssetType.CurrentOutfitFolder).ToString(), root.folderID.ToString() });
if (folders.Length == 0)
return null;
return folders[0];
}
private XInventoryFolder GetSuitcaseXFolder(UUID principalID)
{
// Warp! Root folder for travelers
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "agentID", "type" },
new string[] { principalID.ToString(), InventoryItemBase.SUITCASE_FOLDER_TYPE.ToString() }); // This is a special folder type...
if (folders != null && folders.Length > 0)
return folders[0];
// check to see if we have the old Suitcase folder
folders = m_Database.GetFolders(
new string[] { "agentID", "folderName", "parentFolderID" },
new string[] { principalID.ToString(), InventoryItemBase.SUITCASE_FOLDER_NAME, UUID.Zero.ToString() });
if (folders != null && folders.Length > 0)
{
// Move it to under the root folder
XInventoryFolder root = GetRootXFolder(principalID);
folders[0].parentFolderID = root.folderID;
folders[0].type = InventoryItemBase.SUITCASE_FOLDER_TYPE;
m_Database.StoreFolder(folders[0]);
return folders[0];
}
return null;
}
private void SetAsNormalFolder(XInventoryFolder suitcase)
{
suitcase.type = InventoryItemBase.SUITCASE_FOLDER_FAKE_TYPE;
}
private List<XInventoryFolder> GetFolderTree(UUID principalID, UUID folder)
{
List<XInventoryFolder> t = null;
if (m_SuitcaseTrees.TryGetValue(principalID, out t))
return t;
// Get the tree of the suitcase folder
t = GetFolderTreeRecursive(folder);
m_SuitcaseTrees.AddOrUpdate(principalID, t, 5*60); // 5minutes
return t;
}
private List<XInventoryFolder> GetFolderTreeRecursive(UUID root)
{
List<XInventoryFolder> tree = new List<XInventoryFolder>();
XInventoryFolder[] folders = m_Database.GetFolders(
new string[] { "parentFolderID" },
new string[] { root.ToString() });
if (folders == null || (folders != null && folders.Length == 0))
return tree; // empty tree
else
{
foreach (XInventoryFolder f in folders)
{
tree.Add(f);
tree.AddRange(GetFolderTreeRecursive(f.folderID));
}
return tree;
}
}
/// <summary>
/// Return true if the folderID is a subfolder of the Suitcase or the suitcase folder itself
/// </summary>
/// <param name="folderID"></param>
/// <param name="root"></param>
/// <param name="suitcase"></param>
/// <returns></returns>
private bool IsWithinSuitcaseTree(UUID principalID, UUID folderID)
{
XInventoryFolder suitcase = GetSuitcaseXFolder(principalID);
if (suitcase == null)
{
m_log.WarnFormat("[HG SUITCASE INVENTORY SERVICE]: User {0} does not have a Suitcase folder", principalID);
return false;
}
List<XInventoryFolder> tree = new List<XInventoryFolder>();
tree.Add(suitcase); // Warp! the tree is the real root folder plus the children of the suitcase folder
tree.AddRange(GetFolderTree(principalID, suitcase.folderID));
// Also add the Current Outfit folder to the list of available folders
tree.Add(GetCurrentOutfitXFolder(principalID));
XInventoryFolder f = tree.Find(delegate(XInventoryFolder fl)
{
if (fl.folderID == folderID) return true;
else return false;
});
if (f == null) return false;
else return true;
}
#endregion
#region Avatar Appearance
private AvatarAppearance GetAppearance(UUID principalID)
{
AvatarAppearance a = null;
if (m_Appearances.TryGetValue(principalID, out a))
return a;
a = m_AvatarService.GetAppearance(principalID);
m_Appearances.AddOrUpdate(principalID, a, 5 * 60); // 5minutes
return a;
}
private bool IsPartOfAppearance(UUID principalID, UUID itemID)
{
AvatarAppearance a = GetAppearance(principalID);
if (a == null)
return false;
// Check wearables (body parts and clothes)
for (int i = 0; i < a.Wearables.Length; i++)
{
for (int j = 0; j < a.Wearables[i].Count; j++)
{
if (a.Wearables[i][j].ItemID == itemID)
{
//m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: item {0} is a wearable", itemID);
return true;
}
}
}
// Check attachments
if (a.GetAttachmentForItem(itemID) != null)
{
//m_log.DebugFormat("[HG SUITCASE INVENTORY SERVICE]: item {0} is an attachment", itemID);
return true;
}
return false;
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Reflection;
using System.Diagnostics;
using System.Globalization;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.TypeInfos;
using System.Reflection.Runtime.CustomAttributes;
using System.Reflection.Runtime.BindingFlagSupport;
using Internal.Reflection.Core;
using Internal.Reflection.Core.Execution;
using Internal.Reflection.Tracing;
namespace System.Reflection.Runtime.FieldInfos
{
//
// The Runtime's implementation of fields.
//
[DebuggerDisplay("{_debugName}")]
internal abstract partial class RuntimeFieldInfo : FieldInfo, ITraceableTypeMember
{
//
// contextType - the type that supplies the type context (i.e. substitutions for generic parameters.) Though you
// get your raw information from "definingType", you report "contextType" as your DeclaringType property.
//
// For example:
//
// typeof(Foo<>).GetTypeInfo().DeclaredMembers
//
// The definingType and contextType are both Foo<>
//
// typeof(Foo<int,String>).GetTypeInfo().DeclaredMembers
//
// The definingType is "Foo<,>"
// The contextType is "Foo<int,String>"
//
// We don't report any DeclaredMembers for arrays or generic parameters so those don't apply.
//
protected RuntimeFieldInfo(RuntimeTypeInfo contextTypeInfo, RuntimeTypeInfo reflectedType)
{
_contextTypeInfo = contextTypeInfo;
_reflectedType = reflectedType;
}
public sealed override IEnumerable<CustomAttributeData> CustomAttributes
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.FieldInfo_CustomAttributes(this);
#endif
foreach (CustomAttributeData cad in TrueCustomAttributes)
yield return cad;
if (DeclaringType.IsExplicitLayout)
{
int offset = ExplicitLayoutFieldOffsetData;
CustomAttributeTypedArgument offsetArgument = new CustomAttributeTypedArgument(typeof(int), offset);
yield return new RuntimePseudoCustomAttributeData(typeof(FieldOffsetAttribute), new CustomAttributeTypedArgument[] { offsetArgument }, null);
}
}
}
public sealed override Type DeclaringType
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.FieldInfo_DeclaringType(this);
#endif
return _contextTypeInfo;
}
}
public sealed override Type FieldType
{
get
{
Type fieldType = _lazyFieldType;
if (fieldType == null)
{
_lazyFieldType = fieldType = this.FieldRuntimeType;
}
return fieldType;
}
}
public abstract override Type[] GetOptionalCustomModifiers();
public abstract override Type[] GetRequiredCustomModifiers();
public sealed override Object GetValue(Object obj)
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.FieldInfo_GetValue(this, obj);
#endif
FieldAccessor fieldAccessor = this.FieldAccessor;
return fieldAccessor.GetField(obj);
}
public sealed override object GetValueDirect(TypedReference obj)
{
if (obj.IsNull)
throw new ArgumentException(SR.Arg_TypedReference_Null);
FieldAccessor fieldAccessor = this.FieldAccessor;
return fieldAccessor.GetFieldDirect(obj);
}
public abstract override bool HasSameMetadataDefinitionAs(MemberInfo other);
public sealed override Module Module
{
get
{
return DefiningType.Module;
}
}
public sealed override Type ReflectedType
{
get
{
return _reflectedType;
}
}
public sealed override void SetValue(object obj, object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture)
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.FieldInfo_SetValue(this, obj, value);
#endif
FieldAccessor fieldAccessor = this.FieldAccessor;
BinderBundle binderBundle = binder.ToBinderBundle(invokeAttr, culture);
fieldAccessor.SetField(obj, value, binderBundle);
}
public sealed override void SetValueDirect(TypedReference obj, object value)
{
if (obj.IsNull)
throw new ArgumentException(SR.Arg_TypedReference_Null);
FieldAccessor fieldAccessor = this.FieldAccessor;
fieldAccessor.SetFieldDirect(obj, value);
}
Type ITraceableTypeMember.ContainingType
{
get
{
return _contextTypeInfo;
}
}
/// <summary>
/// Override to provide the metadata based name of a field. (Different from the Name
/// property in that it does not go into the reflection trace logic.)
/// </summary>
protected abstract string MetadataName { get; }
public sealed override String Name
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.FieldInfo_Name(this);
#endif
return MetadataName;
}
}
String ITraceableTypeMember.MemberName
{
get
{
return MetadataName;
}
}
public sealed override object GetRawConstantValue()
{
if (!IsLiteral)
throw new InvalidOperationException();
object defaultValue;
if (!GetDefaultValueIfAvailable(raw: true, defaultValue: out defaultValue))
throw new BadImageFormatException(); // Field marked literal but has no default value.
return defaultValue;
}
// Types that derive from RuntimeFieldInfo must implement the following public surface area members
public abstract override FieldAttributes Attributes { get; }
public abstract override int MetadataToken { get; }
public abstract override String ToString();
public abstract override bool Equals(Object obj);
public abstract override int GetHashCode();
public abstract override RuntimeFieldHandle FieldHandle { get; }
/// <summary>
/// Get the default value if exists for a field by parsing metadata. Return false if there is no default value.
/// </summary>
protected abstract bool GetDefaultValueIfAvailable(bool raw, out object defaultValue);
/// <summary>
/// Return a FieldAccessor object for accessing the value of a non-literal field. May rely on metadata to create correct accessor.
/// </summary>
protected abstract FieldAccessor TryGetFieldAccessor();
private FieldAccessor FieldAccessor
{
get
{
FieldAccessor fieldAccessor = _lazyFieldAccessor;
if (fieldAccessor == null)
{
if (this.IsLiteral)
{
// Legacy: ECMA335 does not require that the metadata literal match the type of the field that declares it.
// For desktop compat, we return the metadata literal as is and do not attempt to convert or validate against the Field type.
Object defaultValue;
if (!GetDefaultValueIfAvailable(raw: false, defaultValue: out defaultValue))
{
throw new BadImageFormatException(); // Field marked literal but has no default value.
}
_lazyFieldAccessor = fieldAccessor = ReflectionCoreExecution.ExecutionEnvironment.CreateLiteralFieldAccessor(defaultValue, FieldType.TypeHandle);
}
else
{
_lazyFieldAccessor = fieldAccessor = TryGetFieldAccessor();
if (fieldAccessor == null)
throw ReflectionCoreExecution.ExecutionDomain.CreateNonInvokabilityException(this);
}
}
return fieldAccessor;
}
}
/// <summary>
/// Return the type of the field by parsing metadata.
/// </summary>
protected abstract RuntimeTypeInfo FieldRuntimeType { get; }
protected RuntimeFieldInfo WithDebugName()
{
bool populateDebugNames = DeveloperExperienceState.DeveloperExperienceModeEnabled;
#if DEBUG
populateDebugNames = true;
#endif
if (!populateDebugNames)
return this;
if (_debugName == null)
{
_debugName = "Constructing..."; // Protect against any inadvertent reentrancy.
_debugName = ((ITraceableTypeMember)this).MemberName;
}
return this;
}
/// <summary>
/// Return the DefiningTypeInfo as a RuntimeTypeInfo (instead of as a format specific type info)
/// </summary>
protected abstract RuntimeTypeInfo DefiningType { get; }
protected abstract IEnumerable<CustomAttributeData> TrueCustomAttributes { get; }
protected abstract int ExplicitLayoutFieldOffsetData { get; }
/// <summary>
/// Returns the field offset (asserts and throws if not an instance field). Does not include the size of the object header.
/// </summary>
internal int Offset => FieldAccessor.Offset;
protected readonly RuntimeTypeInfo _contextTypeInfo;
protected readonly RuntimeTypeInfo _reflectedType;
private volatile FieldAccessor _lazyFieldAccessor = null;
private volatile Type _lazyFieldType = null;
private String _debugName;
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.IO;
using FluentAssertions;
using Microsoft.DotNet.TestFramework;
using Microsoft.DotNet.Tools.Test.Utilities;
using Xunit;
using LocalizableStrings = Microsoft.DotNet.Tools.Run.LocalizableStrings;
namespace Microsoft.DotNet.Cli.Run.Tests
{
public class GivenDotnetRunBuildsCsproj : TestBase
{
[Fact]
public void ItCanRunAMSBuildProject()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput()
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItImplicitlyRestoresAProjectWhenRunning()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput()
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItCanRunAMultiTFMProjectWithImplicitRestore()
{
var testInstance = TestAssets.Get(
TestAssetKinds.DesktopTestProjects,
"NETFrameworkReferenceNETStandard20")
.CreateInstance()
.WithSourceFiles();
string projectDirectory = Path.Combine(testInstance.Root.FullName, "MultiTFMTestApp");
new RunCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput("--framework netcoreapp2.0")
.Should().Pass()
.And.HaveStdOutContaining("This string came from the test library!");
}
[Fact]
public void ItDoesNotImplicitlyRestoreAProjectWhenRunningWithTheNoRestoreOption()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--no-restore")
.Should().Fail()
.And.HaveStdOutContaining("project.assets.json");
}
[Fact]
public void ItBuildsTheProjectBeforeRunning()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput()
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItCanRunAMSBuildProjectWhenSpecifyingAFramework()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--framework netcoreapp2.0")
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItRunsPortableAppsFromADifferentPathAfterBuilding()
{
var testInstance = TestAssets.Get("MSBuildTestApp")
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
new BuildCommand()
.WithWorkingDirectory(testInstance.Root)
.Execute()
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput($"--no-build")
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItRunsPortableAppsFromADifferentPathWithoutBuilding()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var projectFile = testInstance.Root.GetFile(testAppName + ".csproj");
new RunCommand()
.WithWorkingDirectory(testInstance.Root.Parent)
.ExecuteWithCapturedOutput($"--project {projectFile.FullName}")
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItRunsPortableAppsFromADifferentPathSpecifyingOnlyTheDirectoryWithoutBuilding()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RunCommand()
.WithWorkingDirectory(testInstance.Root.Parent)
.ExecuteWithCapturedOutput($"--project {testProjectDirectory}")
.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
}
[Fact]
public void ItRunsAppWhenRestoringToSpecificPackageDirectory()
{
var rootPath = TestAssets.CreateTestDirectory().FullName;
string dir = "pkgs";
string args = $"--packages {dir}";
string newArgs = $"console -o \"{rootPath}\" --no-restore";
new NewCommandShim()
.WithWorkingDirectory(rootPath)
.Execute(newArgs)
.Should()
.Pass();
new RestoreCommand()
.WithWorkingDirectory(rootPath)
.Execute(args)
.Should()
.Pass();
new RunCommand()
.WithWorkingDirectory(rootPath)
.ExecuteWithCapturedOutput("--no-restore")
.Should().Pass()
.And.HaveStdOutContaining("Hello World");
}
[Fact]
public void ItReportsAGoodErrorWhenProjectHasMultipleFrameworks()
{
var testAppName = "MSBuildAppWithMultipleFrameworks";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
// use --no-build so this test can run on all platforms.
// the test app targets net451, which can't be built on non-Windows
new RunCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput("--no-build")
.Should().Fail()
.And.HaveStdErrContaining("--framework");
}
[Fact]
public void ItCanPassArgumentsToSubjectAppByDoubleDash()
{
const string testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("-- foo bar baz")
.Should()
.Pass()
.And.HaveStdOutContaining("echo args:foo;bar;baz");
}
[Fact]
public void ItGivesAnErrorWhenAttemptingToUseALaunchProfileThatDoesNotExistWhenThereIsNoLaunchSettingsFile()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--launch-profile test")
.Should().Pass()
.And.HaveStdOutContaining("Hello World!")
.And.HaveStdErrContaining(LocalizableStrings.RunCommandExceptionCouldNotLocateALaunchSettingsFile);
}
[Fact]
public void ItUsesLaunchProfileOfTheSpecifiedName()
{
var testAppName = "AppWithLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--launch-profile Second");
cmd.Should().Pass()
.And.HaveStdOutContaining("Second");
cmd.StdErr.Should().BeEmpty();
}
[Fact]
public void ItDefaultsToTheFirstUsableLaunchProfile()
{
var testAppName = "AppWithLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput();
cmd.Should().Pass()
.And.HaveStdOutContaining("First");
cmd.StdErr.Should().BeEmpty();
}
[Fact]
public void ItGivesAnErrorWhenTheLaunchProfileNotFound()
{
var testAppName = "AppWithLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--launch-profile Third")
.Should().Pass()
.And.HaveStdOutContaining("(NO MESSAGE)")
.And.HaveStdErrContaining(string.Format(LocalizableStrings.RunCommandExceptionCouldNotApplyLaunchSettings, "Third", "").Trim());
}
[Fact]
public void ItGivesAnErrorWhenTheLaunchProfileCanNotBeHandled()
{
var testAppName = "AppWithLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--launch-profile \"IIS Express\"")
.Should().Pass()
.And.HaveStdOutContaining("(NO MESSAGE)")
.And.HaveStdErrContaining(string.Format(LocalizableStrings.RunCommandExceptionCouldNotApplyLaunchSettings, "IIS Express", "").Trim());
}
[Fact]
public void ItSkipsLaunchProfilesWhenTheSwitchIsSupplied()
{
var testAppName = "AppWithLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--no-launch-profile");
cmd.Should().Pass()
.And.HaveStdOutContaining("(NO MESSAGE)");
cmd.StdErr.Should().BeEmpty();
}
[Fact]
public void ItSkipsLaunchProfilesWhenTheSwitchIsSuppliedWithoutErrorWhenThereAreNoLaunchSettings()
{
var testAppName = "MSBuildTestApp";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput("--no-launch-profile");
cmd.Should().Pass()
.And.HaveStdOutContaining("Hello World!");
cmd.StdErr.Should().BeEmpty();
}
[Fact]
public void ItSkipsLaunchProfilesWhenThereIsNoUsableDefault()
{
var testAppName = "AppWithLaunchSettingsNoDefault";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput();
cmd.Should().Pass()
.And.HaveStdOutContaining("(NO MESSAGE)")
.And.HaveStdErrContaining(string.Format(LocalizableStrings.RunCommandExceptionCouldNotApplyLaunchSettings, LocalizableStrings.DefaultLaunchProfileDisplayName, "").Trim());
}
[Fact]
public void ItPrintsAnErrorWhenLaunchSettingsAreCorrupted()
{
var testAppName = "AppWithCorruptedLaunchSettings";
var testInstance = TestAssets.Get(testAppName)
.CreateInstance()
.WithSourceFiles();
var testProjectDirectory = testInstance.Root.FullName;
new RestoreCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute("/p:SkipInvalidConfigurations=true")
.Should().Pass();
new BuildCommand()
.WithWorkingDirectory(testProjectDirectory)
.Execute()
.Should().Pass();
var cmd = new RunCommand()
.WithWorkingDirectory(testProjectDirectory)
.ExecuteWithCapturedOutput();
cmd.Should().Pass()
.And.HaveStdOutContaining("(NO MESSAGE)")
.And.HaveStdErrContaining(string.Format(LocalizableStrings.RunCommandExceptionCouldNotApplyLaunchSettings, LocalizableStrings.DefaultLaunchProfileDisplayName, "").Trim());
}
}
}
| |
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace Infoplus.Model
{
/// <summary>
///
/// </summary>
[DataContract]
public partial class BillingCode : IEquatable<BillingCode>
{
/// <summary>
/// Initializes a new instance of the <see cref="BillingCode" /> class.
/// Initializes a new instance of the <see cref="BillingCode" />class.
/// </summary>
/// <param name="Quantity">Quantity (required).</param>
/// <param name="UserId">UserId (required).</param>
/// <param name="LobId">LobId (required).</param>
/// <param name="BillingCodeTypeId">BillingCodeTypeId (required).</param>
/// <param name="Note">Note.</param>
/// <param name="CustomFields">CustomFields.</param>
public BillingCode(int? Quantity = null, int? UserId = null, int? LobId = null, int? BillingCodeTypeId = null, string Note = null, Dictionary<string, Object> CustomFields = null)
{
// to ensure "Quantity" is required (not null)
if (Quantity == null)
{
throw new InvalidDataException("Quantity is a required property for BillingCode and cannot be null");
}
else
{
this.Quantity = Quantity;
}
// to ensure "UserId" is required (not null)
if (UserId == null)
{
throw new InvalidDataException("UserId is a required property for BillingCode and cannot be null");
}
else
{
this.UserId = UserId;
}
// to ensure "LobId" is required (not null)
if (LobId == null)
{
throw new InvalidDataException("LobId is a required property for BillingCode and cannot be null");
}
else
{
this.LobId = LobId;
}
// to ensure "BillingCodeTypeId" is required (not null)
if (BillingCodeTypeId == null)
{
throw new InvalidDataException("BillingCodeTypeId is a required property for BillingCode and cannot be null");
}
else
{
this.BillingCodeTypeId = BillingCodeTypeId;
}
this.Note = Note;
this.CustomFields = CustomFields;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; private set; }
/// <summary>
/// Gets or Sets CreateDate
/// </summary>
[DataMember(Name="createDate", EmitDefaultValue=false)]
public DateTime? CreateDate { get; private set; }
/// <summary>
/// Gets or Sets ModifyDate
/// </summary>
[DataMember(Name="modifyDate", EmitDefaultValue=false)]
public DateTime? ModifyDate { get; private set; }
/// <summary>
/// Gets or Sets Quantity
/// </summary>
[DataMember(Name="quantity", EmitDefaultValue=false)]
public int? Quantity { get; set; }
/// <summary>
/// Gets or Sets Date
/// </summary>
[DataMember(Name="date", EmitDefaultValue=false)]
public DateTime? Date { get; private set; }
/// <summary>
/// Gets or Sets UserId
/// </summary>
[DataMember(Name="userId", EmitDefaultValue=false)]
public int? UserId { get; set; }
/// <summary>
/// Gets or Sets LobId
/// </summary>
[DataMember(Name="lobId", EmitDefaultValue=false)]
public int? LobId { get; set; }
/// <summary>
/// Gets or Sets BillingCodeTypeId
/// </summary>
[DataMember(Name="billingCodeTypeId", EmitDefaultValue=false)]
public int? BillingCodeTypeId { get; set; }
/// <summary>
/// Gets or Sets Note
/// </summary>
[DataMember(Name="note", EmitDefaultValue=false)]
public string Note { get; set; }
/// <summary>
/// Gets or Sets CustomFields
/// </summary>
[DataMember(Name="customFields", EmitDefaultValue=false)]
public Dictionary<string, Object> CustomFields { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class BillingCode {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" CreateDate: ").Append(CreateDate).Append("\n");
sb.Append(" ModifyDate: ").Append(ModifyDate).Append("\n");
sb.Append(" Quantity: ").Append(Quantity).Append("\n");
sb.Append(" Date: ").Append(Date).Append("\n");
sb.Append(" UserId: ").Append(UserId).Append("\n");
sb.Append(" LobId: ").Append(LobId).Append("\n");
sb.Append(" BillingCodeTypeId: ").Append(BillingCodeTypeId).Append("\n");
sb.Append(" Note: ").Append(Note).Append("\n");
sb.Append(" CustomFields: ").Append(CustomFields).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as BillingCode);
}
/// <summary>
/// Returns true if BillingCode instances are equal
/// </summary>
/// <param name="other">Instance of BillingCode to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(BillingCode other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.CreateDate == other.CreateDate ||
this.CreateDate != null &&
this.CreateDate.Equals(other.CreateDate)
) &&
(
this.ModifyDate == other.ModifyDate ||
this.ModifyDate != null &&
this.ModifyDate.Equals(other.ModifyDate)
) &&
(
this.Quantity == other.Quantity ||
this.Quantity != null &&
this.Quantity.Equals(other.Quantity)
) &&
(
this.Date == other.Date ||
this.Date != null &&
this.Date.Equals(other.Date)
) &&
(
this.UserId == other.UserId ||
this.UserId != null &&
this.UserId.Equals(other.UserId)
) &&
(
this.LobId == other.LobId ||
this.LobId != null &&
this.LobId.Equals(other.LobId)
) &&
(
this.BillingCodeTypeId == other.BillingCodeTypeId ||
this.BillingCodeTypeId != null &&
this.BillingCodeTypeId.Equals(other.BillingCodeTypeId)
) &&
(
this.Note == other.Note ||
this.Note != null &&
this.Note.Equals(other.Note)
) &&
(
this.CustomFields == other.CustomFields ||
this.CustomFields != null &&
this.CustomFields.SequenceEqual(other.CustomFields)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.CreateDate != null)
hash = hash * 59 + this.CreateDate.GetHashCode();
if (this.ModifyDate != null)
hash = hash * 59 + this.ModifyDate.GetHashCode();
if (this.Quantity != null)
hash = hash * 59 + this.Quantity.GetHashCode();
if (this.Date != null)
hash = hash * 59 + this.Date.GetHashCode();
if (this.UserId != null)
hash = hash * 59 + this.UserId.GetHashCode();
if (this.LobId != null)
hash = hash * 59 + this.LobId.GetHashCode();
if (this.BillingCodeTypeId != null)
hash = hash * 59 + this.BillingCodeTypeId.GetHashCode();
if (this.Note != null)
hash = hash * 59 + this.Note.GetHashCode();
if (this.CustomFields != null)
hash = hash * 59 + this.CustomFields.GetHashCode();
return hash;
}
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR
using UnityEditor;
[CanEditMultipleObjects]
[CustomEditor(typeof(SgtTerrain))]
public class SgtTerrain_Editor : SgtEditor<SgtTerrain>
{
protected override void OnInspector()
{
var updateMeshes = false;
var updateColliders = false;
var updateMaterials = false;
var updateSplits = false;
DrawDefault("Material", ref updateMaterials);
DrawDefault("Atmosphere", ref updateMaterials);
BeginError(Any(t => t.Resolution <= 1 || (t.Resolution % 2) == 0));
DrawDefault("Resolution", ref updateMeshes);
EndError();
BeginError(Any(t => t.Radius <= 0.0f));
DrawDefault("Radius", ref updateMeshes);
EndError();
BeginError(Any(t => t.Height <= 0.0f));
DrawDefault("Height", ref updateMeshes);
EndError();
Separator();
DrawDefault("DefaultDisplacement", ref updateMeshes);
DrawDefault("DefaultColor", ref updateMeshes);
Separator();
BeginError(Any(t => t.Budget <= 0.0f || t.Budget >= 1.0f));
DrawDefault("Budget");
EndError();
BeginError(Any(t => t.DelayMin < 0.0f || t.DelayMin >= 10.0f || t.DelayMin > t.DelayMax));
DrawDefault("DelayMin");
EndError();
BeginError(Any(t => t.DelayMax <= 0.0f || t.DelayMax >= 10.0f || t.DelayMin > t.DelayMax));
DrawDefault("DelayMax");
EndError();
BeginError(Any(t => t.MaxSplitsInEditMode < 0 || t.MaxSplitsInEditMode > t.SplitDistances.Count));
DrawDefault("MaxSplitsInEditMode", ref updateSplits);
EndError();
DrawDefault("MaxColliderDepth", ref updateColliders);
DrawDefault("SplitDistances", ref updateSplits);
if (All(DistancesInOrder) == false)
{
EditorGUILayout.HelpBox("Split distances should start large and get smaller", MessageType.Warning);
}
if (Button("Add Split Distance") == true)
{
Each(AddDistance); updateSplits = true;
}
RequireObserver();
if (updateMeshes == true) DirtyEach(t => t.UpdateMeshes ());
if (updateMaterials == true) DirtyEach(t => t.UpdateMaterials());
if (updateColliders == true) DirtyEach(t => t.UpdateColliders());
if (updateSplits == true) DirtyEach(t => t.UpdateSplits ());
}
private bool DistancesInOrder(SgtTerrain terrain)
{
var distances = terrain.SplitDistances;
var bestDistance = float.PositiveInfinity;
if (distances != null)
{
for (var i = 0; i < distances.Count; i++)
{
var distance = distances[i];
if (distance >= bestDistance)
{
return false;
}
bestDistance = distance;
}
}
return true;
}
private void AddDistance(SgtTerrain terrain)
{
var distances = terrain.SplitDistances;
var distance = 5.0f;
if (distances != null)
{
var count = distances.Count;
if (count > 0)
{
distance = distances[count - 1] * 0.5f;
}
}
else
{
distances = terrain.SplitDistances = new List<float>();
}
distances.Add(distance);
}
}
#endif
[ExecuteInEditMode]
[DisallowMultipleComponent]
[AddComponentMenu(SgtHelper.ComponentMenuPrefix + "Terrain")]
public partial class SgtTerrain : MonoBehaviour
{
// All active and enabled terrains in the scene
public static List<SgtTerrain> AllTerrains = new List<SgtTerrain>();
[Tooltip("The base material applied to patches")]
public Material Material;
[Tooltip("The atmosphere applied to the patches")]
public SgtAtmosphere Atmosphere;
[Tooltip("The amount of rows & columns on each patch edge")]
[Range(1, 16)]
public int Resolution = 5;
[Tooltip("The maximum time this terrain can spend in Update in seconds")]
public float Budget = 0.01f;
[Tooltip("The minimum delay between patch updating in seconds")]
public float DelayMin = 0.5f;
[Tooltip("The maximum delay between patch updating in seconds (unless the budget is exceeded)")]
public float DelayMax = 1.0f;
[Tooltip("The amount of times the main patches can be split in edit mode (0 = no splits)")]
#pragma warning disable 414
public int MaxSplitsInEditMode;
[Tooltip("The maximum depth of the patches that get colliders (0 = no colliders)")]
public int MaxColliderDepth;
[Tooltip("The inner radius of the terrain (may go under this value based on displacement settings)")]
public float Radius = 1.0f;
[Tooltip("The maximum height of the terrain (may go above this value based on displacement settings)")]
public float Height = 0.1f;
[Tooltip("The default displacement that gets passed to height modifiers (0 = Radius, 1 = Radius + Height)")]
public float DefaultDisplacement = 0.5f;
[Tooltip("The default vertex color that gets passed to height modifiers")]
public Color DefaultColor = Color.white;
[Tooltip("The local distance between the patch and observer (camera) for the patch to split")]
public List<float> SplitDistances;
[SerializeField]
private SgtPatch negativeX;
[SerializeField]
private SgtPatch negativeY;
[SerializeField]
private SgtPatch negativeZ;
[SerializeField]
private SgtPatch positiveX;
[SerializeField]
private SgtPatch positiveY;
[SerializeField]
private SgtPatch positiveZ;
[SerializeField]
[HideInInspector]
private bool startCalled;
[System.NonSerialized]
private bool updateMaterialsCalled;
[System.NonSerialized]
private bool updateMeshesCalled;
[System.NonSerialized]
private bool updateMaterialsDirty;
[System.NonSerialized]
private bool updateMeshesDirty;
[System.NonSerialized]
private int patchIndex;
[System.NonSerialized]
private float updateAge;
[System.NonSerialized]
private List<SgtPatch> patches = new List<SgtPatch>();
[System.NonSerialized]
private int patchSequence;
[System.NonSerialized]
private static List<Vector3> localObservers = new List<Vector3>();
private static int globalPatchSequence;
private static readonly List<float> defaultSplitDistances = new List<float>(new float[] { 10.0f, 5.0f, 2.5f, 1.25f, 0.75f });
public void UpdateMaterialsDirty()
{
updateMaterialsDirty = true;
}
[ContextMenu("Update Materials")]
public void UpdateMaterials()
{
updateMaterialsCalled = true;
updateMaterialsDirty = false;
if (Atmosphere != null)
{
Atmosphere.UpdateMaterials();
}
ValidateMainPatches();
negativeX.UpdateMaterials();
negativeY.UpdateMaterials();
negativeZ.UpdateMaterials();
positiveX.UpdateMaterials();
positiveY.UpdateMaterials();
positiveZ.UpdateMaterials();
}
public void UpdateMeshesDirty()
{
updateMeshesDirty = true;
}
[ContextMenu("Update Meshes")]
public void UpdateMeshes()
{
updateMeshesCalled = true;
updateMeshesDirty = false;
ValidateMainPatches();
negativeX.UpdateMeshes();
negativeY.UpdateMeshes();
negativeZ.UpdateMeshes();
positiveX.UpdateMeshes();
positiveY.UpdateMeshes();
positiveZ.UpdateMeshes();
}
[ContextMenu("Update Colliders")]
public void UpdateColliders()
{
ValidateMainPatches();
negativeX.UpdateColliders();
negativeY.UpdateColliders();
negativeZ.UpdateColliders();
positiveX.UpdateColliders();
positiveY.UpdateColliders();
positiveZ.UpdateColliders();
}
[ContextMenu("Update Splits")]
public void UpdateSplits()
{
UpdateLocalObservers();
ValidateMainPatches();
negativeX.UpdateSplits(localObservers);
negativeY.UpdateSplits(localObservers);
negativeZ.UpdateSplits(localObservers);
positiveX.UpdateSplits(localObservers);
positiveY.UpdateSplits(localObservers);
positiveZ.UpdateSplits(localObservers);
}
// This will return the local height of the terrain under the localPosition point
public float GetSurfaceHeightLocal(Vector3 localPosition)
{
var displacement = DefaultDisplacement;
if (OnCalculateDisplacement != null) OnCalculateDisplacement(localPosition, ref displacement);
return Radius + Height * displacement;
}
public float GetSurfaceHeightWorld(Vector3 worldPosition)
{
var localPosition = transform.InverseTransformPoint(worldPosition);
var surfacePosition = GetSurfacePositionLocal(localPosition);
return Vector3.Distance(transform.position, transform.TransformPoint(surfacePosition));
}
// This will return the local surface position under the given local position
public Vector3 GetSurfacePositionLocal(Vector3 localPosition)
{
var height = GetSurfaceHeightLocal(localPosition);
return localPosition.normalized * height;
}
// This will return the world surface position under the given world position
public Vector3 GetSurfacePositionWorld(Vector3 worldPosition, float offset = 0.0f)
{
var localPosition = transform.InverseTransformPoint(worldPosition);
var surfacePosition = GetSurfacePositionLocal(localPosition);
return transform.TransformPoint(surfacePosition) + transform.TransformDirection(surfacePosition).normalized * offset;
}
// This will return the local surface normal under the given local position
public Vector3 GetSurfaceNormalLocal(Vector3 localPosition, Vector3 localRight, Vector3 localForward)
{
var right = GetSurfacePositionLocal(localPosition + localRight);
var left = GetSurfacePositionLocal(localPosition - localRight);
var forward = GetSurfacePositionLocal(localPosition + localForward);
var back = GetSurfacePositionLocal(localPosition - localForward);
var rightLeft = right - left;
var forwardBack = forward - back;
return Vector3.Cross(forwardBack.normalized, rightLeft.normalized).normalized;
}
// This will return the world surface normal under the given world position, using 4 samples, whose distances are based on the right & forward vectors
public Vector3 GetSurfaceNormalWorld(Vector3 worldPosition, Vector3 worldRight, Vector3 worldForward)
{
var localPosition = transform.InverseTransformPoint(worldPosition);
var localRight = transform.InverseTransformDirection(worldRight);
var localForward = transform.InverseTransformDirection(worldForward);
var localNormal = GetSurfaceNormalLocal(localPosition, localRight, localForward);
return transform.TransformDirection(localNormal);
}
public Vector3 GetSurfaceNormalWorld(Vector3 worldPosition)
{
return (worldPosition - transform.position).normalized;
}
public SgtPatch CreatePatch(string name, SgtPatch parent, Vector3 pointBL, Vector3 pointBR, Vector3 pointTL, Vector3 pointTR, Vector3 coordBL, Vector3 coordBR, Vector3 coordTL, Vector3 coordTR, int depth)
{
var parentTransform = parent != null ? parent.transform : transform;
var patch = SgtPatch.Create(name, gameObject.layer, parentTransform);
patch.Terrain = this;
patch.Parent = parent;
patch.Depth = depth;
patch.PointBL = pointBL;
patch.PointBR = pointBR;
patch.PointTL = pointTL;
patch.PointTR = pointTR;
patch.CoordBL = coordBL;
patch.CoordBR = coordBR;
patch.CoordTL = coordTL;
patch.CoordTR = coordTR;
patch.UpdateMesh();
patch.UpdateCollider();
patch.UpdateMaterials();
return patch;
}
public static SgtTerrain CreateTerrain(int layer = 0, Transform parent = null)
{
return CreateTerrain(layer, parent, Vector3.zero, Quaternion.identity, Vector3.one);
}
public static SgtTerrain CreateTerrain(int layer, Transform parent, Vector3 localPosition, Quaternion localRotation, Vector3 localScale)
{
var gameObject = SgtHelper.CreateGameObject("Terrain", layer, parent, localPosition, localRotation, localScale);
var terrain = gameObject.AddComponent<SgtTerrain>();
return terrain;
}
#if UNITY_EDITOR
[MenuItem(SgtHelper.GameObjectMenuPrefix + "Terrain", false, 10)]
public static void CreateTerrainMenuItem()
{
var parent = SgtHelper.GetSelectedParent();
var terrain = CreateTerrain(parent != null ? parent.gameObject.layer : 0, parent);
SgtHelper.SelectAndPing(terrain);
}
#endif
protected virtual void OnEnable()
{
AllTerrains.Add(this);
if (negativeX != null) negativeX.gameObject.SetActive(true);
if (negativeY != null) negativeY.gameObject.SetActive(true);
if (negativeZ != null) negativeZ.gameObject.SetActive(true);
if (positiveX != null) positiveX.gameObject.SetActive(true);
if (positiveY != null) positiveY.gameObject.SetActive(true);
if (positiveZ != null) positiveZ.gameObject.SetActive(true);
if (startCalled == true)
{
CheckUpdateCalls();
}
}
protected virtual void Start()
{
if (startCalled == false)
{
startCalled = true;
if (SplitDistances == null)
{
SplitDistances = defaultSplitDistances;
}
CheckUpdateCalls();
}
}
protected virtual void Update()
{
if (updateMaterialsDirty == true)
{
UpdateMaterials();
}
if (updateMeshesDirty == true)
{
UpdateMeshes();
}
ValidateMainPatches();
}
protected virtual void LateUpdate()
{
if (Application.isPlaying == true)
{
updateAge += Time.deltaTime;
}
UpdatePatches();
}
protected virtual void OnDisable()
{
AllTerrains.Remove(this);
if (negativeX != null) negativeX.gameObject.SetActive(false);
if (negativeY != null) negativeY.gameObject.SetActive(false);
if (negativeZ != null) negativeZ.gameObject.SetActive(false);
if (positiveX != null) positiveX.gameObject.SetActive(false);
if (positiveY != null) positiveY.gameObject.SetActive(false);
if (positiveZ != null) positiveZ.gameObject.SetActive(false);
}
protected virtual void OnDestroy()
{
SgtPatch.MarkForDestruction(negativeX);
SgtPatch.MarkForDestruction(negativeY);
SgtPatch.MarkForDestruction(negativeZ);
SgtPatch.MarkForDestruction(positiveX);
SgtPatch.MarkForDestruction(positiveY);
SgtPatch.MarkForDestruction(positiveZ);
}
private System.Diagnostics.Stopwatch budgetWatch = new System.Diagnostics.Stopwatch();
private void UpdatePatches()
{
#if UNITY_EDITOR
if (Application.isPlaying == false)
{
return;
}
#endif
if (patchIndex < patches.Count)
{
UpdateLocalObservers();
budgetWatch.Reset();
budgetWatch.Start();
while (budgetWatch.Elapsed.TotalSeconds < Budget && SgtComponentPool<SgtPatch>.Count < 4)
{
SgtComponentPool<SgtPatch>.Cache();
}
while (budgetWatch.Elapsed.TotalSeconds < Budget && patchIndex < patches.Count)
{
var patch = patches[patchIndex++];
// Make sure this patch is still in sequence (if it gets pooled and spawned it won't be)
if (patch != null && patch.Sequence == patchSequence)
{
patch.UpdateSplit(localObservers);
patch.Cooldown = Random.Range(DelayMin, DelayMax);
}
}
budgetWatch.Stop();
}
else
{
// Grab new leaves
patches.Clear();
// Been at least half the min delay since the last update?
if (updateAge > DelayMin * 0.5f)
{
var elapsed = updateAge;
updateAge = 0.0f;
patchIndex = 0;
patchSequence = globalPatchSequence = globalPatchSequence % int.MaxValue + 1; // Increment this, but prevent it from going below 1
ValidateMainPatches();
negativeX.GetPatches(patches, patchSequence, elapsed);
negativeY.GetPatches(patches, patchSequence, elapsed);
negativeZ.GetPatches(patches, patchSequence, elapsed);
positiveX.GetPatches(patches, patchSequence, elapsed);
positiveY.GetPatches(patches, patchSequence, elapsed);
positiveZ.GetPatches(patches, patchSequence, elapsed);
}
}
}
private SgtPatch CreatePatch(string name, Quaternion rotation)
{
var pointBL = rotation * new Vector3(-1.0f, -1.0f, 1.0f);
var pointBR = rotation * new Vector3( 1.0f, -1.0f, 1.0f);
var pointTL = rotation * new Vector3(-1.0f, 1.0f, 1.0f);
var pointTR = rotation * new Vector3( 1.0f, 1.0f, 1.0f);
var coordBL = new Vector2(1.0f, 0.0f);
var coordBR = new Vector2(0.0f, 0.0f);
var coordTL = new Vector2(1.0f, 1.0f);
var coordTR = new Vector2(0.0f, 1.0f);
return CreatePatch(name, null, pointBL, pointBR, pointTL, pointTR, coordBL, coordBR, coordTL, coordTR, 0);
}
private void ValidateMainPatches()
{
if (negativeX == null) negativeX = CreatePatch("Negative X", Quaternion.Euler( 0.0f, 90.0f, 0.0f));
if (negativeY == null) negativeY = CreatePatch("Negative Y", Quaternion.Euler( 90.0f, 0.0f, 0.0f));
if (negativeZ == null) negativeZ = CreatePatch("Negative Z", Quaternion.Euler( 0.0f, 180.0f, 0.0f));
if (positiveX == null) positiveX = CreatePatch("Positive X", Quaternion.Euler( 0.0f, 270.0f, 0.0f));
if (positiveY == null) positiveY = CreatePatch("Positive Y", Quaternion.Euler(270.0f, 0.0f, 0.0f));
if (positiveZ == null) positiveZ = CreatePatch("Positive Z", Quaternion.Euler( 0.0f, 0.0f, 0.0f));
}
private void UpdateLocalObservers()
{
localObservers.Clear();
for (var i = SgtObserver.AllObservers.Count - 1; i >= 0; i--)
{
var observer = SgtObserver.AllObservers[i];
var point = transform.InverseTransformPoint(observer.transform.position);
localObservers.Add(point);
}
}
private void CheckUpdateCalls()
{
if (updateMeshesCalled == false)
{
UpdateMeshes();
}
//if (updateMaterialsCalled == false)
//{
UpdateMaterials();
//}
UpdateColliders();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ILCompiler.Compiler.CppCodeGen;
using ILCompiler.DependencyAnalysisFramework;
using Internal.TypeSystem;
using Internal.TypeSystem.Ecma;
using Internal.Runtime;
using Internal.IL;
using ILCompiler.DependencyAnalysis;
namespace ILCompiler.CppCodeGen
{
internal class CppWriter
{
private Compilation _compilation;
private void SetWellKnownTypeSignatureName(WellKnownType wellKnownType, string mangledSignatureName)
{
var type = _compilation.TypeSystemContext.GetWellKnownType(wellKnownType);
_cppSignatureNames.Add(type, mangledSignatureName);
}
public CppWriter(Compilation compilation)
{
_compilation = compilation;
_out = new StreamWriter(new FileStream(compilation.Options.OutputFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, 4096, false));
SetWellKnownTypeSignatureName(WellKnownType.Void, "void");
SetWellKnownTypeSignatureName(WellKnownType.Boolean, "uint8_t");
SetWellKnownTypeSignatureName(WellKnownType.Char, "uint16_t");
SetWellKnownTypeSignatureName(WellKnownType.SByte, "int8_t");
SetWellKnownTypeSignatureName(WellKnownType.Byte, "uint8_t");
SetWellKnownTypeSignatureName(WellKnownType.Int16, "int16_t");
SetWellKnownTypeSignatureName(WellKnownType.UInt16, "uint16_t");
SetWellKnownTypeSignatureName(WellKnownType.Int32, "int32_t");
SetWellKnownTypeSignatureName(WellKnownType.UInt32, "uint32_t");
SetWellKnownTypeSignatureName(WellKnownType.Int64, "int64_t");
SetWellKnownTypeSignatureName(WellKnownType.UInt64, "uint64_t");
SetWellKnownTypeSignatureName(WellKnownType.IntPtr, "intptr_t");
SetWellKnownTypeSignatureName(WellKnownType.UIntPtr, "uintptr_t");
SetWellKnownTypeSignatureName(WellKnownType.Single, "float");
SetWellKnownTypeSignatureName(WellKnownType.Double, "double");
BuildExternCSignatureMap();
}
private Dictionary<TypeDesc, string> _cppSignatureNames = new Dictionary<TypeDesc, string>();
public string GetCppSignatureTypeName(TypeDesc type)
{
string mangledName;
if (_cppSignatureNames.TryGetValue(type, out mangledName))
return mangledName;
// TODO: Use friendly names for enums
if (type.IsEnum)
mangledName = GetCppSignatureTypeName(type.UnderlyingType);
else
mangledName = GetCppTypeName(type);
if (!type.IsValueType && !type.IsByRef && !type.IsPointer)
mangledName += "*";
_cppSignatureNames.Add(type, mangledName);
return mangledName;
}
// extern "C" methods are sometimes referenced via different signatures.
// _externCSignatureMap contains the canonical signature of the extern "C" import. References
// via other signatures are required to use casts.
private Dictionary<string, MethodSignature> _externCSignatureMap = new Dictionary<string, MethodSignature>();
private void BuildExternCSignatureMap()
{
foreach (var nodeAlias in _compilation.NodeFactory.NodeAliases)
{
var methodNode = (CppMethodCodeNode)nodeAlias.Key;
_externCSignatureMap.Add(nodeAlias.Value, methodNode.Method.Signature);
}
}
private IEnumerable<string> GetParameterNamesForMethod(MethodDesc method)
{
// TODO: The uses of this method need revision. The right way to get to this info is from
// a MethodIL. For declarations, we don't need names.
method = method.GetTypicalMethodDefinition();
var ecmaMethod = method as EcmaMethod;
if (ecmaMethod != null && ecmaMethod.Module.PdbReader != null)
{
return (new EcmaMethodDebugInformation(ecmaMethod)).GetParameterNames();
}
return null;
}
public string GetCppMethodDeclaration(MethodDesc method, bool implementation, string externalMethodName = null, MethodSignature methodSignature = null)
{
var sb = new CppGenerationBuffer();
if (methodSignature == null)
methodSignature = method.Signature;
if (externalMethodName != null)
{
sb.Append("extern \"C\" ");
}
else
{
if (!implementation)
{
sb.Append("static ");
}
}
sb.Append(GetCppSignatureTypeName(methodSignature.ReturnType));
sb.Append(" ");
if (externalMethodName != null)
{
sb.Append(externalMethodName);
}
else
{
if (implementation)
{
sb.Append(GetCppMethodDeclarationName(method.OwningType, GetCppMethodName(method)));
}
else
{
sb.Append(GetCppMethodName(method));
}
}
sb.Append("(");
bool hasThis = !methodSignature.IsStatic;
int argCount = methodSignature.Length;
if (hasThis)
argCount++;
List<string> parameterNames = null;
if (method != null)
{
IEnumerable<string> parameters = GetParameterNamesForMethod(method);
if (parameters != null)
{
parameterNames = new List<string>(parameters);
if (parameterNames.Count != 0)
{
System.Diagnostics.Debug.Assert(parameterNames.Count == argCount);
}
else
{
parameterNames = null;
}
}
}
for (int i = 0; i < argCount; i++)
{
if (hasThis)
{
if (i == 0)
{
var thisType = method.OwningType;
if (thisType.IsValueType)
thisType = thisType.MakeByRefType();
sb.Append(GetCppSignatureTypeName(thisType));
}
else
{
sb.Append(GetCppSignatureTypeName(methodSignature[i - 1]));
}
}
else
{
sb.Append(GetCppSignatureTypeName(methodSignature[i]));
}
if (implementation)
{
sb.Append(" ");
if (parameterNames != null)
{
sb.Append(SanitizeCppVarName(parameterNames[i]));
}
else
{
sb.Append("_a");
sb.Append(i.ToStringInvariant());
}
}
if (i != argCount - 1)
sb.Append(", ");
}
sb.Append(")");
if (!implementation)
sb.Append(";");
return sb.ToString();
}
public string GetCppMethodCallParamList(MethodDesc method)
{
var sb = new CppGenerationBuffer();
var methodSignature = method.Signature;
bool hasThis = !methodSignature.IsStatic;
int argCount = methodSignature.Length;
if (hasThis)
argCount++;
List<string> parameterNames = null;
IEnumerable<string> parameters = GetParameterNamesForMethod(method);
if (parameters != null)
{
parameterNames = new List<string>(parameters);
if (parameterNames.Count != 0)
{
System.Diagnostics.Debug.Assert(parameterNames.Count == argCount);
}
else
{
parameterNames = null;
}
}
for (int i = 0; i < argCount; i++)
{
if (parameterNames != null)
{
sb.Append(SanitizeCppVarName(parameterNames[i]));
}
else
{
sb.Append("_a");
sb.Append(i.ToStringInvariant());
}
if (i != argCount - 1)
sb.Append(", ");
}
return sb.ToString();
}
public string GetCppTypeName(TypeDesc type)
{
switch (type.Category)
{
case TypeFlags.ByRef:
case TypeFlags.Pointer:
return GetCppSignatureTypeName(((ParameterizedType)type).ParameterType) + "*";
default:
return _compilation.NameMangler.GetMangledTypeName(type);
}
}
/// <summary>
/// Compute a proper declaration for <param name="methodName"/> defined in <param name="owningType"/>.
/// Usually the C++ name for a type is prefixed by "::" but this is not a valid way to declare a method,
/// so we need to strip it if present.
/// </summary>
/// <param name="owningType">Type where <param name="methodName"/> belongs.</param>
/// <param name="methodName">Name of method from <param name="owningType"/>.</param>
/// <returns>C++ declaration name for <param name="methodName"/>.</returns>
public string GetCppMethodDeclarationName(TypeDesc owningType, string methodName)
{
var s = GetCppTypeName(owningType);
if (s.StartsWith("::"))
{
// For a Method declaration we do not need the starting ::
s = s.Substring(2, s.Length - 2);
}
return string.Concat(s, "::", methodName);
}
public string GetCppMethodName(MethodDesc method)
{
return _compilation.NameMangler.GetMangledMethodName(method);
}
public string GetCppFieldName(FieldDesc field)
{
return _compilation.NameMangler.GetMangledFieldName(field);
}
public string GetCppStaticFieldName(FieldDesc field)
{
TypeDesc type = field.OwningType;
string typeName = GetCppTypeName(type);
return typeName.Replace("::", "__") + "__" + _compilation.NameMangler.GetMangledFieldName(field);
}
public string SanitizeCppVarName(string varName)
{
// TODO: name mangling robustness
if (varName == "errno") // some names collide with CRT headers
varName += "_";
return varName;
}
private void CompileExternMethod(CppMethodCodeNode methodCodeNodeNeedingCode, string importName)
{
MethodDesc method = methodCodeNodeNeedingCode.Method;
MethodSignature methodSignature = method.Signature;
bool slotCastRequired = false;
MethodSignature externCSignature;
if (_externCSignatureMap.TryGetValue(importName, out externCSignature))
{
slotCastRequired = !externCSignature.Equals(methodSignature);
}
else
{
_externCSignatureMap.Add(importName, methodSignature);
externCSignature = methodSignature;
}
var builder = new CppGenerationBuffer();
builder.AppendLine();
builder.Append(GetCppMethodDeclaration(method, true));
builder.AppendLine();
builder.Append("{");
builder.Indent();
if (slotCastRequired)
{
AppendSlotTypeDef(builder, method);
}
builder.AppendLine();
if (!method.Signature.ReturnType.IsVoid)
{
builder.Append("return ");
}
if (slotCastRequired)
builder.Append("((__slot__" + GetCppMethodName(method) + ")");
builder.Append("::");
builder.Append(importName);
if (slotCastRequired)
builder.Append(")");
builder.Append("(");
builder.Append(GetCppMethodCallParamList(method));
builder.Append(");");
builder.Exdent();
builder.AppendLine();
builder.Append("}");
methodCodeNodeNeedingCode.SetCode(builder.ToString(), Array.Empty<Object>());
}
public void CompileMethod(CppMethodCodeNode methodCodeNodeNeedingCode)
{
MethodDesc method = methodCodeNodeNeedingCode.Method;
_compilation.Log.WriteLine("Compiling " + method.ToString());
if (method.HasCustomAttribute("System.Runtime", "RuntimeImportAttribute"))
{
CompileExternMethod(methodCodeNodeNeedingCode, ((EcmaMethod)method).GetRuntimeImportName());
return;
}
if (method.IsRawPInvoke())
{
CompileExternMethod(methodCodeNodeNeedingCode, method.GetPInvokeMethodMetadata().Name ?? method.Name);
return;
}
var methodIL = _compilation.GetMethodIL(method);
if (methodIL == null)
return;
try
{
var ilImporter = new ILImporter(_compilation, this, method, methodIL);
CompilerTypeSystemContext typeSystemContext = _compilation.TypeSystemContext;
MethodDebugInformation debugInfo = _compilation.GetDebugInfo(methodIL);
if (!_compilation.Options.NoLineNumbers)
{
IEnumerable<ILSequencePoint> sequencePoints = debugInfo.GetSequencePoints();
if (sequencePoints != null)
ilImporter.SetSequencePoints(sequencePoints);
}
IEnumerable<ILLocalVariable> localVariables = debugInfo.GetLocalVariables();
if (localVariables != null)
ilImporter.SetLocalVariables(localVariables);
IEnumerable<string> parameters = GetParameterNamesForMethod(method);
if (parameters != null)
ilImporter.SetParameterNames(parameters);
ilImporter.Compile(methodCodeNodeNeedingCode);
}
catch (Exception e)
{
_compilation.Log.WriteLine(e.Message + " (" + method + ")");
var builder = new CppGenerationBuffer();
builder.AppendLine();
builder.Append(GetCppMethodDeclaration(method, true));
builder.AppendLine();
builder.Append("{");
builder.Indent();
builder.AppendLine();
builder.Append("throw 0xC000C000;");
builder.Exdent();
builder.AppendLine();
builder.Append("}");
methodCodeNodeNeedingCode.SetCode(builder.ToString(), Array.Empty<Object>());
}
}
private TextWriter Out
{
get
{
return _out;
}
}
private StreamWriter _out;
private Dictionary<TypeDesc, List<MethodDesc>> _methodLists;
private CppGenerationBuffer _statics;
private CppGenerationBuffer _gcStatics;
private CppGenerationBuffer _threadStatics;
private CppGenerationBuffer _gcThreadStatics;
// Base classes and valuetypes has to be emitted before they are used.
private HashSet<TypeDesc> _emittedTypes;
private TypeDesc GetFieldTypeOrPlaceholder(FieldDesc field)
{
try
{
return field.FieldType;
}
catch
{
// TODO: For now, catch errors due to missing dependencies
return _compilation.TypeSystemContext.GetWellKnownType(WellKnownType.Boolean);
}
}
private void ExpandTypes()
{
_emittedTypes = new HashSet<TypeDesc>();
foreach (var t in _cppSignatureNames.Keys.ToArray())
{
ExpandType(t);
}
_emittedTypes = null;
}
private void ExpandType(TypeDesc type)
{
if (_emittedTypes.Contains(type))
return;
_emittedTypes.Add(type);
GetCppSignatureTypeName(type);
var baseType = type.BaseType;
if (baseType != null)
{
ExpandType(baseType);
}
foreach (var field in type.GetFields())
{
ExpandType(GetFieldTypeOrPlaceholder(field));
}
if (type.IsDelegate)
{
MethodDesc method = type.GetKnownMethod("Invoke", null);
var sig = method.Signature;
ExpandType(sig.ReturnType);
for (int i = 0; i < sig.Length; i++)
ExpandType(sig[i]);
}
if (type.IsArray)
{
ExpandType(((ArrayType)type).ElementType);
}
}
private void OutputTypes(bool full)
{
var sb = new CppGenerationBuffer();
if (full)
{
_statics = new CppGenerationBuffer();
_statics.Indent();
_gcStatics = new CppGenerationBuffer();
_gcStatics.Indent();
_threadStatics = new CppGenerationBuffer();
_threadStatics.Indent();
_gcThreadStatics = new CppGenerationBuffer();
_gcThreadStatics.Indent();
}
_emittedTypes = new HashSet<TypeDesc>();
foreach (var t in _cppSignatureNames.Keys)
{
if (t.IsByRef || t.IsPointer)
continue;
// Base class types and valuetype instantance field types may be emitted out-of-order to make them
// appear before they are used.
if (_emittedTypes.Contains(t))
continue;
OutputType(sb, t, full);
}
_emittedTypes = null;
if (full)
{
sb.AppendLine();
sb.Append("struct {");
// No need to indent or add a new line as _statics is already properly indented
sb.Append(_statics.ToString());
sb.AppendLine();
sb.Append("} __statics;");
// TODO: Register GC statics with GC
sb.AppendLine();
sb.Append("struct {");
// No need to indent or add a new line as _gcStatics is already properly indented
sb.Append(_gcStatics.ToString());
sb.AppendLine();
sb.Append("} __gcStatics;");
sb.AppendLine();
_statics = null;
_gcStatics = null;
_threadStatics = null;
_gcThreadStatics = null;
}
Out.Write(sb.ToString());
sb.Clear();
}
private void OutputType(CppGenerationBuffer sb, TypeDesc t, bool full)
{
_emittedTypes.Add(t);
if (full)
{
if (!t.IsValueType)
{
var baseType = t.BaseType;
if (baseType != null)
{
if (!_emittedTypes.Contains(baseType))
{
OutputType(sb, baseType, full);
}
}
}
foreach (var field in t.GetFields())
{
var fieldType = GetFieldTypeOrPlaceholder(field);
if (fieldType.IsValueType && !fieldType.IsPrimitive && !field.IsStatic)
{
if (!_emittedTypes.Contains(fieldType))
{
OutputType(sb, fieldType, full);
}
}
}
}
string mangledName = GetCppTypeName(t);
int nesting = 0;
int current = 0;
// Create Namespaces. If a mangledName starts with just :: we will simply ignore it.
sb.AppendLine();
for (;;)
{
int sep = mangledName.IndexOf("::", current);
if (sep < 0)
break;
if (sep != 0)
{
// Case of a name not starting with ::
sb.Append("namespace " + mangledName.Substring(current, sep - current) + " { ");
nesting++;
}
current = sep + 2;
}
if (full)
{
sb.Append("class " + mangledName.Substring(current));
if (!t.IsValueType)
{
if (t.BaseType != null)
{
sb.Append(" : public " + GetCppTypeName(t.BaseType));
}
}
sb.Append(" {");
sb.AppendLine();
sb.Append("public:");
sb.Indent();
// TODO: Enable once the dependencies are tracked for arrays
// if (((DependencyNode)_compilation.NodeFactory.ConstructedTypeSymbol(t)).Marked)
if (!t.IsPointer && !t.IsByRef)
{
sb.AppendLine();
sb.Append("static MethodTable * __getMethodTable();");
}
IReadOnlyList<MethodDesc> virtualSlots = _compilation.NodeFactory.VTable(t).Slots;
int baseSlots = 0;
var baseType = t.BaseType;
while (baseType != null)
{
IReadOnlyList<MethodDesc> baseVirtualSlots = _compilation.NodeFactory.VTable(baseType).Slots;
if (baseVirtualSlots != null)
baseSlots += baseVirtualSlots.Count;
baseType = baseType.BaseType;
}
for (int slot = 0; slot < virtualSlots.Count; slot++)
{
MethodDesc virtualMethod = virtualSlots[slot];
sb.AppendLine();
sb.Append(GetCodeForVirtualMethod(virtualMethod, baseSlots + slot));
}
if (t.IsDelegate)
{
sb.AppendLine();
sb.Append(GetCodeForDelegate(t));
}
OutputTypeFields(sb, t);
if (t.HasStaticConstructor)
{
_statics.AppendLine();
_statics.Append("bool __cctor_" + GetCppTypeName(t).Replace("::", "__") + ";");
}
List<MethodDesc> methodList;
if (_methodLists.TryGetValue(t, out methodList))
{
foreach (var m in methodList)
{
OutputMethod(sb, m);
}
}
sb.Exdent();
sb.AppendLine();
sb.Append("};");
}
else
{
sb.Append("class " + mangledName.Substring(current) + ";");
}
while (nesting > 0)
{
sb.Append("};");
nesting--;
}
// Make some rooms between two type definitions
if (full)
sb.AppendEmptyLine();
}
private void OutputTypeFields(CppGenerationBuffer sb, TypeDesc t)
{
bool explicitLayout = false;
ClassLayoutMetadata classLayoutMetadata = default(ClassLayoutMetadata);
if (t.IsValueType)
{
MetadataType metadataType = (MetadataType)t;
if (metadataType.IsExplicitLayout)
{
explicitLayout = true;
classLayoutMetadata = metadataType.GetClassLayout();
}
}
int instanceFieldIndex = 0;
if (explicitLayout)
{
sb.AppendLine();
sb.Append("union {");
sb.Indent();
}
foreach (var field in t.GetFields())
{
if (field.IsStatic)
{
if (field.IsLiteral)
continue;
TypeDesc fieldType = GetFieldTypeOrPlaceholder(field);
CppGenerationBuffer builder;
if (!fieldType.IsValueType)
{
builder = _gcStatics;
}
else
{
// TODO: Valuetype statics with GC references
builder = _statics;
}
builder.AppendLine();
builder.Append(GetCppSignatureTypeName(fieldType));
builder.Append(" ");
builder.Append(GetCppStaticFieldName(field) + ";");
}
else
{
if (explicitLayout)
{
sb.AppendLine();
sb.Append("struct {");
sb.Indent();
int offset = classLayoutMetadata.Offsets[instanceFieldIndex].Offset;
if (offset > 0)
{
sb.AppendLine();
sb.Append("char __pad" + instanceFieldIndex + "[" + offset + "];");
}
}
sb.AppendLine();
sb.Append(GetCppSignatureTypeName(GetFieldTypeOrPlaceholder(field)) + " " + GetCppFieldName(field) + ";");
if (explicitLayout)
{
sb.Exdent();
sb.AppendLine();
sb.Append("};");
}
instanceFieldIndex++;
}
}
if (explicitLayout)
{
sb.Exdent();
sb.AppendLine();
sb.Append("};");
}
}
private void OutputMethod(CppGenerationBuffer sb, MethodDesc m)
{
sb.AppendLine();
sb.Append(GetCppMethodDeclaration(m, false));
}
private void AppendSlotTypeDef(CppGenerationBuffer sb, MethodDesc method)
{
MethodSignature methodSignature = method.Signature;
TypeDesc thisArgument = null;
if (!methodSignature.IsStatic)
thisArgument = method.OwningType;
AppendSignatureTypeDef(sb, "__slot__" + GetCppMethodName(method), methodSignature, thisArgument);
}
internal void AppendSignatureTypeDef(CppGenerationBuffer sb, string name, MethodSignature methodSignature, TypeDesc thisArgument)
{
sb.AppendLine();
sb.Append("typedef ");
sb.Append(GetCppSignatureTypeName(methodSignature.ReturnType));
sb.Append("(*");
sb.Append(name);
sb.Append(")(");
int argCount = methodSignature.Length;
if (thisArgument != null)
argCount++;
for (int i = 0; i < argCount; i++)
{
if (thisArgument != null)
{
if (i == 0)
{
sb.Append(GetCppSignatureTypeName(thisArgument));
}
else
{
sb.Append(GetCppSignatureTypeName(methodSignature[i - 1]));
}
}
else
{
sb.Append(GetCppSignatureTypeName(methodSignature[i]));
}
if (i != argCount - 1)
sb.Append(", ");
}
sb.Append(");");
}
private String GetCodeForDelegate(TypeDesc delegateType)
{
var sb = new CppGenerationBuffer();
MethodDesc method = delegateType.GetKnownMethod("Invoke", null);
AppendSlotTypeDef(sb, method);
sb.AppendLine();
sb.Append("static __slot__");
sb.Append(GetCppMethodName(method));
sb.Append(" __invoke__");
sb.Append(GetCppMethodName(method));
sb.Append("(void * pThis)");
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("return (__slot__");
sb.Append(GetCppMethodName(method));
sb.Append(")(((");
sb.Append(GetCppSignatureTypeName(_compilation.TypeSystemContext.GetWellKnownType(WellKnownType.MulticastDelegate)));
sb.Append(")pThis)->m_functionPointer);");
sb.Exdent();
sb.AppendLine();
sb.Append("};");
return sb.ToString();
}
private String GetCodeForVirtualMethod(MethodDesc method, int slot)
{
var sb = new CppGenerationBuffer();
AppendSlotTypeDef(sb, method);
sb.AppendLine();
sb.Append("static __slot__");
sb.Append(GetCppMethodName(method));
sb.Append(" __getslot__");
sb.Append(GetCppMethodName(method));
sb.Append("(void * pThis)");
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append(" return (__slot__");
sb.Append(GetCppMethodName(method));
sb.Append(")*((void **)(*((RawEEType **)pThis) + 1) + ");
sb.Append(slot.ToStringInvariant());
sb.Append(");");
sb.Exdent();
sb.AppendLine();
sb.Append("};");
return sb.ToString();
}
private void AppendVirtualSlots(CppGenerationBuffer sb, TypeDesc implType, TypeDesc declType)
{
var baseType = declType.BaseType;
if (baseType != null)
AppendVirtualSlots(sb, implType, baseType);
IReadOnlyList<MethodDesc> virtualSlots = _compilation.NodeFactory.VTable(declType).Slots;
for (int i = 0; i < virtualSlots.Count; i++)
{
MethodDesc declMethod = virtualSlots[i];
MethodDesc implMethod = implType.GetClosestMetadataType().FindVirtualFunctionTargetMethodOnObjectType(declMethod);
sb.AppendLine();
if (implMethod.IsAbstract)
{
sb.Append("NULL,");
}
else
{
sb.Append("(void*)&");
sb.Append(GetCppMethodDeclarationName(implMethod.OwningType, GetCppMethodName(implMethod)));
sb.Append(",");
}
}
}
private String GetCodeForType(TypeDesc type)
{
var sb = new CppGenerationBuffer();
int totalSlots = 0;
TypeDesc t = type;
while (t != null)
{
IReadOnlyList<MethodDesc> virtualSlots = _compilation.NodeFactory.VTable(t).Slots;
totalSlots += virtualSlots.Count;
t = t.BaseType;
}
UInt16 flags = 0;
try
{
flags = EETypeBuilderHelpers.ComputeFlags(type);
}
catch
{
// TODO: Handling of missing dependencies
flags = 0;
}
sb.Append("MethodTable * ");
sb.Append(GetCppMethodDeclarationName(type, "__getMethodTable"));
sb.Append("()");
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("static struct {");
sb.Indent();
// sb.Append(GCDesc);
sb.AppendLine();
sb.Append("RawEEType EEType;");
if (totalSlots != 0)
{
sb.AppendLine();
sb.Append("void * slots[");
sb.Append(totalSlots);
sb.Append("];");
}
sb.Exdent();
sb.AppendLine();
sb.Append("} mt = {");
sb.Indent();
// gcdesc
if (type.IsString)
{
// String has non-standard layout
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("sizeof(uint16_t),");
sb.AppendLine();
sb.Append("0x"); // EEType::_usComponentSize
sb.Append(flags.ToStringInvariant("x4")); // EEType::_usFlags
sb.Append(",");
sb.AppendLine();
sb.Append("2 * sizeof(void*) + sizeof(int32_t) + 2,"); // EEType::_uBaseSize
}
else
if (type.IsSzArray)
{
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("sizeof(");
sb.Append(GetCppSignatureTypeName(((ArrayType)type).ElementType)); // EEType::_usComponentSize
sb.Append("),");
sb.AppendLine();
sb.Append("0x");
sb.Append(flags.ToStringInvariant("x4")); // EEType::_usFlags
sb.Append(",");
sb.AppendLine();
sb.Append("3 * sizeof(void*),"); // EEType::_uBaseSize
}
else
if (type.IsArray)
{
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("sizeof(");
sb.Append(GetCppSignatureTypeName(((ArrayType)type).ElementType)); // EEType::_usComponentSize
sb.Append("),");
sb.AppendLine();
sb.Append("0x");
sb.Append(flags.ToStringInvariant("x4")); // EEType::_usFlags
sb.Append(",");
sb.AppendLine();
sb.Append("3 * sizeof(void*) + "); // EEType::_uBaseSize
sb.Append(((ArrayType)type).Rank.ToStringInvariant());
sb.Append("* sizeof(int32_t) * 2,");
}
else
{
// sizeof(void*) == size of object header
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
sb.Append("0,");
sb.AppendLine();
sb.Append("0x"); // EEType::_usComponentSize
sb.Append(flags.ToStringInvariant("x")); // EEType::_usFlags
sb.Append(",");
sb.AppendLine();
sb.Append("AlignBaseSize(sizeof(void*)+sizeof("); // EEType::_uBaseSize
sb.Append(GetCppTypeName(type));
sb.Append(")),");
}
sb.AppendLine();
// base type
if (type.IsArray)
{
sb.Append(GetCppMethodDeclarationName(((ArrayType)type).ElementType, "__getMethodTable"));
sb.Append("()");
}
else
{
var baseType = type.BaseType;
if (baseType != null)
{
sb.Append(GetCppMethodDeclarationName(type.BaseType, "__getMethodTable"));
sb.Append("()");
}
else
{
sb.Append("NULL");
}
}
sb.Exdent();
sb.AppendLine();
sb.Append("},");
// virtual slots
if (((DependencyNode)_compilation.NodeFactory.ConstructedTypeSymbol(type)).Marked)
AppendVirtualSlots(sb, type, type);
sb.Exdent();
sb.AppendLine();
sb.Append("};");
sb.AppendLine();
sb.Append("return (MethodTable *)&mt.EEType;");
sb.Exdent();
sb.AppendLine();
sb.Append("}");
return sb.ToString();
}
private void BuildMethodLists(IEnumerable<DependencyNode> nodes)
{
_methodLists = new Dictionary<TypeDesc, List<MethodDesc>>();
foreach (var node in nodes)
{
if (node is CppMethodCodeNode)
{
CppMethodCodeNode methodCodeNode = (CppMethodCodeNode)node;
var method = methodCodeNode.Method;
var type = method.OwningType;
List<MethodDesc> methodList;
if (!_methodLists.TryGetValue(type, out methodList))
{
GetCppSignatureTypeName(type);
methodList = new List<MethodDesc>();
_methodLists.Add(type, methodList);
}
methodList.Add(method);
}
else
if (node is IEETypeNode)
{
IEETypeNode eeTypeNode = (IEETypeNode)node;
if (eeTypeNode.Type.IsGenericDefinition)
{
// TODO: CppWriter can't handle generic type definition EETypes
}
else
GetCppSignatureTypeName(eeTypeNode.Type);
}
}
}
public void OutputCode(IEnumerable<DependencyNode> nodes, MethodDesc entrypoint)
{
BuildMethodLists(nodes);
ExpandTypes();
Out.WriteLine("#include \"common.h\"");
Out.WriteLine("#include \"CppCodeGen.h\"");
Out.WriteLine();
Out.Write("/* Forward type definitions */");
OutputTypes(false);
Out.WriteLine();
Out.WriteLine();
Out.Write("/* Type definitions */");
OutputTypes(true);
var sb = new CppGenerationBuffer();
foreach (var externC in _externCSignatureMap)
{
string importName = externC.Key;
// TODO: hacky special-case
if (importName != "memmove" && importName != "malloc") // some methods are already declared by the CRT headers
{
sb.AppendLine();
sb.Append(GetCppMethodDeclaration(null, false, importName, externC.Value));
}
}
Out.Write(sb.ToString());
sb.Clear();
foreach (var t in _cppSignatureNames.Keys)
{
// TODO: Enable once the dependencies are tracked for arrays
// if (((DependencyNode)_compilation.NodeFactory.ConstructedTypeSymbol(t)).Marked)
if (!t.IsPointer && !t.IsByRef)
{
sb.AppendLine();
sb.Append(GetCodeForType(t));
}
List<MethodDesc> methodList;
if (_methodLists.TryGetValue(t, out methodList))
{
foreach (var m in methodList)
{
var methodCodeNode = (CppMethodCodeNode)_compilation.NodeFactory.MethodEntrypoint(m);
sb.AppendLine();
sb.Append(methodCodeNode.CppCode);
var alternateName = _compilation.NodeFactory.GetSymbolAlternateName(methodCodeNode);
if (alternateName != null)
{
sb.AppendLine();
sb.Append(GetCppMethodDeclaration(m, true, alternateName));
sb.AppendLine();
sb.Append("{");
sb.Indent();
sb.AppendLine();
if (!m.Signature.ReturnType.IsVoid)
{
sb.Append("return ");
}
sb.Append(GetCppMethodDeclarationName(m.OwningType, GetCppMethodName(m)));
sb.Append("(");
sb.Append(GetCppMethodCallParamList(m));
sb.Append(");");
sb.Exdent();
sb.AppendLine();
sb.Append("}");
}
}
}
}
Out.Write(sb.ToString());
sb.Clear();
if (entrypoint != null)
{
// Stub for main method
sb.AppendLine();
if (_compilation.TypeSystemContext.Target.OperatingSystem == TargetOS.Windows)
{
sb.Append("int wmain(int argc, wchar_t * argv[]) { ");
}
else
{
sb.Append("int main(int argc, char * argv[]) {");
}
sb.Indent();
sb.AppendLine();
sb.Append("if (__initialize_runtime() != 0)");
sb.Indent();
sb.AppendLine();
sb.Append("return -1;");
sb.Exdent();
sb.AppendEmptyLine();
sb.AppendLine();
sb.Append("ReversePInvokeFrame frame;");
sb.AppendLine();
sb.Append("__reverse_pinvoke(&frame);");
sb.AppendEmptyLine();
sb.AppendLine();
sb.Append("int ret = ");
sb.Append(GetCppMethodDeclarationName(entrypoint.OwningType, GetCppMethodName(entrypoint)));
sb.Append("(argc, (intptr_t)argv);");
sb.AppendEmptyLine();
sb.AppendLine();
sb.Append("__reverse_pinvoke_return(&frame);");
sb.AppendLine();
sb.Append("__shutdown_runtime();");
sb.AppendLine();
sb.Append("return ret;");
sb.Exdent();
sb.AppendLine();
sb.Append("}");
}
Out.Write(sb.ToString());
sb.Clear();
Out.Dispose();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.