context stringlengths 2.52k 185k | gt stringclasses 1 value |
|---|---|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Microsoft.Xml.Serialization
{
using Microsoft.Xml.Schema;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public class SoapReflectionImporter
{
private TypeScope _typeScope;
private SoapAttributeOverrides _attributeOverrides;
private NameTable _types = new NameTable(); // xmltypename + xmlns -> Mapping
private NameTable _nullables = new NameTable(); // xmltypename + xmlns -> NullableMapping
private StructMapping _root;
private string _defaultNs;
private ModelScope _modelScope;
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public SoapReflectionImporter() : this(null, null)
{
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public SoapReflectionImporter(string defaultNamespace) : this(null, defaultNamespace)
{
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public SoapReflectionImporter(SoapAttributeOverrides attributeOverrides) : this(attributeOverrides, null)
{
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public SoapReflectionImporter(SoapAttributeOverrides attributeOverrides, string defaultNamespace)
{
if (defaultNamespace == null)
defaultNamespace = String.Empty;
if (attributeOverrides == null)
attributeOverrides = new SoapAttributeOverrides();
_attributeOverrides = attributeOverrides;
_defaultNs = defaultNamespace;
_typeScope = new TypeScope();
_modelScope = new ModelScope(_typeScope);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.IncludeTypes"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void IncludeTypes(Type provider)
{
IncludeTypes(provider, new RecursionLimiter());
}
private void IncludeTypes(Type provider, RecursionLimiter limiter)
{
foreach (var attrib in provider.GetTypeInfo().GetCustomAttributes<SoapIncludeAttribute>(false))
{
IncludeType(attrib.Type, limiter);
}
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.IncludeType"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void IncludeType(Type type)
{
IncludeType(type, new RecursionLimiter());
}
private void IncludeType(Type type, RecursionLimiter limiter)
{
ImportTypeMapping(_modelScope.GetTypeModel(type), limiter);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="XmlReflectionImporter.ImportTypeMapping"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlTypeMapping ImportTypeMapping(Type type)
{
return ImportTypeMapping(type, null);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="XmlReflectionImporter.ImportTypeMapping1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlTypeMapping ImportTypeMapping(Type type, string defaultNamespace)
{
ElementAccessor element = new ElementAccessor();
element.IsSoap = true;
element.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(type), new RecursionLimiter());
element.Name = element.Mapping.DefaultElementName;
element.Namespace = element.Mapping.Namespace == null ? defaultNamespace : element.Mapping.Namespace;
element.Form = XmlSchemaForm.Qualified;
XmlTypeMapping xmlMapping = new XmlTypeMapping(_typeScope, element);
xmlMapping.SetKeyInternal(XmlMapping.GenerateKey(type, null, defaultNamespace));
xmlMapping.IsSoap = true;
xmlMapping.GenerateSerializer = true;
return xmlMapping;
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members)
{
return ImportMembersMapping(elementName, ns, members, true, true, false);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors)
{
return ImportMembersMapping(elementName, ns, members, hasWrapperElement, writeAccessors, false);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate)
{
return ImportMembersMapping(elementName, ns, members, hasWrapperElement, writeAccessors, validate, XmlMappingAccess.Read | XmlMappingAccess.Write);
}
/// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate, XmlMappingAccess access)
{
ElementAccessor element = new ElementAccessor();
element.IsSoap = true;
element.Name = elementName == null || elementName.Length == 0 ? elementName : XmlConvert.EncodeLocalName(elementName);
element.Mapping = ImportMembersMapping(members, ns, hasWrapperElement, writeAccessors, validate, new RecursionLimiter());
element.Mapping.TypeName = elementName;
element.Namespace = element.Mapping.Namespace == null ? ns : element.Mapping.Namespace;
element.Form = XmlSchemaForm.Qualified;
XmlMembersMapping xmlMapping = new XmlMembersMapping(_typeScope, element, access);
xmlMapping.IsSoap = true;
xmlMapping.GenerateSerializer = true;
return xmlMapping;
}
private Exception ReflectionException(string context, Exception e)
{
return new InvalidOperationException(string.Format(ResXml.XmlReflectionError, context), e);
}
private SoapAttributes GetAttributes(Type type)
{
SoapAttributes attrs = _attributeOverrides[type];
if (attrs != null) return attrs;
return new SoapAttributes(type.GetTypeInfo().GetCustomAttributes(false) as Attribute[]);
}
private SoapAttributes GetAttributes(MemberInfo memberInfo)
{
SoapAttributes attrs = _attributeOverrides[memberInfo.DeclaringType, memberInfo.Name];
if (attrs != null) return attrs;
return new SoapAttributes(memberInfo.GetCustomAttributes(false) as Attribute[]);
}
private TypeMapping ImportTypeMapping(TypeModel model, RecursionLimiter limiter)
{
return ImportTypeMapping(model, String.Empty, limiter);
}
private TypeMapping ImportTypeMapping(TypeModel model, string dataType, RecursionLimiter limiter)
{
if (dataType.Length > 0)
{
if (!model.TypeDesc.IsPrimitive)
{
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidDataTypeUsage, dataType, "SoapElementAttribute.DataType"));
}
TypeDesc td = _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace);
if (td == null)
{
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidXsdDataType, dataType, "SoapElementAttribute.DataType", new XmlQualifiedName(dataType, XmlSchema.Namespace).ToString()));
}
if (model.TypeDesc.FullName != td.FullName)
{
throw new InvalidOperationException(string.Format(ResXml.XmlDataTypeMismatch, dataType, "SoapElementAttribute.DataType", model.TypeDesc.FullName));
}
}
SoapAttributes a = GetAttributes(model.Type);
if ((a.SoapFlags & ~SoapAttributeFlags.Type) != 0)
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidTypeAttributes, model.Type.FullName));
switch (model.TypeDesc.Kind)
{
case TypeKind.Enum:
return ImportEnumMapping((EnumModel)model);
case TypeKind.Primitive:
return ImportPrimitiveMapping((PrimitiveModel)model, dataType);
case TypeKind.Array:
case TypeKind.Collection:
case TypeKind.Enumerable:
return ImportArrayLikeMapping((ArrayModel)model, limiter);
case TypeKind.Root:
case TypeKind.Class:
case TypeKind.Struct:
if (model.TypeDesc.IsOptionalValue)
{
TypeDesc baseTypeDesc = model.TypeDesc.BaseTypeDesc;
SoapAttributes baseAttributes = GetAttributes(baseTypeDesc.Type);
string typeNs = _defaultNs;
if (baseAttributes.SoapType != null && baseAttributes.SoapType.Namespace != null)
typeNs = baseAttributes.SoapType.Namespace;
TypeDesc valueTypeDesc = string.IsNullOrEmpty(dataType) ? model.TypeDesc.BaseTypeDesc : _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace);
string xsdTypeName = string.IsNullOrEmpty(dataType) ? model.TypeDesc.BaseTypeDesc.Name : dataType;
TypeMapping baseMapping = GetTypeMapping(xsdTypeName, typeNs, valueTypeDesc);
if (baseMapping == null)
baseMapping = ImportTypeMapping(_modelScope.GetTypeModel(baseTypeDesc.Type), dataType, limiter);
return CreateNullableMapping(baseMapping, model.TypeDesc.Type);
}
else
{
return ImportStructLikeMapping((StructModel)model, limiter);
}
default:
throw new NotSupportedException(string.Format(ResXml.XmlUnsupportedSoapTypeKind, model.TypeDesc.FullName));
}
}
private StructMapping CreateRootMapping()
{
TypeDesc typeDesc = _typeScope.GetTypeDesc(typeof(object));
StructMapping mapping = new StructMapping();
mapping.IsSoap = true;
mapping.TypeDesc = typeDesc;
mapping.Members = new MemberMapping[0];
mapping.IncludeInSchema = false;
mapping.TypeName = Soap.UrType;
mapping.Namespace = XmlSchema.Namespace;
return mapping;
}
private StructMapping GetRootMapping()
{
if (_root == null)
{
_root = CreateRootMapping();
_typeScope.AddTypeMapping(_root);
}
return _root;
}
private TypeMapping GetTypeMapping(string typeName, string ns, TypeDesc typeDesc)
{
TypeMapping mapping = (TypeMapping)_types[typeName, ns];
if (mapping == null) return null;
if (mapping.TypeDesc != typeDesc)
throw new InvalidOperationException(string.Format(ResXml.XmlTypesDuplicate, typeDesc.FullName, mapping.TypeDesc.FullName, typeName, ns));
return mapping;
}
private NullableMapping CreateNullableMapping(TypeMapping baseMapping, Type type)
{
TypeDesc typeDesc = baseMapping.TypeDesc.GetNullableTypeDesc(type);
TypeMapping existingMapping = (TypeMapping)_nullables[baseMapping.TypeName, baseMapping.Namespace];
NullableMapping mapping;
if (existingMapping != null)
{
if (existingMapping is NullableMapping)
{
mapping = (NullableMapping)existingMapping;
if (mapping.BaseMapping is PrimitiveMapping && baseMapping is PrimitiveMapping)
return mapping;
else if (mapping.BaseMapping == baseMapping)
{
return mapping;
}
else
{
throw new InvalidOperationException(string.Format(ResXml.XmlTypesDuplicate, typeDesc.FullName, existingMapping.TypeDesc.FullName, typeDesc.Name, existingMapping.Namespace));
}
}
else if (!(baseMapping is PrimitiveMapping))
{
throw new InvalidOperationException(string.Format(ResXml.XmlTypesDuplicate, typeDesc.FullName, existingMapping.TypeDesc.FullName, typeDesc.Name, existingMapping.Namespace));
}
}
mapping = new NullableMapping();
mapping.BaseMapping = baseMapping;
mapping.TypeDesc = typeDesc;
mapping.TypeName = baseMapping.TypeName;
mapping.Namespace = baseMapping.Namespace;
mapping.IncludeInSchema = false; //baseMapping.IncludeInSchema;
_nullables.Add(baseMapping.TypeName, mapping.Namespace, mapping);
_typeScope.AddTypeMapping(mapping);
return mapping;
}
private StructMapping ImportStructLikeMapping(StructModel model, RecursionLimiter limiter)
{
if (model.TypeDesc.Kind == TypeKind.Root) return GetRootMapping();
SoapAttributes a = GetAttributes(model.Type);
string typeNs = _defaultNs;
if (a.SoapType != null && a.SoapType.Namespace != null)
typeNs = a.SoapType.Namespace;
string typeName = XsdTypeName(model.Type, a, model.TypeDesc.Name);
typeName = XmlConvert.EncodeLocalName(typeName);
StructMapping mapping = (StructMapping)GetTypeMapping(typeName, typeNs, model.TypeDesc);
if (mapping == null)
{
mapping = new StructMapping();
mapping.IsSoap = true;
mapping.TypeDesc = model.TypeDesc;
mapping.Namespace = typeNs;
mapping.TypeName = typeName;
if (a.SoapType != null) mapping.IncludeInSchema = a.SoapType.IncludeInSchema;
_typeScope.AddTypeMapping(mapping);
_types.Add(typeName, typeNs, mapping);
if (limiter.IsExceededLimit)
{
limiter.DeferredWorkItems.Add(new ImportStructWorkItem(model, mapping));
return mapping;
}
limiter.Depth++;
InitializeStructMembers(mapping, model, limiter);
while (limiter.DeferredWorkItems.Count > 0)
{
int index = limiter.DeferredWorkItems.Count - 1;
ImportStructWorkItem item = limiter.DeferredWorkItems[index];
if (InitializeStructMembers(item.Mapping, item.Model, limiter))
{
//
// if InitializeStructMembers returns true, then there were *no* chages to the DeferredWorkItems
//
#if DEBUG
// use exception in the place of Debug.Assert to avoid throwing asserts from a server process such as aspnet_ewp.exe
if (index != limiter.DeferredWorkItems.Count - 1)
throw new InvalidOperationException(string.Format(ResXml.XmlInternalErrorDetails, "DeferredWorkItems.Count have changed"));
if (item != limiter.DeferredWorkItems[index])
throw new InvalidOperationException(string.Format(ResXml.XmlInternalErrorDetails, "DeferredWorkItems.Top have changed"));
#endif
// Remove the last work item
limiter.DeferredWorkItems.RemoveAt(index);
}
}
limiter.Depth--;
}
return mapping;
}
private bool InitializeStructMembers(StructMapping mapping, StructModel model, RecursionLimiter limiter)
{
if (mapping.IsFullyInitialized)
return true;
if (model.TypeDesc.BaseTypeDesc != null)
{
StructMapping baseMapping = ImportStructLikeMapping((StructModel)_modelScope.GetTypeModel(model.Type.GetTypeInfo().BaseType, false), limiter);
// check to see if the import of the baseMapping was deffered
int baseIndex = limiter.DeferredWorkItems.IndexOf(mapping.BaseMapping);
if (baseIndex < 0)
{
mapping.BaseMapping = baseMapping;
}
else
{
// the import of the baseMapping was deffered, make sure that the derived mappings is deffered as well
if (!limiter.DeferredWorkItems.Contains(mapping))
{
limiter.DeferredWorkItems.Add(new ImportStructWorkItem(model, mapping));
}
// make sure that baseMapping get processed before the derived
int top = limiter.DeferredWorkItems.Count - 1;
if (baseIndex < top)
{
ImportStructWorkItem baseMappingWorkItem = limiter.DeferredWorkItems[baseIndex];
limiter.DeferredWorkItems[baseIndex] = limiter.DeferredWorkItems[top];
limiter.DeferredWorkItems[top] = baseMappingWorkItem;
}
return false;
}
}
ArrayList members = new ArrayList();
foreach (MemberInfo memberInfo in model.GetMemberInfos())
{
if (memberInfo is FieldInfo || memberInfo is PropertyInfo)
continue;
SoapAttributes memberAttrs = GetAttributes(memberInfo);
if (memberAttrs.SoapIgnore) continue;
FieldModel fieldModel = model.GetFieldModel(memberInfo);
if (fieldModel == null) continue;
MemberMapping member = ImportFieldMapping(fieldModel, memberAttrs, mapping.Namespace, limiter);
if (member == null) continue;
if (!member.TypeDesc.IsPrimitive && !member.TypeDesc.IsEnum && !member.TypeDesc.IsOptionalValue)
{
if (model.TypeDesc.IsValueType)
throw new NotSupportedException(string.Format(ResXml.XmlRpcRefsInValueType, model.TypeDesc.FullName));
if (member.TypeDesc.IsValueType)
throw new NotSupportedException(string.Format(ResXml.XmlRpcNestedValueType, member.TypeDesc.FullName));
}
if (mapping.BaseMapping != null)
{
if (mapping.BaseMapping.Declares(member, mapping.TypeName)) continue;
}
members.Add(member);
}
mapping.Members = (MemberMapping[])members.ToArray(typeof(MemberMapping));
if (mapping.BaseMapping == null) mapping.BaseMapping = GetRootMapping();
IncludeTypes(model.Type, limiter);
return true;
}
private ArrayMapping ImportArrayLikeMapping(ArrayModel model, RecursionLimiter limiter)
{
ArrayMapping mapping = new ArrayMapping();
mapping.IsSoap = true;
TypeMapping itemTypeMapping = ImportTypeMapping(model.Element, limiter);
if (itemTypeMapping.TypeDesc.IsValueType && !itemTypeMapping.TypeDesc.IsPrimitive && !itemTypeMapping.TypeDesc.IsEnum)
throw new NotSupportedException(string.Format(ResXml.XmlRpcArrayOfValueTypes, model.TypeDesc.FullName));
mapping.TypeDesc = model.TypeDesc;
mapping.Elements = new ElementAccessor[] {
CreateElementAccessor(itemTypeMapping, mapping.Namespace) };
SetArrayMappingType(mapping);
// in the case of an ArrayMapping we can have more that one mapping correspond to a type
// examples of that are ArrayList and object[] both will map tp ArrayOfur-type
// so we create a link list for all mappings of the same XSD type
ArrayMapping existingMapping = (ArrayMapping)_types[mapping.TypeName, mapping.Namespace];
if (existingMapping != null)
{
ArrayMapping first = existingMapping;
while (existingMapping != null)
{
if (existingMapping.TypeDesc == model.TypeDesc)
return existingMapping;
existingMapping = existingMapping.Next;
}
mapping.Next = first;
_types[mapping.TypeName, mapping.Namespace] = mapping;
return mapping;
}
_typeScope.AddTypeMapping(mapping);
_types.Add(mapping.TypeName, mapping.Namespace, mapping);
IncludeTypes(model.Type);
return mapping;
}
// UNDONE Nullable
private void SetArrayMappingType(ArrayMapping mapping)
{
bool useDefaultNs = false;
string itemTypeName;
string itemTypeNamespace;
TypeMapping itemTypeMapping;
if (mapping.Elements.Length == 1)
itemTypeMapping = mapping.Elements[0].Mapping;
else
itemTypeMapping = null;
if (itemTypeMapping is EnumMapping)
{
itemTypeNamespace = itemTypeMapping.Namespace;
itemTypeName = itemTypeMapping.TypeName;
}
else if (itemTypeMapping is PrimitiveMapping)
{
itemTypeNamespace = itemTypeMapping.TypeDesc.IsXsdType ? XmlSchema.Namespace : UrtTypes.Namespace;
itemTypeName = itemTypeMapping.TypeDesc.DataType.Name;
useDefaultNs = true;
}
else if (itemTypeMapping is StructMapping)
{
if (itemTypeMapping.TypeDesc.IsRoot)
{
itemTypeNamespace = XmlSchema.Namespace;
itemTypeName = Soap.UrType;
useDefaultNs = true;
}
else
{
itemTypeNamespace = itemTypeMapping.Namespace;
itemTypeName = itemTypeMapping.TypeName;
}
}
else if (itemTypeMapping is ArrayMapping)
{
itemTypeNamespace = itemTypeMapping.Namespace;
itemTypeName = itemTypeMapping.TypeName;
}
else
{
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidSoapArray, mapping.TypeDesc.FullName));
}
itemTypeName = CodeIdentifier.MakePascal(itemTypeName);
string uniqueName = "ArrayOf" + itemTypeName;
string ns = useDefaultNs ? _defaultNs : itemTypeNamespace;
int i = 1;
TypeMapping existingMapping = (TypeMapping)_types[uniqueName, ns];
while (existingMapping != null)
{
if (existingMapping is ArrayMapping)
{
ArrayMapping arrayMapping = (ArrayMapping)existingMapping;
if (AccessorMapping.ElementsMatch(arrayMapping.Elements, mapping.Elements))
{
break;
}
}
// need to re-name the mapping
uniqueName = itemTypeName + i.ToString();
existingMapping = (TypeMapping)_types[uniqueName, ns];
i++;
}
mapping.Namespace = ns;
mapping.TypeName = uniqueName;
}
private PrimitiveMapping ImportPrimitiveMapping(PrimitiveModel model, string dataType)
{
PrimitiveMapping mapping = new PrimitiveMapping();
mapping.IsSoap = true;
if (dataType.Length > 0)
{
mapping.TypeDesc = _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace);
if (mapping.TypeDesc == null)
{
// try it as a non-Xsd type
mapping.TypeDesc = _typeScope.GetTypeDesc(dataType, UrtTypes.Namespace);
if (mapping.TypeDesc == null)
{
throw new InvalidOperationException(string.Format(ResXml.XmlUdeclaredXsdType, dataType));
}
}
}
else
{
mapping.TypeDesc = model.TypeDesc;
}
mapping.TypeName = mapping.TypeDesc.DataType.Name;
mapping.Namespace = mapping.TypeDesc.IsXsdType ? XmlSchema.Namespace : UrtTypes.Namespace;
return mapping;
}
private EnumMapping ImportEnumMapping(EnumModel model)
{
SoapAttributes a = GetAttributes(model.Type);
string typeNs = _defaultNs;
if (a.SoapType != null && a.SoapType.Namespace != null)
typeNs = a.SoapType.Namespace;
string typeName = XsdTypeName(model.Type, a, model.TypeDesc.Name);
typeName = XmlConvert.EncodeLocalName(typeName);
EnumMapping mapping = (EnumMapping)GetTypeMapping(typeName, typeNs, model.TypeDesc);
if (mapping == null)
{
mapping = new EnumMapping();
mapping.IsSoap = true;
mapping.TypeDesc = model.TypeDesc;
mapping.TypeName = typeName;
mapping.Namespace = typeNs;
mapping.IsFlags = model.Type.GetTypeInfo().IsDefined(typeof(FlagsAttribute), false);
_typeScope.AddTypeMapping(mapping);
_types.Add(typeName, typeNs, mapping);
ArrayList constants = new ArrayList();
for (int i = 0; i < model.Constants.Length; i++)
{
ConstantMapping constant = ImportConstantMapping(model.Constants[i]);
if (constant != null) constants.Add(constant);
}
if (constants.Count == 0)
{
throw new InvalidOperationException(string.Format(ResXml.XmlNoSerializableMembers, model.TypeDesc.FullName));
}
mapping.Constants = (ConstantMapping[])constants.ToArray(typeof(ConstantMapping));
}
return mapping;
}
private ConstantMapping ImportConstantMapping(ConstantModel model)
{
SoapAttributes a = GetAttributes(model.FieldInfo);
if (a.SoapIgnore) return null;
if ((a.SoapFlags & ~SoapAttributeFlags.Enum) != 0)
throw new InvalidOperationException(ResXml.XmlInvalidEnumAttribute);
if (a.SoapEnum == null)
a.SoapEnum = new SoapEnumAttribute();
ConstantMapping constant = new ConstantMapping();
constant.XmlName = a.SoapEnum.Name.Length == 0 ? model.Name : a.SoapEnum.Name;
constant.Name = model.Name;
constant.Value = model.Value;
return constant;
}
private MembersMapping ImportMembersMapping(XmlReflectionMember[] xmlReflectionMembers, string ns, bool hasWrapperElement, bool writeAccessors, bool validateWrapperElement, RecursionLimiter limiter)
{
MembersMapping members = new MembersMapping();
members.TypeDesc = _typeScope.GetTypeDesc(typeof(object[]));
MemberMapping[] mappings = new MemberMapping[xmlReflectionMembers.Length];
for (int i = 0; i < mappings.Length; i++)
{
try
{
XmlReflectionMember member = xmlReflectionMembers[i];
MemberMapping mapping = ImportMemberMapping(member, ns, xmlReflectionMembers, hasWrapperElement ? XmlSchemaForm.Unqualified : XmlSchemaForm.Qualified, limiter);
if (member.IsReturnValue && writeAccessors)
{ // no special treatment for return values with doc/enc
if (i > 0) throw new InvalidOperationException(ResXml.XmlInvalidReturnPosition);
mapping.IsReturnValue = true;
}
mappings[i] = mapping;
}
catch (Exception e)
{
if (e is OutOfMemoryException)
{
throw;
}
throw ReflectionException(xmlReflectionMembers[i].MemberName, e);
}
}
members.Members = mappings;
members.HasWrapperElement = hasWrapperElement;
if (hasWrapperElement)
{
members.ValidateRpcWrapperElement = validateWrapperElement;
}
members.WriteAccessors = writeAccessors;
members.IsSoap = true;
if (hasWrapperElement && !writeAccessors)
members.Namespace = ns;
return members;
}
private MemberMapping ImportMemberMapping(XmlReflectionMember xmlReflectionMember, string ns, XmlReflectionMember[] xmlReflectionMembers, XmlSchemaForm form, RecursionLimiter limiter)
{
SoapAttributes a = xmlReflectionMember.SoapAttributes;
if (a.SoapIgnore) return null;
MemberMapping member = new MemberMapping();
member.IsSoap = true;
member.Name = xmlReflectionMember.MemberName;
bool checkSpecified = XmlReflectionImporter.FindSpecifiedMember(xmlReflectionMember.MemberName, xmlReflectionMembers) != null;
FieldModel model = new FieldModel(xmlReflectionMember.MemberName, xmlReflectionMember.MemberType, _typeScope.GetTypeDesc(xmlReflectionMember.MemberType), checkSpecified, false);
member.CheckShouldPersist = model.CheckShouldPersist;
member.CheckSpecified = model.CheckSpecified;
member.ReadOnly = model.ReadOnly; // || !model.FieldTypeDesc.HasDefaultConstructor;
ImportAccessorMapping(member, model, a, ns, form, limiter);
if (xmlReflectionMember.OverrideIsNullable)
member.Elements[0].IsNullable = false;
return member;
}
private MemberMapping ImportFieldMapping(FieldModel model, SoapAttributes a, string ns, RecursionLimiter limiter)
{
if (a.SoapIgnore) return null;
MemberMapping member = new MemberMapping();
member.IsSoap = true;
member.Name = model.Name;
member.CheckShouldPersist = model.CheckShouldPersist;
member.CheckSpecified = model.CheckSpecified;
member.MemberInfo = model.MemberInfo;
member.CheckSpecifiedMemberInfo = model.CheckSpecifiedMemberInfo;
member.CheckShouldPersistMethodInfo = model.CheckShouldPersistMethodInfo;
member.ReadOnly = model.ReadOnly; // || !model.FieldTypeDesc.HasDefaultConstructor;
ImportAccessorMapping(member, model, a, ns, XmlSchemaForm.Unqualified, limiter);
return member;
}
private void ImportAccessorMapping(MemberMapping accessor, FieldModel model, SoapAttributes a, string ns, XmlSchemaForm form, RecursionLimiter limiter)
{
Type accessorType = model.FieldType;
string accessorName = model.Name;
accessor.TypeDesc = _typeScope.GetTypeDesc(accessorType);
if (accessor.TypeDesc.IsVoid)
{
throw new InvalidOperationException(ResXml.XmlInvalidVoid);
}
SoapAttributeFlags flags = a.SoapFlags;
if ((flags & SoapAttributeFlags.Attribute) == SoapAttributeFlags.Attribute)
{
if (!accessor.TypeDesc.IsPrimitive && !accessor.TypeDesc.IsEnum)
throw new InvalidOperationException(string.Format(ResXml.XmlIllegalSoapAttribute, accessorName, accessor.TypeDesc.FullName));
if ((flags & SoapAttributeFlags.Attribute) != flags)
throw new InvalidOperationException(ResXml.XmlInvalidElementAttribute);
AttributeAccessor attribute = new AttributeAccessor();
attribute.Name = Accessor.EscapeQName(a.SoapAttribute == null || a.SoapAttribute.AttributeName.Length == 0 ? accessorName : a.SoapAttribute.AttributeName);
attribute.Namespace = a.SoapAttribute == null || a.SoapAttribute.Namespace == null ? ns : a.SoapAttribute.Namespace;
attribute.Form = XmlSchemaForm.Qualified; // attributes are always qualified since they're only used for encoded soap headers
attribute.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(accessorType), (a.SoapAttribute == null ? String.Empty : a.SoapAttribute.DataType), limiter);
attribute.Default = GetDefaultValue(model.FieldTypeDesc, a);
accessor.Attribute = attribute;
accessor.Elements = new ElementAccessor[0];
}
else
{
if ((flags & SoapAttributeFlags.Element) != flags)
throw new InvalidOperationException(ResXml.XmlInvalidElementAttribute);
ElementAccessor element = new ElementAccessor();
element.IsSoap = true;
element.Name = XmlConvert.EncodeLocalName(a.SoapElement == null || a.SoapElement.ElementName.Length == 0 ? accessorName : a.SoapElement.ElementName);
element.Namespace = ns;
element.Form = form;
element.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(accessorType), (a.SoapElement == null ? String.Empty : a.SoapElement.DataType), limiter);
if (a.SoapElement != null)
element.IsNullable = a.SoapElement.IsNullable;
accessor.Elements = new ElementAccessor[] { element };
}
}
private static ElementAccessor CreateElementAccessor(TypeMapping mapping, string ns)
{
ElementAccessor element = new ElementAccessor();
element.IsSoap = true;
element.Name = mapping.TypeName; //XmlConvert.EncodeLocalName(name == null || name.Length == 0 ? mapping.TypeName : name);
element.Namespace = ns;
element.Mapping = mapping;
return element;
}
private object GetDefaultValue(TypeDesc fieldTypeDesc, SoapAttributes a)
{
if (a.SoapDefaultValue == null || a.SoapDefaultValue == DBNull.Value) return null;
if (!(fieldTypeDesc.Kind == TypeKind.Primitive || fieldTypeDesc.Kind == TypeKind.Enum))
{
a.SoapDefaultValue = null;
return a.SoapDefaultValue;
}
// for enums validate and return a string representation
if (fieldTypeDesc.Kind == TypeKind.Enum)
{
if (fieldTypeDesc != _typeScope.GetTypeDesc(a.SoapDefaultValue.GetType()))
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidDefaultEnumValue, a.SoapDefaultValue.GetType().FullName, fieldTypeDesc.FullName));
string strValue = Enum.Format(a.SoapDefaultValue.GetType(), a.SoapDefaultValue, "G").Replace(",", " ");
string numValue = Enum.Format(a.SoapDefaultValue.GetType(), a.SoapDefaultValue, "D");
if (strValue == numValue) // means enum value wasn't recognized
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidDefaultValue, strValue, a.SoapDefaultValue.GetType().FullName));
return strValue;
}
return a.SoapDefaultValue;
}
internal string XsdTypeName(Type type)
{
if (type == typeof(object)) return Soap.UrType;
TypeDesc typeDesc = _typeScope.GetTypeDesc(type);
if (typeDesc.IsPrimitive && typeDesc.DataType != null && typeDesc.DataType.Name != null && typeDesc.DataType.Name.Length > 0)
return typeDesc.DataType.Name;
return XsdTypeName(type, GetAttributes(type), typeDesc.Name);
}
internal string XsdTypeName(Type type, SoapAttributes a, string name)
{
string typeName = name;
if (a.SoapType != null && a.SoapType.TypeName.Length > 0)
typeName = a.SoapType.TypeName;
if (type.GetTypeInfo().IsGenericType && typeName.IndexOf('{') >= 0)
{
Type genType = type.GetGenericTypeDefinition();
Type[] names = genType.GetGenericArguments();
Type[] types = type.GetGenericArguments();
for (int i = 0; i < names.Length; i++)
{
string argument = "{" + names[i] + "}";
if (typeName.Contains(argument))
{
typeName = typeName.Replace(argument, XsdTypeName(types[i]));
if (typeName.IndexOf('{') < 0)
{
break;
}
}
}
}
// CONSIDER: throw if not all parameters were filled
return typeName;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
// System.ComponentModel.Container test cases
//
// Authors:
// Gonzalo Paniagua Javier (gonzalo@ximian.com)
// Ivan N. Zlatev (contact i-nZ.net)
// Copyright (c) 2006 Novell, Inc. (http://www.novell.com)
// Copyright (c) 2006 Ivan N. Zlatev
//
using System.ComponentModel.Design;
using System.Linq;
using Xunit;
namespace System.ComponentModel.Tests
{
internal class TestService
{
}
internal class TestContainer : Container
{
ServiceContainer _services = new ServiceContainer();
bool allowDuplicateNames;
public TestContainer()
{
_services.AddService(typeof(TestService), new TestService());
}
public bool AllowDuplicateNames
{
get { return allowDuplicateNames; }
set { allowDuplicateNames = value; }
}
protected override object GetService(Type serviceType)
{
return _services.GetService(serviceType);
}
public new void RemoveWithoutUnsiting(IComponent component)
{
base.RemoveWithoutUnsiting(component);
}
public void InvokeValidateName(IComponent component, string name)
{
ValidateName(component, name);
}
protected override void ValidateName(IComponent component, string name)
{
if (AllowDuplicateNames)
return;
base.ValidateName(component, name);
}
public bool Contains(IComponent component)
{
bool found = false;
foreach (IComponent c in Components)
{
if (component.Equals(c))
{
found = true;
break;
}
}
return found;
}
public new void Dispose(bool disposing)
{
base.Dispose(disposing);
}
}
internal class TestComponent : Component
{
public override ISite Site
{
get
{
return base.Site;
}
set
{
base.Site = value;
if (value != null)
{
Assert.NotNull(value.GetService(typeof(ISite)));
Assert.NotNull(value.GetService(typeof(TestService)));
}
}
}
public bool IsDisposed
{
get { return disposed; }
}
public bool ThrowOnDispose
{
get { return throwOnDispose; }
set { throwOnDispose = value; }
}
protected override void Dispose(bool disposing)
{
if (ThrowOnDispose)
throw new InvalidOperationException();
base.Dispose(disposing);
disposed = true;
}
private bool disposed;
private bool throwOnDispose;
}
public class ContainerTest
{
private TestContainer _container;
public ContainerTest()
{
_container = new TestContainer();
}
[Fact] // Add (IComponent)
public void Add1()
{
TestContainer containerA = new TestContainer();
TestContainer containerB = new TestContainer();
ISite siteA;
ISite siteB;
TestComponent compA = new TestComponent();
Assert.Null(compA.Site);
TestComponent compB = new TestComponent();
Assert.Null(compB.Site);
Assert.Equal(0, containerA.Components.Count);
Assert.Equal(0, containerB.Components.Count);
containerA.Add(compA);
siteA = compA.Site;
Assert.NotNull(siteA);
Assert.Same(compA, siteA.Component);
Assert.Same(containerA, siteA.Container);
Assert.False(siteA.DesignMode);
Assert.Null(siteA.Name);
containerA.Add(compB);
siteB = compB.Site;
Assert.NotNull(siteB);
Assert.Same(compB, siteB.Component);
Assert.Same(containerA, siteB.Container);
Assert.False(siteB.DesignMode);
Assert.Null(siteB.Name);
Assert.False(object.ReferenceEquals(siteA, siteB));
Assert.Equal(2, containerA.Components.Count);
Assert.Equal(0, containerB.Components.Count);
Assert.Same(compA, containerA.Components[0]);
Assert.Same(compB, containerA.Components[1]);
// check effect of adding component that is already member of
// another container
containerB.Add(compA);
Assert.False(object.ReferenceEquals(siteA, compA.Site));
siteA = compA.Site;
Assert.NotNull(siteA);
Assert.Same(compA, siteA.Component);
Assert.Same(containerB, siteA.Container);
Assert.False(siteA.DesignMode);
Assert.Null(siteA.Name);
Assert.Equal(1, containerA.Components.Count);
Assert.Equal(1, containerB.Components.Count);
Assert.Same(compB, containerA.Components[0]);
Assert.Same(compA, containerB.Components[0]);
// check effect of add component twice to same container
containerB.Add(compA);
Assert.Same(siteA, compA.Site);
Assert.Equal(1, containerA.Components.Count);
Assert.Equal(1, containerB.Components.Count);
Assert.Same(compB, containerA.Components[0]);
Assert.Same(compA, containerB.Components[0]);
}
[Fact]
public void Add1_Component_Null()
{
_container.Add((IComponent)null);
Assert.Equal(0, _container.Components.Count);
}
[Fact] // Add (IComponent, String)
public void Add2()
{
TestContainer containerA = new TestContainer();
TestContainer containerB = new TestContainer();
ISite siteA;
ISite siteB;
TestComponent compA = new TestComponent();
Assert.Null(compA.Site);
TestComponent compB = new TestComponent();
Assert.Null(compB.Site);
Assert.Equal(0, containerA.Components.Count);
Assert.Equal(0, containerB.Components.Count);
containerA.Add(compA, "A");
siteA = compA.Site;
Assert.NotNull(siteA);
Assert.Same(compA, siteA.Component);
Assert.Same(containerA, siteA.Container);
Assert.False(siteA.DesignMode);
Assert.Equal("A", siteA.Name);
containerA.Add(compB, "B");
siteB = compB.Site;
Assert.NotNull(siteB);
Assert.Same(compB, siteB.Component);
Assert.Same(containerA, siteB.Container);
Assert.False(siteB.DesignMode);
Assert.Equal("B", siteB.Name);
Assert.False(object.ReferenceEquals(siteA, siteB));
Assert.Equal(2, containerA.Components.Count);
Assert.Equal(0, containerB.Components.Count);
Assert.Same(compA, containerA.Components[0]);
Assert.Same(compB, containerA.Components[1]);
// check effect of adding component that is already member of
// another container
containerB.Add(compA, "A2");
Assert.False(object.ReferenceEquals(siteA, compA.Site));
siteA = compA.Site;
Assert.NotNull(siteA);
Assert.Same(compA, siteA.Component);
Assert.Same(containerB, siteA.Container);
Assert.False(siteA.DesignMode);
Assert.Equal("A2", siteA.Name);
Assert.Equal(1, containerA.Components.Count);
Assert.Equal(1, containerB.Components.Count);
Assert.Same(compB, containerA.Components[0]);
Assert.Same(compA, containerB.Components[0]);
// check effect of add component twice to same container
containerB.Add(compA, "A2");
Assert.Same(siteA, compA.Site);
Assert.Equal("A2", siteA.Name);
Assert.Equal(1, containerA.Components.Count);
Assert.Equal(1, containerB.Components.Count);
Assert.Same(compB, containerA.Components[0]);
Assert.Same(compA, containerB.Components[0]);
// add again with different name
containerB.Add(compA, "A3");
Assert.Same(siteA, compA.Site);
Assert.Equal("A2", siteA.Name);
Assert.Equal(1, containerA.Components.Count);
Assert.Equal(1, containerB.Components.Count);
Assert.Same(compB, containerA.Components[0]);
Assert.Same(compA, containerB.Components[0]);
// check effect of add component twice to same container
containerB.Add(compA, "A2");
Assert.Same(siteA, compA.Site);
Assert.Equal("A2", siteA.Name);
}
[Fact]
public void Add_ExceedsSizeOfBuffer_Success()
{
var container = new Container();
var components = new Component[] { new Component(), new Component(), new Component(), new Component(), new Component() };
for (int i = 0; i < components.Length; i++)
{
container.Add(components[i]);
Assert.Same(components[i], container.Components[i]);
}
}
[Fact]
public void Add2_Component_Null()
{
_container.Add((IComponent)null, "A");
Assert.Equal(0, _container.Components.Count);
_container.Add(new TestComponent(), "A");
Assert.Equal(1, _container.Components.Count);
_container.Add((IComponent)null, "A");
Assert.Equal(1, _container.Components.Count);
}
[Fact]
public void Add2_Name_Duplicate()
{
TestContainer container = new TestContainer();
TestComponent c1 = new TestComponent();
container.Add(c1, "dup");
// new component, same case
TestComponent c2 = new TestComponent();
ArgumentException ex;
ex = AssertExtensions.Throws<ArgumentException>(null, () => container.Add(c2, "dup"));
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'dup'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(1, container.Components.Count);
// new component, different case
TestComponent c3 = new TestComponent();
ex = AssertExtensions.Throws<ArgumentException>(null, () => container.Add(c3, "duP"));
// Duplicate component name 'duP'. Component names must be
// unique and case-insensitive
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'duP'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(1, container.Components.Count);
// existing component, same case
TestComponent c4 = new TestComponent();
container.Add(c4, "C4");
Assert.Equal(2, container.Components.Count);
container.Add(c4, "dup");
Assert.Equal(2, container.Components.Count);
Assert.Equal("C4", c4.Site.Name);
// component of other container, same case
TestContainer container2 = new TestContainer();
TestComponent c5 = new TestComponent();
container2.Add(c5, "C5");
ex = AssertExtensions.Throws<ArgumentException>(null, () => container.Add(c5, "dup"));
// Duplicate component name 'dup'. Component names must be
// unique and case-insensitive
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'dup'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(2, container.Components.Count);
Assert.Equal(1, container2.Components.Count);
Assert.Same(c5, container2.Components[0]);
container.AllowDuplicateNames = true;
TestComponent c6 = new TestComponent();
container.Add(c6, "dup");
Assert.Equal(3, container.Components.Count);
Assert.NotNull(c1.Site);
Assert.Equal("dup", c1.Site.Name);
Assert.NotNull(c6.Site);
Assert.Equal("dup", c6.Site.Name);
Assert.False(object.ReferenceEquals(c1.Site, c6.Site));
}
[Fact]
public void Add_SetSiteName_ReturnsExpected()
{
var component = new Component();
var container = new Container();
container.Add(component, "Name1");
Assert.Equal("Name1", component.Site.Name);
component.Site.Name = "OtherName";
Assert.Equal("OtherName", component.Site.Name);
// Setting to the same value is a nop.
component.Site.Name = "OtherName";
Assert.Equal("OtherName", component.Site.Name);
}
[Fact]
public void Add_SetSiteNameDuplicate_ThrowsArgumentException()
{
var component1 = new Component();
var component2 = new Component();
var container = new Container();
container.Add(component1, "Name1");
container.Add(component2, "Name2");
Assert.Throws<ArgumentException>(null, () => component1.Site.Name = "Name2");
}
[Fact]
public void Add_DuplicateNameWithInheritedReadOnly_AddsSuccessfully()
{
var component1 = new Component();
var component2 = new Component();
TypeDescriptor.AddAttributes(component1, new InheritanceAttribute(InheritanceLevel.InheritedReadOnly));
var container = new Container();
container.Add(component1, "Name");
container.Add(component2, "Name");
Assert.Equal(new IComponent[] { component1, component2 }, container.Components.Cast<IComponent>());
}
[Fact]
public void AddRemove()
{
TestComponent component = new TestComponent();
_container.Add(component);
Assert.NotNull(component.Site);
Assert.True(_container.Contains(component));
_container.Remove(component);
Assert.Null(component.Site);
Assert.False(_container.Contains(component));
}
[Fact] // Dispose ()
public void Dispose1()
{
TestComponent compA;
TestComponent compB;
compA = new TestComponent();
_container.Add(compA);
compB = new TestComponent();
_container.Add(compB);
_container.Dispose();
Assert.Equal(0, _container.Components.Count);
Assert.True(compA.IsDisposed);
Assert.Null(compA.Site);
Assert.True(compB.IsDisposed);
Assert.Null(compB.Site);
_container = new TestContainer();
compA = new TestComponent();
compA.ThrowOnDispose = true;
_container.Add(compA);
compB = new TestComponent();
_container.Add(compB);
Assert.Throws<InvalidOperationException>(() => _container.Dispose());
// assert that component is not removed from components until after
// Dispose of component has succeeded
Assert.Equal(0, _container.Components.Count);
Assert.False(compA.IsDisposed);
Assert.Null(compA.Site);
Assert.True(compB.IsDisposed);
Assert.Null(compB.Site);
compA.ThrowOnDispose = false;
_container = new TestContainer();
compA = new TestComponent();
_container.Add(compA);
compB = new TestComponent();
compB.ThrowOnDispose = true;
_container.Add(compB);
Assert.Throws<InvalidOperationException>(() => _container.Dispose());
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
Assert.False(compA.IsDisposed);
Assert.NotNull(compA.Site);
Assert.False(compB.IsDisposed);
Assert.Null(compB.Site);
compB.ThrowOnDispose = false;
}
[Fact] // Dispose (Boolean)
public void Dispose2()
{
TestComponent compA;
TestComponent compB;
compA = new TestComponent();
_container.Add(compA);
compB = new TestComponent();
_container.Add(compB);
_container.Dispose(false);
Assert.Equal(2, _container.Components.Count);
Assert.False(compA.IsDisposed);
Assert.NotNull(compA.Site);
Assert.False(compB.IsDisposed);
Assert.NotNull(compB.Site);
_container.Dispose(true);
Assert.Equal(0, _container.Components.Count);
Assert.True(compA.IsDisposed);
Assert.Null(compA.Site);
Assert.True(compB.IsDisposed);
Assert.Null(compB.Site);
compA = new TestComponent();
_container.Add(compA);
compB = new TestComponent();
_container.Add(compB);
Assert.Equal(2, _container.Components.Count);
Assert.False(compA.IsDisposed);
Assert.NotNull(compA.Site);
Assert.False(compB.IsDisposed);
Assert.NotNull(compB.Site);
_container.Dispose(true);
Assert.Equal(0, _container.Components.Count);
Assert.True(compA.IsDisposed);
Assert.Null(compA.Site);
Assert.True(compB.IsDisposed);
Assert.Null(compB.Site);
}
[Fact]
public void Dispose_Recursive()
{
MyComponent comp = new MyComponent();
Container container = comp.CreateContainer();
comp.Dispose();
Assert.Equal(0, container.Components.Count);
}
[Fact]
public void GetService()
{
object service;
GetServiceContainer container = new GetServiceContainer();
container.Add(new MyComponent());
service = container.GetService(typeof(MyComponent));
Assert.Null(service);
service = container.GetService(typeof(Component));
Assert.Null(service);
service = container.GetService(typeof(IContainer));
Assert.Same(container, service);
service = container.GetService((Type)null);
Assert.Null(service);
}
[Fact]
public void Remove()
{
TestComponent compA;
TestComponent compB;
ISite siteA;
ISite siteB;
compA = new TestComponent();
_container.Add(compA);
siteA = compA.Site;
compB = new TestComponent();
_container.Add(compB);
siteB = compB.Site;
_container.Remove(compB);
Assert.Same(siteA, compA.Site);
Assert.Null(compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
// remove component with no site
compB = new TestComponent();
_container.Remove(compB);
Assert.Same(siteA, compA.Site);
Assert.Null(compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
// remove component associated with other container
TestContainer container2 = new TestContainer();
compB = new TestComponent();
container2.Add(compB);
siteB = compB.Site;
_container.Remove(compB);
Assert.Same(siteA, compA.Site);
Assert.Same(siteB, compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
Assert.Equal(1, container2.Components.Count);
Assert.Same(compB, container2.Components[0]);
}
[Fact]
public void Remove_Component_Null()
{
_container.Add(new TestComponent());
_container.Remove((IComponent)null);
Assert.Equal(1, _container.Components.Count);
}
[Fact]
public void RemoveWithoutUnsiting()
{
TestComponent compA;
TestComponent compB;
ISite siteA;
ISite siteB;
compA = new TestComponent();
_container.Add(compA);
siteA = compA.Site;
compB = new TestComponent();
_container.Add(compB);
siteB = compB.Site;
_container.RemoveWithoutUnsiting(compB);
Assert.Same(siteA, compA.Site);
Assert.Same(siteB, compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
// remove component with no site
compB = new TestComponent();
_container.RemoveWithoutUnsiting(compB);
Assert.Same(siteA, compA.Site);
Assert.Null(compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
// remove component associated with other container
TestContainer container2 = new TestContainer();
compB = new TestComponent();
container2.Add(compB);
siteB = compB.Site;
_container.RemoveWithoutUnsiting(compB);
Assert.Same(siteA, compA.Site);
Assert.Same(siteB, compB.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(compA, _container.Components[0]);
Assert.Equal(1, container2.Components.Count);
Assert.Same(compB, container2.Components[0]);
}
[Fact]
public void RemoveWithoutUnsiting_Component_Null()
{
ISite site;
TestComponent component;
component = new TestComponent();
_container.Add(component);
site = component.Site;
_container.RemoveWithoutUnsiting((IComponent)null);
Assert.Same(site, component.Site);
Assert.Equal(1, _container.Components.Count);
Assert.Same(component, _container.Components[0]);
}
[Fact]
public void Remove_NoSuchComponentWithoutUnsiting_Nop()
{
var component1 = new Component();
var component2 = new Component();
var container = new SitingContainer();
container.Add(component1);
container.Add(component2);
container.DoRemoveWithoutUnsitting(component1);
Assert.Equal(1, container.Components.Count);
container.DoRemoveWithoutUnsitting(component1);
Assert.Equal(1, container.Components.Count);
container.DoRemoveWithoutUnsitting(component2);
Assert.Equal(0, container.Components.Count);
}
private class SitingContainer : Container
{
public void DoRemoveWithoutUnsitting(IComponent component) => RemoveWithoutUnsiting(component);
}
[Fact]
public void ValidateName_Component_Null()
{
ArgumentNullException ex = Assert.Throws<ArgumentNullException>(() => _container.InvokeValidateName((IComponent)null, "A"));
Assert.Equal(typeof(ArgumentNullException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.Equal("component", ex.ParamName);
}
}
[Fact]
public void ValidateName_Name_Null()
{
TestComponent compA = new TestComponent();
_container.Add(compA, (string)null);
TestComponent compB = new TestComponent();
_container.InvokeValidateName(compB, (string)null);
}
[Fact]
public void ValidateName_Name_Duplicate()
{
TestComponent compA = new TestComponent();
_container.Add(compA, "dup");
// same component, same case
_container.InvokeValidateName(compA, "dup");
// existing component, same case
TestComponent compB = new TestComponent();
_container.Add(compB, "B");
ArgumentException ex;
ex = AssertExtensions.Throws<ArgumentException>(null, () => _container.InvokeValidateName(compB, "dup"));
// Duplicate component name 'duP'. Component names must be
// unique and case-insensitive
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'dup'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(2, _container.Components.Count);
_container.InvokeValidateName(compB, "whatever");
// new component, different case
TestComponent compC = new TestComponent();
ex = AssertExtensions.Throws<ArgumentException>(null, () => _container.InvokeValidateName(compC, "dup"));
// Duplicate component name 'duP'. Component names must be
// unique and case-insensitive
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'dup'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(2, _container.Components.Count);
_container.InvokeValidateName(compC, "whatever");
// component of other container, different case
TestContainer container2 = new TestContainer();
TestComponent compD = new TestComponent();
container2.Add(compD, "B");
ex = AssertExtensions.Throws<ArgumentException>(null, () => _container.InvokeValidateName(compD, "dup"));
// Duplicate component name 'duP'. Component names must be
// unique and case-insensitive
Assert.Equal(typeof(ArgumentException), ex.GetType());
Assert.Null(ex.InnerException);
if (!PlatformDetection.IsNetNative) // .Net Native toolchain optimizes away exception messages and paramnames.
{
Assert.NotNull(ex.Message);
Assert.True(ex.Message.IndexOf("'dup'") != -1);
Assert.Null(ex.ParamName);
}
Assert.Equal(2, _container.Components.Count);
_container.InvokeValidateName(compD, "whatever");
Assert.Equal(1, container2.Components.Count);
Assert.Same(compD, container2.Components[0]);
}
[Fact]
public void Components_GetWithDefaultFilterService_ReturnsAllComponents()
{
var component1 = new SubComponent();
var component2 = new Component();
var component3 = new SubComponent();
var container = new FilterContainer { FilterService = new DefaultFilterService() };
container.Add(component1);
container.Add(component2);
container.Add(component3);
Assert.Equal(new IComponent[] { component1, component2, component3 }, container.Components.Cast<IComponent>());
}
[Fact]
public void Components_GetWithCustomFilterService_ReturnsFilteredComponents()
{
var component1 = new SubComponent();
var component2 = new Component();
var component3 = new SubComponent();
// This filter only includes SubComponents.
var container = new FilterContainer { FilterService = new CustomContainerFilterService() };
container.Add(component1);
container.Add(component2);
container.Add(component3);
Assert.Equal(new IComponent[] { component1, component3 }, container.Components.Cast<IComponent>());
}
[Fact]
public void Components_GetWithCustomFilterServiceAfterChangingComponents_ReturnsUpdatedComponents()
{
var component1 = new SubComponent();
var component2 = new Component();
var component3 = new SubComponent();
// This filter only includes SubComponents.
var container = new FilterContainer { FilterService = new CustomContainerFilterService() };
container.Add(component1);
container.Add(component2);
container.Add(component3);
Assert.Equal(new IComponent[] { component1, component3 }, container.Components.Cast<IComponent>());
container.Remove(component1);
Assert.Equal(new IComponent[] { component3 }, container.Components.Cast<IComponent>());
}
[Fact]
public void Components_GetWithNullFilterService_ReturnsUnfiltered()
{
var component1 = new SubComponent();
var component2 = new Component();
var component3 = new SubComponent();
// This filter only includes SubComponents.
var container = new FilterContainer { FilterService = new NullContainerFilterService() };
container.Add(component1);
container.Add(component2);
container.Add(component3);
Assert.Equal(new IComponent[] { component1, component2, component3 }, container.Components.Cast<IComponent>());
}
private class FilterContainer : Container
{
public ContainerFilterService FilterService { get; set; }
protected override object GetService(Type service)
{
if (service == typeof(ContainerFilterService))
{
return FilterService;
}
return base.GetService(service);
}
}
private class SubComponent : Component { }
private class DefaultFilterService : ContainerFilterService { }
private class CustomContainerFilterService : ContainerFilterService
{
public override ComponentCollection FilterComponents(ComponentCollection components)
{
SubComponent[] newComponents = components.OfType<SubComponent>().ToArray();
return new ComponentCollection(newComponents);
}
}
private class NullContainerFilterService : ContainerFilterService
{
public override ComponentCollection FilterComponents(ComponentCollection components)
{
return null;
}
}
private class MyComponent : Component
{
private Container container;
protected override void Dispose(bool disposing)
{
if (container != null)
container.Dispose();
base.Dispose(disposing);
}
public Container CreateContainer()
{
if (container != null)
throw new InvalidOperationException();
container = new Container();
container.Add(new MyComponent());
container.Add(this);
return container;
}
}
private class MyContainer : IContainer
{
private ComponentCollection components = new ComponentCollection(
new Component[0]);
public ComponentCollection Components
{
get { return components; }
}
public void Add(IComponent component)
{
}
public void Add(IComponent component, string name)
{
}
public void Remove(IComponent component)
{
}
public void Dispose()
{
}
}
public class GetServiceContainer : Container
{
public new object GetService(Type service)
{
return base.GetService(service);
}
}
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
using System.Text;
using TVicPort_Cs;
namespace MemTest
{
/// <summary>
/// Summary description for Form1.
/// </summary>
public class MemForm : System.Windows.Forms.Form
{
bool DriverOpened = false;
public System.Windows.Forms.Button B_Exit;
public System.Windows.Forms.Button Close_Driver;
public System.Windows.Forms.Button Open_Driver;
public System.Windows.Forms.ListBox ListAddr;
public System.Windows.Forms.ListBox ListHex;
public System.Windows.Forms.ListBox ListAscii;
public System.Windows.Forms.TextBox E_Base;
public System.Windows.Forms.Button B_ReadMemory;
public System.Windows.Forms.Label Label12;
public System.Windows.Forms.Label Label13;
public System.Windows.Forms.Label Label14;
private System.Windows.Forms.Label label6;
internal System.Windows.Forms.Label label3;
private System.Windows.Forms.LinkLabel linkLabel2;
private System.Windows.Forms.LinkLabel linkLabel1;
private System.Windows.Forms.Label L_Days;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
public MemForm()
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
//
// TODO: Add any constructor code after InitializeComponent call
//
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(MemForm));
this.B_Exit = new System.Windows.Forms.Button();
this.Close_Driver = new System.Windows.Forms.Button();
this.Open_Driver = new System.Windows.Forms.Button();
this.ListAddr = new System.Windows.Forms.ListBox();
this.ListHex = new System.Windows.Forms.ListBox();
this.ListAscii = new System.Windows.Forms.ListBox();
this.E_Base = new System.Windows.Forms.TextBox();
this.B_ReadMemory = new System.Windows.Forms.Button();
this.Label12 = new System.Windows.Forms.Label();
this.Label13 = new System.Windows.Forms.Label();
this.Label14 = new System.Windows.Forms.Label();
this.label6 = new System.Windows.Forms.Label();
this.label3 = new System.Windows.Forms.Label();
this.linkLabel2 = new System.Windows.Forms.LinkLabel();
this.linkLabel1 = new System.Windows.Forms.LinkLabel();
this.L_Days = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// B_Exit
//
this.B_Exit.BackColor = System.Drawing.SystemColors.Control;
this.B_Exit.Cursor = System.Windows.Forms.Cursors.Default;
this.B_Exit.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.B_Exit.ForeColor = System.Drawing.SystemColors.ControlText;
this.B_Exit.Location = new System.Drawing.Point(472, 270);
this.B_Exit.Name = "B_Exit";
this.B_Exit.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.B_Exit.Size = new System.Drawing.Size(161, 33);
this.B_Exit.TabIndex = 27;
this.B_Exit.Text = "Exit";
this.B_Exit.Click += new System.EventHandler(this.B_Exit_Click);
//
// Close_Driver
//
this.Close_Driver.BackColor = System.Drawing.SystemColors.Control;
this.Close_Driver.Cursor = System.Windows.Forms.Cursors.Default;
this.Close_Driver.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.Close_Driver.ForeColor = System.Drawing.SystemColors.ControlText;
this.Close_Driver.Location = new System.Drawing.Point(472, 200);
this.Close_Driver.Name = "Close_Driver";
this.Close_Driver.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Close_Driver.Size = new System.Drawing.Size(161, 33);
this.Close_Driver.TabIndex = 26;
this.Close_Driver.Text = "Close Driver";
this.Close_Driver.Click += new System.EventHandler(this.Close_Driver_Click);
//
// Open_Driver
//
this.Open_Driver.BackColor = System.Drawing.SystemColors.Control;
this.Open_Driver.Cursor = System.Windows.Forms.Cursors.Default;
this.Open_Driver.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.Open_Driver.ForeColor = System.Drawing.SystemColors.ControlText;
this.Open_Driver.Location = new System.Drawing.Point(472, 152);
this.Open_Driver.Name = "Open_Driver";
this.Open_Driver.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Open_Driver.Size = new System.Drawing.Size(161, 33);
this.Open_Driver.TabIndex = 25;
this.Open_Driver.Text = "Open Driver";
this.Open_Driver.Click += new System.EventHandler(this.Open_Driver_Click);
//
// ListAddr
//
this.ListAddr.BackColor = System.Drawing.SystemColors.Window;
this.ListAddr.Cursor = System.Windows.Forms.Cursors.Default;
this.ListAddr.Font = new System.Drawing.Font("Courier New", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(204)));
this.ListAddr.ForeColor = System.Drawing.SystemColors.WindowText;
this.ListAddr.ItemHeight = 14;
this.ListAddr.Location = new System.Drawing.Point(8, 32);
this.ListAddr.Name = "ListAddr";
this.ListAddr.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.ListAddr.Size = new System.Drawing.Size(65, 228);
this.ListAddr.TabIndex = 21;
//
// ListHex
//
this.ListHex.BackColor = System.Drawing.SystemColors.Window;
this.ListHex.Cursor = System.Windows.Forms.Cursors.Default;
this.ListHex.Font = new System.Drawing.Font("Courier New", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(204)));
this.ListHex.ForeColor = System.Drawing.SystemColors.WindowText;
this.ListHex.ItemHeight = 14;
this.ListHex.Location = new System.Drawing.Point(76, 32);
this.ListHex.Name = "ListHex";
this.ListHex.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.ListHex.Size = new System.Drawing.Size(233, 228);
this.ListHex.TabIndex = 20;
//
// ListAscii
//
this.ListAscii.BackColor = System.Drawing.SystemColors.Window;
this.ListAscii.Cursor = System.Windows.Forms.Cursors.Default;
this.ListAscii.Font = new System.Drawing.Font("Courier New", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(204)));
this.ListAscii.ForeColor = System.Drawing.SystemColors.WindowText;
this.ListAscii.ItemHeight = 14;
this.ListAscii.Location = new System.Drawing.Point(312, 32);
this.ListAscii.Name = "ListAscii";
this.ListAscii.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.ListAscii.Size = new System.Drawing.Size(129, 228);
this.ListAscii.TabIndex = 19;
//
// E_Base
//
this.E_Base.AcceptsReturn = true;
this.E_Base.AutoSize = false;
this.E_Base.BackColor = System.Drawing.SystemColors.Window;
this.E_Base.Cursor = System.Windows.Forms.Cursors.IBeam;
this.E_Base.Font = new System.Drawing.Font("Courier New", 9.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(204)));
this.E_Base.ForeColor = System.Drawing.SystemColors.WindowText;
this.E_Base.Location = new System.Drawing.Point(368, 272);
this.E_Base.MaxLength = 0;
this.E_Base.Name = "E_Base";
this.E_Base.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.E_Base.Size = new System.Drawing.Size(73, 28);
this.E_Base.TabIndex = 18;
this.E_Base.Text = "000c0044";
//
// B_ReadMemory
//
this.B_ReadMemory.BackColor = System.Drawing.SystemColors.Control;
this.B_ReadMemory.Cursor = System.Windows.Forms.Cursors.Default;
this.B_ReadMemory.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.B_ReadMemory.ForeColor = System.Drawing.SystemColors.ControlText;
this.B_ReadMemory.Location = new System.Drawing.Point(8, 272);
this.B_ReadMemory.Name = "B_ReadMemory";
this.B_ReadMemory.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.B_ReadMemory.Size = new System.Drawing.Size(345, 29);
this.B_ReadMemory.TabIndex = 17;
this.B_ReadMemory.Text = "Read 256 bytes from this physical memory adrress ==>";
this.B_ReadMemory.Click += new System.EventHandler(this.B_ReadMemory_Click);
//
// Label12
//
this.Label12.BackColor = System.Drawing.SystemColors.Control;
this.Label12.Cursor = System.Windows.Forms.Cursors.Default;
this.Label12.Font = new System.Drawing.Font("Arial", 9.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(204)));
this.Label12.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label12.Location = new System.Drawing.Point(16, 8);
this.Label12.Name = "Label12";
this.Label12.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label12.Size = new System.Drawing.Size(73, 17);
this.Label12.TabIndex = 24;
this.Label12.Text = "Addr(hex)";
//
// Label13
//
this.Label13.BackColor = System.Drawing.SystemColors.Control;
this.Label13.Cursor = System.Windows.Forms.Cursors.Default;
this.Label13.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.Label13.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label13.Location = new System.Drawing.Point(128, 8);
this.Label13.Name = "Label13";
this.Label13.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label13.Size = new System.Drawing.Size(129, 17);
this.Label13.TabIndex = 23;
this.Label13.Text = "H E X A D E C I M A L";
//
// Label14
//
this.Label14.BackColor = System.Drawing.SystemColors.Control;
this.Label14.Cursor = System.Windows.Forms.Cursors.Default;
this.Label14.Font = new System.Drawing.Font("Arial", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.Label14.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label14.Location = new System.Drawing.Point(344, 8);
this.Label14.Name = "Label14";
this.Label14.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label14.Size = new System.Drawing.Size(89, 17);
this.Label14.TabIndex = 22;
this.Label14.Text = "A S C I I";
//
// label6
//
this.label6.AutoSize = true;
this.label6.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.label6.Location = new System.Drawing.Point(487, 32);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(130, 16);
this.label6.TabIndex = 46;
this.label6.Text = "(c) 2005, EnTech Taiwan";
//
// label3
//
this.label3.AutoSize = true;
this.label3.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.label3.Location = new System.Drawing.Point(528, 8);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(48, 16);
this.label3.TabIndex = 45;
this.label3.Text = "TVicPort";
//
// linkLabel2
//
this.linkLabel2.AutoSize = true;
this.linkLabel2.Location = new System.Drawing.Point(487, 80);
this.linkLabel2.Name = "linkLabel2";
this.linkLabel2.Size = new System.Drawing.Size(131, 16);
this.linkLabel2.TabIndex = 51;
this.linkLabel2.TabStop = true;
this.linkLabel2.Text = "tools@entechtaiwan.com";
this.linkLabel2.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.linkLabel2_LinkClicked);
//
// linkLabel1
//
this.linkLabel1.AutoSize = true;
this.linkLabel1.Location = new System.Drawing.Point(476, 56);
this.linkLabel1.Name = "linkLabel1";
this.linkLabel1.Size = new System.Drawing.Size(152, 16);
this.linkLabel1.TabIndex = 50;
this.linkLabel1.TabStop = true;
this.linkLabel1.Text = "http://www.entechtaiwan.com";
this.linkLabel1.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.linkLabel1_LinkClicked);
//
// L_Days
//
this.L_Days.Location = new System.Drawing.Point(472, 112);
this.L_Days.Name = "L_Days";
this.L_Days.Size = new System.Drawing.Size(160, 16);
this.L_Days.TabIndex = 52;
this.L_Days.Text = "Evaluation Days Left: ??";
this.L_Days.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// MemForm
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(656, 309);
this.Controls.Add(this.L_Days);
this.Controls.Add(this.linkLabel2);
this.Controls.Add(this.linkLabel1);
this.Controls.Add(this.label6);
this.Controls.Add(this.label3);
this.Controls.Add(this.B_Exit);
this.Controls.Add(this.Close_Driver);
this.Controls.Add(this.Open_Driver);
this.Controls.Add(this.ListAddr);
this.Controls.Add(this.ListHex);
this.Controls.Add(this.ListAscii);
this.Controls.Add(this.E_Base);
this.Controls.Add(this.B_ReadMemory);
this.Controls.Add(this.Label12);
this.Controls.Add(this.Label13);
this.Controls.Add(this.Label14);
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "MemForm";
this.Text = "Physical Memory Sample Application";
this.Load += new System.EventHandler(this.Form1_Load);
this.Closed += new System.EventHandler(this.Form1_Closed);
this.ResumeLayout(false);
}
#endregion
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.Run(new MemForm());
}
private void LockControls()
{
Open_Driver.Enabled = !DriverOpened;
Close_Driver.Enabled = DriverOpened;
B_ReadMemory.Enabled = DriverOpened;
}
private void Open_Driver_Click(object sender, System.EventArgs e)
{
TVicPort.OpenTVicPort();
DriverOpened = TVicPort.IsDriverOpened() != 0;
if (DriverOpened)
L_Days.Text = "Evaluation Days Left: " + TVicPort.EvaluationDaysLeft().ToString("d");
LockControls();
}
private void Close_Driver_Click(object sender, System.EventArgs e)
{
TVicPort.CloseTVicPort();
DriverOpened = false;
LockControls();
}
private void B_Exit_Click(object sender, System.EventArgs e)
{
Close();
}
private void Form1_Closed(object sender, System.EventArgs e)
{
TVicPort.CloseTVicPort();
}
private void Form1_Load(object sender, System.EventArgs e)
{
DriverOpened = false;
LockControls();
}
unsafe private void B_ReadMemory_Click(object sender, System.EventArgs e)
{
String s;
uint i,j;
UInt32 PhysAddr;
byte * MappedAddr;
byte[] b = new byte[16];
ASCIIEncoding ascii = new ASCIIEncoding();
// read the base physical address from Edit box
PhysAddr = UInt32.Parse(E_Base.Text, System.Globalization.NumberStyles.HexNumber);
// map this address to the linear address space
MappedAddr = (byte*)TVicPort.MapPhysToLinear(PhysAddr, 256);
// display memory dump
ListAddr.Items.Clear();
ListHex.Items.Clear();
ListAscii.Items.Clear();
for (i = 0; i< 16; i++) // 16 lines in box
{
s = "";
for (j = 0; j< 16; j++) // 16 bytes in line
{
b[j] = *(MappedAddr +16*i+j);
s = s + b[j].ToString("X2");
if (b[j] < 32) b[j] = 46; // replace all nonreadable symbols to "."
}
ListAddr.Items.Add((PhysAddr+16*i).ToString("X8")); // address
ListHex.Items.Add(s); // hex
ListAscii.Items.Add(ascii.GetString(b)); // ascii
}
}
private void linkLabel1_LinkClicked(object sender, System.Windows.Forms.LinkLabelLinkClickedEventArgs e)
{
TVicPort.LaunchWeb();
}
private void linkLabel2_LinkClicked(object sender, System.Windows.Forms.LinkLabelLinkClickedEventArgs e)
{
TVicPort.LaunchMail();
}
}
}
| |
/*
* Vericred API
*
* Vericred's API allows you to search for Health Plans that a specific doctor
accepts.
## Getting Started
Visit our [Developer Portal](https://developers.vericred.com) to
create an account.
Once you have created an account, you can create one Application for
Production and another for our Sandbox (select the appropriate Plan when
you create the Application).
## SDKs
Our API follows standard REST conventions, so you can use any HTTP client
to integrate with us. You will likely find it easier to use one of our
[autogenerated SDKs](https://github.com/vericred/?query=vericred-),
which we make available for several common programming languages.
## Authentication
To authenticate, pass the API Key you created in the Developer Portal as
a `Vericred-Api-Key` header.
`curl -H 'Vericred-Api-Key: YOUR_KEY' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"`
## Versioning
Vericred's API default to the latest version. However, if you need a specific
version, you can request it with an `Accept-Version` header.
The current version is `v3`. Previous versions are `v1` and `v2`.
`curl -H 'Vericred-Api-Key: YOUR_KEY' -H 'Accept-Version: v2' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"`
## Pagination
Endpoints that accept `page` and `per_page` parameters are paginated. They expose
four additional fields that contain data about your position in the response,
namely `Total`, `Per-Page`, `Link`, and `Page` as described in [RFC-5988](https://tools.ietf.org/html/rfc5988).
For example, to display 5 results per page and view the second page of a
`GET` to `/networks`, your final request would be `GET /networks?....page=2&per_page=5`.
## Sideloading
When we return multiple levels of an object graph (e.g. `Provider`s and their `State`s
we sideload the associated data. In this example, we would provide an Array of
`State`s and a `state_id` for each provider. This is done primarily to reduce the
payload size since many of the `Provider`s will share a `State`
```
{
providers: [{ id: 1, state_id: 1}, { id: 2, state_id: 1 }],
states: [{ id: 1, code: 'NY' }]
}
```
If you need the second level of the object graph, you can just match the
corresponding id.
## Selecting specific data
All endpoints allow you to specify which fields you would like to return.
This allows you to limit the response to contain only the data you need.
For example, let's take a request that returns the following JSON by default
```
{
provider: {
id: 1,
name: 'John',
phone: '1234567890',
field_we_dont_care_about: 'value_we_dont_care_about'
},
states: [{
id: 1,
name: 'New York',
code: 'NY',
field_we_dont_care_about: 'value_we_dont_care_about'
}]
}
```
To limit our results to only return the fields we care about, we specify the
`select` query string parameter for the corresponding fields in the JSON
document.
In this case, we want to select `name` and `phone` from the `provider` key,
so we would add the parameters `select=provider.name,provider.phone`.
We also want the `name` and `code` from the `states` key, so we would
add the parameters `select=states.name,staes.code`. The id field of
each document is always returned whether or not it is requested.
Our final request would be `GET /providers/12345?select=provider.name,provider.phone,states.name,states.code`
The response would be
```
{
provider: {
id: 1,
name: 'John',
phone: '1234567890'
},
states: [{
id: 1,
name: 'New York',
code: 'NY'
}]
}
```
## Benefits summary format
Benefit cost-share strings are formatted to capture:
* Network tiers
* Compound or conditional cost-share
* Limits on the cost-share
* Benefit-specific maximum out-of-pocket costs
**Example #1**
As an example, we would represent [this Summary of Benefits & Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/33602TX0780032.pdf) as:
* **Hospital stay facility fees**:
- Network Provider: `$400 copay/admit plus 20% coinsurance`
- Out-of-Network Provider: `$1,500 copay/admit plus 50% coinsurance`
- Vericred's format for this benefit: `In-Network: $400 before deductible then 20% after deductible / Out-of-Network: $1,500 before deductible then 50% after deductible`
* **Rehabilitation services:**
- Network Provider: `20% coinsurance`
- Out-of-Network Provider: `50% coinsurance`
- Limitations & Exceptions: `35 visit maximum per benefit period combined with Chiropractic care.`
- Vericred's format for this benefit: `In-Network: 20% after deductible / Out-of-Network: 50% after deductible | limit: 35 visit(s) per Benefit Period`
**Example #2**
In [this other Summary of Benefits & Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/40733CA0110568.pdf), the **specialty_drugs** cost-share has a maximum out-of-pocket for in-network pharmacies.
* **Specialty drugs:**
- Network Provider: `40% coinsurance up to a $500 maximum for up to a 30 day supply`
- Out-of-Network Provider `Not covered`
- Vericred's format for this benefit: `In-Network: 40% after deductible, up to $500 per script / Out-of-Network: 100%`
**BNF**
Here's a description of the benefits summary string, represented as a context-free grammar:
```
<cost-share> ::= <tier> <opt-num-prefix> <value> <opt-per-unit> <deductible> <tier-limit> "/" <tier> <opt-num-prefix> <value> <opt-per-unit> <deductible> "|" <benefit-limit>
<tier> ::= "In-Network:" | "In-Network-Tier-2:" | "Out-of-Network:"
<opt-num-prefix> ::= "first" <num> <unit> | ""
<unit> ::= "day(s)" | "visit(s)" | "exam(s)" | "item(s)"
<value> ::= <ddct_moop> | <copay> | <coinsurance> | <compound> | "unknown" | "Not Applicable"
<compound> ::= <copay> <deductible> "then" <coinsurance> <deductible> | <copay> <deductible> "then" <copay> <deductible> | <coinsurance> <deductible> "then" <coinsurance> <deductible>
<copay> ::= "$" <num>
<coinsurace> ::= <num> "%"
<ddct_moop> ::= <copay> | "Included in Medical" | "Unlimited"
<opt-per-unit> ::= "per day" | "per visit" | "per stay" | ""
<deductible> ::= "before deductible" | "after deductible" | ""
<tier-limit> ::= ", " <limit> | ""
<benefit-limit> ::= <limit> | ""
```
*
* OpenAPI spec version: 1.0.0
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace IO.Vericred.Model
{
/// <summary>
/// ZipCounty
/// </summary>
[DataContract]
public partial class ZipCounty : IEquatable<ZipCounty>
{
/// <summary>
/// Initializes a new instance of the <see cref="ZipCounty" /> class.
/// </summary>
/// <param name="CountyId">ID of the parent County in Vericred's API.</param>
/// <param name="Id">Primary key.</param>
/// <param name="ZipCodeId">ID of the parent Zip Code in Vericred's API.</param>
public ZipCounty(int? CountyId = null, int? Id = null, int? ZipCodeId = null)
{
this.CountyId = CountyId;
this.Id = Id;
this.ZipCodeId = ZipCodeId;
}
/// <summary>
/// ID of the parent County in Vericred's API
/// </summary>
/// <value>ID of the parent County in Vericred's API</value>
[DataMember(Name="county_id", EmitDefaultValue=false)]
public int? CountyId { get; set; }
/// <summary>
/// Primary key
/// </summary>
/// <value>Primary key</value>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; set; }
/// <summary>
/// ID of the parent Zip Code in Vericred's API
/// </summary>
/// <value>ID of the parent Zip Code in Vericred's API</value>
[DataMember(Name="zip_code_id", EmitDefaultValue=false)]
public int? ZipCodeId { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class ZipCounty {\n");
sb.Append(" CountyId: ").Append(CountyId).Append("\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" ZipCodeId: ").Append(ZipCodeId).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as ZipCounty);
}
/// <summary>
/// Returns true if ZipCounty instances are equal
/// </summary>
/// <param name="other">Instance of ZipCounty to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(ZipCounty other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.CountyId == other.CountyId ||
this.CountyId != null &&
this.CountyId.Equals(other.CountyId)
) &&
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.ZipCodeId == other.ZipCodeId ||
this.ZipCodeId != null &&
this.ZipCodeId.Equals(other.ZipCodeId)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.CountyId != null)
hash = hash * 59 + this.CountyId.GetHashCode();
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.ZipCodeId != null)
hash = hash * 59 + this.ZipCodeId.GetHashCode();
return hash;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using CocosSharp;
namespace tests
{
public class LabelFNTMultiLineAlignment : AtlasDemoNew
{
public const string LongSentencesExample =
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.";
public const string LineBreaksExample = "Lorem ipsum dolor\nsit amet\nconsectetur adipisicing elit\nblah\nblah";
public const string MixedExample = "ABC\nLorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt\nDEF";
private const float ArrowsMax = 0.95f;
private const float ArrowsMin = 0.7f;
private const int LeftAlign = 0;
private const int CenterAlign = 1;
private const int RightAlign = 2;
private const int LongSentences = 0;
private const int LineBreaks = 1;
private const int Mixed = 2;
private static float alignmentItemPadding = 50f;
private static float menuItemPaddingCenter = 50f;
private CCSprite arrowsBar;
private CCSprite arrows;
private CCLabel label;
private bool drag;
private CCMenuItemFont lastAlignmentItem;
private CCMenuItemFont lastSentenceItem;
public LabelFNTMultiLineAlignment()
{
// Register Touch Event
var touchListener = new CCEventListenerTouchAllAtOnce();
touchListener.OnTouchesBegan = onTouchesBegan;
touchListener.OnTouchesMoved = onTouchesMoved;
touchListener.OnTouchesEnded = onTouchesEnded;
AddEventListener(touchListener);
}
protected override void AddedToScene()
{
base.AddedToScene();
// ask director the the window size
var size = VisibleBoundsWorldspace.Size;
// create and initialize a Label
label = new CCLabel(LongSentencesExample, "fonts/markerFelt.fnt", new CCSize(size.Width / 3f, 0),
CCTextAlignment.Center);
//label.LineBreakWithoutSpace = true;
arrowsBar = new CCSprite("Images/arrowsBar");
arrows = new CCSprite("Images/arrows");
CCMenuItemFont.FontSize = 20;
CCMenuItemFont.FontName = "arial";
var longSentences = new CCMenuItemFont("Long Flowing Sentences", stringChanged);
var lineBreaks = new CCMenuItemFont("Short Sentences With Intentional Line Breaks", stringChanged);
var mixed = new CCMenuItemFont("Long Sentences Mixed With Intentional Line Breaks", stringChanged);
var stringMenu = new CCMenu(longSentences, lineBreaks, mixed);
stringMenu.AlignItemsVertically();
longSentences.Color = CCColor3B.Red;
lastSentenceItem = longSentences;
longSentences.Tag = LongSentences;
lineBreaks.Tag = LineBreaks;
mixed.Tag = Mixed;
CCMenuItemFont.FontSize = 30;
var left = new CCMenuItemFont("Left", alignmentChanged);
var center = new CCMenuItemFont("Center", alignmentChanged);
var right = new CCMenuItemFont("Right", alignmentChanged);
var alignmentMenu = new CCMenu(left, center, right);
alignmentMenu.AlignItemsHorizontally(alignmentItemPadding);
center.Color = CCColor3B.Red;
lastAlignmentItem = center;
left.Tag = LeftAlign;
center.Tag = CenterAlign;
right.Tag = RightAlign;
// position the label on the center of the screen
label.Position = size.Center;
arrowsBar.Visible = false;
float arrowsWidth = (ArrowsMax - ArrowsMin) * size.Width;
arrowsBar.ScaleX = (arrowsWidth / arrowsBar.ContentSize.Width);
arrowsBar.Position = new CCPoint(((ArrowsMax + ArrowsMin) / 2) * size.Width, label.Position.Y);
snapArrowsToEdge();
stringMenu.Position = new CCPoint(size.Width / 2, size.Height - menuItemPaddingCenter);
alignmentMenu.Position = new CCPoint(size.Width / 2, menuItemPaddingCenter + 15);
AddChild(label);
AddChild(arrowsBar);
AddChild(arrows);
AddChild(stringMenu);
AddChild(alignmentMenu);
}
private void stringChanged(object sender)
{
var item = (CCMenuItemFont) sender;
item.Color = CCColor3B.Red;
lastAlignmentItem.Color = CCColor3B.White;
lastAlignmentItem = item;
switch (item.Tag)
{
case LongSentences:
label.Text = LongSentencesExample;
break;
case LineBreaks:
label.Text = LineBreaksExample;
break;
case Mixed:
label.Text = MixedExample;
break;
default:
break;
}
snapArrowsToEdge();
}
private void alignmentChanged(object sender)
{
var item = (CCMenuItemFont) sender;
item.Color = CCColor3B.Red;
lastAlignmentItem.Color = CCColor3B.White;
lastAlignmentItem = item;
switch (item.Tag)
{
case LeftAlign:
label.HorizontalAlignment = CCTextAlignment.Left;
break;
case CenterAlign:
label.HorizontalAlignment = CCTextAlignment.Center;
break;
case RightAlign:
label.HorizontalAlignment = CCTextAlignment.Right;
break;
default:
break;
}
snapArrowsToEdge();
}
void onTouchesBegan(List<CCTouch> pTouches, CCEvent touchEvent)
{
CCTouch touch = pTouches[0];
CCPoint location = touch.Location;
if (arrows.BoundingBox.ContainsPoint(location))
{
drag = true;
arrowsBar.Visible = true;
}
}
void onTouchesEnded(List<CCTouch> pTouches, CCEvent touchEvent)
{
drag = false;
snapArrowsToEdge();
arrowsBar.Visible = false;
}
void onTouchesMoved(List<CCTouch> pTouches, CCEvent touchEvent)
{
if (!drag)
{
return;
}
CCTouch touch = pTouches[0];
CCPoint location = touch.Location;
CCSize winSize = VisibleBoundsWorldspace.Size;
arrows.Position = new CCPoint(Math.Max(Math.Min(location.X, ArrowsMax * winSize.Width), ArrowsMin * winSize.Width),
arrows.Position.Y);
float labelWidth = Math.Abs(arrows.Position.X - label.Position.X);
label.Dimensions = new CCSize(labelWidth, 0);
}
private void snapArrowsToEdge()
{
arrows.PositionX = label.Position.X + label.ContentSize.Width;
arrows.PositionY = label.Position.Y;
}
public override string Title
{
get { return string.Empty; }
}
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Org.Apache.Http.Util.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Org.Apache.Http.Util
{
/// <summary>
/// <para>A set of utility methods to help produce consistent equals and hashCode methods.</para><para><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/LangUtils
/// </java-name>
[Dot42.DexImport("org/apache/http/util/LangUtils", AccessFlags = 49)]
public sealed partial class LangUtils
/* scope: __dot42__ */
{
/// <java-name>
/// HASH_SEED
/// </java-name>
[Dot42.DexImport("HASH_SEED", "I", AccessFlags = 25)]
public const int HASH_SEED = 17;
/// <java-name>
/// HASH_OFFSET
/// </java-name>
[Dot42.DexImport("HASH_OFFSET", "I", AccessFlags = 25)]
public const int HASH_OFFSET = 37;
/// <summary>
/// <para>Disabled default constructor. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal LangUtils() /* MethodBuilder.Create */
{
}
/// <java-name>
/// hashCode
/// </java-name>
[Dot42.DexImport("hashCode", "(II)I", AccessFlags = 9)]
public static int GetHashCode(int int32, int int321) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// hashCode
/// </java-name>
[Dot42.DexImport("hashCode", "(IZ)I", AccessFlags = 9)]
public static int GetHashCode(int int32, bool boolean) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// hashCode
/// </java-name>
[Dot42.DexImport("hashCode", "(ILjava/lang/Object;)I", AccessFlags = 9)]
public static int GetHashCode(int int32, object @object) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// equals
/// </java-name>
[Dot42.DexImport("equals", "(Ljava/lang/Object;Ljava/lang/Object;)Z", AccessFlags = 9)]
public static bool Equals(object @object, object object1) /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// equals
/// </java-name>
[Dot42.DexImport("equals", "([Ljava/lang/Object;[Ljava/lang/Object;)Z", AccessFlags = 9)]
public static bool Equals(object[] @object, object[] object1) /* MethodBuilder.Create */
{
return default(bool);
}
}
/// <summary>
/// <para>Provides access to version information for HTTP components. Instances of this class provide version information for a single module or informal unit, as explained . Static methods are used to extract version information from property files that are automatically packaged with HTTP component release JARs. <br></br> All available version information is provided in strings, where the string format is informal and subject to change without notice. Version information is provided for debugging output and interpretation by humans, not for automated processing in applications.</para><para><para> </para><simplesectsep></simplesectsep><para>and others </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/VersionInfo
/// </java-name>
[Dot42.DexImport("org/apache/http/util/VersionInfo", AccessFlags = 33)]
public partial class VersionInfo
/* scope: __dot42__ */
{
/// <summary>
/// <para>A string constant for unavailable information. </para>
/// </summary>
/// <java-name>
/// UNAVAILABLE
/// </java-name>
[Dot42.DexImport("UNAVAILABLE", "Ljava/lang/String;", AccessFlags = 25)]
public const string UNAVAILABLE = "UNAVAILABLE";
/// <summary>
/// <para>The filename of the version information files. </para>
/// </summary>
/// <java-name>
/// VERSION_PROPERTY_FILE
/// </java-name>
[Dot42.DexImport("VERSION_PROPERTY_FILE", "Ljava/lang/String;", AccessFlags = 25)]
public const string VERSION_PROPERTY_FILE = "version.properties";
/// <java-name>
/// PROPERTY_MODULE
/// </java-name>
[Dot42.DexImport("PROPERTY_MODULE", "Ljava/lang/String;", AccessFlags = 25)]
public const string PROPERTY_MODULE = "info.module";
/// <java-name>
/// PROPERTY_RELEASE
/// </java-name>
[Dot42.DexImport("PROPERTY_RELEASE", "Ljava/lang/String;", AccessFlags = 25)]
public const string PROPERTY_RELEASE = "info.release";
/// <java-name>
/// PROPERTY_TIMESTAMP
/// </java-name>
[Dot42.DexImport("PROPERTY_TIMESTAMP", "Ljava/lang/String;", AccessFlags = 25)]
public const string PROPERTY_TIMESTAMP = "info.timestamp";
/// <summary>
/// <para>Instantiates version information.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/la" +
"ng/String;)V", AccessFlags = 4)]
protected internal VersionInfo(string pckg, string module, string release, string time, string clsldr) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Obtains the package name. The package name identifies the module or informal unit.</para><para></para>
/// </summary>
/// <returns>
/// <para>the package name, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getPackage
/// </java-name>
[Dot42.DexImport("getPackage", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetPackage() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Obtains the name of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the module name, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getModule
/// </java-name>
[Dot42.DexImport("getModule", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetModule() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Obtains the release of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the release version, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getRelease
/// </java-name>
[Dot42.DexImport("getRelease", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetRelease() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Obtains the timestamp of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the timestamp, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getTimestamp
/// </java-name>
[Dot42.DexImport("getTimestamp", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetTimestamp() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Obtains the classloader used to read the version information. This is just the <code>toString</code> output of the classloader, since the version information should not keep a reference to the classloader itself. That could prevent garbage collection.</para><para></para>
/// </summary>
/// <returns>
/// <para>the classloader description, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getClassloader
/// </java-name>
[Dot42.DexImport("getClassloader", "()Ljava/lang/String;", AccessFlags = 17)]
public string GetClassloader() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Provides the version information in human-readable format.</para><para></para>
/// </summary>
/// <returns>
/// <para>a string holding this version information </para>
/// </returns>
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// loadVersionInfo
/// </java-name>
[Dot42.DexImport("loadVersionInfo", "([Ljava/lang/String;Ljava/lang/ClassLoader;)[Lorg/apache/http/util/VersionInfo;", AccessFlags = 25)]
public static global::Org.Apache.Http.Util.VersionInfo[] LoadVersionInfo(string[] @string, global::Java.Lang.ClassLoader classLoader) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Util.VersionInfo[]);
}
/// <java-name>
/// loadVersionInfo
/// </java-name>
[Dot42.DexImport("loadVersionInfo", "(Ljava/lang/String;Ljava/lang/ClassLoader;)Lorg/apache/http/util/VersionInfo;", AccessFlags = 25)]
public static global::Org.Apache.Http.Util.VersionInfo LoadVersionInfo(string @string, global::Java.Lang.ClassLoader classLoader) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Util.VersionInfo);
}
/// <summary>
/// <para>Instantiates version information from properties.</para><para></para>
/// </summary>
/// <returns>
/// <para>the version information </para>
/// </returns>
/// <java-name>
/// fromMap
/// </java-name>
[Dot42.DexImport("fromMap", "(Ljava/lang/String;Ljava/util/Map;Ljava/lang/ClassLoader;)Lorg/apache/http/util/V" +
"ersionInfo;", AccessFlags = 28)]
protected internal static global::Org.Apache.Http.Util.VersionInfo FromMap(string pckg, global::Java.Util.IMap<object, object> info, global::Java.Lang.ClassLoader clsldr) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Util.VersionInfo);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal VersionInfo() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Obtains the package name. The package name identifies the module or informal unit.</para><para></para>
/// </summary>
/// <returns>
/// <para>the package name, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getPackage
/// </java-name>
public string Package
{
[Dot42.DexImport("getPackage", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetPackage(); }
}
/// <summary>
/// <para>Obtains the name of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the module name, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getModule
/// </java-name>
public string Module
{
[Dot42.DexImport("getModule", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetModule(); }
}
/// <summary>
/// <para>Obtains the release of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the release version, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getRelease
/// </java-name>
public string Release
{
[Dot42.DexImport("getRelease", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetRelease(); }
}
/// <summary>
/// <para>Obtains the timestamp of the versioned module or informal unit. This data is read from the version information for the package.</para><para></para>
/// </summary>
/// <returns>
/// <para>the timestamp, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getTimestamp
/// </java-name>
public string Timestamp
{
[Dot42.DexImport("getTimestamp", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetTimestamp(); }
}
/// <summary>
/// <para>Obtains the classloader used to read the version information. This is just the <code>toString</code> output of the classloader, since the version information should not keep a reference to the classloader itself. That could prevent garbage collection.</para><para></para>
/// </summary>
/// <returns>
/// <para>the classloader description, never <code>null</code> </para>
/// </returns>
/// <java-name>
/// getClassloader
/// </java-name>
public string Classloader
{
[Dot42.DexImport("getClassloader", "()Ljava/lang/String;", AccessFlags = 17)]
get{ return GetClassloader(); }
}
}
/// <summary>
/// <para>The home for utility methods that handle various encoding tasks.</para><para><para>Michael Becke </para><simplesectsep></simplesectsep><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/EncodingUtils
/// </java-name>
[Dot42.DexImport("org/apache/http/util/EncodingUtils", AccessFlags = 49)]
public sealed partial class EncodingUtils
/* scope: __dot42__ */
{
/// <summary>
/// <para>This class should not be instantiated. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal EncodingUtils() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Converts the byte array of HTTP content characters to a string. If the specified charset is not supported, default system encoding is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The result of the conversion. </para>
/// </returns>
/// <java-name>
/// getString
/// </java-name>
[Dot42.DexImport("getString", "([BIILjava/lang/String;)Ljava/lang/String;", AccessFlags = 9)]
public static string GetString(sbyte[] data, int offset, int length, string charset) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of HTTP content characters to a string. If the specified charset is not supported, default system encoding is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The result of the conversion. </para>
/// </returns>
/// <java-name>
/// getString
/// </java-name>
[Dot42.DexImport("getString", "([BIILjava/lang/String;)Ljava/lang/String;", AccessFlags = 9, IgnoreFromJava = true)]
public static string GetString(byte[] data, int offset, int length, string charset) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of HTTP content characters to a string. If the specified charset is not supported, default system encoding is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The result of the conversion. </para>
/// </returns>
/// <java-name>
/// getString
/// </java-name>
[Dot42.DexImport("getString", "([BLjava/lang/String;)Ljava/lang/String;", AccessFlags = 9)]
public static string GetString(sbyte[] data, string charset) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of HTTP content characters to a string. If the specified charset is not supported, default system encoding is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The result of the conversion. </para>
/// </returns>
/// <java-name>
/// getString
/// </java-name>
[Dot42.DexImport("getString", "([BLjava/lang/String;)Ljava/lang/String;", AccessFlags = 9, IgnoreFromJava = true)]
public static string GetString(byte[] data, string charset) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the specified string to a byte array. If the charset is not supported the default system charset is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The resulting byte array. </para>
/// </returns>
/// <java-name>
/// getBytes
/// </java-name>
[Dot42.DexImport("getBytes", "(Ljava/lang/String;Ljava/lang/String;)[B", AccessFlags = 9)]
public static sbyte[] JavaGetBytes(string data, string charset) /* MethodBuilder.Create */
{
return default(sbyte[]);
}
/// <summary>
/// <para>Converts the specified string to a byte array. If the charset is not supported the default system charset is used.</para><para></para>
/// </summary>
/// <returns>
/// <para>The resulting byte array. </para>
/// </returns>
/// <java-name>
/// getBytes
/// </java-name>
[Dot42.DexImport("getBytes", "(Ljava/lang/String;Ljava/lang/String;)[B", AccessFlags = 9, IgnoreFromJava = true)]
public static byte[] GetBytes(string data, string charset) /* MethodBuilder.Create */
{
return default(byte[]);
}
/// <summary>
/// <para>Converts the specified string to byte array of ASCII characters.</para><para></para>
/// </summary>
/// <returns>
/// <para>The string as a byte array. </para>
/// </returns>
/// <java-name>
/// getAsciiBytes
/// </java-name>
[Dot42.DexImport("getAsciiBytes", "(Ljava/lang/String;)[B", AccessFlags = 9)]
public static sbyte[] JavaGetAsciiBytes(string data) /* MethodBuilder.Create */
{
return default(sbyte[]);
}
/// <summary>
/// <para>Converts the specified string to byte array of ASCII characters.</para><para></para>
/// </summary>
/// <returns>
/// <para>The string as a byte array. </para>
/// </returns>
/// <java-name>
/// getAsciiBytes
/// </java-name>
[Dot42.DexImport("getAsciiBytes", "(Ljava/lang/String;)[B", AccessFlags = 9, IgnoreFromJava = true)]
public static byte[] GetAsciiBytes(string data) /* MethodBuilder.Create */
{
return default(byte[]);
}
/// <summary>
/// <para>Converts the byte array of ASCII characters to a string. This method is to be used when decoding content of HTTP elements (such as response headers)</para><para></para>
/// </summary>
/// <returns>
/// <para>The string representation of the byte array </para>
/// </returns>
/// <java-name>
/// getAsciiString
/// </java-name>
[Dot42.DexImport("getAsciiString", "([BII)Ljava/lang/String;", AccessFlags = 9)]
public static string GetAsciiString(sbyte[] data, int offset, int length) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of ASCII characters to a string. This method is to be used when decoding content of HTTP elements (such as response headers)</para><para></para>
/// </summary>
/// <returns>
/// <para>The string representation of the byte array </para>
/// </returns>
/// <java-name>
/// getAsciiString
/// </java-name>
[Dot42.DexImport("getAsciiString", "([BII)Ljava/lang/String;", AccessFlags = 9, IgnoreFromJava = true)]
public static string GetAsciiString(byte[] data, int offset, int length) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of ASCII characters to a string. This method is to be used when decoding content of HTTP elements (such as response headers)</para><para></para>
/// </summary>
/// <returns>
/// <para>The string representation of the byte array </para>
/// </returns>
/// <java-name>
/// getAsciiString
/// </java-name>
[Dot42.DexImport("getAsciiString", "([B)Ljava/lang/String;", AccessFlags = 9)]
public static string GetAsciiString(sbyte[] data) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Converts the byte array of ASCII characters to a string. This method is to be used when decoding content of HTTP elements (such as response headers)</para><para></para>
/// </summary>
/// <returns>
/// <para>The string representation of the byte array </para>
/// </returns>
/// <java-name>
/// getAsciiString
/// </java-name>
[Dot42.DexImport("getAsciiString", "([B)Ljava/lang/String;", AccessFlags = 9, IgnoreFromJava = true)]
public static string GetAsciiString(byte[] data) /* MethodBuilder.Create */
{
return default(string);
}
}
/// <summary>
/// <para>Static helpers for dealing with entities.</para><para><para></para><para></para><title>Revision:</title><para>569637 </para></para><para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/EntityUtils
/// </java-name>
[Dot42.DexImport("org/apache/http/util/EntityUtils", AccessFlags = 49)]
public sealed partial class EntityUtils
/* scope: __dot42__ */
{
/// <summary>
/// <para>Disabled default constructor. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal EntityUtils() /* MethodBuilder.Create */
{
}
/// <java-name>
/// toByteArray
/// </java-name>
[Dot42.DexImport("toByteArray", "(Lorg/apache/http/HttpEntity;)[B", AccessFlags = 9)]
public static sbyte[] JavaToByteArray(global::Org.Apache.Http.IHttpEntity entity) /* MethodBuilder.Create */
{
return default(sbyte[]);
}
/// <java-name>
/// toByteArray
/// </java-name>
[Dot42.DexImport("toByteArray", "(Lorg/apache/http/HttpEntity;)[B", AccessFlags = 9, IgnoreFromJava = true)]
public static byte[] ToByteArray(global::Org.Apache.Http.IHttpEntity entity) /* MethodBuilder.Create */
{
return default(byte[]);
}
/// <java-name>
/// getContentCharSet
/// </java-name>
[Dot42.DexImport("getContentCharSet", "(Lorg/apache/http/HttpEntity;)Ljava/lang/String;", AccessFlags = 9)]
public static string GetContentCharSet(global::Org.Apache.Http.IHttpEntity entity) /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "(Lorg/apache/http/HttpEntity;Ljava/lang/String;)Ljava/lang/String;", AccessFlags = 9)]
public static string ToString(global::Org.Apache.Http.IHttpEntity entity, string defaultCharset) /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "(Lorg/apache/http/HttpEntity;)Ljava/lang/String;", AccessFlags = 9)]
public static string ToString(global::Org.Apache.Http.IHttpEntity entity) /* MethodBuilder.Create */
{
return default(string);
}
}
/// <summary>
/// <para>The home for utility methods that handle various exception-related tasks.</para><para><para> </para><simplesectsep></simplesectsep><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/ExceptionUtils
/// </java-name>
[Dot42.DexImport("org/apache/http/util/ExceptionUtils", AccessFlags = 49)]
public sealed partial class ExceptionUtils
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal ExceptionUtils() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>If we're running on JDK 1.4 or later, initialize the cause for the given throwable.</para><para></para>
/// </summary>
/// <java-name>
/// initCause
/// </java-name>
[Dot42.DexImport("initCause", "(Ljava/lang/Throwable;Ljava/lang/Throwable;)V", AccessFlags = 9)]
public static void InitCause(global::System.Exception throwable, global::System.Exception cause) /* MethodBuilder.Create */
{
}
}
/// <summary>
/// <para>A resizable char array.</para><para><para></para><para></para><title>Revision:</title><para>496070 </para></para><para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/CharArrayBuffer
/// </java-name>
[Dot42.DexImport("org/apache/http/util/CharArrayBuffer", AccessFlags = 49)]
public sealed partial class CharArrayBuffer
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(I)V", AccessFlags = 1)]
public CharArrayBuffer(int capacity) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([CII)V", AccessFlags = 1)]
public void Append(char[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Ljava/lang/String;)V", AccessFlags = 1)]
public void Append(string b) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Lorg/apache/http/util/CharArrayBuffer;II)V", AccessFlags = 1)]
public void Append(global::Org.Apache.Http.Util.CharArrayBuffer b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Lorg/apache/http/util/CharArrayBuffer;)V", AccessFlags = 1)]
public void Append(global::Org.Apache.Http.Util.CharArrayBuffer b) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(C)V", AccessFlags = 1)]
public void Append(char b) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([BII)V", AccessFlags = 1)]
public void Append(sbyte[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([BII)V", AccessFlags = 1, IgnoreFromJava = true)]
public void Append(byte[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Lorg/apache/http/util/ByteArrayBuffer;II)V", AccessFlags = 1)]
public void Append(global::Org.Apache.Http.Util.ByteArrayBuffer b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Ljava/lang/Object;)V", AccessFlags = 1)]
public void Append(object b) /* MethodBuilder.Create */
{
}
/// <java-name>
/// clear
/// </java-name>
[Dot42.DexImport("clear", "()V", AccessFlags = 1)]
public void Clear() /* MethodBuilder.Create */
{
}
/// <java-name>
/// toCharArray
/// </java-name>
[Dot42.DexImport("toCharArray", "()[C", AccessFlags = 1)]
public char[] ToCharArray() /* MethodBuilder.Create */
{
return default(char[]);
}
/// <java-name>
/// charAt
/// </java-name>
[Dot42.DexImport("charAt", "(I)C", AccessFlags = 1)]
public char CharAt(int i) /* MethodBuilder.Create */
{
return default(char);
}
/// <java-name>
/// buffer
/// </java-name>
[Dot42.DexImport("buffer", "()[C", AccessFlags = 1)]
public char[] Buffer() /* MethodBuilder.Create */
{
return default(char[]);
}
/// <java-name>
/// capacity
/// </java-name>
[Dot42.DexImport("capacity", "()I", AccessFlags = 1)]
public int Capacity() /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// length
/// </java-name>
[Dot42.DexImport("length", "()I", AccessFlags = 1)]
public int Length() /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// ensureCapacity
/// </java-name>
[Dot42.DexImport("ensureCapacity", "(I)V", AccessFlags = 1)]
public void EnsureCapacity(int required) /* MethodBuilder.Create */
{
}
/// <java-name>
/// setLength
/// </java-name>
[Dot42.DexImport("setLength", "(I)V", AccessFlags = 1)]
public void SetLength(int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// isEmpty
/// </java-name>
[Dot42.DexImport("isEmpty", "()Z", AccessFlags = 1)]
public bool IsEmpty() /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// isFull
/// </java-name>
[Dot42.DexImport("isFull", "()Z", AccessFlags = 1)]
public bool IsFull() /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// indexOf
/// </java-name>
[Dot42.DexImport("indexOf", "(III)I", AccessFlags = 1)]
public int IndexOf(int ch, int beginIndex, int endIndex) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// indexOf
/// </java-name>
[Dot42.DexImport("indexOf", "(I)I", AccessFlags = 1)]
public int IndexOf(int ch) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// substring
/// </java-name>
[Dot42.DexImport("substring", "(II)Ljava/lang/String;", AccessFlags = 1)]
public string Substring(int beginIndex, int endIndex) /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// substringTrimmed
/// </java-name>
[Dot42.DexImport("substringTrimmed", "(II)Ljava/lang/String;", AccessFlags = 1)]
public string SubstringTrimmed(int beginIndex, int endIndex) /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal CharArrayBuffer() /* TypeBuilder.AddDefaultConstructor */
{
}
}
/// <summary>
/// <para>A resizable byte array.</para><para><para></para><para></para><title>Revision:</title><para>496070 </para></para><para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/util/ByteArrayBuffer
/// </java-name>
[Dot42.DexImport("org/apache/http/util/ByteArrayBuffer", AccessFlags = 49)]
public sealed partial class ByteArrayBuffer
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(I)V", AccessFlags = 1)]
public ByteArrayBuffer(int capacity) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([BII)V", AccessFlags = 1)]
public void Append(sbyte[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([BII)V", AccessFlags = 1, IgnoreFromJava = true)]
public void Append(byte[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(I)V", AccessFlags = 1)]
public void Append(int b) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "([CII)V", AccessFlags = 1)]
public void Append(char[] b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// append
/// </java-name>
[Dot42.DexImport("append", "(Lorg/apache/http/util/CharArrayBuffer;II)V", AccessFlags = 1)]
public void Append(global::Org.Apache.Http.Util.CharArrayBuffer b, int off, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// clear
/// </java-name>
[Dot42.DexImport("clear", "()V", AccessFlags = 1)]
public void Clear() /* MethodBuilder.Create */
{
}
/// <java-name>
/// toByteArray
/// </java-name>
[Dot42.DexImport("toByteArray", "()[B", AccessFlags = 1)]
public sbyte[] JavaToByteArray() /* MethodBuilder.Create */
{
return default(sbyte[]);
}
/// <java-name>
/// toByteArray
/// </java-name>
[Dot42.DexImport("toByteArray", "()[B", AccessFlags = 1, IgnoreFromJava = true)]
public byte[] ToByteArray() /* MethodBuilder.Create */
{
return default(byte[]);
}
/// <java-name>
/// byteAt
/// </java-name>
[Dot42.DexImport("byteAt", "(I)I", AccessFlags = 1)]
public int ByteAt(int i) /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// capacity
/// </java-name>
[Dot42.DexImport("capacity", "()I", AccessFlags = 1)]
public int Capacity() /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// length
/// </java-name>
[Dot42.DexImport("length", "()I", AccessFlags = 1)]
public int Length() /* MethodBuilder.Create */
{
return default(int);
}
/// <java-name>
/// buffer
/// </java-name>
[Dot42.DexImport("buffer", "()[B", AccessFlags = 1)]
public sbyte[] JavaBuffer() /* MethodBuilder.Create */
{
return default(sbyte[]);
}
/// <java-name>
/// buffer
/// </java-name>
[Dot42.DexImport("buffer", "()[B", AccessFlags = 1, IgnoreFromJava = true)]
public byte[] Buffer() /* MethodBuilder.Create */
{
return default(byte[]);
}
/// <java-name>
/// setLength
/// </java-name>
[Dot42.DexImport("setLength", "(I)V", AccessFlags = 1)]
public void SetLength(int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// isEmpty
/// </java-name>
[Dot42.DexImport("isEmpty", "()Z", AccessFlags = 1)]
public bool IsEmpty() /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// isFull
/// </java-name>
[Dot42.DexImport("isFull", "()Z", AccessFlags = 1)]
public bool IsFull() /* MethodBuilder.Create */
{
return default(bool);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal ByteArrayBuffer() /* TypeBuilder.AddDefaultConstructor */
{
}
}
}
| |
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web;
using Codex;
using Codex.ObjectModel;
namespace WebUI.Rendering
{
public class SourceFileRenderer
{
BoundSourceFile _sourceFile;
private string projectId;
public SourceFileRenderer(BoundSourceFile sourceFile, string projectId)
{
Contract.Requires(sourceFile != null);
_sourceFile = sourceFile;
this.projectId = projectId;
}
/// <summary>
/// Gets the contents of the source file with <span> tags added around
/// all the spans specified for this BoundSourceFile that have a
/// class of the Classification for the Symbol
/// </summary>
/// <returns></returns>
public async Task<EditorModel> RenderAsync()
{
var filePath = _sourceFile.SourceFile?.Info?.ProjectRelativePath;
var model = new EditorModel()
{
ProjectId = projectId,
FilePath = filePath,
WebLink = _sourceFile.SourceFile?.Info?.WebAddress,
RepoRelativePath = _sourceFile.SourceFile?.Info?.RepoRelativePath
};
string sourceText = _sourceFile.SourceFile.Content;
int lineCount = GetLineCount(sourceText);
var url = $"/?rightProject={HttpUtility.UrlEncode(projectId)}&file={HttpUtility.UrlEncode(filePath)}";
model.LineNumberText = GenerateLineNumberText(lineCount, url);
var ret = new StringBuilder();
int textIndex = 0;
Span prevSpan = null;
using (StringWriter sw = new StringWriter(ret))
{
int referenceIndex = -1;
ReferenceSpan referenceSpan = null;
foreach (ClassificationSpan span in _sourceFile.Classifications.OrderBy(s => s.Start))
{
if (span.Start > sourceText.Length)
{ //Not sure how this happened but a span is off the end of our text
Debug.WriteLine(
$"Span had Start of {span.Start}, which is greater than text length for file '{_sourceFile.SourceFile.Info.ProjectRelativePath}'", "BoundSourceFileMarkup");
break;
}
if (prevSpan != null && span.Start == prevSpan.Start)
{
// Overlapping spans?
continue;
}
if (span.Start > textIndex)
{ //Span is ahead of our current index, just write the normal text between the two to the buffer
ret.Append(HttpUtility.HtmlEncode(sourceText.Substring(textIndex, span.Start - textIndex)));
textIndex = span.Start;
}
string spanText = sourceText.Substring(span.Start, span.Length);
GenerateSpan(sw, span, spanText, ref referenceIndex, ref referenceSpan, _sourceFile.References);
textIndex += span.Length;
prevSpan = span;
}
// Append any leftover text
ret.Append(HttpUtility.HtmlEncode(sourceText.Substring(textIndex)));
model.Text = ret.ToString();
return model;
}
}
private static string GenerateLineNumberText(int lineNumbers, string documentUrl)
{
if (lineNumbers == 0)
{
return string.Empty;
}
string href = documentUrl + "&line=";
var sb = new StringBuilder();
for (int i = 1; i <= lineNumbers; i++)
{
var lineNumber = i.ToString();
sb.AppendFormat(
"<a id=\"l{0}\" href=\"{1}\" onclick=\"GoToLine({0});return false;\">{0}</a><br/>",
lineNumber,
href + lineNumber);
}
return sb.ToString();
}
public static int GetLineCount(string text)
{
if (string.IsNullOrEmpty(text))
{
return 0;
}
int lineCount = 0;
bool previousWasCarriageReturn = false;
for (int i = 0; i < text.Length; i++)
{
if (text[i] == '\r')
{
if (previousWasCarriageReturn)
{
lineCount++;
}
else
{
previousWasCarriageReturn = true;
}
}
else if (text[i] == '\n')
{
previousWasCarriageReturn = false;
lineCount++;
}
else
{
previousWasCarriageReturn = false;
}
}
lineCount++;
return lineCount;
}
private void WriteReferenceText(TextWriter tw, ClassificationSpan span, string spanText, ref int referenceIndex, ref ReferenceSpan currentReference, IReadOnlyList<ReferenceSpan> referenceSpans)
{
int startPosition = span.Start;
int currentPosition = span.Start;
int end = span.End;
while (currentReference != null && currentReference.Start >= currentPosition && currentReference.Start < end && currentReference.End <= end)
{
if (currentReference != null)
{
GetBestReference(ref referenceIndex, ref currentReference, referenceSpans);
}
if (currentReference.Start > currentPosition)
{
HttpUtility.HtmlEncode(spanText.Substring(currentPosition - startPosition, currentReference.Start - currentPosition), tw);
currentPosition = currentReference.Start;
}
if (currentReference.Length > 0)
{
var htmlElementInfo = GenerateHyperlinkForReference(currentReference.Reference);
WriteHtmlElement(tw, htmlElementInfo, spanText.Substring(currentReference.Start - startPosition, currentReference.Length));
currentPosition = currentReference.End;
}
referenceIndex++;
if (referenceIndex < referenceSpans.Count)
{
currentReference = referenceSpans[referenceIndex];
}
else
{
break;
}
}
if (currentPosition < end)
{
HttpUtility.HtmlEncode(spanText.Substring(currentPosition - startPosition, end - currentPosition), tw);
}
}
private void GetBestReference(ref int referenceIndex, ref ReferenceSpan currentReference, IReadOnlyList<ReferenceSpan> referenceSpans)
{
for (int i = referenceIndex; i < referenceSpans.Count; i++)
{
var reference = referenceSpans[i];
if (reference.Start != currentReference.Start)
{
break;
}
if (currentReference.Reference.IsImplicitlyDeclared || reference.Reference.ReferenceKind == nameof(ReferenceKind.Definition))
{
currentReference = reference;
referenceIndex = i;
}
}
}
private void GenerateSpan(TextWriter tw, ClassificationSpan span, string spanText, ref int referenceIndex, ref ReferenceSpan currentReference, IReadOnlyList<ReferenceSpan> referenceSpans)
{
while ((currentReference == null || currentReference.Start < span.Start) && referenceIndex < referenceSpans.Count)
{
referenceIndex++;
if (referenceIndex < referenceSpans.Count)
{
currentReference = referenceSpans[referenceIndex];
}
else
{
currentReference = null;
}
}
if (currentReference != null)
{
GetBestReference(ref referenceIndex, ref currentReference, referenceSpans);
}
string cssClass = MapClassificationToCssClass(span.Classification);
string referenceClass = string.Empty;
if (span.LocalGroupId > 0)
{
referenceClass = $"r{span.LocalGroupId} r";
cssClass = string.IsNullOrEmpty(cssClass) ? referenceClass : $"{referenceClass} {cssClass}";
}
HtmlElementInfo htmlElementInfo = null;
if (currentReference?.SpanEquals(span) == true)
{
htmlElementInfo = GenerateHyperlinkForReference(currentReference.Reference);
}
if (htmlElementInfo == null && !span.Contains(currentReference))
{
if (cssClass == null)
{
tw.Write(HttpUtility.HtmlEncode(spanText));
return;
}
}
string elementName = "span";
bool classAttributeSpecified = false;
if (htmlElementInfo != null)
{
elementName = htmlElementInfo.Name;
if (htmlElementInfo.RequiresWrappingSpan)
{
tw.Write("<span");
AddAttribute(tw, "class", cssClass);
tw.Write(">");
classAttributeSpecified = true;
}
}
tw.Write("<" + elementName);
if (htmlElementInfo != null)
{
foreach (var attribute in htmlElementInfo.Attributes)
{
if (AddAttribute(tw, attribute.Key, attribute.Value))
{
if (attribute.Key == "class")
{
classAttributeSpecified = true;
}
}
}
}
if (!classAttributeSpecified)
{
AddAttribute(tw, "class", cssClass);
}
if (span.LocalGroupId > 0 && (htmlElementInfo?.Attributes?.ContainsKey("onclick") != true))
{
AddAttribute(tw, "onclick", "t(this);");
}
tw.Write(">");
if (htmlElementInfo != null || !span.Contains(currentReference))
{
tw.Write(HttpUtility.HtmlEncode(spanText));
}
else
{
WriteReferenceText(tw, span, spanText, ref referenceIndex, ref currentReference, referenceSpans);
}
tw.Write("</" + elementName + ">");
if (htmlElementInfo?.RequiresWrappingSpan == true)
{
tw.Write("</span>");
}
}
void WriteHtmlElement(TextWriter tw, HtmlElementInfo htmlElementInfo, string innerText)
{
tw.Write("<" + htmlElementInfo.Name);
foreach (var att in htmlElementInfo.Attributes)
{
AddAttribute(tw, att.Key, att.Value);
}
tw.Write(">");
HttpUtility.HtmlEncode(innerText, tw);
tw.Write("</" + htmlElementInfo.Name + ">");
}
bool AddAttribute(TextWriter tw, string name, string value)
{
if (value != null)
{
tw.Write(" " + name + "=\"" + value + "\"");
return true;
}
return false;
}
HtmlElementInfo GenerateHyperlinkForReference(ReferenceSymbol symbol)
{
string idHash = symbol.Id.Value;
bool isMsBuild = _sourceFile.SourceFile.Info.Language == "msbuild";
bool isDistributedDefinition =
symbol.Kind == nameof(SymbolKinds.MSBuildItem) ||
symbol.Kind == nameof(SymbolKinds.MSBuildItemMetadata) ||
symbol.Kind == nameof(SymbolKinds.MSBuildProperty) ||
symbol.Kind == nameof(SymbolKinds.MSBuildTarget) ||
symbol.Kind == nameof(SymbolKinds.MSBuildTask) ||
symbol.Kind == nameof(SymbolKinds.MSBuildTaskParameter);
var isDefinition = symbol.ReferenceKind == nameof(ReferenceKind.Definition) || isDistributedDefinition;
bool isProjectScopedReference = symbol.ReferenceKind == nameof(ReferenceKind.ProjectLevelReference);
if (!isDefinition)
{
idHash = null;
}
string jsmethod = isDefinition || isProjectScopedReference ? "R" : "D";
string additionalParams = isProjectScopedReference ? $@", '{projectId}'" : string.Empty;
string onclick = $@"{jsmethod}('{symbol.ProjectId}', '{symbol.Id}'{additionalParams});return false;";
string url = "";
if (isDefinition)
{
url = $"/?leftProject={symbol.ProjectId}&leftSymbol={symbol.Id}&file={HttpUtility.UrlEncode(this._sourceFile.SourceFile.Info.ProjectRelativePath)}";
}
else if (isProjectScopedReference)
{
url = $"/?leftProject={symbol.ProjectId}&leftSymbol={symbol.Id}&projectScope={projectId}";
}
else
{
url = $"/?rightProject={symbol.ProjectId}&rightSymbol={symbol.Id}";
}
var result = new HtmlElementInfo()
{
Name = "a",
Attributes =
{
{ "id", idHash },
{ "onclick", onclick },
{ "href", url },
{ "class", isDistributedDefinition ? "msbuildlink" : null }
},
DeclaredSymbolId = symbol.Id.Value,
RequiresWrappingSpan = isMsBuild,
};
return result;
}
private static HashSet<string> ignoreClassifications = new HashSet<string>(new[]
{
"operator",
"number",
"punctuation",
"preprocessor text",
"xml literal - text",
"xml - text"
});
private static Dictionary<string, string> replaceClassifications = new Dictionary<string, string>
{
{ "xml - delimiter", Constants.ClassificationXmlDelimiter },
{ "xml - name", Constants.ClassificationXmlName },
{ "xml - attribute name", Constants.ClassificationXmlAttributeName },
{ "xml - attribute quotes", Constants.ClassificationXmlAttributeQuotes },
{ "xml - attribute value", Constants.ClassificationXmlAttributeValue },
{ "xml - entity reference", Constants.ClassificationXmlEntityReference },
{ "xml - cdata section", Constants.ClassificationXmlCDataSection },
{ "xml - processing instruction", Constants.ClassificationXmlProcessingInstruction },
{ "xml - comment", Constants.ClassificationComment },
{ "keyword", Constants.ClassificationKeyword },
{ "identifier", Constants.ClassificationIdentifier },
{ "class name", Constants.ClassificationTypeName },
{ "struct name", Constants.ClassificationTypeName },
{ "interface name", Constants.ClassificationTypeName },
{ "enum name", Constants.ClassificationTypeName },
{ "delegate name", Constants.ClassificationTypeName },
{ "module name", Constants.ClassificationTypeName },
{ "type parameter name", Constants.ClassificationTypeName },
{ "preprocessor keyword", Constants.ClassificationKeyword },
{ "xml doc comment - delimiter", Constants.ClassificationComment },
{ "xml doc comment - name", Constants.ClassificationComment },
{ "xml doc comment - text", Constants.ClassificationComment },
{ "xml doc comment - comment", Constants.ClassificationComment },
{ "xml doc comment - entity reference", Constants.ClassificationComment },
{ "xml doc comment - attribute name", Constants.ClassificationComment },
{ "xml doc comment - attribute quotes", Constants.ClassificationComment },
{ "xml doc comment - attribute value", Constants.ClassificationComment },
{ "xml doc comment - cdata section", Constants.ClassificationComment },
{ "xml literal - delimiter", Constants.ClassificationXmlLiteralDelimiter },
{ "xml literal - name", Constants.ClassificationXmlLiteralName },
{ "xml literal - attribute name", Constants.ClassificationXmlLiteralAttributeName },
{ "xml literal - attribute quotes", Constants.ClassificationXmlLiteralAttributeQuotes },
{ "xml literal - attribute value", Constants.ClassificationXmlLiteralAttributeValue },
{ "xml literal - entity reference", Constants.ClassificationXmlLiteralEntityReference },
{ "xml literal - cdata section", Constants.ClassificationXmlLiteralCDataSection },
{ "xml literal - processing instruction", Constants.ClassificationXmlLiteralProcessingInstruction },
{ "xml literal - embedded expression", Constants.ClassificationXmlLiteralEmbeddedExpression },
{ "xml literal - comment", Constants.ClassificationComment },
{ "comment", Constants.ClassificationComment },
{ "string", Constants.ClassificationLiteral },
{ "string - verbatim", Constants.ClassificationLiteral },
{ "excluded code", Constants.ClassificationExcludedCode },
};
public static string MapClassificationToCssClass(string classificationType)
{
if (classificationType == null || ignoreClassifications.Contains(classificationType))
{
return null;
}
if (classificationType == Constants.ClassificationKeyword)
{
return classificationType;
}
string replacement = null;
if (replaceClassifications.TryGetValue(classificationType, out replacement))
{
classificationType = replacement;
}
if (classificationType == null ||
classificationType == "" ||
classificationType == Constants.ClassificationIdentifier ||
classificationType == Constants.ClassificationPunctuation)
{
// identifiers are conveniently black by default so let's save some space
return null;
}
return classificationType;
}
public class Constants
{
//public static readonly string IDResolvingFileName = "A";
//public static readonly string PartialResolvingFileName = "P";
//public static readonly string ReferencesFileName = "R";
//public static readonly string DeclaredSymbolsFileName = "D";
//public static readonly string MasterIndexFileName = "DeclaredSymbols.txt";
//public static readonly string ReferencedAssemblyList = "References";
//public static readonly string UsedReferencedAssemblyList = "UsedReferences";
//public static readonly string ReferencingAssemblyList = "ReferencingAssemblies";
//public static readonly string ProjectInfoFileName = "i";
//public static readonly string MasterProjectMap = "Projects";
//public static readonly string MasterAssemblyMap = "Assemblies";
//public static readonly string Namespaces = "namespaces.html";
public static readonly string ClassificationIdentifier = "i";
public static readonly string ClassificationKeyword = "k";
public static readonly string ClassificationTypeName = "t";
public static readonly string ClassificationComment = "c";
public static readonly string ClassificationLiteral = "s";
public static readonly string ClassificationXmlDelimiter = "xd";
public static readonly string ClassificationXmlName = "xn";
public static readonly string ClassificationXmlAttributeName = "xan";
public static readonly string ClassificationXmlAttributeValue = "xav";
public static readonly string ClassificationXmlAttributeQuotes = null;
public static readonly string ClassificationXmlEntityReference = "xer";
public static readonly string ClassificationXmlCDataSection = "xcs";
public static readonly string ClassificationXmlProcessingInstruction = "xpi";
public static readonly string ClassificationXmlLiteralDelimiter = "xld";
public static readonly string ClassificationXmlLiteralName = "xln";
public static readonly string ClassificationXmlLiteralAttributeName = "xlan";
public static readonly string ClassificationXmlLiteralAttributeValue = "xlav";
public static readonly string ClassificationXmlLiteralAttributeQuotes = "xlaq";
public static readonly string ClassificationXmlLiteralEntityReference = "xler";
public static readonly string ClassificationXmlLiteralCDataSection = "xlcs";
public static readonly string ClassificationXmlLiteralEmbeddedExpression = "xlee";
public static readonly string ClassificationXmlLiteralProcessingInstruction = "xlpi";
public static readonly string ClassificationExcludedCode = "e";
//public static readonly string RoslynClassificationKeyword = "keyword";
//public static readonly string DeclarationMap = "DeclarationMap";
public static readonly string ClassificationPunctuation = "punctuation";
//public static readonly string ProjectExplorer = "ProjectExplorer";
//public static readonly string SolutionExplorer = "SolutionExplorer";
//public static readonly string HuffmanFileName = "Huffman.txt";
//public static readonly string TopReferencedAssemblies = "TopReferencedAssemblies";
//public static readonly string BaseMembersFileName = "BaseMembers";
//public static readonly string ImplementedInterfaceMembersFileName = "ImplementedInterfaceMembers";
//public static readonly string GuidAssembly = "GuidAssembly";
//public static readonly string MSBuildPropertiesAssembly = "MSBuildProperties";
//public static readonly string MSBuildItemsAssembly = "MSBuildItems";
//public static readonly string MSBuildTargetsAssembly = "MSBuildTargets";
//public static readonly string MSBuildTasksAssembly = "MSBuildTasks";
//public static readonly string MSBuildFiles = "MSBuildFiles";
//public static readonly string TypeScriptFiles = "TypeScriptFiles";
//public static readonly string AssemblyPaths = @"AssemblyPaths.txt";
}
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
using Ds3.Models;
using System;
using System.Net;
namespace Ds3.Calls
{
public class GetTapesSpectraS3Request : Ds3Request
{
private bool? _assignedToStorageDomain;
public bool? AssignedToStorageDomain
{
get { return _assignedToStorageDomain; }
set { WithAssignedToStorageDomain(value); }
}
private long? _availableRawCapacity;
public long? AvailableRawCapacity
{
get { return _availableRawCapacity; }
set { WithAvailableRawCapacity(value); }
}
private string _barCode;
public string BarCode
{
get { return _barCode; }
set { WithBarCode(value); }
}
private string _bucketId;
public string BucketId
{
get { return _bucketId; }
set { WithBucketId(value); }
}
private string _ejectLabel;
public string EjectLabel
{
get { return _ejectLabel; }
set { WithEjectLabel(value); }
}
private string _ejectLocation;
public string EjectLocation
{
get { return _ejectLocation; }
set { WithEjectLocation(value); }
}
private bool? _fullOfData;
public bool? FullOfData
{
get { return _fullOfData; }
set { WithFullOfData(value); }
}
private bool? _lastPage;
public bool? LastPage
{
get { return _lastPage; }
set { WithLastPage(value); }
}
private DateTime? _lastVerified;
public DateTime? LastVerified
{
get { return _lastVerified; }
set { WithLastVerified(value); }
}
private int? _pageLength;
public int? PageLength
{
get { return _pageLength; }
set { WithPageLength(value); }
}
private int? _pageOffset;
public int? PageOffset
{
get { return _pageOffset; }
set { WithPageOffset(value); }
}
private string _pageStartMarker;
public string PageStartMarker
{
get { return _pageStartMarker; }
set { WithPageStartMarker(value); }
}
private DateTime? _partiallyVerifiedEndOfTape;
public DateTime? PartiallyVerifiedEndOfTape
{
get { return _partiallyVerifiedEndOfTape; }
set { WithPartiallyVerifiedEndOfTape(value); }
}
private string _partitionId;
public string PartitionId
{
get { return _partitionId; }
set { WithPartitionId(value); }
}
private TapeState? _previousState;
public TapeState? PreviousState
{
get { return _previousState; }
set { WithPreviousState(value); }
}
private string _serialNumber;
public string SerialNumber
{
get { return _serialNumber; }
set { WithSerialNumber(value); }
}
private string _sortBy;
public string SortBy
{
get { return _sortBy; }
set { WithSortBy(value); }
}
private TapeState? _state;
public TapeState? State
{
get { return _state; }
set { WithState(value); }
}
private string _storageDomainMemberId;
public string StorageDomainMemberId
{
get { return _storageDomainMemberId; }
set { WithStorageDomainMemberId(value); }
}
private string _type;
public string Type
{
get { return _type; }
set { WithType(value); }
}
private Priority? _verifyPending;
public Priority? VerifyPending
{
get { return _verifyPending; }
set { WithVerifyPending(value); }
}
private bool? _writeProtected;
public bool? WriteProtected
{
get { return _writeProtected; }
set { WithWriteProtected(value); }
}
public GetTapesSpectraS3Request WithAssignedToStorageDomain(bool? assignedToStorageDomain)
{
this._assignedToStorageDomain = assignedToStorageDomain;
if (assignedToStorageDomain != null)
{
this.QueryParams.Add("assigned_to_storage_domain", assignedToStorageDomain.ToString());
}
else
{
this.QueryParams.Remove("assigned_to_storage_domain");
}
return this;
}
public GetTapesSpectraS3Request WithAvailableRawCapacity(long? availableRawCapacity)
{
this._availableRawCapacity = availableRawCapacity;
if (availableRawCapacity != null)
{
this.QueryParams.Add("available_raw_capacity", availableRawCapacity.ToString());
}
else
{
this.QueryParams.Remove("available_raw_capacity");
}
return this;
}
public GetTapesSpectraS3Request WithBarCode(string barCode)
{
this._barCode = barCode;
if (barCode != null)
{
this.QueryParams.Add("bar_code", barCode);
}
else
{
this.QueryParams.Remove("bar_code");
}
return this;
}
public GetTapesSpectraS3Request WithBucketId(Guid? bucketId)
{
this._bucketId = bucketId.ToString();
if (bucketId != null)
{
this.QueryParams.Add("bucket_id", bucketId.ToString());
}
else
{
this.QueryParams.Remove("bucket_id");
}
return this;
}
public GetTapesSpectraS3Request WithBucketId(string bucketId)
{
this._bucketId = bucketId;
if (bucketId != null)
{
this.QueryParams.Add("bucket_id", bucketId);
}
else
{
this.QueryParams.Remove("bucket_id");
}
return this;
}
public GetTapesSpectraS3Request WithEjectLabel(string ejectLabel)
{
this._ejectLabel = ejectLabel;
if (ejectLabel != null)
{
this.QueryParams.Add("eject_label", ejectLabel);
}
else
{
this.QueryParams.Remove("eject_label");
}
return this;
}
public GetTapesSpectraS3Request WithEjectLocation(string ejectLocation)
{
this._ejectLocation = ejectLocation;
if (ejectLocation != null)
{
this.QueryParams.Add("eject_location", ejectLocation);
}
else
{
this.QueryParams.Remove("eject_location");
}
return this;
}
public GetTapesSpectraS3Request WithFullOfData(bool? fullOfData)
{
this._fullOfData = fullOfData;
if (fullOfData != null)
{
this.QueryParams.Add("full_of_data", fullOfData.ToString());
}
else
{
this.QueryParams.Remove("full_of_data");
}
return this;
}
public GetTapesSpectraS3Request WithLastPage(bool? lastPage)
{
this._lastPage = lastPage;
if (lastPage != null)
{
this.QueryParams.Add("last_page", lastPage.ToString());
}
else
{
this.QueryParams.Remove("last_page");
}
return this;
}
public GetTapesSpectraS3Request WithLastVerified(DateTime? lastVerified)
{
this._lastVerified = lastVerified;
if (lastVerified != null)
{
this.QueryParams.Add("last_verified", lastVerified.ToString());
}
else
{
this.QueryParams.Remove("last_verified");
}
return this;
}
public GetTapesSpectraS3Request WithPageLength(int? pageLength)
{
this._pageLength = pageLength;
if (pageLength != null)
{
this.QueryParams.Add("page_length", pageLength.ToString());
}
else
{
this.QueryParams.Remove("page_length");
}
return this;
}
public GetTapesSpectraS3Request WithPageOffset(int? pageOffset)
{
this._pageOffset = pageOffset;
if (pageOffset != null)
{
this.QueryParams.Add("page_offset", pageOffset.ToString());
}
else
{
this.QueryParams.Remove("page_offset");
}
return this;
}
public GetTapesSpectraS3Request WithPageStartMarker(Guid? pageStartMarker)
{
this._pageStartMarker = pageStartMarker.ToString();
if (pageStartMarker != null)
{
this.QueryParams.Add("page_start_marker", pageStartMarker.ToString());
}
else
{
this.QueryParams.Remove("page_start_marker");
}
return this;
}
public GetTapesSpectraS3Request WithPageStartMarker(string pageStartMarker)
{
this._pageStartMarker = pageStartMarker;
if (pageStartMarker != null)
{
this.QueryParams.Add("page_start_marker", pageStartMarker);
}
else
{
this.QueryParams.Remove("page_start_marker");
}
return this;
}
public GetTapesSpectraS3Request WithPartiallyVerifiedEndOfTape(DateTime? partiallyVerifiedEndOfTape)
{
this._partiallyVerifiedEndOfTape = partiallyVerifiedEndOfTape;
if (partiallyVerifiedEndOfTape != null)
{
this.QueryParams.Add("partially_verified_end_of_tape", partiallyVerifiedEndOfTape.ToString());
}
else
{
this.QueryParams.Remove("partially_verified_end_of_tape");
}
return this;
}
public GetTapesSpectraS3Request WithPartitionId(Guid? partitionId)
{
this._partitionId = partitionId.ToString();
if (partitionId != null)
{
this.QueryParams.Add("partition_id", partitionId.ToString());
}
else
{
this.QueryParams.Remove("partition_id");
}
return this;
}
public GetTapesSpectraS3Request WithPartitionId(string partitionId)
{
this._partitionId = partitionId;
if (partitionId != null)
{
this.QueryParams.Add("partition_id", partitionId);
}
else
{
this.QueryParams.Remove("partition_id");
}
return this;
}
public GetTapesSpectraS3Request WithPreviousState(TapeState? previousState)
{
this._previousState = previousState;
if (previousState != null)
{
this.QueryParams.Add("previous_state", previousState.ToString());
}
else
{
this.QueryParams.Remove("previous_state");
}
return this;
}
public GetTapesSpectraS3Request WithSerialNumber(string serialNumber)
{
this._serialNumber = serialNumber;
if (serialNumber != null)
{
this.QueryParams.Add("serial_number", serialNumber);
}
else
{
this.QueryParams.Remove("serial_number");
}
return this;
}
public GetTapesSpectraS3Request WithSortBy(string sortBy)
{
this._sortBy = sortBy;
if (sortBy != null)
{
this.QueryParams.Add("sort_by", sortBy);
}
else
{
this.QueryParams.Remove("sort_by");
}
return this;
}
public GetTapesSpectraS3Request WithState(TapeState? state)
{
this._state = state;
if (state != null)
{
this.QueryParams.Add("state", state.ToString());
}
else
{
this.QueryParams.Remove("state");
}
return this;
}
public GetTapesSpectraS3Request WithStorageDomainMemberId(Guid? storageDomainMemberId)
{
this._storageDomainMemberId = storageDomainMemberId.ToString();
if (storageDomainMemberId != null)
{
this.QueryParams.Add("storage_domain_member_id", storageDomainMemberId.ToString());
}
else
{
this.QueryParams.Remove("storage_domain_member_id");
}
return this;
}
public GetTapesSpectraS3Request WithStorageDomainMemberId(string storageDomainMemberId)
{
this._storageDomainMemberId = storageDomainMemberId;
if (storageDomainMemberId != null)
{
this.QueryParams.Add("storage_domain_member_id", storageDomainMemberId);
}
else
{
this.QueryParams.Remove("storage_domain_member_id");
}
return this;
}
public GetTapesSpectraS3Request WithType(string type)
{
this._type = type;
if (type != null)
{
this.QueryParams.Add("type", type);
}
else
{
this.QueryParams.Remove("type");
}
return this;
}
public GetTapesSpectraS3Request WithVerifyPending(Priority? verifyPending)
{
this._verifyPending = verifyPending;
if (verifyPending != null)
{
this.QueryParams.Add("verify_pending", verifyPending.ToString());
}
else
{
this.QueryParams.Remove("verify_pending");
}
return this;
}
public GetTapesSpectraS3Request WithWriteProtected(bool? writeProtected)
{
this._writeProtected = writeProtected;
if (writeProtected != null)
{
this.QueryParams.Add("write_protected", writeProtected.ToString());
}
else
{
this.QueryParams.Remove("write_protected");
}
return this;
}
public GetTapesSpectraS3Request()
{
}
internal override HttpVerb Verb
{
get
{
return HttpVerb.GET;
}
}
internal override string Path
{
get
{
return "/_rest_/tape";
}
}
}
}
| |
/*
* Farseer Physics Engine:
* Copyright (c) 2012 Ian Qvist
*
* Original source Box2D:
* Copyright (c) 2006-2011 Erin Catto http://www.box2d.org
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
#pragma warning disable 0162
using System.Diagnostics;
namespace TrueSync.Physics2D
{
// Linear constraint (point-to-line)
// d = p2 - p1 = x2 + r2 - x1 - r1
// C = dot(perp, d)
// Cdot = dot(d, cross(w1, perp)) + dot(perp, v2 + cross(w2, r2) - v1 - cross(w1, r1))
// = -dot(perp, v1) - dot(cross(d + r1, perp), w1) + dot(perp, v2) + dot(cross(r2, perp), v2)
// J = [-perp, -cross(d + r1, perp), perp, cross(r2,perp)]
//
// Angular constraint
// C = a2 - a1 + a_initial
// Cdot = w2 - w1
// J = [0 0 -1 0 0 1]
//
// K = J * invM * JT
//
// J = [-a -s1 a s2]
// [0 -1 0 1]
// a = perp
// s1 = cross(d + r1, a) = cross(p2 - x1, a)
// s2 = cross(r2, a) = cross(p2 - x2, a)
// Motor/Limit linear constraint
// C = dot(ax1, d)
// Cdot = = -dot(ax1, v1) - dot(cross(d + r1, ax1), w1) + dot(ax1, v2) + dot(cross(r2, ax1), v2)
// J = [-ax1 -cross(d+r1,ax1) ax1 cross(r2,ax1)]
// Block Solver
// We develop a block solver that includes the joint limit. This makes the limit stiff (inelastic) even
// when the mass has poor distribution (leading to large torques about the joint anchor points).
//
// The Jacobian has 3 rows:
// J = [-uT -s1 uT s2] // linear
// [0 -1 0 1] // angular
// [-vT -a1 vT a2] // limit
//
// u = perp
// v = axis
// s1 = cross(d + r1, u), s2 = cross(r2, u)
// a1 = cross(d + r1, v), a2 = cross(r2, v)
// M * (v2 - v1) = JT * df
// J * v2 = bias
//
// v2 = v1 + invM * JT * df
// J * (v1 + invM * JT * df) = bias
// K * df = bias - J * v1 = -Cdot
// K = J * invM * JT
// Cdot = J * v1 - bias
//
// Now solve for f2.
// df = f2 - f1
// K * (f2 - f1) = -Cdot
// f2 = invK * (-Cdot) + f1
//
// Clamp accumulated limit impulse.
// lower: f2(3) = max(f2(3), 0)
// upper: f2(3) = min(f2(3), 0)
//
// Solve for correct f2(1:2)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:3) * f1
// = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:2) * f1(1:2) + K(1:2,3) * f1(3)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3)) + K(1:2,1:2) * f1(1:2)
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
//
// Now compute impulse to be applied:
// df = f2 - f1
/// <summary>
/// A prismatic joint. This joint provides one degree of freedom: translation
/// along an axis fixed in bodyA. Relative rotation is prevented. You can
/// use a joint limit to restrict the range of motion and a joint motor to
/// drive the motion or to model joint friction.
/// </summary>
public class PrismaticJoint : Joint2D
{
private TSVector2 _localYAxisA;
private TSVector _impulse;
private FP _lowerTranslation;
private FP _upperTranslation;
private FP _maxMotorForce;
private FP _motorSpeed;
private bool _enableLimit;
private bool _enableMotor;
private LimitState _limitState;
// Solver temp
private int _indexA;
private int _indexB;
private TSVector2 _localCenterA;
private TSVector2 _localCenterB;
private FP _invMassA;
private FP _invMassB;
private FP _invIA;
private FP _invIB;
private TSVector2 _axis, _perp;
private FP _s1, _s2;
private FP _a1, _a2;
private Mat33 _K;
private FP _motorMass;
private TSVector2 _axis1;
internal PrismaticJoint()
{
JointType = JointType.Prismatic;
}
/// <summary>
/// This requires defining a line of
/// motion using an axis and an anchor point. The definition uses local
/// anchor points and a local axis so that the initial configuration
/// can violate the constraint slightly. The joint translation is zero
/// when the local anchor points coincide in world space. Using local
/// anchors and a local axis helps when saving and loading a game.
/// </summary>
/// <param name="bodyA">The first body.</param>
/// <param name="bodyB">The second body.</param>
/// <param name="anchorA">The first body anchor.</param>
/// <param name="anchorB">The second body anchor.</param>
/// <param name="axis">The axis.</param>
/// <param name="useWorldCoordinates">Set to true if you are using world coordinates as anchors.</param>
public PrismaticJoint(Body bodyA, Body bodyB, TSVector2 anchorA, TSVector2 anchorB, TSVector2 axis, bool useWorldCoordinates = false)
: base(bodyA, bodyB)
{
Initialize(anchorA, anchorB, axis, useWorldCoordinates);
}
public PrismaticJoint(Body bodyA, Body bodyB, TSVector2 anchor, TSVector2 axis, bool useWorldCoordinates = false)
: base(bodyA, bodyB)
{
Initialize(anchor, anchor, axis, useWorldCoordinates);
}
private void Initialize(TSVector2 localAnchorA, TSVector2 localAnchorB, TSVector2 axis, bool useWorldCoordinates)
{
JointType = JointType.Prismatic;
if (useWorldCoordinates)
{
LocalAnchorA = BodyA.GetLocalPoint(localAnchorA);
LocalAnchorB = BodyB.GetLocalPoint(localAnchorB);
}
else
{
LocalAnchorA = localAnchorA;
LocalAnchorB = localAnchorB;
}
Axis = axis; //FPE only: store the orignal value for use in Serialization
ReferenceAngle = BodyB.Rotation - BodyA.Rotation;
_limitState = LimitState.Inactive;
}
/// <summary>
/// The local anchor point on BodyA
/// </summary>
public TSVector2 LocalAnchorA { get; set; }
/// <summary>
/// The local anchor point on BodyB
/// </summary>
public TSVector2 LocalAnchorB { get; set; }
public override TSVector2 WorldAnchorA
{
get { return BodyA.GetWorldPoint(LocalAnchorA); }
set { LocalAnchorA = BodyA.GetLocalPoint(value); }
}
public override TSVector2 WorldAnchorB
{
get { return BodyB.GetWorldPoint(LocalAnchorB); }
set { LocalAnchorB = BodyB.GetLocalPoint(value); }
}
/// <summary>
/// Get the current joint translation, usually in meters.
/// </summary>
/// <value></value>
public FP JointTranslation
{
get
{
TSVector2 d = BodyB.GetWorldPoint(LocalAnchorB) - BodyA.GetWorldPoint(LocalAnchorA);
TSVector2 axis = BodyA.GetWorldVector(LocalXAxis);
return TSVector2.Dot(d, axis);
}
}
/// <summary>
/// Get the current joint translation speed, usually in meters per second.
/// </summary>
/// <value></value>
public FP JointSpeed
{
get
{
Transform xf1, xf2;
BodyA.GetTransform(out xf1);
BodyB.GetTransform(out xf2);
TSVector2 r1 = MathUtils.Mul(ref xf1.q, LocalAnchorA - BodyA.LocalCenter);
TSVector2 r2 = MathUtils.Mul(ref xf2.q, LocalAnchorB - BodyB.LocalCenter);
TSVector2 p1 = BodyA._sweep.C + r1;
TSVector2 p2 = BodyB._sweep.C + r2;
TSVector2 d = p2 - p1;
TSVector2 axis = BodyA.GetWorldVector(LocalXAxis);
TSVector2 v1 = BodyA._linearVelocity;
TSVector2 v2 = BodyB._linearVelocity;
FP w1 = BodyA._angularVelocity;
FP w2 = BodyB._angularVelocity;
FP speed = TSVector2.Dot(d, MathUtils.Cross(w1, axis)) + TSVector2.Dot(axis, v2 + MathUtils.Cross(w2, r2) - v1 - MathUtils.Cross(w1, r1));
return speed;
}
}
/// <summary>
/// Is the joint limit enabled?
/// </summary>
/// <value><c>true</c> if [limit enabled]; otherwise, <c>false</c>.</value>
public bool LimitEnabled
{
get { return _enableLimit; }
set
{
Debug.Assert(BodyA.FixedRotation == false || BodyB.FixedRotation == false, "Warning: limits does currently not work with fixed rotation");
if (value != _enableLimit)
{
WakeBodies();
_enableLimit = value;
_impulse.z = 0;
}
}
}
/// <summary>
/// Get the lower joint limit, usually in meters.
/// </summary>
/// <value></value>
public FP LowerLimit
{
get { return _lowerTranslation; }
set
{
if (value != _lowerTranslation)
{
WakeBodies();
_lowerTranslation = value;
_impulse.z = 0.0f;
}
}
}
/// <summary>
/// Get the upper joint limit, usually in meters.
/// </summary>
/// <value></value>
public FP UpperLimit
{
get { return _upperTranslation; }
set
{
if (value != _upperTranslation)
{
WakeBodies();
_upperTranslation = value;
_impulse.z = 0.0f;
}
}
}
/// <summary>
/// Set the joint limits, usually in meters.
/// </summary>
/// <param name="lower">The lower limit</param>
/// <param name="upper">The upper limit</param>
public void SetLimits(FP lower, FP upper)
{
if (upper != _upperTranslation || lower != _lowerTranslation)
{
WakeBodies();
_upperTranslation = upper;
_lowerTranslation = lower;
_impulse.z = 0.0f;
}
}
/// <summary>
/// Is the joint motor enabled?
/// </summary>
/// <value><c>true</c> if [motor enabled]; otherwise, <c>false</c>.</value>
public bool MotorEnabled
{
get { return _enableMotor; }
set
{
WakeBodies();
_enableMotor = value;
}
}
/// <summary>
/// Set the motor speed, usually in meters per second.
/// </summary>
/// <value>The speed.</value>
public FP MotorSpeed
{
set
{
WakeBodies();
_motorSpeed = value;
}
get { return _motorSpeed; }
}
/// <summary>
/// Set the maximum motor force, usually in N.
/// </summary>
/// <value>The force.</value>
public FP MaxMotorForce
{
get { return _maxMotorForce; }
set
{
WakeBodies();
_maxMotorForce = value;
}
}
/// <summary>
/// Get the current motor impulse, usually in N.
/// </summary>
/// <value></value>
public FP MotorImpulse { get; set; }
/// <summary>
/// Gets the motor force.
/// </summary>
/// <param name="invDt">The inverse delta time</param>
public FP GetMotorForce(FP invDt)
{
return invDt * MotorImpulse;
}
/// <summary>
/// The axis at which the joint moves.
/// </summary>
public TSVector2 Axis
{
get { return _axis1; }
set
{
_axis1 = value;
LocalXAxis = BodyA.GetLocalVector(_axis1);
LocalXAxis.Normalize();
_localYAxisA = MathUtils.Cross(1.0f, LocalXAxis);
}
}
/// <summary>
/// The axis in local coordinates relative to BodyA
/// </summary>
public TSVector2 LocalXAxis { get; private set; }
/// <summary>
/// The reference angle.
/// </summary>
public FP ReferenceAngle { get; set; }
public override TSVector2 GetReactionForce(FP invDt)
{
return invDt * (_impulse.x * _perp + (MotorImpulse + _impulse.z) * _axis);
}
public override FP GetReactionTorque(FP invDt)
{
return invDt * _impulse.y;
}
internal override void InitVelocityConstraints(ref SolverData data)
{
_indexA = BodyA.IslandIndex;
_indexB = BodyB.IslandIndex;
_localCenterA = BodyA._sweep.LocalCenter;
_localCenterB = BodyB._sweep.LocalCenter;
_invMassA = BodyA._invMass;
_invMassB = BodyB._invMass;
_invIA = BodyA._invI;
_invIB = BodyB._invI;
TSVector2 cA = data.positions[_indexA].c;
FP aA = data.positions[_indexA].a;
TSVector2 vA = data.velocities[_indexA].v;
FP wA = data.velocities[_indexA].w;
TSVector2 cB = data.positions[_indexB].c;
FP aB = data.positions[_indexB].a;
TSVector2 vB = data.velocities[_indexB].v;
FP wB = data.velocities[_indexB].w;
Rot qA = new Rot(aA), qB = new Rot(aB);
// Compute the effective masses.
TSVector2 rA = MathUtils.Mul(qA, LocalAnchorA - _localCenterA);
TSVector2 rB = MathUtils.Mul(qB, LocalAnchorB - _localCenterB);
TSVector2 d = (cB - cA) + rB - rA;
FP mA = _invMassA, mB = _invMassB;
FP iA = _invIA, iB = _invIB;
// Compute motor Jacobian and effective mass.
{
_axis = MathUtils.Mul(qA, LocalXAxis);
_a1 = MathUtils.Cross(d + rA, _axis);
_a2 = MathUtils.Cross(rB, _axis);
_motorMass = mA + mB + iA * _a1 * _a1 + iB * _a2 * _a2;
if (_motorMass > 0.0f)
{
_motorMass = 1.0f / _motorMass;
}
}
// Prismatic constraint.
{
_perp = MathUtils.Mul(qA, _localYAxisA);
_s1 = MathUtils.Cross(d + rA, _perp);
_s2 = MathUtils.Cross(rB, _perp);
FP k11 = mA + mB + iA * _s1 * _s1 + iB * _s2 * _s2;
FP k12 = iA * _s1 + iB * _s2;
FP k13 = iA * _s1 * _a1 + iB * _s2 * _a2;
FP k22 = iA + iB;
if (k22 == 0.0f)
{
// For bodies with fixed rotation.
k22 = 1.0f;
}
FP k23 = iA * _a1 + iB * _a2;
FP k33 = mA + mB + iA * _a1 * _a1 + iB * _a2 * _a2;
_K.ex = new TSVector(k11, k12, k13);
_K.ey = new TSVector(k12, k22, k23);
_K.ez = new TSVector(k13, k23, k33);
}
// Compute motor and limit terms.
if (_enableLimit)
{
FP jointTranslation = TSVector2.Dot(_axis, d);
if (FP.Abs(_upperTranslation - _lowerTranslation) < 2.0f * Settings.LinearSlop)
{
_limitState = LimitState.Equal;
}
else if (jointTranslation <= _lowerTranslation)
{
if (_limitState != LimitState.AtLower)
{
_limitState = LimitState.AtLower;
_impulse.z = 0.0f;
}
}
else if (jointTranslation >= _upperTranslation)
{
if (_limitState != LimitState.AtUpper)
{
_limitState = LimitState.AtUpper;
_impulse.z = 0.0f;
}
}
else
{
_limitState = LimitState.Inactive;
_impulse.z = 0.0f;
}
}
else
{
_limitState = LimitState.Inactive;
_impulse.z = 0.0f;
}
if (_enableMotor == false)
{
MotorImpulse = 0.0f;
}
if (Settings.EnableWarmstarting)
{
// Account for variable time step.
_impulse *= data.step.dtRatio;
MotorImpulse *= data.step.dtRatio;
TSVector2 P = _impulse.x * _perp + (MotorImpulse + _impulse.z) * _axis;
FP LA = _impulse.x * _s1 + _impulse.y + (MotorImpulse + _impulse.z) * _a1;
FP LB = _impulse.x * _s2 + _impulse.y + (MotorImpulse + _impulse.z) * _a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
else
{
_impulse = TSVector.zero;
MotorImpulse = 0.0f;
}
data.velocities[_indexA].v = vA;
data.velocities[_indexA].w = wA;
data.velocities[_indexB].v = vB;
data.velocities[_indexB].w = wB;
}
internal override void SolveVelocityConstraints(ref SolverData data)
{
TSVector2 vA = data.velocities[_indexA].v;
FP wA = data.velocities[_indexA].w;
TSVector2 vB = data.velocities[_indexB].v;
FP wB = data.velocities[_indexB].w;
FP mA = _invMassA, mB = _invMassB;
FP iA = _invIA, iB = _invIB;
// Solve linear motor constraint.
if (_enableMotor && _limitState != LimitState.Equal)
{
FP Cdot = TSVector2.Dot(_axis, vB - vA) + _a2 * wB - _a1 * wA;
FP impulse = _motorMass * (_motorSpeed - Cdot);
FP oldImpulse = MotorImpulse;
FP maxImpulse = data.step.dt * _maxMotorForce;
MotorImpulse = MathUtils.Clamp(MotorImpulse + impulse, -maxImpulse, maxImpulse);
impulse = MotorImpulse - oldImpulse;
TSVector2 P = impulse * _axis;
FP LA = impulse * _a1;
FP LB = impulse * _a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
TSVector2 Cdot1 = new TSVector2();
Cdot1.x = TSVector2.Dot(_perp, vB - vA) + _s2 * wB - _s1 * wA;
Cdot1.y = wB - wA;
if (_enableLimit && _limitState != LimitState.Inactive)
{
// Solve prismatic and limit constraint in block form.
FP Cdot2;
Cdot2 = TSVector2.Dot(_axis, vB - vA) + _a2 * wB - _a1 * wA;
TSVector Cdot = new TSVector(Cdot1.x, Cdot1.y, Cdot2);
TSVector f1 = _impulse;
TSVector df = _K.Solve33(Cdot * -1);
_impulse += df;
if (_limitState == LimitState.AtLower)
{
_impulse.z = TrueSync.TSMath.Max(_impulse.z, 0.0f);
}
else if (_limitState == LimitState.AtUpper)
{
_impulse.z = TrueSync.TSMath.Min(_impulse.z, 0.0f);
}
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
TSVector2 b = -Cdot1 - (_impulse.z - f1.z) * new TSVector2(_K.ez.x, _K.ez.y);
TSVector2 f2r = _K.Solve22(b) + new TSVector2(f1.x, f1.y);
_impulse.x = f2r.x;
_impulse.y = f2r.y;
df = _impulse - f1;
TSVector2 P = df.x * _perp + df.z * _axis;
FP LA = df.x * _s1 + df.y + df.z * _a1;
FP LB = df.x * _s2 + df.y + df.z * _a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
else
{
// Limit is inactive, just solve the prismatic constraint in block form.
TSVector2 df = _K.Solve22(-Cdot1);
_impulse.x += df.x;
_impulse.y += df.y;
TSVector2 P = df.x * _perp;
FP LA = df.x * _s1 + df.y;
FP LB = df.x * _s2 + df.y;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
data.velocities[_indexA].v = vA;
data.velocities[_indexA].w = wA;
data.velocities[_indexB].v = vB;
data.velocities[_indexB].w = wB;
}
internal override bool SolvePositionConstraints(ref SolverData data)
{
TSVector2 cA = data.positions[_indexA].c;
FP aA = data.positions[_indexA].a;
TSVector2 cB = data.positions[_indexB].c;
FP aB = data.positions[_indexB].a;
Rot qA = new Rot(aA), qB = new Rot(aB);
FP mA = _invMassA, mB = _invMassB;
FP iA = _invIA, iB = _invIB;
// Compute fresh Jacobians
TSVector2 rA = MathUtils.Mul(qA, LocalAnchorA - _localCenterA);
TSVector2 rB = MathUtils.Mul(qB, LocalAnchorB - _localCenterB);
TSVector2 d = cB + rB - cA - rA;
TSVector2 axis = MathUtils.Mul(qA, LocalXAxis);
FP a1 = MathUtils.Cross(d + rA, axis);
FP a2 = MathUtils.Cross(rB, axis);
TSVector2 perp = MathUtils.Mul(qA, _localYAxisA);
FP s1 = MathUtils.Cross(d + rA, perp);
FP s2 = MathUtils.Cross(rB, perp);
TSVector impulse;
TSVector2 C1 = new TSVector2();
C1.x = TSVector2.Dot(perp, d);
C1.y = aB - aA - ReferenceAngle;
FP linearError = FP.Abs(C1.x);
FP angularError = FP.Abs(C1.y);
bool active = false;
FP C2 = 0.0f;
if (_enableLimit)
{
FP translation = TSVector2.Dot(axis, d);
if (FP.Abs(_upperTranslation - _lowerTranslation) < 2.0f * Settings.LinearSlop)
{
// Prevent large angular corrections
C2 = MathUtils.Clamp(translation, -Settings.MaxLinearCorrection, Settings.MaxLinearCorrection);
linearError = TrueSync.TSMath.Max(linearError, FP.Abs(translation));
active = true;
}
else if (translation <= _lowerTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = MathUtils.Clamp(translation - _lowerTranslation + Settings.LinearSlop, -Settings.MaxLinearCorrection, 0.0f);
linearError = TrueSync.TSMath.Max(linearError, _lowerTranslation - translation);
active = true;
}
else if (translation >= _upperTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = MathUtils.Clamp(translation - _upperTranslation - Settings.LinearSlop, 0.0f, Settings.MaxLinearCorrection);
linearError = TrueSync.TSMath.Max(linearError, translation - _upperTranslation);
active = true;
}
}
if (active)
{
FP k11 = mA + mB + iA * s1 * s1 + iB * s2 * s2;
FP k12 = iA * s1 + iB * s2;
FP k13 = iA * s1 * a1 + iB * s2 * a2;
FP k22 = iA + iB;
if (k22 == 0.0f)
{
// For fixed rotation
k22 = 1.0f;
}
FP k23 = iA * a1 + iB * a2;
FP k33 = mA + mB + iA * a1 * a1 + iB * a2 * a2;
Mat33 K = new Mat33();
K.ex = new TSVector(k11, k12, k13);
K.ey = new TSVector(k12, k22, k23);
K.ez = new TSVector(k13, k23, k33);
TSVector C = new TSVector();
C.x = C1.x;
C.y = C1.y;
C.z = C2;
impulse = K.Solve33(C * -1);
}
else
{
FP k11 = mA + mB + iA * s1 * s1 + iB * s2 * s2;
FP k12 = iA * s1 + iB * s2;
FP k22 = iA + iB;
if (k22 == 0.0f)
{
k22 = 1.0f;
}
Mat22 K = new Mat22();
K.ex = new TSVector2(k11, k12);
K.ey = new TSVector2(k12, k22);
TSVector2 impulse1 = K.Solve(-C1);
impulse = new TSVector();
impulse.x = impulse1.x;
impulse.y = impulse1.y;
impulse.z = 0.0f;
}
TSVector2 P = impulse.x * perp + impulse.z * axis;
FP LA = impulse.x * s1 + impulse.y + impulse.z * a1;
FP LB = impulse.x * s2 + impulse.y + impulse.z * a2;
cA -= mA * P;
aA -= iA * LA;
cB += mB * P;
aB += iB * LB;
data.positions[_indexA].c = cA;
data.positions[_indexA].a = aA;
data.positions[_indexB].c = cB;
data.positions[_indexB].a = aB;
return linearError <= Settings.LinearSlop && angularError <= Settings.AngularSlop;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Schema;
using System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
namespace MS.Internal.Xml.Cache
{
/// <summary>
/// Although the XPath data model does not differentiate between text and whitespace, Managed Xml 1.0
/// does. Therefore, when building from an XmlReader, we must preserve these designations in order
/// to remain backwards-compatible.
/// </summary>
internal enum TextBlockType
{
None = 0,
Text = XPathNodeType.Text,
SignificantWhitespace = XPathNodeType.SignificantWhitespace,
Whitespace = XPathNodeType.Whitespace,
};
/// <summary>
/// Implementation of XmlRawWriter that builds nodes in an XPathDocument.
/// </summary>
internal sealed class XPathDocumentBuilder : XmlRawWriter
{
private NodePageFactory _nodePageFact; // Creates non-namespace node pages
private NodePageFactory _nmspPageFact; // Creates namespace node pages
private TextBlockBuilder _textBldr; // Concatenates adjacent text blocks
private readonly Stack<XPathNodeRef> _stkNmsp; // In-scope namespaces
private XPathNodeInfoTable _infoTable; // Atomization table for shared node information
private XPathDocument _doc; // Currently building document
private IXmlLineInfo _lineInfo; // Line information provider
private XmlNameTable _nameTable; // Atomization table for all names in the document
private bool _atomizeNames; // True if all names should be atomized (false if they are pre-atomized)
private XPathNode[] _pageNmsp; // Page of last in-scope namespace node
private int _idxNmsp; // Page index of last in-scope namespace node
private XPathNode[] _pageParent; // Page of last parent-type node (Element or Root)
private int _idxParent; // Page index of last parent-type node (Element or Root)
private XPathNode[] _pageSibling; // Page of previous sibling node (may be null if no previous sibling)
private int _idxSibling; // Page index of previous sibling node
private int _lineNumBase; // Line number from which offsets are computed
private int _linePosBase; // Line position from which offsets are computed
private XmlQualifiedName _idAttrName; // Cached name of an ID attribute
private Hashtable _elemIdMap; // Map from element name to ID attribute name
private XPathNodeRef[] _elemNameIndex; // Elements with the same name are linked together so that they can be searched quickly
private const int ElementIndexSize = 64;
/// <summary>
/// Create a new XPathDocumentBuilder which creates nodes in "doc".
/// </summary>
public XPathDocumentBuilder(XPathDocument doc, IXmlLineInfo lineInfo, string baseUri, XPathDocument.LoadFlags flags)
{
// Allocate the initial node (for non-namespaces) page, and the initial namespace page
_nodePageFact.Init(256);
_nmspPageFact.Init(16);
_stkNmsp = new Stack<XPathNodeRef>();
Initialize(doc, lineInfo, baseUri, flags);
}
/// <summary>
/// Start construction of a new document. This must be called before any other methods are called.
/// It may also be called after Close(), in order to build further documents.
/// </summary>
public void Initialize(XPathDocument doc, IXmlLineInfo lineInfo, string baseUri, XPathDocument.LoadFlags flags)
{
XPathNode[] page;
int idx;
_doc = doc;
_nameTable = doc.NameTable;
_atomizeNames = (flags & XPathDocument.LoadFlags.AtomizeNames) != 0;
_idxParent = _idxSibling = 0;
_elemNameIndex = new XPathNodeRef[ElementIndexSize];
// Prepare line number information
_textBldr.Initialize(lineInfo);
_lineInfo = lineInfo;
_lineNumBase = 0;
_linePosBase = 0;
// Allocate the atomization table
_infoTable = new XPathNodeInfoTable();
// Allocate singleton collapsed text node
idx = NewNode(out page, XPathNodeType.Text, string.Empty, string.Empty, string.Empty, string.Empty);
_doc.SetCollapsedTextNode(page, idx);
// Allocate xmlns:xml namespace node
_idxNmsp = NewNamespaceNode(out _pageNmsp, _nameTable.Add("xml"), _nameTable.Add(XmlReservedNs.NsXml), null, 0);
_doc.SetXmlNamespaceNode(_pageNmsp, _idxNmsp);
if ((flags & XPathDocument.LoadFlags.Fragment) == 0)
{
// This tree has a document root node
_idxParent = NewNode(out _pageParent, XPathNodeType.Root, string.Empty, string.Empty, string.Empty, baseUri);
_doc.SetRootNode(_pageParent, _idxParent);
}
else
{
// This tree is an XQuery fragment (no document root node), so root will be next node in the current page
_doc.SetRootNode(_nodePageFact.NextNodePage, _nodePageFact.NextNodeIndex);
}
}
//-----------------------------------------------
// XmlWriter interface
//-----------------------------------------------
/// <summary>
/// XPathDocument ignores the DocType information.
/// </summary>
public override void WriteDocType(string name, string pubid, string sysid, string subset)
{
}
/// <summary>
/// Shortcut for calling WriteStartElement with elemType == null.
/// </summary>
public override void WriteStartElement(string prefix, string localName, string ns)
{
this.WriteStartElement(prefix, localName, ns, string.Empty);
}
/// <summary>
/// Build an element node and attach it to its parent, if one exists. Make the element the new parent node.
/// </summary>
public void WriteStartElement(string prefix, string localName, string ns, string baseUri)
{
int hash;
Debug.Assert(prefix != null && localName != null && ns != null && localName.Length != 0 && baseUri != null);
if (_atomizeNames)
{
prefix = _nameTable.Add(prefix);
localName = _nameTable.Add(localName);
ns = _nameTable.Add(ns);
}
AddSibling(XPathNodeType.Element, localName, ns, prefix, baseUri);
_pageParent = _pageSibling;
_idxParent = _idxSibling;
_idxSibling = 0;
// Link elements with the same name together
hash = (_pageParent[_idxParent].LocalNameHashCode & (ElementIndexSize - 1));
_elemNameIndex[hash] = LinkSimilarElements(_elemNameIndex[hash].Page, _elemNameIndex[hash].Index, _pageParent, _idxParent);
// If elements within this document might have IDs, then cache the name of the ID attribute, if one exists
if (_elemIdMap != null)
_idAttrName = (XmlQualifiedName)_elemIdMap[new XmlQualifiedName(localName, prefix)];
}
/// <summary>
/// Must be called when an element node's children have been fully enumerated.
/// </summary>
public override void WriteEndElement()
{
WriteEndElement(true);
}
/// <summary>
/// Must be called when an element node's children have been fully enumerated.
/// </summary>
public override void WriteFullEndElement()
{
WriteEndElement(false);
}
/// <summary>
/// Must be called when an element node's children have been fully enumerated.
/// </summary>
internal override void WriteEndElement(string prefix, string localName, string namespaceName)
{
WriteEndElement(true);
}
/// <summary>
/// Must be called when an element node's children have been fully enumerated.
/// </summary>
internal override void WriteFullEndElement(string prefix, string localName, string namespaceName)
{
WriteEndElement(false);
}
/// <summary>
/// Must be called when an element node's children have been fully enumerated.
/// </summary>
public void WriteEndElement(bool allowShortcutTag)
{
XPathNodeRef nodeRef;
Debug.Assert(_pageParent[_idxParent].NodeType == XPathNodeType.Element);
// If element has no content-typed children except for the one about to be added, then
// its value is the same as its only text child's.
if (!_pageParent[_idxParent].HasContentChild)
{
switch (_textBldr.TextType)
{
case TextBlockType.Text:
// Collapsed text node can be created if text line number information can be encoded efficiently in parent node
if (_lineInfo != null)
{
// If collapsed text node is not on same line as parent, don't collapse text
if (_textBldr.LineNumber != _pageParent[_idxParent].LineNumber)
goto case TextBlockType.Whitespace;
// If position is not within 256 of parent, don't collapse text
int posDiff = _textBldr.LinePosition - _pageParent[_idxParent].LinePosition;
if (posDiff < 0 || posDiff > XPathNode.MaxCollapsedPositionOffset)
goto case TextBlockType.Whitespace;
// Set collapsed node line position offset
_pageParent[_idxParent].SetCollapsedLineInfoOffset(posDiff);
}
// Set collapsed node text
_pageParent[_idxParent].SetCollapsedValue(_textBldr.ReadText());
break;
case TextBlockType.SignificantWhitespace:
case TextBlockType.Whitespace:
// Create separate whitespace node
CachedTextNode();
_pageParent[_idxParent].SetValue(_pageSibling[_idxSibling].Value);
break;
default:
// Empty value, so don't create collapsed text node
_pageParent[_idxParent].SetEmptyValue(allowShortcutTag);
break;
}
}
else
{
if (_textBldr.HasText)
{
// Element's last child (one of several) is a text or whitespace node
CachedTextNode();
}
}
// If namespaces were added to this element,
if (_pageParent[_idxParent].HasNamespaceDecls)
{
// Add it to the document's element --> namespace mapping
_doc.AddNamespace(_pageParent, _idxParent, _pageNmsp, _idxNmsp);
// Restore the previous namespace chain
nodeRef = _stkNmsp.Pop();
_pageNmsp = nodeRef.Page;
_idxNmsp = nodeRef.Index;
}
// Make parent of this element the current element
_pageSibling = _pageParent;
_idxSibling = _idxParent;
_idxParent = _pageParent[_idxParent].GetParent(out _pageParent);
}
/// <summary>
/// Shortcut for calling WriteStartAttribute with attrfType == null.
/// </summary>
public override void WriteStartAttribute(string prefix, string localName, string namespaceName)
{
Debug.Assert(!prefix.Equals("xmlns"));
Debug.Assert(_idxParent == 0 || _pageParent[_idxParent].NodeType == XPathNodeType.Element);
Debug.Assert(_idxSibling == 0 || _pageSibling[_idxSibling].NodeType == XPathNodeType.Attribute);
if (_atomizeNames)
{
prefix = _nameTable.Add(prefix);
localName = _nameTable.Add(localName);
namespaceName = _nameTable.Add(namespaceName);
}
AddSibling(XPathNodeType.Attribute, localName, namespaceName, prefix, string.Empty);
}
/// <summary>
/// Attach the attribute's text or typed value to the previously constructed attribute node.
/// </summary>
public override void WriteEndAttribute()
{
Debug.Assert(_pageSibling[_idxSibling].NodeType == XPathNodeType.Attribute);
_pageSibling[_idxSibling].SetValue(_textBldr.ReadText());
if (_idAttrName != null)
{
// If this is an ID attribute,
if (_pageSibling[_idxSibling].LocalName == _idAttrName.Name &&
_pageSibling[_idxSibling].Prefix == _idAttrName.Namespace)
{
// Then add its value to the idValueMap map
Debug.Assert(_idxParent != 0, "ID attribute must have an element parent");
_doc.AddIdElement(_pageSibling[_idxSibling].Value, _pageParent, _idxParent);
}
}
}
/// <summary>
/// Map CData text into regular text.
/// </summary>
public override void WriteCData(string text)
{
WriteString(text, TextBlockType.Text);
}
/// <summary>
/// Construct comment node.
/// </summary>
public override void WriteComment(string text)
{
AddSibling(XPathNodeType.Comment, string.Empty, string.Empty, string.Empty, string.Empty);
_pageSibling[_idxSibling].SetValue(text);
}
/// <summary>
/// Shortcut for calling WriteProcessingInstruction with baseUri = string.Empty.
/// </summary>
public override void WriteProcessingInstruction(string name, string text)
{
this.WriteProcessingInstruction(name, text, string.Empty);
}
/// <summary>
/// Construct pi node.
/// </summary>
public void WriteProcessingInstruction(string name, string text, string baseUri)
{
if (_atomizeNames)
name = _nameTable.Add(name);
AddSibling(XPathNodeType.ProcessingInstruction, name, string.Empty, string.Empty, baseUri);
_pageSibling[_idxSibling].SetValue(text);
}
/// <summary>
/// Write a whitespace text block.
/// </summary>
public override void WriteWhitespace(string ws)
{
WriteString(ws, TextBlockType.Whitespace);
}
/// <summary>
/// Write an attribute or element text block.
/// </summary>
public override void WriteString(string text)
{
WriteString(text, TextBlockType.Text);
}
public override void WriteChars(char[] buffer, int index, int count)
{
WriteString(new string(buffer, index, count), TextBlockType.Text);
}
/// <summary>
/// Map RawText to Text. This will lose entitization and won't roundtrip.
/// </summary>
public override void WriteRaw(string data)
{
WriteString(data, TextBlockType.Text);
}
public override void WriteRaw(char[] buffer, int index, int count)
{
WriteString(new string(buffer, index, count), TextBlockType.Text);
}
/// <summary>
/// Write an element text block with the specified text type (whitespace, significant whitespace, or text).
/// </summary>
public void WriteString(string text, TextBlockType textType)
{
_textBldr.WriteTextBlock(text, textType);
}
/// <summary>
/// Cache does not handle entity references.
/// </summary>
public override void WriteEntityRef(string name)
{
throw new NotImplementedException();
}
/// <summary>
/// Don't entitize, since the cache cannot represent character entities.
/// </summary>
public override void WriteCharEntity(char ch)
{
WriteString(char.ToString(ch), TextBlockType.Text);
}
/// <summary>
/// Don't entitize, since the cache cannot represent character entities.
/// </summary>
public override void WriteSurrogateCharEntity(char lowChar, char highChar)
{
ReadOnlySpan<char> chars = stackalloc char[] { highChar, lowChar };
WriteString(new string(chars), TextBlockType.Text);
}
/// <summary>
/// Signals the end of tree construction.
/// </summary>
public override void Close()
{
XPathNode[] page;
int idx;
// If cached text exists, then create a text node at the top-level
if (_textBldr.HasText)
CachedTextNode();
// If document does not yet contain nodes, then an empty text node must have been created
idx = _doc.GetRootNode(out page);
if (idx == _nodePageFact.NextNodeIndex && page == _nodePageFact.NextNodePage)
{
AddSibling(XPathNodeType.Text, string.Empty, string.Empty, string.Empty, string.Empty);
_pageSibling[_idxSibling].SetValue(string.Empty);
}
}
/// <summary>
/// Since output is not forwarded to another object, this does nothing.
/// </summary>
public override void Flush()
{
}
//-----------------------------------------------
// XmlRawWriter interface
//-----------------------------------------------
/// <summary>
/// Write the xml declaration. This must be the first call after Open.
/// </summary>
internal override void WriteXmlDeclaration(XmlStandalone standalone)
{
// Ignore the xml declaration when building the cache
}
internal override void WriteXmlDeclaration(string xmldecl)
{
// Ignore the xml declaration when building the cache
}
/// <summary>
/// Called as element node's children are about to be enumerated.
/// </summary>
internal override void StartElementContent()
{
Debug.Assert(_pageParent[_idxParent].NodeType == XPathNodeType.Element);
}
/// <summary>
/// Build a namespace declaration node. Attach it to an element parent, if one was previously constructed.
/// All namespace declarations are linked together in an in-scope namespace tree.
/// </summary>
internal override void WriteNamespaceDeclaration(string prefix, string namespaceName)
{
XPathNode[] pageTemp, pageOverride, pageNew, pageOrig, pageCopy;
int idxTemp, idxOverride, idxNew, idxOrig, idxCopy;
Debug.Assert(_idxSibling == 0 || _pageSibling[_idxSibling].NodeType == XPathNodeType.Attribute);
Debug.Assert(!prefix.Equals("xmlns") && !namespaceName.Equals(XmlReservedNs.NsXmlNs));
Debug.Assert(_idxParent == 0 || _idxNmsp != 0);
Debug.Assert(_idxParent == 0 || _pageParent[_idxParent].NodeType == XPathNodeType.Element);
if (_atomizeNames)
prefix = _nameTable.Add(prefix);
namespaceName = _nameTable.Add(namespaceName);
// Does the new namespace override a previous namespace node?
pageOverride = _pageNmsp;
idxOverride = _idxNmsp;
while (idxOverride != 0)
{
if ((object)pageOverride[idxOverride].LocalName == (object)prefix)
{
// Need to clone all namespaces up until the overridden node in order to bypass it
break;
}
idxOverride = pageOverride[idxOverride].GetSibling(out pageOverride);
}
// Create new namespace node and add it to front of namespace list
idxNew = NewNamespaceNode(out pageNew, prefix, namespaceName, _pageParent, _idxParent);
if (idxOverride != 0)
{
// Bypass overridden node by cloning nodes in list leading to it
pageOrig = _pageNmsp;
idxOrig = _idxNmsp;
pageCopy = pageNew;
idxCopy = idxNew;
while (idxOrig != idxOverride || pageOrig != pageOverride)
{
// Make a copy of the original namespace node
idxTemp = pageOrig[idxOrig].GetParent(out pageTemp);
idxTemp = NewNamespaceNode(out pageTemp, pageOrig[idxOrig].LocalName, pageOrig[idxOrig].Value, pageTemp, idxTemp);
// Attach copy to chain of copied nodes
pageCopy[idxCopy].SetSibling(_infoTable, pageTemp, idxTemp);
// Position on the new copy
pageCopy = pageTemp;
idxCopy = idxTemp;
// Get next original sibling
idxOrig = pageOrig[idxOrig].GetSibling(out pageOrig);
}
// Link farther up in the original chain, just past the last overridden node
idxOverride = pageOverride[idxOverride].GetSibling(out pageOverride);
if (idxOverride != 0)
pageCopy[idxCopy].SetSibling(_infoTable, pageOverride, idxOverride);
else
Debug.Assert(prefix.Equals("xml"), "xmlns:xml namespace declaration should always be present in the list.");
}
else if (_idxParent != 0)
{
// Link new node directly to last in-scope namespace. No overrides necessary.
pageNew[idxNew].SetSibling(_infoTable, _pageNmsp, _idxNmsp);
}
else
{
// Floating namespace, so make this the root of the tree
_doc.SetRootNode(pageNew, idxNew);
}
if (_idxParent != 0)
{
// If this is the first namespace on the current element,
if (!_pageParent[_idxParent].HasNamespaceDecls)
{
// Then save the last in-scope namespace on a stack so that EndElementNode can restore it.
_stkNmsp.Push(new XPathNodeRef(_pageNmsp, _idxNmsp));
// Mark element parent as having namespace nodes declared on it
_pageParent[_idxParent].HasNamespaceDecls = true;
}
// New namespace is now last in-scope namespace
_pageNmsp = pageNew;
_idxNmsp = idxNew;
}
}
//-----------------------------------------------
// Custom Build Helper Methods
//-----------------------------------------------
/// <summary>
/// Build ID lookup tables from the XSD schema or DTD.
/// </summary>
public void CreateIdTables(IDtdInfo dtdInfo)
{
// Extract the elements which has attribute defined as ID from the element declarations
foreach (IDtdAttributeListInfo attrList in dtdInfo.GetAttributeLists())
{
IDtdAttributeInfo idAttribute = attrList.LookupIdAttribute();
if (idAttribute != null)
{
if (_elemIdMap == null)
_elemIdMap = new Hashtable();
// Id was defined in DTD and DTD doesn't have notion of namespace so we should
// use prefix instead of namespace here. Schema already does this for us.
_elemIdMap.Add(new XmlQualifiedName(attrList.LocalName, attrList.Prefix),
new XmlQualifiedName(idAttribute.LocalName, idAttribute.Prefix));
}
}
}
/// <summary>
/// Link "prev" element with "next" element, which has a "similar" name. This increases the performance of searches by element name.
/// </summary>
private XPathNodeRef LinkSimilarElements(XPathNode[] pagePrev, int idxPrev, XPathNode[] pageNext, int idxNext)
{
// Set link on previous element
if (pagePrev != null)
pagePrev[idxPrev].SetSimilarElement(_infoTable, pageNext, idxNext);
// Add next element to index
return new XPathNodeRef(pageNext, idxNext);
}
/// <summary>
/// Helper method that constructs a new Namespace XPathNode.
/// </summary>
private int NewNamespaceNode(out XPathNode[] page, string prefix, string namespaceUri, XPathNode[] pageElem, int idxElem)
{
XPathNode[] pageNode;
int idxNode, lineNumOffset, linePosOffset;
XPathNodeInfoAtom info;
Debug.Assert(pageElem == null || pageElem[idxElem].NodeType == XPathNodeType.Element);
// Allocate a page slot for the new XPathNode
_nmspPageFact.AllocateSlot(out pageNode, out idxNode);
// Compute node's line number information
ComputeLineInfo(false, out lineNumOffset, out linePosOffset);
// Obtain a XPathNodeInfoAtom object for this node
info = _infoTable.Create(prefix, string.Empty, string.Empty, string.Empty,
pageElem, pageNode, null,
_doc, _lineNumBase, _linePosBase);
// Initialize the new node
pageNode[idxNode].Create(info, XPathNodeType.Namespace, idxElem);
pageNode[idxNode].SetValue(namespaceUri);
pageNode[idxNode].SetLineInfoOffsets(lineNumOffset, linePosOffset);
page = pageNode;
return idxNode;
}
/// <summary>
/// Helper method that constructs a new XPathNode.
/// </summary>
private int NewNode(out XPathNode[] page, XPathNodeType xptyp, string localName, string namespaceUri, string prefix, string baseUri)
{
XPathNode[] pageNode;
int idxNode, lineNumOffset, linePosOffset;
XPathNodeInfoAtom info;
Debug.Assert(xptyp != XPathNodeType.Namespace);
// Allocate a page slot for the new XPathNode
_nodePageFact.AllocateSlot(out pageNode, out idxNode);
// Compute node's line number information
ComputeLineInfo(XPathNavigator.IsText(xptyp), out lineNumOffset, out linePosOffset);
// Obtain a XPathNodeInfoAtom object for this node
info = _infoTable.Create(localName, namespaceUri, prefix, baseUri,
_pageParent, pageNode, pageNode,
_doc, _lineNumBase, _linePosBase);
// Initialize the new node
pageNode[idxNode].Create(info, xptyp, _idxParent);
pageNode[idxNode].SetLineInfoOffsets(lineNumOffset, linePosOffset);
page = pageNode;
return idxNode;
}
/// <summary>
/// Compute current node's line number information.
/// </summary>
private void ComputeLineInfo(bool isTextNode, out int lineNumOffset, out int linePosOffset)
{
int lineNum, linePos;
if (_lineInfo == null)
{
lineNumOffset = 0;
linePosOffset = 0;
return;
}
// Get line number info from TextBlockBuilder if current node is a text node
if (isTextNode)
{
lineNum = _textBldr.LineNumber;
linePos = _textBldr.LinePosition;
}
else
{
Debug.Assert(_lineInfo.HasLineInfo(), "HasLineInfo should have been checked before this.");
lineNum = _lineInfo.LineNumber;
linePos = _lineInfo.LinePosition;
}
lineNumOffset = lineNum - _lineNumBase;
if (lineNumOffset < 0 || lineNumOffset > XPathNode.MaxLineNumberOffset)
{
_lineNumBase = lineNum;
lineNumOffset = 0;
}
linePosOffset = linePos - _linePosBase;
if (linePosOffset < 0 || linePosOffset > XPathNode.MaxLinePositionOffset)
{
_linePosBase = linePos;
linePosOffset = 0;
}
}
/// <summary>
/// Add a sibling node. If no previous sibling exists, add the node as the first child of the parent.
/// If no parent exists, make this node the root of the document.
/// </summary>
private void AddSibling(XPathNodeType xptyp, string localName, string namespaceUri, string prefix, string baseUri)
{
XPathNode[] pageNew;
int idxNew;
Debug.Assert(xptyp != XPathNodeType.Root && xptyp != XPathNodeType.Namespace);
if (_textBldr.HasText)
CachedTextNode();
idxNew = NewNode(out pageNew, xptyp, localName, namespaceUri, prefix, baseUri);
// this.idxParent is only 0 for the top-most node
if (_idxParent != 0)
{
// Set properties on parent
_pageParent[_idxParent].SetParentProperties(xptyp);
if (_idxSibling == 0)
{
// This is the first child of the parent (so should be allocated immediately after parent)
Debug.Assert(_idxParent + 1 == idxNew || idxNew == 1);
}
else
{
// There is already a previous sibling
_pageSibling[_idxSibling].SetSibling(_infoTable, pageNew, idxNew);
}
}
_pageSibling = pageNew;
_idxSibling = idxNew;
}
/// <summary>
/// Creates a text node from cached text parts.
/// </summary>
private void CachedTextNode()
{
TextBlockType textType;
string text;
Debug.Assert(_textBldr.HasText || (_idxSibling == 0 && _idxParent == 0), "Cannot create empty text node unless it's a top-level text node.");
Debug.Assert(_idxSibling == 0 || !_pageSibling[_idxSibling].IsText, "Cannot create adjacent text nodes.");
// Create a text node
textType = _textBldr.TextType;
text = _textBldr.ReadText();
AddSibling((XPathNodeType)textType, string.Empty, string.Empty, string.Empty, string.Empty);
_pageSibling[_idxSibling].SetValue(text);
}
/// <summary>
/// Allocates pages of nodes for the XPathDocumentBuilder. The initial pages and arrays are
/// fairly small. As each page fills, a new page that is twice as big is allocated.
/// The max size of a page is 65536 nodes, since XPathNode indexes are 16-bits.
/// </summary>
private struct NodePageFactory
{
private XPathNode[] _page;
private XPathNodePageInfo _pageInfo;
private int _pageSize;
/// <summary>
/// Allocates and returns the initial node page.
/// </summary>
public void Init(int initialPageSize)
{
// 0th slot: Index 0 is reserved to mean "null node". Only use 0th slot to store PageInfo.
_pageSize = initialPageSize;
_page = new XPathNode[_pageSize];
_pageInfo = new XPathNodePageInfo(null, 1);
_page[0].Create(_pageInfo);
}
/// <summary>
/// Return the page on which the next node will be allocated.
/// </summary>
public XPathNode[] NextNodePage
{
get { return _page; }
}
/// <summary>
/// Return the page index that the next node will be given.
/// </summary>
public int NextNodeIndex
{
get { return _pageInfo.NodeCount; }
}
/// <summary>
/// Allocate the next slot in the current node page. Return a reference to the page and the index
/// of the allocated slot.
/// </summary>
public void AllocateSlot(out XPathNode[] page, out int idx)
{
page = _page;
idx = _pageInfo.NodeCount;
// Allocate new page if necessary
if (++_pageInfo.NodeCount >= _page.Length)
{
if (_pageSize < (1 << 16))
{
// New page shouldn't contain more slots than 16 bits can address
_pageSize *= 2;
}
_page = new XPathNode[_pageSize];
_pageInfo.NextPage = _page;
_pageInfo = new XPathNodePageInfo(page, _pageInfo.PageNumber + 1);
_page[0].Create(_pageInfo);
}
}
}
/// <summary>
/// This class concatenates adjacent text blocks and tracks TextBlockType and line number information.
/// </summary>
private struct TextBlockBuilder
{
private IXmlLineInfo _lineInfo;
private TextBlockType _textType;
private string _text;
private int _lineNum, _linePos;
/// <summary>
/// Constructor.
/// </summary>
public void Initialize(IXmlLineInfo lineInfo)
{
_lineInfo = lineInfo;
_textType = TextBlockType.None;
}
/// <summary>
/// Return the type of the cached text block.
/// </summary>
public TextBlockType TextType
{
get { return _textType; }
}
/// <summary>
/// Returns true if text has been cached.
/// </summary>
public bool HasText
{
get { return _textType != TextBlockType.None; }
}
/// <summary>
/// Returns the line number of the last text block to be cached.
/// </summary>
public int LineNumber
{
get { return _lineNum; }
}
/// <summary>
/// Returns the line position of the last text block to be cached.
/// </summary>
public int LinePosition
{
get { return _linePos; }
}
/// <summary>
/// Append a text block with the specified type.
/// </summary>
public void WriteTextBlock(string text, TextBlockType textType)
{
Debug.Assert((int)XPathNodeType.Text < (int)XPathNodeType.SignificantWhitespace);
Debug.Assert((int)XPathNodeType.SignificantWhitespace < (int)XPathNodeType.Whitespace);
if (text.Length != 0)
{
if (_textType == TextBlockType.None)
{
_text = text;
_textType = textType;
if (_lineInfo != null)
{
_lineNum = _lineInfo.LineNumber;
_linePos = _lineInfo.LinePosition;
}
}
else
{
_text = string.Concat(_text, text);
// Determine whether text is Text, Whitespace, or SignificantWhitespace
if ((int)textType < (int)_textType)
_textType = textType;
}
}
}
/// <summary>
/// Read all cached text, or string.Empty if no text has been cached, and clear the text block type.
/// </summary>
public string ReadText()
{
if (_textType == TextBlockType.None)
return string.Empty;
_textType = TextBlockType.None;
return _text;
}
}
}
}
| |
// Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using System;
using System.Data;
using System.Data.SqlClient;
using System.Collections;
using System.Globalization;
using Alachisoft.NCache.Common;
using Alachisoft.NCache.Common.Logger;
using Alachisoft.NCache.Common.DataStructures.Clustered;
namespace Alachisoft.NCache.Caching.Util
{
internal class DbConnectionPool : ResourcePool
{
internal class DbResourceInfo : IDisposable
{
private IDbConnection _conn;
private IDictionary _syncData;
public DbResourceInfo(IDbConnection conn)
{
_conn = conn;
}
#region / --- IDisposable --- /
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or
/// resetting unmanaged resources.
/// </summary>
void IDisposable.Dispose()
{
if(_conn != null)
{
_conn.Close();
_conn.Dispose();
_conn = null;
}
}
#endregion
public IDbConnection Connection
{
get { return _conn; }
set { _conn = value; }
}
public IDictionary DbSyncInfo
{
get {return _syncData;}
set {_syncData = value;}
}
}
private ILogger _ncacheLog;
ILogger NCacheLog
{
get { return _ncacheLog; }
}
public DbConnectionPool(ILogger NCacheLog)
{
this._ncacheLog = NCacheLog;
}
/// <summary>
/// Adds a connection to the _connectionTable if already not present.
/// Otherwise, increments the referrence count for it.
/// </summary>
/// <param name="connString"></param>
/// <param name="connection">An initialized connection object</param>
/// <returns></returns>
public IDbConnection PoolConnection(string connString, IDbConnection connection)
{
lock (this)
{
string connKey = connString.ToLower();
DbResourceInfo connInfo = (DbResourceInfo)GetResource(connKey);
if (connInfo == null)
{
connection.ConnectionString = connection.ConnectionString;
connection.Open();
connInfo = new DbResourceInfo(connection);
AddResource(connKey, connInfo);
}
else
{
if (connInfo.Connection.State == ConnectionState.Broken || connInfo.Connection.State == ConnectionState.Closed)
connection.Open();
AddResource(connKey, null); //To increase the reference count
}
return connInfo.Connection;
}
}
/// <summary>
/// When connection is no more required, it is closed and removed from the
/// _connectionTable.
/// </summary>
/// <param name="connString"></param>
public void RemoveConnection(string connString)
{
lock (this)
{
RemoveResource(connString.ToLower());
}
}
/// <summary>
/// Removes the Severed Connection from the Resource pool.
/// </summary>
/// <param name="connString">The connection string of the Severed Connection.</param>
public void RemoveSeveredConnection(string connString)
{
lock (this)
{
IDbConnection conn = GetConnection(connString);
if (conn != null)
conn.Close();
RemoveSeveredResource(connString.ToLower());
}
}
/// <summary>
/// Wrapper for ResourcePool.GetResource(string key).
/// </summary>
/// <param name="connString"></param>
/// <returns></returns>
public IDbConnection GetConnection(string connString)
{
lock (this)
{
DbResourceInfo connInfo = (DbResourceInfo) GetResource(connString.ToLower());
if(connInfo != null)
return connInfo.Connection;
return null;
}
}
/// <summary>
/// Wrapper for ResourcePool.GetResource(string key).
/// </summary>
/// <param name="connString"></param>
/// <returns></returns>
public IDictionary GetResourceSyncInfo(string connString)
{
lock (this)
{
DbResourceInfo connInfo = (DbResourceInfo) GetResource(connString.ToLower());
if(connInfo != null)
return connInfo.DbSyncInfo;
return null;
}
}
/// <summary>
/// Acquire the modified records in ncache_db_sync table
/// </summary>
/// <param name="syncTable"></param>
/// <param name="cacheName"></param>
public void AcquireSyncData(string syncTable, string cacheName)
{
lock (this)
{
IEnumerator em = Keys.GetEnumerator();
while (em.MoveNext())
{
DbResourceInfo connInfo = (DbResourceInfo) GetResource((string)em.Current);
IDictionary dbSyncInfo = LoadTableData(syncTable, cacheName, connInfo.Connection);
connInfo.DbSyncInfo = dbSyncInfo;
}
}
}
/// <summary>
/// Remove all the records where work_in_progress flag is set.
/// </summary>
/// <returns></returns>
public void RemoveSyncData(string syncTable, string cacheName)
{
lock (this)
{
IEnumerator em = Keys.GetEnumerator();
while (em.MoveNext())
{
DbResourceInfo connInfo = (DbResourceInfo)GetResource((string)em.Current);
RemoveTableData(syncTable, cacheName, connInfo.Connection);
connInfo.DbSyncInfo = null;
}
}
}
private bool RemoveTableData(string syncTable, string cacheName, IDbConnection connection)
{
object[] tableInfo = new object[] { syncTable, cacheName };
SqlCommand command = null;
try
{
if (connection.State != ConnectionState.Open)
connection.Open();
if (connection is SqlConnection)
{
command = ((SqlConnection)connection).CreateCommand();
command.CommandText = string.Format(CultureInfo.InvariantCulture, "DELETE FROM {0} WHERE CACHE_ID = '{1}' AND WORK_IN_PROGRESS = 1", tableInfo);
command.CommandType = CommandType.Text;
command.ExecuteNonQuery();
return true;
}
}
catch (Exception ex)
{
NCacheLog.Error(cacheName, ex.ToString());
}
return false;
}
/// <summary>
/// Remove all the stored sync information
/// </summary>
public void FlushSyncData()
{
lock (this)
{
IEnumerator em = Keys.GetEnumerator();
while (em.MoveNext())
{
DbResourceInfo connInfo = (DbResourceInfo) GetResource((string)em.Current);
connInfo.DbSyncInfo = null;
}
}
}
/// <summary>
/// Load the modified records for the given cache and set these flags to false
/// </summary>
/// <returns></returns>
private Hashtable LoadTableData(string syncTable, string cacheName, IDbConnection connection)
{
object[] tableInfo = new object[] { syncTable, cacheName };
Hashtable tableData = new Hashtable();
IDataReader reader = null;
IDbCommand command = null;
string cacheKey = "";
bool modified = false;
IDbTransaction transaction = connection.BeginTransaction(System.Data.IsolationLevel.RepeatableRead);
try
{
if (connection.State != ConnectionState.Open)
connection.Open();
command = connection.CreateCommand();
command.CommandText = string.Format(CultureInfo.InvariantCulture, "UPDATE {0} SET WORK_IN_PROGRESS = 1 WHERE CACHE_ID = '{1}' AND MODIFIED = 1", tableInfo);
command.CommandType = CommandType.Text;
command.Transaction = transaction;
reader = command.ExecuteReader();
}
catch (Exception ex)
{
NCacheLog.Error(cacheName, ex.ToString());
transaction.Rollback();
return null;
}
finally
{
if (reader != null)
{
reader.Close();
reader.Dispose();
reader = null;
}
}
try
{
if (connection.State != ConnectionState.Open)
connection.Open();
command = connection.CreateCommand();
command.CommandText = string.Format(CultureInfo.InvariantCulture, "SELECT CACHE_KEY, MODIFIED FROM {0} WHERE CACHE_ID = '{1}' AND WORK_IN_PROGRESS = 1", tableInfo);
command.CommandType = CommandType.Text;
command.Transaction = transaction;
reader = command.ExecuteReader();
//Infact we are doing nothing with this flag. reader.Read() will take care of it
while (reader.Read())
{
cacheKey = Convert.ToString(reader.GetValue(0));
modified = Convert.ToBoolean(reader.GetValue(1));
tableData.Add(cacheKey, modified);
}
}
catch (Exception ex)
{
NCacheLog.Error(cacheName, ex.ToString());
transaction.Rollback();
return null;
}
finally
{
if (reader != null)
{
reader.Close();
reader.Dispose();
reader = null;
}
}
transaction.Commit();
return tableData;
}
/// <summary>
/// Gets the keys which have been modified in the database.
/// call this method after acquiring the latest database state.
/// </summary>
/// <returns> array list of all the modified keys. </returns>
internal ClusteredArrayList GetExpiredKeys()
{
ClusteredArrayList keys = new ClusteredArrayList();
lock (this)
{
IEnumerator em = Keys.GetEnumerator();
while (em.MoveNext())
{
DbResourceInfo connInfo = (DbResourceInfo)GetResource((string)em.Current);
if (connInfo != null && connInfo.DbSyncInfo != null)
{
keys.AddRange(connInfo.DbSyncInfo.Keys);
connInfo.DbSyncInfo = null;
}
}
}
return keys;
}
}
}
| |
#region License
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
#region Imports
using System;
using NUnit.Framework;
using Spring.Core;
#endregion
namespace Spring.Objects.Support
{
/// <summary>
/// Unit tests for the MethodInvoker class.
/// </summary>
/// <author>Rick Evans</author>
[TestFixture]
public sealed class MethodInvokerTests
{
[Test]
public void Instantiation()
{
MethodInvoker vkr = new MethodInvoker();
Assert.IsNotNull(vkr.Arguments);
}
[Test]
public void SettingNamedArgumentsToNullJustClearsOutAnyNamedArguments()
{
MethodInvoker vkr = new MethodInvoker();
vkr.AddNamedArgument("age", 10);
vkr.NamedArguments = null;
Assert.IsNotNull(vkr.NamedArguments);
Assert.AreEqual(0, vkr.NamedArguments.Count);
}
[Test]
[ExpectedException(typeof (ArgumentException), ExpectedMessage="One of either the 'TargetType' or 'TargetObject' properties is required.")]
public void PrepareWithOnlyTargetMethodSet()
{
MethodInvoker vkr = new MethodInvoker();
vkr.TargetMethod = "Foo";
vkr.Prepare();
}
[Test]
public void ArgumentsProperty()
{
MethodInvoker vkr = new MethodInvoker();
vkr.Arguments = null;
Assert.IsNotNull(vkr.Arguments); // should always be the empty object array, never null
Assert.AreEqual(0, vkr.Arguments.Length);
vkr.Arguments = new string[] {"Chank Pop"};
Assert.AreEqual(1, vkr.Arguments.Length);
}
[Test]
public void InvokeWithStaticMethod()
{
MethodInvoker mi = new MethodInvoker();
mi.TargetType = typeof(Int32);
mi.TargetMethod = "Parse";
mi.Arguments = new object[] { "27" };
mi.Prepare();
object actual = mi.Invoke();
Assert.IsNotNull(actual);
Assert.AreEqual(typeof(int), actual.GetType());
Assert.AreEqual(27, (int)actual);
}
#if NET_2_0
[Test]
public void InvokeWithGenericStaticMethod()
{
MethodInvoker mi = new MethodInvoker();
mi.TargetType = typeof(Activator);
mi.TargetMethod = "CreateInstance<Spring.Objects.TestObject>";
mi.Prepare();
object actual = mi.Invoke();
Assert.IsNotNull(actual);
Assert.AreEqual(typeof(TestObject), actual.GetType());
}
#endif
[Test]
public void InvokeWithOKArguments()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "GrowOlder";
vkr.Arguments = new object[] {10};
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(98, actual);
}
[Test]
public void InvokeWithOKArgumentsAndMixedCaseMethodName()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "growolder";
vkr.Arguments = new object[] {10};
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(98, actual);
}
[Test]
public void InvokeWithNamedArgument()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "growolder";
vkr.AddNamedArgument("years", 10);
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(98, actual);
}
[Test]
public void InvokeWithMixOfNamedAndPlainVanillaArguments()
{
int maximumAge = 95;
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "GrowOlderUntilMaximumAgeReached";
vkr.AddNamedArgument("years", 10);
vkr.Arguments = new object[] {maximumAge};
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(maximumAge, actual);
}
[Test]
public void InvokeWithMixOfNamedAndPlainVanillaArgumentsOfDifferingTypes()
{
int maximumAge = 95;
string expectedStatus = "Old Age Pensioner";
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "GrowOlderUntilMaximumAgeReachedAndSetStatusName";
vkr.AddNamedArgument("years", 10);
vkr.AddNamedArgument("status", expectedStatus);
vkr.Arguments = new object[] {maximumAge};
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(maximumAge, actual);
Assert.AreEqual(expectedStatus, foo.Status);
}
[Test]
[ExpectedException(
typeof (ArgumentException),
ExpectedMessage = "The named argument 'southpaw' could not be found on the 'GrowOlder' method of class [Spring.Objects.Support.MethodInvokerTests+Foo].")]
public void InvokeWithNamedArgumentThatDoesNotExist()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "growolder";
vkr.AddNamedArgument("southpaw", 10);
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(98, actual);
}
/// <summary>
/// Tests CLS case insensitivity compliance...
/// </summary>
[Test]
public void InvokeWithWeIRdLyCasedNamedArgument()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "gROwOldeR";
vkr.AddNamedArgument("YEarS", 10);
vkr.Prepare();
object actual = vkr.Invoke();
Assert.AreEqual(98, actual);
}
[Test]
[ExpectedException(typeof (MethodInvocationException), ExpectedMessage="At least one of the arguments passed to this MethodInvoker was incompatible with the signature of the invoked method.")]
public void InvokeWithArgumentOfWrongType()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "growolder";
vkr.AddNamedArgument("years", "Bingo");
vkr.Prepare();
vkr.Invoke();
}
[Test]
public void NamedArgumentsOverwriteEachOther()
{
Foo foo = new Foo();
foo.Age = 88;
MethodInvoker vkr = new MethodInvoker();
vkr.TargetObject = foo;
vkr.TargetMethod = "growolder";
vkr.AddNamedArgument("years", 10);
// this second setting must overwrite the first...
vkr.AddNamedArgument("years", 200);
vkr.Prepare();
object actual = vkr.Invoke();
Assert.IsFalse(98.Equals(actual), "The first named argument setter is sticking; must allow itslf to be overwritten.");
Assert.AreEqual(288, actual, "The second named argument was not applied (it must be).");
}
[Test]
public void PreparedArgumentsIsNeverNull()
{
MyMethodInvoker vkr = new MyMethodInvoker();
Assert.IsNotNull(vkr.GetPreparedArguments(),
"PreparedArguments is null even before Prepare() is called; must NEVER be null.");
vkr.NullOutPreparedArguments();
Assert.IsNotNull(vkr.GetPreparedArguments(),
"PreparedArguments should revert to the empty object[] when set to null; must NEVER be null.");
}
#region Inner Class : Foo
private sealed class Foo
{
public Foo()
{
Age = 0;
Status = "Baby";
}
public int GrowOlder()
{
return GrowOlder(1);
}
public int GrowOlder(int years)
{
_age += years;
return _age;
}
public int GrowOlderUntilMaximumAgeReached(int years, int maximumAge)
{
_age += years;
if (_age > maximumAge)
{
_age = maximumAge;
}
return _age;
}
public int GrowOlderUntilMaximumAgeReachedAndSetStatusName(
int years, int maximumAge, string status)
{
Status = status;
return GrowOlderUntilMaximumAgeReached(years, maximumAge);
}
private int _age;
private string _status;
public int Age
{
get { return _age; }
set { _age = value; }
}
public string Status
{
get { return _status; }
set { _status = value; }
}
}
#endregion
#region Inner Class : MyMethodInvoker
private sealed class MyMethodInvoker : MethodInvoker
{
public MyMethodInvoker()
{
}
public void NullOutPreparedArguments()
{
PreparedArguments = null;
}
public object[] GetPreparedArguments()
{
return PreparedArguments;
}
}
#endregion
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/admin/v2/instance.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Cloud.Bigtable.Admin.V2 {
/// <summary>Holder for reflection information generated from google/bigtable/admin/v2/instance.proto</summary>
public static partial class InstanceReflection {
#region Descriptor
/// <summary>File descriptor for google/bigtable/admin/v2/instance.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static InstanceReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Cidnb29nbGUvYmlndGFibGUvYWRtaW4vdjIvaW5zdGFuY2UucHJvdG8SGGdv",
"b2dsZS5iaWd0YWJsZS5hZG1pbi52MhocZ29vZ2xlL2FwaS9hbm5vdGF0aW9u",
"cy5wcm90bxolZ29vZ2xlL2JpZ3RhYmxlL2FkbWluL3YyL2NvbW1vbi5wcm90",
"byKDAwoISW5zdGFuY2USDAoEbmFtZRgBIAEoCRIUCgxkaXNwbGF5X25hbWUY",
"AiABKAkSNwoFc3RhdGUYAyABKA4yKC5nb29nbGUuYmlndGFibGUuYWRtaW4u",
"djIuSW5zdGFuY2UuU3RhdGUSNQoEdHlwZRgEIAEoDjInLmdvb2dsZS5iaWd0",
"YWJsZS5hZG1pbi52Mi5JbnN0YW5jZS5UeXBlEj4KBmxhYmVscxgFIAMoCzIu",
"Lmdvb2dsZS5iaWd0YWJsZS5hZG1pbi52Mi5JbnN0YW5jZS5MYWJlbHNFbnRy",
"eRotCgtMYWJlbHNFbnRyeRILCgNrZXkYASABKAkSDQoFdmFsdWUYAiABKAk6",
"AjgBIjUKBVN0YXRlEhMKD1NUQVRFX05PVF9LTk9XThAAEgkKBVJFQURZEAES",
"DAoIQ1JFQVRJTkcQAiI9CgRUeXBlEhQKEFRZUEVfVU5TUEVDSUZJRUQQABIO",
"CgpQUk9EVUNUSU9OEAESDwoLREVWRUxPUE1FTlQQAiKOAgoHQ2x1c3RlchIM",
"CgRuYW1lGAEgASgJEhAKCGxvY2F0aW9uGAIgASgJEjYKBXN0YXRlGAMgASgO",
"MicuZ29vZ2xlLmJpZ3RhYmxlLmFkbWluLnYyLkNsdXN0ZXIuU3RhdGUSEwoL",
"c2VydmVfbm9kZXMYBCABKAUSQwoUZGVmYXVsdF9zdG9yYWdlX3R5cGUYBSAB",
"KA4yJS5nb29nbGUuYmlndGFibGUuYWRtaW4udjIuU3RvcmFnZVR5cGUiUQoF",
"U3RhdGUSEwoPU1RBVEVfTk9UX0tOT1dOEAASCQoFUkVBRFkQARIMCghDUkVB",
"VElORxACEgwKCFJFU0laSU5HEAMSDAoIRElTQUJMRUQQBCKCAwoKQXBwUHJv",
"ZmlsZRIMCgRuYW1lGAEgASgJEgwKBGV0YWcYAiABKAkSEwoLZGVzY3JpcHRp",
"b24YAyABKAkSZwodbXVsdGlfY2x1c3Rlcl9yb3V0aW5nX3VzZV9hbnkYBSAB",
"KAsyPi5nb29nbGUuYmlndGFibGUuYWRtaW4udjIuQXBwUHJvZmlsZS5NdWx0",
"aUNsdXN0ZXJSb3V0aW5nVXNlQW55SAASWwoWc2luZ2xlX2NsdXN0ZXJfcm91",
"dGluZxgGIAEoCzI5Lmdvb2dsZS5iaWd0YWJsZS5hZG1pbi52Mi5BcHBQcm9m",
"aWxlLlNpbmdsZUNsdXN0ZXJSb3V0aW5nSAAaGwoZTXVsdGlDbHVzdGVyUm91",
"dGluZ1VzZUFueRpOChRTaW5nbGVDbHVzdGVyUm91dGluZxISCgpjbHVzdGVy",
"X2lkGAEgASgJEiIKGmFsbG93X3RyYW5zYWN0aW9uYWxfd3JpdGVzGAIgASgI",
"QhAKDnJvdXRpbmdfcG9saWN5QrABChxjb20uZ29vZ2xlLmJpZ3RhYmxlLmFk",
"bWluLnYyQg1JbnN0YW5jZVByb3RvUAFaPWdvb2dsZS5nb2xhbmcub3JnL2dl",
"bnByb3RvL2dvb2dsZWFwaXMvYmlndGFibGUvYWRtaW4vdjI7YWRtaW6qAh5H",
"b29nbGUuQ2xvdWQuQmlndGFibGUuQWRtaW4uVjLKAh5Hb29nbGVcQ2xvdWRc",
"QmlndGFibGVcQWRtaW5cVjJiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, global::Google.Cloud.Bigtable.Admin.V2.CommonReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Bigtable.Admin.V2.Instance), global::Google.Cloud.Bigtable.Admin.V2.Instance.Parser, new[]{ "Name", "DisplayName", "State", "Type", "Labels" }, null, new[]{ typeof(global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.State), typeof(global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.Type) }, new pbr::GeneratedClrTypeInfo[] { null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Bigtable.Admin.V2.Cluster), global::Google.Cloud.Bigtable.Admin.V2.Cluster.Parser, new[]{ "Name", "Location", "State", "ServeNodes", "DefaultStorageType" }, null, new[]{ typeof(global::Google.Cloud.Bigtable.Admin.V2.Cluster.Types.State) }, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Bigtable.Admin.V2.AppProfile), global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Parser, new[]{ "Name", "Etag", "Description", "MultiClusterRoutingUseAny", "SingleClusterRouting" }, new[]{ "RoutingPolicy" }, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny), global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny.Parser, null, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting), global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting.Parser, new[]{ "ClusterId", "AllowTransactionalWrites" }, null, null, null)})
}));
}
#endregion
}
#region Messages
/// <summary>
/// A collection of Bigtable [Tables][google.bigtable.admin.v2.Table] and
/// the resources that serve them.
/// All tables in an instance are served from a single
/// [Cluster][google.bigtable.admin.v2.Cluster].
/// </summary>
public sealed partial class Instance : pb::IMessage<Instance> {
private static readonly pb::MessageParser<Instance> _parser = new pb::MessageParser<Instance>(() => new Instance());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Instance> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Bigtable.Admin.V2.InstanceReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Instance() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Instance(Instance other) : this() {
name_ = other.name_;
displayName_ = other.displayName_;
state_ = other.state_;
type_ = other.type_;
labels_ = other.labels_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Instance Clone() {
return new Instance(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
/// <summary>
/// (`OutputOnly`)
/// The unique name of the instance. Values are of the form
/// `projects/<project>/instances/[a-z][a-z0-9\\-]+[a-z0-9]`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "display_name" field.</summary>
public const int DisplayNameFieldNumber = 2;
private string displayName_ = "";
/// <summary>
/// The descriptive name for this instance as it appears in UIs.
/// Can be changed at any time, but should be kept globally unique
/// to avoid confusion.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string DisplayName {
get { return displayName_; }
set {
displayName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "state" field.</summary>
public const int StateFieldNumber = 3;
private global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.State state_ = 0;
/// <summary>
/// (`OutputOnly`)
/// The current state of the instance.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.State State {
get { return state_; }
set {
state_ = value;
}
}
/// <summary>Field number for the "type" field.</summary>
public const int TypeFieldNumber = 4;
private global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.Type type_ = 0;
/// <summary>
/// The type of the instance. Defaults to `PRODUCTION`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.Type Type {
get { return type_; }
set {
type_ = value;
}
}
/// <summary>Field number for the "labels" field.</summary>
public const int LabelsFieldNumber = 5;
private static readonly pbc::MapField<string, string>.Codec _map_labels_codec
= new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 42);
private readonly pbc::MapField<string, string> labels_ = new pbc::MapField<string, string>();
/// <summary>
/// Labels are a flexible and lightweight mechanism for organizing cloud
/// resources into groups that reflect a customer's organizational needs and
/// deployment strategies. They can be used to filter resources and aggregate
/// metrics.
///
/// * Label keys must be between 1 and 63 characters long and must conform to
/// the regular expression: `[\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}`.
/// * Label values must be between 0 and 63 characters long and must conform to
/// the regular expression: `[\p{Ll}\p{Lo}\p{N}_-]{0,63}`.
/// * No more than 64 labels can be associated with a given resource.
/// * Keys and values must both be under 128 bytes.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, string> Labels {
get { return labels_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Instance);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Instance other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (DisplayName != other.DisplayName) return false;
if (State != other.State) return false;
if (Type != other.Type) return false;
if (!Labels.Equals(other.Labels)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (DisplayName.Length != 0) hash ^= DisplayName.GetHashCode();
if (State != 0) hash ^= State.GetHashCode();
if (Type != 0) hash ^= Type.GetHashCode();
hash ^= Labels.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (DisplayName.Length != 0) {
output.WriteRawTag(18);
output.WriteString(DisplayName);
}
if (State != 0) {
output.WriteRawTag(24);
output.WriteEnum((int) State);
}
if (Type != 0) {
output.WriteRawTag(32);
output.WriteEnum((int) Type);
}
labels_.WriteTo(output, _map_labels_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (DisplayName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(DisplayName);
}
if (State != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) State);
}
if (Type != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Type);
}
size += labels_.CalculateSize(_map_labels_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Instance other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.DisplayName.Length != 0) {
DisplayName = other.DisplayName;
}
if (other.State != 0) {
State = other.State;
}
if (other.Type != 0) {
Type = other.Type;
}
labels_.Add(other.labels_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
DisplayName = input.ReadString();
break;
}
case 24: {
state_ = (global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.State) input.ReadEnum();
break;
}
case 32: {
type_ = (global::Google.Cloud.Bigtable.Admin.V2.Instance.Types.Type) input.ReadEnum();
break;
}
case 42: {
labels_.AddEntriesFrom(input, _map_labels_codec);
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the Instance message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// Possible states of an instance.
/// </summary>
public enum State {
/// <summary>
/// The state of the instance could not be determined.
/// </summary>
[pbr::OriginalName("STATE_NOT_KNOWN")] NotKnown = 0,
/// <summary>
/// The instance has been successfully created and can serve requests
/// to its tables.
/// </summary>
[pbr::OriginalName("READY")] Ready = 1,
/// <summary>
/// The instance is currently being created, and may be destroyed
/// if the creation process encounters an error.
/// </summary>
[pbr::OriginalName("CREATING")] Creating = 2,
}
/// <summary>
/// The type of the instance.
/// </summary>
public enum Type {
/// <summary>
/// The type of the instance is unspecified. If set when creating an
/// instance, a `PRODUCTION` instance will be created. If set when updating
/// an instance, the type will be left unchanged.
/// </summary>
[pbr::OriginalName("TYPE_UNSPECIFIED")] Unspecified = 0,
/// <summary>
/// An instance meant for production use. `serve_nodes` must be set
/// on the cluster.
/// </summary>
[pbr::OriginalName("PRODUCTION")] Production = 1,
/// <summary>
/// The instance is meant for development and testing purposes only; it has
/// no performance or uptime guarantees and is not covered by SLA.
/// After a development instance is created, it can be upgraded by
/// updating the instance to type `PRODUCTION`. An instance created
/// as a production instance cannot be changed to a development instance.
/// When creating a development instance, `serve_nodes` on the cluster must
/// not be set.
/// </summary>
[pbr::OriginalName("DEVELOPMENT")] Development = 2,
}
}
#endregion
}
/// <summary>
/// A resizable group of nodes in a particular cloud location, capable
/// of serving all [Tables][google.bigtable.admin.v2.Table] in the parent
/// [Instance][google.bigtable.admin.v2.Instance].
/// </summary>
public sealed partial class Cluster : pb::IMessage<Cluster> {
private static readonly pb::MessageParser<Cluster> _parser = new pb::MessageParser<Cluster>(() => new Cluster());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Cluster> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Bigtable.Admin.V2.InstanceReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Cluster() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Cluster(Cluster other) : this() {
name_ = other.name_;
location_ = other.location_;
state_ = other.state_;
serveNodes_ = other.serveNodes_;
defaultStorageType_ = other.defaultStorageType_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Cluster Clone() {
return new Cluster(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
/// <summary>
/// (`OutputOnly`)
/// The unique name of the cluster. Values are of the form
/// `projects/<project>/instances/<instance>/clusters/[a-z][-a-z0-9]*`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "location" field.</summary>
public const int LocationFieldNumber = 2;
private string location_ = "";
/// <summary>
/// (`CreationOnly`)
/// The location where this cluster's nodes and storage reside. For best
/// performance, clients should be located as close as possible to this
/// cluster. Currently only zones are supported, so values should be of the
/// form `projects/<project>/locations/<zone>`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Location {
get { return location_; }
set {
location_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "state" field.</summary>
public const int StateFieldNumber = 3;
private global::Google.Cloud.Bigtable.Admin.V2.Cluster.Types.State state_ = 0;
/// <summary>
/// (`OutputOnly`)
/// The current state of the cluster.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.Cluster.Types.State State {
get { return state_; }
set {
state_ = value;
}
}
/// <summary>Field number for the "serve_nodes" field.</summary>
public const int ServeNodesFieldNumber = 4;
private int serveNodes_;
/// <summary>
/// The number of nodes allocated to this cluster. More nodes enable higher
/// throughput and more consistent performance.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int ServeNodes {
get { return serveNodes_; }
set {
serveNodes_ = value;
}
}
/// <summary>Field number for the "default_storage_type" field.</summary>
public const int DefaultStorageTypeFieldNumber = 5;
private global::Google.Cloud.Bigtable.Admin.V2.StorageType defaultStorageType_ = 0;
/// <summary>
/// (`CreationOnly`)
/// The type of storage used by this cluster to serve its
/// parent instance's tables, unless explicitly overridden.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.StorageType DefaultStorageType {
get { return defaultStorageType_; }
set {
defaultStorageType_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Cluster);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Cluster other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (Location != other.Location) return false;
if (State != other.State) return false;
if (ServeNodes != other.ServeNodes) return false;
if (DefaultStorageType != other.DefaultStorageType) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (Location.Length != 0) hash ^= Location.GetHashCode();
if (State != 0) hash ^= State.GetHashCode();
if (ServeNodes != 0) hash ^= ServeNodes.GetHashCode();
if (DefaultStorageType != 0) hash ^= DefaultStorageType.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (Location.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Location);
}
if (State != 0) {
output.WriteRawTag(24);
output.WriteEnum((int) State);
}
if (ServeNodes != 0) {
output.WriteRawTag(32);
output.WriteInt32(ServeNodes);
}
if (DefaultStorageType != 0) {
output.WriteRawTag(40);
output.WriteEnum((int) DefaultStorageType);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (Location.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Location);
}
if (State != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) State);
}
if (ServeNodes != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(ServeNodes);
}
if (DefaultStorageType != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) DefaultStorageType);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Cluster other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.Location.Length != 0) {
Location = other.Location;
}
if (other.State != 0) {
State = other.State;
}
if (other.ServeNodes != 0) {
ServeNodes = other.ServeNodes;
}
if (other.DefaultStorageType != 0) {
DefaultStorageType = other.DefaultStorageType;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
Location = input.ReadString();
break;
}
case 24: {
state_ = (global::Google.Cloud.Bigtable.Admin.V2.Cluster.Types.State) input.ReadEnum();
break;
}
case 32: {
ServeNodes = input.ReadInt32();
break;
}
case 40: {
defaultStorageType_ = (global::Google.Cloud.Bigtable.Admin.V2.StorageType) input.ReadEnum();
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the Cluster message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// Possible states of a cluster.
/// </summary>
public enum State {
/// <summary>
/// The state of the cluster could not be determined.
/// </summary>
[pbr::OriginalName("STATE_NOT_KNOWN")] NotKnown = 0,
/// <summary>
/// The cluster has been successfully created and is ready to serve requests.
/// </summary>
[pbr::OriginalName("READY")] Ready = 1,
/// <summary>
/// The cluster is currently being created, and may be destroyed
/// if the creation process encounters an error.
/// A cluster may not be able to serve requests while being created.
/// </summary>
[pbr::OriginalName("CREATING")] Creating = 2,
/// <summary>
/// The cluster is currently being resized, and may revert to its previous
/// node count if the process encounters an error.
/// A cluster is still capable of serving requests while being resized,
/// but may exhibit performance as if its number of allocated nodes is
/// between the starting and requested states.
/// </summary>
[pbr::OriginalName("RESIZING")] Resizing = 3,
/// <summary>
/// The cluster has no backing nodes. The data (tables) still
/// exist, but no operations can be performed on the cluster.
/// </summary>
[pbr::OriginalName("DISABLED")] Disabled = 4,
}
}
#endregion
}
/// <summary>
/// This is a private alpha release of Cloud Bigtable replication. This feature
/// is not currently available to most Cloud Bigtable customers. This feature
/// might be changed in backward-incompatible ways and is not recommended for
/// production use. It is not subject to any SLA or deprecation policy.
///
/// A configuration object describing how Cloud Bigtable should treat traffic
/// from a particular end user application.
/// </summary>
public sealed partial class AppProfile : pb::IMessage<AppProfile> {
private static readonly pb::MessageParser<AppProfile> _parser = new pb::MessageParser<AppProfile>(() => new AppProfile());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AppProfile> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Bigtable.Admin.V2.InstanceReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AppProfile() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AppProfile(AppProfile other) : this() {
name_ = other.name_;
etag_ = other.etag_;
description_ = other.description_;
switch (other.RoutingPolicyCase) {
case RoutingPolicyOneofCase.MultiClusterRoutingUseAny:
MultiClusterRoutingUseAny = other.MultiClusterRoutingUseAny.Clone();
break;
case RoutingPolicyOneofCase.SingleClusterRouting:
SingleClusterRouting = other.SingleClusterRouting.Clone();
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AppProfile Clone() {
return new AppProfile(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
/// <summary>
/// (`OutputOnly`)
/// The unique name of the app profile. Values are of the form
/// `projects/<project>/instances/<instance>/appProfiles/[_a-zA-Z0-9][-_.a-zA-Z0-9]*`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "etag" field.</summary>
public const int EtagFieldNumber = 2;
private string etag_ = "";
/// <summary>
/// Strongly validated etag for optimistic concurrency control. Preserve the
/// value returned from `GetAppProfile` when calling `UpdateAppProfile` to
/// fail the request if there has been a modification in the mean time. The
/// `update_mask` of the request need not include `etag` for this protection
/// to apply.
/// See [Wikipedia](https://en.wikipedia.org/wiki/HTTP_ETag) and
/// [RFC 7232](https://tools.ietf.org/html/rfc7232#section-2.3) for more
/// details.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Etag {
get { return etag_; }
set {
etag_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "description" field.</summary>
public const int DescriptionFieldNumber = 3;
private string description_ = "";
/// <summary>
/// Optional long form description of the use case for this AppProfile.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Description {
get { return description_; }
set {
description_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "multi_cluster_routing_use_any" field.</summary>
public const int MultiClusterRoutingUseAnyFieldNumber = 5;
/// <summary>
/// Use a multi-cluster routing policy that may pick any cluster.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny MultiClusterRoutingUseAny {
get { return routingPolicyCase_ == RoutingPolicyOneofCase.MultiClusterRoutingUseAny ? (global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny) routingPolicy_ : null; }
set {
routingPolicy_ = value;
routingPolicyCase_ = value == null ? RoutingPolicyOneofCase.None : RoutingPolicyOneofCase.MultiClusterRoutingUseAny;
}
}
/// <summary>Field number for the "single_cluster_routing" field.</summary>
public const int SingleClusterRoutingFieldNumber = 6;
/// <summary>
/// Use a single-cluster routing policy.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting SingleClusterRouting {
get { return routingPolicyCase_ == RoutingPolicyOneofCase.SingleClusterRouting ? (global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting) routingPolicy_ : null; }
set {
routingPolicy_ = value;
routingPolicyCase_ = value == null ? RoutingPolicyOneofCase.None : RoutingPolicyOneofCase.SingleClusterRouting;
}
}
private object routingPolicy_;
/// <summary>Enum of possible cases for the "routing_policy" oneof.</summary>
public enum RoutingPolicyOneofCase {
None = 0,
MultiClusterRoutingUseAny = 5,
SingleClusterRouting = 6,
}
private RoutingPolicyOneofCase routingPolicyCase_ = RoutingPolicyOneofCase.None;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RoutingPolicyOneofCase RoutingPolicyCase {
get { return routingPolicyCase_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void ClearRoutingPolicy() {
routingPolicyCase_ = RoutingPolicyOneofCase.None;
routingPolicy_ = null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AppProfile);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AppProfile other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (Etag != other.Etag) return false;
if (Description != other.Description) return false;
if (!object.Equals(MultiClusterRoutingUseAny, other.MultiClusterRoutingUseAny)) return false;
if (!object.Equals(SingleClusterRouting, other.SingleClusterRouting)) return false;
if (RoutingPolicyCase != other.RoutingPolicyCase) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (Etag.Length != 0) hash ^= Etag.GetHashCode();
if (Description.Length != 0) hash ^= Description.GetHashCode();
if (routingPolicyCase_ == RoutingPolicyOneofCase.MultiClusterRoutingUseAny) hash ^= MultiClusterRoutingUseAny.GetHashCode();
if (routingPolicyCase_ == RoutingPolicyOneofCase.SingleClusterRouting) hash ^= SingleClusterRouting.GetHashCode();
hash ^= (int) routingPolicyCase_;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (Etag.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Etag);
}
if (Description.Length != 0) {
output.WriteRawTag(26);
output.WriteString(Description);
}
if (routingPolicyCase_ == RoutingPolicyOneofCase.MultiClusterRoutingUseAny) {
output.WriteRawTag(42);
output.WriteMessage(MultiClusterRoutingUseAny);
}
if (routingPolicyCase_ == RoutingPolicyOneofCase.SingleClusterRouting) {
output.WriteRawTag(50);
output.WriteMessage(SingleClusterRouting);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (Etag.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Etag);
}
if (Description.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Description);
}
if (routingPolicyCase_ == RoutingPolicyOneofCase.MultiClusterRoutingUseAny) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(MultiClusterRoutingUseAny);
}
if (routingPolicyCase_ == RoutingPolicyOneofCase.SingleClusterRouting) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(SingleClusterRouting);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AppProfile other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.Etag.Length != 0) {
Etag = other.Etag;
}
if (other.Description.Length != 0) {
Description = other.Description;
}
switch (other.RoutingPolicyCase) {
case RoutingPolicyOneofCase.MultiClusterRoutingUseAny:
MultiClusterRoutingUseAny = other.MultiClusterRoutingUseAny;
break;
case RoutingPolicyOneofCase.SingleClusterRouting:
SingleClusterRouting = other.SingleClusterRouting;
break;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
Etag = input.ReadString();
break;
}
case 26: {
Description = input.ReadString();
break;
}
case 42: {
global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny subBuilder = new global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.MultiClusterRoutingUseAny();
if (routingPolicyCase_ == RoutingPolicyOneofCase.MultiClusterRoutingUseAny) {
subBuilder.MergeFrom(MultiClusterRoutingUseAny);
}
input.ReadMessage(subBuilder);
MultiClusterRoutingUseAny = subBuilder;
break;
}
case 50: {
global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting subBuilder = new global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Types.SingleClusterRouting();
if (routingPolicyCase_ == RoutingPolicyOneofCase.SingleClusterRouting) {
subBuilder.MergeFrom(SingleClusterRouting);
}
input.ReadMessage(subBuilder);
SingleClusterRouting = subBuilder;
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the AppProfile message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// Read/write requests may be routed to any cluster in the instance, and will
/// fail over to another cluster in the event of transient errors or delays.
/// Choosing this option sacrifices read-your-writes consistency to improve
/// availability.
/// </summary>
public sealed partial class MultiClusterRoutingUseAny : pb::IMessage<MultiClusterRoutingUseAny> {
private static readonly pb::MessageParser<MultiClusterRoutingUseAny> _parser = new pb::MessageParser<MultiClusterRoutingUseAny>(() => new MultiClusterRoutingUseAny());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<MultiClusterRoutingUseAny> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Descriptor.NestedTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MultiClusterRoutingUseAny() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MultiClusterRoutingUseAny(MultiClusterRoutingUseAny other) : this() {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MultiClusterRoutingUseAny Clone() {
return new MultiClusterRoutingUseAny(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as MultiClusterRoutingUseAny);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(MultiClusterRoutingUseAny other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(MultiClusterRoutingUseAny other) {
if (other == null) {
return;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
}
}
}
}
/// <summary>
/// Unconditionally routes all read/write requests to a specific cluster.
/// This option preserves read-your-writes consistency, but does not improve
/// availability.
/// </summary>
public sealed partial class SingleClusterRouting : pb::IMessage<SingleClusterRouting> {
private static readonly pb::MessageParser<SingleClusterRouting> _parser = new pb::MessageParser<SingleClusterRouting>(() => new SingleClusterRouting());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SingleClusterRouting> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Bigtable.Admin.V2.AppProfile.Descriptor.NestedTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SingleClusterRouting() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SingleClusterRouting(SingleClusterRouting other) : this() {
clusterId_ = other.clusterId_;
allowTransactionalWrites_ = other.allowTransactionalWrites_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SingleClusterRouting Clone() {
return new SingleClusterRouting(this);
}
/// <summary>Field number for the "cluster_id" field.</summary>
public const int ClusterIdFieldNumber = 1;
private string clusterId_ = "";
/// <summary>
/// The cluster to which read/write requests should be routed.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string ClusterId {
get { return clusterId_; }
set {
clusterId_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "allow_transactional_writes" field.</summary>
public const int AllowTransactionalWritesFieldNumber = 2;
private bool allowTransactionalWrites_;
/// <summary>
/// Whether or not `CheckAndMutateRow` and `ReadModifyWriteRow` requests are
/// allowed by this app profile. It is unsafe to send these requests to
/// the same table/row/column in multiple clusters.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool AllowTransactionalWrites {
get { return allowTransactionalWrites_; }
set {
allowTransactionalWrites_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SingleClusterRouting);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SingleClusterRouting other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (ClusterId != other.ClusterId) return false;
if (AllowTransactionalWrites != other.AllowTransactionalWrites) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (ClusterId.Length != 0) hash ^= ClusterId.GetHashCode();
if (AllowTransactionalWrites != false) hash ^= AllowTransactionalWrites.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (ClusterId.Length != 0) {
output.WriteRawTag(10);
output.WriteString(ClusterId);
}
if (AllowTransactionalWrites != false) {
output.WriteRawTag(16);
output.WriteBool(AllowTransactionalWrites);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (ClusterId.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(ClusterId);
}
if (AllowTransactionalWrites != false) {
size += 1 + 1;
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SingleClusterRouting other) {
if (other == null) {
return;
}
if (other.ClusterId.Length != 0) {
ClusterId = other.ClusterId;
}
if (other.AllowTransactionalWrites != false) {
AllowTransactionalWrites = other.AllowTransactionalWrites;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
ClusterId = input.ReadString();
break;
}
case 16: {
AllowTransactionalWrites = input.ReadBool();
break;
}
}
}
}
}
}
#endregion
}
#endregion
}
#endregion Designer generated code
| |
/*
Copyright 2016 - 2017 Adrian Popescu.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
namespace Redmine.Net.Api
{
/// <summary>
/// </summary>
public static class RedmineKeys
{
/// <summary>
/// The activity
/// </summary>
public const string ACTIVITY = "activity";
/// <summary>
///
/// </summary>
public const string ACTIVITY_ID = "activity_id";
/// <summary>
///
/// </summary>
public const string ALL = "*";
/// <summary>
///
/// </summary>
public const string API_KEY = "api_key";
/// <summary>
///
/// </summary>
public const string ASSIGNED_TO = "assigned_to";
/// <summary>
///
/// </summary>
public const string ASSIGNED_TO_ID = "assigned_to_id";
/// <summary>
///
/// </summary>
public const string ATTACHMENT = "attachment";
/// <summary>
///
/// </summary>
public const string ATTACHMENTS = "attachments";
/// <summary>
///
/// </summary>
public const string AUTHOR = "author";
/// <summary>
///
/// </summary>
public const string AUTH_SOURCE_ID = "auth_source_id";
/// <summary>
///
/// </summary>
public const string CATEGORY = "category";
/// <summary>
///
/// </summary>
public const string CATEGORY_ID = "category_id";
/// <summary>
///
/// </summary>
public const string CHANGESET = "changeset";
/// <summary>
///
/// </summary>
public const string CHANGESETS = "changesets";
/// <summary>
///
/// </summary>
public const string CHILDREN = "children";
/// <summary>
///
/// </summary>
public const string CLOSED_ON = "closed_on";
/// <summary>
///
/// </summary>
public const string COMMENTS = "comments";
/// <summary>
///
/// </summary>
public const string COMMITTED_ON = "committed_on";
/// <summary>
///
/// </summary>
public const string CONTENT_TYPE = "content_type";
/// <summary>
///
/// </summary>
public const string CONTENT_URL = "content_url";
/// <summary>
///
/// </summary>
public const string CREATED_ON = "created_on";
/// <summary>
///
/// </summary>
public const string CUSTOMIZED_TYPE = "customized_type";
/// <summary>
///
/// </summary>
public const string CUSTOM_FIELD = "custom_field";
/// <summary>
///
/// </summary>
public const string CUSTOM_FIELDS = "custom_fields";
/// <summary>
///
/// </summary>
public const string DEFAULT_VALUE = "default_value";
/// <summary>
///
/// </summary>
public const string DELAY = "delay";
/// <summary>
///
/// </summary>
public const string DESCRIPTION = "description";
/// <summary>
///
/// </summary>
public const string DETAIL = "detail";
/// <summary>
///
/// </summary>
public const string DETAILS = "details";
/// <summary>
///
/// </summary>
public const string DIGEST = "digest";
/// <summary>
///
/// </summary>
public const string DONE_RATIO = "done_ratio";
/// <summary>
///
/// </summary>
public const string DOWNLOADS = "downloads";
/// <summary>
///
/// </summary>
public const string DUE_DATE = "due_date";
/// <summary>
///
/// </summary>
public const string ENABLED_MODULE = "enabled_module";
/// <summary>
///
/// </summary>
public const string ENABLED_MODULES = "enabled_modules";
/// <summary>
///
/// </summary>
public const string ENABLED_MODULE_NAMES = "enabled_module_names";
/// <summary>
///
/// </summary>
public const string ERROR = "error";
/// <summary>
///
/// </summary>
public const string ERRORS = "errors";
/// <summary>
///
/// </summary>
public const string ESTIMATED_HOURS = "estimated_hours";
/// <summary>
///
/// </summary>
public const string FIELD_FORMAT = "field_format";
/// <summary>
///
/// </summary>
public const string FILE = "file";
/// <summary>
///
/// </summary>
public const string FILENAME = "filename";
/// <summary>
///
/// </summary>
public const string FILESIZE = "filesize";
/// <summary>
///
/// </summary>
public const string FIRSTNAME = "firstname";
/// <summary>
///
/// </summary>
public const string FIXED_VERSION = "fixed_version";
/// <summary>
///
/// </summary>
public const string FIXED_VERSION_ID = "fixed_version_id";
/// <summary>
///
/// </summary>
public const string GROUP = "group";
/// <summary>
///
/// </summary>
public const string GROUPS = "groups";
/// <summary>
///
/// </summary>
public const string GROUP_ID = "group_id";
/// <summary>
///
/// </summary>
public const string HOMEPAGE = "homepage";
/// <summary>
///
/// </summary>
public const string HOURS = "hours";
/// <summary>
///
/// </summary>
public const string ID = "id";
/// <summary>
///
/// </summary>
public const string IDENTIFIER = "identifier";
/// <summary>
///
/// </summary>
public const string INCLUDE = "include";
/// <summary>
///
/// </summary>
public const string INHERITED = "inherited";
/// <summary>
///
/// </summary>
public const string INHERIT_MEMBERS = "inherit_members";
/// <summary>
///
/// </summary>
public const string ISSUE = "issue";
/// <summary>
///
/// </summary>
public const string ISSUES = "issues";
/// <summary>
///
/// </summary>
public const string ISSUE_CATEGORIES = "issue_categories";
/// <summary>
///
/// </summary>
public const string ISSUE_CATEGORY = "issue_category";
/// <summary>
///
/// </summary>
public const string ISSUE_ID = "issue_id";
/// <summary>
///
/// </summary>
public const string ISSUE_PRIORITIES = "issue_priorities";
/// <summary>
///
/// </summary>
public const string ISSUE_PRIORITY = "issue_priority";
/// <summary>
///
/// </summary>
public const string ISSUE_STATUS = "issue_status";
/// <summary>
///
/// </summary>
public const string ISSUE_TO_ID = "issue_to_id";
/// <summary>
///
/// </summary>
public const string IS_CLOSED = "is_closed";
/// <summary>
///
/// </summary>
public const string IS_DEFAULT = "is_default";
/// <summary>
///
/// </summary>
public const string IS_FILTER = "is_filter";
/// <summary>
///
/// </summary>
public const string IS_PRIVATE = "is_private";
/// <summary>
///
/// </summary>
public const string IS_PUBLIC = "is_public";
/// <summary>
///
/// </summary>
public const string IS_REQUIRED = "is_required";
/// <summary>
///
/// </summary>
public const string JOURNAL = "journal";
/// <summary>
///
/// </summary>
public const string JOURNALS = "journals";
/// <summary>
///
/// </summary>
public const string KEY = "key";
/// <summary>
///
/// </summary>
public const string LASTNAME = "lastname";
/// <summary>
///
/// </summary>
public const string LAST_LOGIN_ON = "last_login_on";
/// <summary>
///
/// </summary>
public const string LIMIT = "limit";
/// <summary>
///
/// </summary>
public const string LOGIN = "login";
/// <summary>
///
/// </summary>
public const string MAIL = "mail";
/// <summary>
///
/// </summary>
public const string MAIL_NOTIFICATION = "mail_notification";
/// <summary>
///
/// </summary>
public const string MAX_LENGTH = "max_length";
/// <summary>
///
/// </summary>
public const string MEMBERSHIP = "membership";
/// <summary>
///
/// </summary>
public const string MEMBERSHIPS = "memberships";
/// <summary>
///
/// </summary>
public const string MIN_LENGTH = "min_length";
/// <summary>
///
/// </summary>
public const string MULTIPLE = "multiple";
/// <summary>
///
/// </summary>
public const string MUST_CHANGE_PASSWD = "must_change_passwd";
/// <summary>
///
/// </summary>
public const string NAME = "name";
/// <summary>
///
/// </summary>
public const string NEWS = "news";
/// <summary>
///
/// </summary>
public const string NEW_VALUE = "new_value";
/// <summary>
///
/// </summary>
public const string NOTES = "notes";
/// <summary>
///
/// </summary>
public const string OFFSET = "offset";
/// <summary>
///
/// </summary>
public const string OLD_VALUE = "old_value";
/// <summary>
///
/// </summary>
public const string PARENT = "parent";
/// <summary>
///
/// </summary>
public const string PARENT_ID = "parent_id";
/// <summary>
///
/// </summary>
public const string PARENT_ISSUE_ID = "parent_issue_id";
/// <summary>
///
/// </summary>
public const string PASSWORD = "password";
/// <summary>
///
/// </summary>
public const string PERMISSION = "permission";
/// <summary>
///
/// </summary>
public const string PERMISSIONS = "permissions";
/// <summary>
///
/// </summary>
public const string POSSIBLE_VALUE = "possible_value";
/// <summary>
///
/// </summary>
public const string POSSIBLE_VALUES = "possible_values";
/// <summary>
///
/// </summary>
public const string PRIORITY = "priority";
/// <summary>
///
/// </summary>
public const string PRIORITY_ID = "priority_id";
/// <summary>
///
/// </summary>
public const string PRIVATE_NOTES = "private_notes";
/// <summary>
///
/// </summary>
public const string PROJECT = "project";
/// <summary>
///
/// </summary>
public const string PROJECTS = "projects";
/// <summary>
///
/// </summary>
public const string PROJECT_ID = "project_id";
/// <summary>
///
/// </summary>
public const string PROPERTY = "property";
/// <summary>
///
/// </summary>
public const string QUERY = "query";
/// <summary>
///
/// </summary>
public const string REGEXP = "regexp";
/// <summary>
///
/// </summary>
public const string RELATION = "relation";
/// <summary>
///
/// </summary>
public const string RELATIONS = "relations";
/// <summary>
///
/// </summary>
public const string RELATION_TYPE = "relation_type";
/// <summary>
///
/// </summary>
public const string REVISION = "revision";
/// <summary>
///
/// </summary>
public const string ROLE = "role";
/// <summary>
///
/// </summary>
public const string ROLES = "roles";
/// <summary>
///
/// </summary>
public const string ROLE_ID = "role_id";
/// <summary>
///
/// </summary>
public const string ROLE_IDS = "role_ids";
/// <summary>
///
/// </summary>
public const string SEARCHABLE = "searchable";
/// <summary>
///
/// </summary>
public const string SHARING = "sharing";
/// <summary>
///
/// </summary>
public const string SORT = "sort";
/// <summary>
///
/// </summary>
public const string SPENT_HOURS = "spent_hours";
/// <summary>
///
/// </summary>
public const string SPENT_ON = "spent_on";
/// <summary>
///
/// </summary>
public const string START_DATE = "start_date";
/// <summary>
///
/// </summary>
public const string STATUS = "status";
/// <summary>
///
/// </summary>
public const string STATUS_ID = "status_id";
/// <summary>
///
/// </summary>
public const string SUBJECT = "subject";
/// <summary>
///
/// </summary>
public const string SUBPROJECT_ID = "subproject_id";
/// <summary>
///
/// </summary>
public const string SUMMARY = "summary";
/// <summary>
///
/// </summary>
public const string TEXT = "text";
/// <summary>
///
/// </summary>
public const string TIME_ENTRY = "time_entry";
/// <summary>
///
/// </summary>
public const string TIME_ENTRY_ACTIVITIES = "time_entry_activities";
/// <summary>
///
/// </summary>
public const string TIME_ENTRY_ACTIVITY = "time_entry_activity";
/// <summary>
///
/// </summary>
public const string TITLE = "title";
/// <summary>
///
/// </summary>
public const string TOKEN = "token";
/// <summary>
///
/// </summary>
public const string TOTAL_COUNT = "total_count";
/// <summary>
///
/// </summary>
public const string TOTAL_ESTIMATED_HOURS = "total_estimated_hours";
/// <summary>
///
/// </summary>
public const string TOTAL_SPENT_HOURS = "total_spent_hours";
/// <summary>
///
/// </summary>
public const string TRACKER = "tracker";
/// <summary>
///
/// </summary>
public const string TRACKERS = "trackers";
/// <summary>
///
/// </summary>
public const string TRACKER_ID = "tracker_id";
/// <summary>
///
/// </summary>
public const string TRACKER_IDS = "tracker_ids";
/// <summary>
///
/// </summary>
public const string UPDATED_ON = "updated_on";
/// <summary>
///
/// </summary>
public const string UPLOAD = "upload";
/// <summary>
///
/// </summary>
public const string UPLOADS = "uploads";
/// <summary>
///
/// </summary>
public const string USER = "user";
/// <summary>
///
/// </summary>
public const string USERS = "users";
/// <summary>
///
/// </summary>
public const string USER_ID = "user_id";
/// <summary>
///
/// </summary>
public const string USER_IDS = "user_ids";
/// <summary>
///
/// </summary>
public const string VALUE = "value";
/// <summary>
///
/// </summary>
public const string VERSION = "version";
/// <summary>
///
/// </summary>
public const string VERSION_ID = "version_id";
/// <summary>
///
/// </summary>
public const string VISIBLE = "visible";
/// <summary>
///
/// </summary>
public const string WATCHER = "watcher";
/// <summary>
///
/// </summary>
public const string WATCHERS = "watchers";
/// <summary>
///
/// </summary>
public const string WATCHER_USER_IDS = "watcher_user_ids";
/// <summary>
///
/// </summary>
public const string WIKI_PAGE = "wiki_page";
/// <summary>
///
/// </summary>
public const string WIKI_PAGES = "wiki_pages";
/// <summary>
///
/// </summary>
public const string REASSIGN_TO_ID = "reassign_to_id";
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ArgumentParser.cs" company="CatenaLogic">
// Copyright (c) 2014 - 2014 CatenaLogic. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace GitLink
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Catel.Collections;
using Catel.Logging;
using GitLink.Providers;
using GitTools;
using GitTools.Git;
using LibGit2Sharp;
public static class ArgumentParser
{
private static readonly ILog Log = LogManager.GetCurrentClassLogger();
public static Context ParseArguments(string commandLineArguments)
{
return ParseArguments(commandLineArguments.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).ToList(),
new ProviderManager());
}
public static Context ParseArguments(params string[] commandLineArguments)
{
return ParseArguments(commandLineArguments.ToList(), new ProviderManager());
}
public static Context ParseArguments(List<string> commandLineArguments, IProviderManager providerManager)
{
var context = new Context(providerManager);
if (commandLineArguments.Count == 0)
{
context.IsHelp = true;
return context;
}
var firstArgument = commandLineArguments.First();
if (IsHelp(firstArgument))
{
context.IsHelp = true;
return context;
}
if (commandLineArguments.Count < 3 && commandLineArguments.Count != 1)
{
throw Log.ErrorAndCreateException<GitLinkException>("Invalid number of arguments");
}
context.SolutionDirectory = firstArgument;
var namedArguments = commandLineArguments.Skip(1).ToList();
for (var index = 0; index < namedArguments.Count; index++)
{
var name = namedArguments[index];
// First check everything without values
if (IsSwitch("debug", name))
{
context.IsDebug = true;
continue;
}
if (IsSwitch("errorsaswarnings", name))
{
context.ErrorsAsWarnings = true;
continue;
}
if (IsSwitch("skipverify", name))
{
context.SkipVerify = true;
continue;
}
// After this point, all arguments should have a value
index++;
var valueInfo = GetValue(namedArguments, index);
var value = valueInfo.Key;
index = index + (valueInfo.Value - 1);
if (IsSwitch("l", name))
{
context.LogFile = value;
continue;
}
if (IsSwitch("c", name))
{
context.ConfigurationName = value;
continue;
}
if (IsSwitch("p", name))
{
context.PlatformName = value;
continue;
}
if (IsSwitch("u", name))
{
context.TargetUrl = value;
continue;
}
if (IsSwitch("b", name))
{
context.TargetBranch = value;
continue;
}
if (IsSwitch("s", name))
{
context.ShaHash = value;
continue;
}
if (IsSwitch("f", name))
{
context.SolutionFile = value;
continue;
}
if (IsSwitch("d", name))
{
context.PdbFilesDirectory = value;
continue;
}
if (IsSwitch("ignore", name))
{
context.IgnoredProjects.AddRange(value.Split(new []{ ',' }, StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()));
continue;
}
throw Log.ErrorAndCreateException<GitLinkException>("Could not parse command line parameter '{0}'.", name);
}
if (string.IsNullOrEmpty(context.TargetUrl))
{
Log.Info("No target url was specified, trying to determine the target url automatically");
var gitDir = GitDirFinder.TreeWalkForGitDir(context.SolutionDirectory);
if (gitDir != null)
{
using (var repo = RepositoryLoader.GetRepo(gitDir))
{
var currentBranch = repo.Head;
if (string.IsNullOrEmpty(context.ShaHash))
{
context.ShaHash = currentBranch.Tip.Sha;
}
if (currentBranch.Remote == null || currentBranch.IsDetachedHead())
{
currentBranch = repo.GetBranchesContainingCommit(context.ShaHash).FirstOrDefault(b => b.Remote != null);
}
if (currentBranch != null && currentBranch.Remote != null)
{
var url = currentBranch.Remote.Url;
if (url.StartsWith("https://") || url.StartsWith("http://"))
{
context.TargetUrl = url.OptimizeUrl();
Log.Info("Automatically determine target url '{0}'", context.TargetUrl);
}
else if (url.StartsWith("ssh://") || (url.Contains("@") && url.Contains(":")))
{
url = url.Replace("ssh://", "");
var parts = url.Split('@', ':');
// ditch the username
if (parts.Length == 3)
{
parts = parts.Skip(1).ToArray();
}
url = string.Format("http://{0}/{1}", parts[0], parts[1]).OptimizeUrl();
context.TargetUrl = url.OptimizeUrl();
Log.Info("Automatically determine target url '{0}' from ssh url...", context.TargetUrl);
Log.Info("--> This may not be correct or may use a different protocol. If you wish you can manually specify the url as a command line parameter.");
}
}
}
}
}
if (!string.IsNullOrEmpty(context.TargetUrl))
{
context.Provider = providerManager.GetProvider(context.TargetUrl);
}
return context;
}
private static KeyValuePair<string, int> GetValue(List<string> arguments, int index)
{
var totalCounter = 1;
var value = arguments[index];
while (value.StartsWith("\""))
{
if (value.EndsWith("\""))
{
break;
}
index++;
value += " " + arguments[index];
totalCounter++;
}
value = value.Trim('\"');
return new KeyValuePair<string, int>(value, totalCounter);
}
private static bool IsSwitch(string switchName, string value)
{
if (value.StartsWith("-"))
{
value = value.Remove(0, 1);
}
if (value.StartsWith("/"))
{
value = value.Remove(0, 1);
}
return (string.Equals(switchName, value));
}
private static bool IsHelp(string singleArgument)
{
return (singleArgument == "?") ||
IsSwitch("h", singleArgument) ||
IsSwitch("help", singleArgument) ||
IsSwitch("?", singleArgument);
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MmoItem.cs" company="Exit Games GmbH">
// Copyright (c) Exit Games GmbH. All rights reserved.
// </copyright>
// <summary>
// This <see cref="Item" /> subclass overrides <see cref="OnDestroy">OnDestroy</see> in order to publish event <see cref="ItemDestroyed" />.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Photon.MmoDemo.Server
{
using System.Collections;
using Photon.MmoDemo.Common;
using Photon.MmoDemo.Server.Events;
using Photon.MmoDemo.Server.Messages;
using Photon.SocketServer;
using Photon.SocketServer.Mmo;
using Photon.SocketServer.Mmo.Messages;
/// <summary>
/// This <see cref = "Item" /> subclass overrides <see cref = "OnDestroy">OnDestroy</see> in order to publish event <see cref = "ItemDestroyed" />.
/// </summary>
public class MmoItem : Item, IMmoItem
{
#region Constants and Fields
/// <summary>
/// The owner.
/// </summary>
private readonly MmoActor owner;
#endregion
#region Constructors and Destructors
/// <summary>
/// Initializes a new instance of the <see cref = "MmoItem" /> class.
/// </summary>
/// <param name = "world">
/// The world.
/// </param>
/// <param name = "coordinate">
/// The coordinate.
/// </param>
/// <param name = "rotation">
/// The rotation.
/// </param>
/// <param name = "properties">
/// The properties.
/// </param>
/// <param name = "owner">
/// The owner.
/// </param>
/// <param name = "itemId">
/// The item Id.
/// </param>
/// <param name = "itemType">
/// The item Type.
/// </param>
public MmoItem(IWorld world, float[] coordinate, float[] rotation, Hashtable properties, MmoActor owner, string itemId, byte itemType)
: base(coordinate.ToVector(), properties, itemId, itemType, world, owner.Peer.RequestFiber)
{
this.owner = owner;
this.Rotation = rotation;
this.Coordinate = coordinate;
}
#endregion
#region Properties
/// <summary>
/// Gets the coordinate.
/// </summary>
public float[] Coordinate { get; private set; }
/// <summary>
/// Gets the <see cref = "MmoActor" /> owner.
/// </summary>
public MmoActor Owner
{
get
{
return this.owner;
}
}
/// <summary>
/// Gets or sets the rotation.
/// </summary>
public float[] Rotation { get; set; }
#endregion
#region Public Methods
/// <summary>
/// Moves the item.
/// </summary>
/// <param name = "coordinate">
/// The coordinate.
/// </param>
public void Move(float[] coordinate)
{
this.Coordinate = coordinate;
this.Position = coordinate.ToVector();
this.UpdateInterestManagement();
}
/// <summary>
/// Spawns the item.
/// </summary>
/// <param name = "coordinate">
/// The coordinate.
/// </param>
public void Spawn(float[] coordinate)
{
this.Coordinate = coordinate;
this.Position = coordinate.ToVector();
this.UpdateInterestManagement();
}
#endregion
#region Implemented Interfaces
#region IMmoItem
/// <summary>
/// Checks wheter the <paramref name = "actor" /> is allowed to change the item.
/// </summary>
/// <param name = "actor">
/// The accessing actor.
/// </param>
/// <returns>
/// True if the <paramref name = "actor" /> equals the <see cref = "Owner" />.
/// </returns>
public bool GrantWriteAccess(MmoActor actor)
{
return this.owner == actor;
}
/// <summary>
/// Sends the event to the owner peer.
/// </summary>
/// <param name = "eventData">
/// The event data.
/// </param>
/// <param name = "sendParameters">
/// The send Parameters.
/// </param>
/// <returns>
/// Always true.
/// </returns>
public bool ReceiveEvent(EventData eventData, SendParameters sendParameters)
{
this.owner.Peer.SendEvent(eventData, sendParameters);
return true;
}
#endregion
#endregion
#region Methods
/// <summary>
/// Override to include the <see cref = "Rotation" /> and <see cref = "Coordinate" /> on item subscribe.
/// </summary>
/// <returns>
/// An instance of <see cref = "MmoItemSnapshot" />.
/// </returns>
protected override ItemSnapshot GetItemSnapshot()
{
return new MmoItemSnapshot(this, this.Position, this.CurrentWorldRegion, this.PropertiesRevision, this.Rotation, this.Coordinate);
}
/// <summary>
/// Override to include the <see cref = "Coordinate" /> for the <see cref = "MmoRadar" />.
/// </summary>
/// <param name = "position">
/// The position.
/// </param>
/// <param name = "region">
/// The region.
/// </param>
/// <returns>
/// An instance of <see cref = "MmoItemPositionUpdate" />.
/// </returns>
protected override ItemPositionMessage GetPositionUpdateMessage(Vector position, Region region)
{
return new MmoItemPositionUpdate(this, position, region, this.Coordinate);
}
/// <summary>
/// Publishes event <see cref = "ItemDestroyed" /> in the <see cref = "Item.EventChannel" />.
/// </summary>
protected override void OnDestroy()
{
var eventInstance = new ItemDestroyed { ItemId = this.Id, ItemType = this.Type };
var eventData = new EventData((byte)EventCode.ItemDestroyed, eventInstance);
var message = new ItemEventMessage(this, eventData, new SendParameters { ChannelId = Settings.ItemEventChannel });
this.EventChannel.Publish(message);
}
#endregion
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Cloud.DataCatalog.V1
{
/// <summary>Settings for <see cref="PolicyTagManagerSerializationClient"/> instances.</summary>
public sealed partial class PolicyTagManagerSerializationSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="PolicyTagManagerSerializationSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="PolicyTagManagerSerializationSettings"/>.</returns>
public static PolicyTagManagerSerializationSettings GetDefault() => new PolicyTagManagerSerializationSettings();
/// <summary>
/// Constructs a new <see cref="PolicyTagManagerSerializationSettings"/> object with default settings.
/// </summary>
public PolicyTagManagerSerializationSettings()
{
}
private PolicyTagManagerSerializationSettings(PolicyTagManagerSerializationSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
ReplaceTaxonomySettings = existing.ReplaceTaxonomySettings;
ImportTaxonomiesSettings = existing.ImportTaxonomiesSettings;
ExportTaxonomiesSettings = existing.ExportTaxonomiesSettings;
OnCopy(existing);
}
partial void OnCopy(PolicyTagManagerSerializationSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>PolicyTagManagerSerializationClient.ReplaceTaxonomy</c> and
/// <c>PolicyTagManagerSerializationClient.ReplaceTaxonomyAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>This call will not be retried.</description></item>
/// <item><description>Timeout: 60 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ReplaceTaxonomySettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(60000)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>PolicyTagManagerSerializationClient.ImportTaxonomies</c> and
/// <c>PolicyTagManagerSerializationClient.ImportTaxonomiesAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>This call will not be retried.</description></item>
/// <item><description>Timeout: 60 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ImportTaxonomiesSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(60000)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>PolicyTagManagerSerializationClient.ExportTaxonomies</c> and
/// <c>PolicyTagManagerSerializationClient.ExportTaxonomiesAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>This call will not be retried.</description></item>
/// <item><description>Timeout: 60 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ExportTaxonomiesSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(60000)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="PolicyTagManagerSerializationSettings"/> object.</returns>
public PolicyTagManagerSerializationSettings Clone() => new PolicyTagManagerSerializationSettings(this);
}
/// <summary>
/// Builder class for <see cref="PolicyTagManagerSerializationClient"/> to provide simple configuration of
/// credentials, endpoint etc.
/// </summary>
public sealed partial class PolicyTagManagerSerializationClientBuilder : gaxgrpc::ClientBuilderBase<PolicyTagManagerSerializationClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public PolicyTagManagerSerializationSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public PolicyTagManagerSerializationClientBuilder()
{
UseJwtAccessWithScopes = PolicyTagManagerSerializationClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref PolicyTagManagerSerializationClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<PolicyTagManagerSerializationClient> task);
/// <summary>Builds the resulting client.</summary>
public override PolicyTagManagerSerializationClient Build()
{
PolicyTagManagerSerializationClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<PolicyTagManagerSerializationClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<PolicyTagManagerSerializationClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private PolicyTagManagerSerializationClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return PolicyTagManagerSerializationClient.Create(callInvoker, Settings);
}
private async stt::Task<PolicyTagManagerSerializationClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return PolicyTagManagerSerializationClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => PolicyTagManagerSerializationClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() =>
PolicyTagManagerSerializationClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => PolicyTagManagerSerializationClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>PolicyTagManagerSerialization client wrapper, for convenient use.</summary>
/// <remarks>
/// Policy Tag Manager Serialization API service allows you to manipulate
/// your policy tags and taxonomies in a serialized format.
///
/// Taxonomy is a hierarchical group of policy tags.
/// </remarks>
public abstract partial class PolicyTagManagerSerializationClient
{
/// <summary>
/// The default endpoint for the PolicyTagManagerSerialization service, which is a host of
/// "datacatalog.googleapis.com" and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "datacatalog.googleapis.com:443";
/// <summary>The default PolicyTagManagerSerialization scopes.</summary>
/// <remarks>
/// The default PolicyTagManagerSerialization scopes are:
/// <list type="bullet">
/// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item>
/// </list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/cloud-platform",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="PolicyTagManagerSerializationClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="PolicyTagManagerSerializationClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="PolicyTagManagerSerializationClient"/>.</returns>
public static stt::Task<PolicyTagManagerSerializationClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new PolicyTagManagerSerializationClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="PolicyTagManagerSerializationClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="PolicyTagManagerSerializationClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="PolicyTagManagerSerializationClient"/>.</returns>
public static PolicyTagManagerSerializationClient Create() =>
new PolicyTagManagerSerializationClientBuilder().Build();
/// <summary>
/// Creates a <see cref="PolicyTagManagerSerializationClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="PolicyTagManagerSerializationSettings"/>.</param>
/// <returns>The created <see cref="PolicyTagManagerSerializationClient"/>.</returns>
internal static PolicyTagManagerSerializationClient Create(grpccore::CallInvoker callInvoker, PolicyTagManagerSerializationSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
PolicyTagManagerSerialization.PolicyTagManagerSerializationClient grpcClient = new PolicyTagManagerSerialization.PolicyTagManagerSerializationClient(callInvoker);
return new PolicyTagManagerSerializationClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC PolicyTagManagerSerialization client</summary>
public virtual PolicyTagManagerSerialization.PolicyTagManagerSerializationClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Replaces (updates) a taxonomy and all its policy tags.
///
/// The taxonomy and its entire hierarchy of policy tags must be
/// represented literally by `SerializedTaxonomy` and the nested
/// `SerializedPolicyTag` messages.
///
/// This operation automatically does the following:
///
/// - Deletes the existing policy tags that are missing from the
/// `SerializedPolicyTag`.
/// - Creates policy tags that don't have resource names. They are considered
/// new.
/// - Updates policy tags with valid resources names accordingly.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual Taxonomy ReplaceTaxonomy(ReplaceTaxonomyRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Replaces (updates) a taxonomy and all its policy tags.
///
/// The taxonomy and its entire hierarchy of policy tags must be
/// represented literally by `SerializedTaxonomy` and the nested
/// `SerializedPolicyTag` messages.
///
/// This operation automatically does the following:
///
/// - Deletes the existing policy tags that are missing from the
/// `SerializedPolicyTag`.
/// - Creates policy tags that don't have resource names. They are considered
/// new.
/// - Updates policy tags with valid resources names accordingly.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<Taxonomy> ReplaceTaxonomyAsync(ReplaceTaxonomyRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Replaces (updates) a taxonomy and all its policy tags.
///
/// The taxonomy and its entire hierarchy of policy tags must be
/// represented literally by `SerializedTaxonomy` and the nested
/// `SerializedPolicyTag` messages.
///
/// This operation automatically does the following:
///
/// - Deletes the existing policy tags that are missing from the
/// `SerializedPolicyTag`.
/// - Creates policy tags that don't have resource names. They are considered
/// new.
/// - Updates policy tags with valid resources names accordingly.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<Taxonomy> ReplaceTaxonomyAsync(ReplaceTaxonomyRequest request, st::CancellationToken cancellationToken) =>
ReplaceTaxonomyAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates new taxonomies (including their policy tags) in a given project
/// by importing from inlined or cross-regional sources.
///
/// For a cross-regional source, new taxonomies are created by copying
/// from a source in another region.
///
/// For an inlined source, taxonomies and policy tags are created in bulk using
/// nested protocol buffer structures.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ImportTaxonomiesResponse ImportTaxonomies(ImportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates new taxonomies (including their policy tags) in a given project
/// by importing from inlined or cross-regional sources.
///
/// For a cross-regional source, new taxonomies are created by copying
/// from a source in another region.
///
/// For an inlined source, taxonomies and policy tags are created in bulk using
/// nested protocol buffer structures.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ImportTaxonomiesResponse> ImportTaxonomiesAsync(ImportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates new taxonomies (including their policy tags) in a given project
/// by importing from inlined or cross-regional sources.
///
/// For a cross-regional source, new taxonomies are created by copying
/// from a source in another region.
///
/// For an inlined source, taxonomies and policy tags are created in bulk using
/// nested protocol buffer structures.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ImportTaxonomiesResponse> ImportTaxonomiesAsync(ImportTaxonomiesRequest request, st::CancellationToken cancellationToken) =>
ImportTaxonomiesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Exports taxonomies in the requested type and returns them,
/// including their policy tags. The requested taxonomies must belong to the
/// same project.
///
/// This method generates `SerializedTaxonomy` protocol buffers with nested
/// policy tags that can be used as input for `ImportTaxonomies` calls.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ExportTaxonomiesResponse ExportTaxonomies(ExportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Exports taxonomies in the requested type and returns them,
/// including their policy tags. The requested taxonomies must belong to the
/// same project.
///
/// This method generates `SerializedTaxonomy` protocol buffers with nested
/// policy tags that can be used as input for `ImportTaxonomies` calls.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ExportTaxonomiesResponse> ExportTaxonomiesAsync(ExportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Exports taxonomies in the requested type and returns them,
/// including their policy tags. The requested taxonomies must belong to the
/// same project.
///
/// This method generates `SerializedTaxonomy` protocol buffers with nested
/// policy tags that can be used as input for `ImportTaxonomies` calls.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ExportTaxonomiesResponse> ExportTaxonomiesAsync(ExportTaxonomiesRequest request, st::CancellationToken cancellationToken) =>
ExportTaxonomiesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>PolicyTagManagerSerialization client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Policy Tag Manager Serialization API service allows you to manipulate
/// your policy tags and taxonomies in a serialized format.
///
/// Taxonomy is a hierarchical group of policy tags.
/// </remarks>
public sealed partial class PolicyTagManagerSerializationClientImpl : PolicyTagManagerSerializationClient
{
private readonly gaxgrpc::ApiCall<ReplaceTaxonomyRequest, Taxonomy> _callReplaceTaxonomy;
private readonly gaxgrpc::ApiCall<ImportTaxonomiesRequest, ImportTaxonomiesResponse> _callImportTaxonomies;
private readonly gaxgrpc::ApiCall<ExportTaxonomiesRequest, ExportTaxonomiesResponse> _callExportTaxonomies;
/// <summary>
/// Constructs a client wrapper for the PolicyTagManagerSerialization service, with the specified gRPC client
/// and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="PolicyTagManagerSerializationSettings"/> used within this client.
/// </param>
public PolicyTagManagerSerializationClientImpl(PolicyTagManagerSerialization.PolicyTagManagerSerializationClient grpcClient, PolicyTagManagerSerializationSettings settings)
{
GrpcClient = grpcClient;
PolicyTagManagerSerializationSettings effectiveSettings = settings ?? PolicyTagManagerSerializationSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callReplaceTaxonomy = clientHelper.BuildApiCall<ReplaceTaxonomyRequest, Taxonomy>(grpcClient.ReplaceTaxonomyAsync, grpcClient.ReplaceTaxonomy, effectiveSettings.ReplaceTaxonomySettings).WithGoogleRequestParam("name", request => request.Name);
Modify_ApiCall(ref _callReplaceTaxonomy);
Modify_ReplaceTaxonomyApiCall(ref _callReplaceTaxonomy);
_callImportTaxonomies = clientHelper.BuildApiCall<ImportTaxonomiesRequest, ImportTaxonomiesResponse>(grpcClient.ImportTaxonomiesAsync, grpcClient.ImportTaxonomies, effectiveSettings.ImportTaxonomiesSettings).WithGoogleRequestParam("parent", request => request.Parent);
Modify_ApiCall(ref _callImportTaxonomies);
Modify_ImportTaxonomiesApiCall(ref _callImportTaxonomies);
_callExportTaxonomies = clientHelper.BuildApiCall<ExportTaxonomiesRequest, ExportTaxonomiesResponse>(grpcClient.ExportTaxonomiesAsync, grpcClient.ExportTaxonomies, effectiveSettings.ExportTaxonomiesSettings).WithGoogleRequestParam("parent", request => request.Parent);
Modify_ApiCall(ref _callExportTaxonomies);
Modify_ExportTaxonomiesApiCall(ref _callExportTaxonomies);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_ReplaceTaxonomyApiCall(ref gaxgrpc::ApiCall<ReplaceTaxonomyRequest, Taxonomy> call);
partial void Modify_ImportTaxonomiesApiCall(ref gaxgrpc::ApiCall<ImportTaxonomiesRequest, ImportTaxonomiesResponse> call);
partial void Modify_ExportTaxonomiesApiCall(ref gaxgrpc::ApiCall<ExportTaxonomiesRequest, ExportTaxonomiesResponse> call);
partial void OnConstruction(PolicyTagManagerSerialization.PolicyTagManagerSerializationClient grpcClient, PolicyTagManagerSerializationSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC PolicyTagManagerSerialization client</summary>
public override PolicyTagManagerSerialization.PolicyTagManagerSerializationClient GrpcClient { get; }
partial void Modify_ReplaceTaxonomyRequest(ref ReplaceTaxonomyRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_ImportTaxonomiesRequest(ref ImportTaxonomiesRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_ExportTaxonomiesRequest(ref ExportTaxonomiesRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Replaces (updates) a taxonomy and all its policy tags.
///
/// The taxonomy and its entire hierarchy of policy tags must be
/// represented literally by `SerializedTaxonomy` and the nested
/// `SerializedPolicyTag` messages.
///
/// This operation automatically does the following:
///
/// - Deletes the existing policy tags that are missing from the
/// `SerializedPolicyTag`.
/// - Creates policy tags that don't have resource names. They are considered
/// new.
/// - Updates policy tags with valid resources names accordingly.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override Taxonomy ReplaceTaxonomy(ReplaceTaxonomyRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ReplaceTaxonomyRequest(ref request, ref callSettings);
return _callReplaceTaxonomy.Sync(request, callSettings);
}
/// <summary>
/// Replaces (updates) a taxonomy and all its policy tags.
///
/// The taxonomy and its entire hierarchy of policy tags must be
/// represented literally by `SerializedTaxonomy` and the nested
/// `SerializedPolicyTag` messages.
///
/// This operation automatically does the following:
///
/// - Deletes the existing policy tags that are missing from the
/// `SerializedPolicyTag`.
/// - Creates policy tags that don't have resource names. They are considered
/// new.
/// - Updates policy tags with valid resources names accordingly.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<Taxonomy> ReplaceTaxonomyAsync(ReplaceTaxonomyRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ReplaceTaxonomyRequest(ref request, ref callSettings);
return _callReplaceTaxonomy.Async(request, callSettings);
}
/// <summary>
/// Creates new taxonomies (including their policy tags) in a given project
/// by importing from inlined or cross-regional sources.
///
/// For a cross-regional source, new taxonomies are created by copying
/// from a source in another region.
///
/// For an inlined source, taxonomies and policy tags are created in bulk using
/// nested protocol buffer structures.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override ImportTaxonomiesResponse ImportTaxonomies(ImportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ImportTaxonomiesRequest(ref request, ref callSettings);
return _callImportTaxonomies.Sync(request, callSettings);
}
/// <summary>
/// Creates new taxonomies (including their policy tags) in a given project
/// by importing from inlined or cross-regional sources.
///
/// For a cross-regional source, new taxonomies are created by copying
/// from a source in another region.
///
/// For an inlined source, taxonomies and policy tags are created in bulk using
/// nested protocol buffer structures.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<ImportTaxonomiesResponse> ImportTaxonomiesAsync(ImportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ImportTaxonomiesRequest(ref request, ref callSettings);
return _callImportTaxonomies.Async(request, callSettings);
}
/// <summary>
/// Exports taxonomies in the requested type and returns them,
/// including their policy tags. The requested taxonomies must belong to the
/// same project.
///
/// This method generates `SerializedTaxonomy` protocol buffers with nested
/// policy tags that can be used as input for `ImportTaxonomies` calls.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override ExportTaxonomiesResponse ExportTaxonomies(ExportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ExportTaxonomiesRequest(ref request, ref callSettings);
return _callExportTaxonomies.Sync(request, callSettings);
}
/// <summary>
/// Exports taxonomies in the requested type and returns them,
/// including their policy tags. The requested taxonomies must belong to the
/// same project.
///
/// This method generates `SerializedTaxonomy` protocol buffers with nested
/// policy tags that can be used as input for `ImportTaxonomies` calls.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<ExportTaxonomiesResponse> ExportTaxonomiesAsync(ExportTaxonomiesRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ExportTaxonomiesRequest(ref request, ref callSettings);
return _callExportTaxonomies.Async(request, callSettings);
}
}
}
| |
using System;
using System.Collections.Generic;
using Moq;
using RestSharp;
using Xunit;
namespace Recurly.Tests
{
public class BaseClientTest
{
public BaseClientTest() { }
[Fact]
public void CantInitializeWithoutApiKey()
{
Assert.Throws<ArgumentException>(() => new MockClient(null));
Assert.Throws<ArgumentException>(() => new MockClient(""));
}
/*
* Assert that Timeout can be set and retrieved. Take on faith for now
* that RestSharp timeouts are well-behaved.
*/
[Fact]
public void CanInitializeWithATimeout()
{
var client = new Recurly.Client("myapikey") { Timeout = 124 };
Assert.Equal(124, client.Timeout);
}
[Fact]
public void RespondsWithGivenApiVersion()
{
var client = new MockClient("myapikey");
Assert.Equal("v2018-08-09", client.ApiVersion);
}
[Fact]
public void CanProperlyFetchAResource()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.OK));
MyResource resource = client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01));
Assert.Equal("benjamin", resource.MyString);
}
[Fact]
public async void CanProperlyFetchAResourceAsync()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.OK));
MyResource resource = await client.GetResourceAsync("benjamin", "param1", new DateTime(2020, 01, 01));
Assert.Equal("benjamin", resource.MyString);
}
[Fact]
public void WillPopulateResponseOnResource()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.OK));
MyResource resource = client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01));
Assert.Equal(System.Net.HttpStatusCode.OK, resource.GetResponse().StatusCode);
Assert.Empty(resource.GetResponse().Headers);
Assert.Equal("{\"my_string\": \"benjamin\"}", resource.GetResponse().RawResponse);
}
[Fact]
public void CanProperlyCreateAResource()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.Created));
var request = new MyResourceCreate()
{
MyString = "benjamin"
};
MyResource resource = client.CreateResource(request);
Assert.Equal("benjamin", resource.MyString);
}
[Fact]
public void WillAddQueryStringParameters()
{
var options = new RequestOptions();
options.AddHeader("Accept-Language", "en-US");
var date = new DateTime(2020, 01, 01);
var paramsMatcher = MockClient.QueryParameterMatcher(new Dictionary<string, object> {
{ "param_1", "param1" },
{ "param_2", Recurly.Utils.ISO8601(date) },
});
var client = MockClient.Build(paramsMatcher, SuccessResponse(System.Net.HttpStatusCode.OK));
MyResource resource = client.GetResource("benjamin", "param1", date, options);
Assert.Equal("benjamin", resource.MyString);
}
[Fact]
public void WillValidatePathParams()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.OK));
MyResource resource = client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01));
Assert.Throws<Recurly.RecurlyError>(() => client.GetResource("", "param1", new DateTime(2020, 01, 01)));
}
[Fact]
public void WillIncludeCustomHeaders()
{
var options = new RequestOptions();
options.AddHeader("Accept-Language", "en-US");
var matcher = MockClient.HeaderMatcher(new Dictionary<string, object> {
{ "Accept-Language", "en-US" },
});
var client = MockClient.Build(matcher, NotFoundResponse());
Assert.Throws<Recurly.Errors.NotFound>(() => client.GetResource("douglas/", "param1", new DateTime(2020, 01, 01), options));
}
[Fact]
public void WillEncodeForwardSlashesInURL()
{
Func<IRestRequest, bool> matcher = delegate (IRestRequest request)
{
Assert.Equal("/my_resources/douglas%2F", request.Resource);
return true;
};
var client = MockClient.Build(matcher, NotFoundResponse());
Assert.Throws<Recurly.Errors.NotFound>(() => client.GetResource("douglas/", "param1", new DateTime(2020, 01, 01)));
}
[Fact]
public void WillThrowNotFoundExceptionForNon200()
{
var client = MockClient.Build(NotFoundResponse());
Assert.Throws<Recurly.Errors.NotFound>(() => client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01)));
}
[Fact]
public void WillThrowARecurlyErrorForUnknownErrors()
{
var client = MockClient.Build(ErrorResponse((System.Net.HttpStatusCode)999));
Assert.Throws<Recurly.RecurlyError>(() => client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01)));
}
[Fact]
public void WillThrowAnApiErrorForUnknownErrorType()
{
var client = MockClient.Build(UnknownTypeResponse());
// Instead of disabling strict mode, test with ArgumentException as proxy
var exception = Assert.Throws<System.ArgumentException>(() => client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01)));
Assert.Matches("no valid exception class", exception.Message);
}
[Fact]
public void WillThrowABadRequestError()
{
var client = MockClient.Build(ErrorResponse(System.Net.HttpStatusCode.BadRequest));
Assert.Throws<Recurly.Errors.BadRequest>(() => client.GetResource("benjamin", "param1", new DateTime(2020, 01, 01)));
}
[Fact]
public void WillTriggerHookIfAvailable()
{
var client = MockClient.Build(SuccessResponse(System.Net.HttpStatusCode.OK));
var mockHandler = new Mock<IEventHandler>();
mockHandler
.Setup(x => x.OnRequest(It.IsAny<Recurly.Http.Request>()));
mockHandler
.Setup(x => x.OnResponse(It.IsAny<Recurly.Http.Response>()));
client.AddEventHandler(mockHandler.Object);
MyResource resource = client.GetResource("benjamin", "param1", new DateTime());
Assert.Equal("benjamin", resource.MyString);
}
private Mock<IRestResponse<MyResource>> SuccessResponse(System.Net.HttpStatusCode status)
{
var data = new MyResource()
{
MyString = "benjamin"
};
var response = new Mock<IRestResponse<MyResource>>();
response.Setup(_ => _.StatusCode).Returns(status);
response.Setup(_ => _.Content).Returns("{\"my_string\": \"benjamin\"}");
response.Setup(_ => _.Headers).Returns(new List<Parameter> { });
response.Setup(_ => _.Data).Returns(data);
return response;
}
private Mock<IRestResponse<MyResource>> ErrorResponse(System.Net.HttpStatusCode statusCode)
{
var response = new Mock<IRestResponse<MyResource>>();
response.Setup(_ => _.StatusCode).Returns(statusCode);
response.Setup(_ => _.Content).Returns("<html>parsing error</html>");
response.Setup(_ => _.Headers).Returns(new List<Parameter> { });
response.Setup(_ => _.ContentType).Returns("text/html");
response.Setup(_ => _.ErrorException).Returns(new Exception("parsing error"));
response.Setup(_ => _.ErrorMessage).Returns("parsing error");
return response;
}
private Mock<IRestResponse<MyResource>> InvalidContentTypeResponse()
{
var response = new Mock<IRestResponse<MyResource>>();
response.Setup(_ => _.StatusCode).Returns(System.Net.HttpStatusCode.BadRequest);
response.Setup(_ => _.Content).Returns("{\"error\":{ \"type\": \"invalid_content_type\", \"message\": \"MyResource not found\"}}");
response.Setup(_ => _.Headers).Returns(new List<Parameter> { });
return response;
}
private Mock<IRestResponse<MyResource>> NotFoundResponse()
{
var response = new Mock<IRestResponse<MyResource>>();
response.Setup(_ => _.StatusCode).Returns(System.Net.HttpStatusCode.NotFound);
response.Setup(_ => _.Content).Returns("{\"error\":{ \"type\": \"not_found\", \"message\": \"MyResource not found\"}}");
response.Setup(_ => _.Headers).Returns(new List<Parameter> { });
return response;
}
private Mock<IRestResponse<MyResource>> UnknownTypeResponse()
{
var response = new Mock<IRestResponse<MyResource>>();
response.Setup(_ => _.StatusCode).Returns(System.Net.HttpStatusCode.BadRequest);
response.Setup(_ => _.Content).Returns("{\"error\":{ \"type\": \"not_in_spec\", \"message\": \"MyResource not found\"}}");
response.Setup(_ => _.Headers).Returns(new List<Parameter> { });
return response;
}
}
}
| |
//------------------------------------------------------------------------------
// <license file="NativeString.cs">
//
// The use and distribution terms for this software are contained in the file
// named 'LICENSE', which can be found in the resources directory of this
// distribution.
//
// By using this software in any fashion, you are agreeing to be bound by the
// terms of this license.
//
// </license>
//------------------------------------------------------------------------------
using System;
namespace EcmaScript.NET.Types
{
/// <summary>
/// This class implements the String native object.
///
/// See ECMA 15.5.
///
/// String methods for dealing with regular expressions are
/// ported directly from C. Latest port is from version 1.40.12.19
/// in the JSFUN13_BRANCH.
///
/// </summary>
internal sealed class BuiltinString : IdScriptableObject
{
public override string ClassName
{
get
{
return "String";
}
}
override protected internal int MaxInstanceId
{
get
{
return MAX_INSTANCE_ID;
}
}
internal int Length
{
get
{
return m_Value.Length;
}
}
private static readonly object STRING_TAG = new object ();
internal static void Init
(IScriptable scope, bool zealed)
{
BuiltinString obj = new BuiltinString ("");
obj.ExportAsJSClass (MAX_PROTOTYPE_ID, scope, zealed,
ScriptableObject.DONTENUM | ScriptableObject.READONLY | ScriptableObject.PERMANENT);
}
private BuiltinString (string s)
{
m_Value = s;
}
private const int Id_length = 1;
private const int MAX_INSTANCE_ID = 1;
protected internal override int FindInstanceIdInfo (string s)
{
if (s.Equals ("length")) {
return InstanceIdInfo (DONTENUM | READONLY | PERMANENT, Id_length);
}
return base.FindInstanceIdInfo (s);
}
protected internal override string GetInstanceIdName (int id)
{
if (id == Id_length) {
return "length";
}
return base.GetInstanceIdName (id);
}
protected internal override object GetInstanceIdValue (int id)
{
if (id == Id_length) {
return m_Value.Length;
}
return base.GetInstanceIdValue (id);
}
protected internal override void FillConstructorProperties (IdFunctionObject ctor)
{
AddIdFunctionProperty (ctor, STRING_TAG, ConstructorId_fromCharCode, "fromCharCode", 1);
base.FillConstructorProperties (ctor);
}
protected internal override void InitPrototypeId (int id)
{
string s;
int arity;
switch (id) {
case Id_constructor:
arity = 1;
s = "constructor";
break;
case Id_toString:
arity = 0;
s = "toString";
break;
case Id_toSource:
arity = 0;
s = "toSource";
break;
case Id_valueOf:
arity = 0;
s = "valueOf";
break;
case Id_charAt:
arity = 1;
s = "charAt";
break;
case Id_charCodeAt:
arity = 1;
s = "charCodeAt";
break;
case Id_indexOf:
arity = 1;
s = "indexOf";
break;
case Id_lastIndexOf:
arity = 1;
s = "lastIndexOf";
break;
case Id_split:
arity = 2;
s = "split";
break;
case Id_substring:
arity = 2;
s = "substring";
break;
case Id_toLowerCase:
arity = 0;
s = "toLowerCase";
break;
case Id_toUpperCase:
arity = 0;
s = "toUpperCase";
break;
case Id_substr:
arity = 2;
s = "substr";
break;
case Id_concat:
arity = 1;
s = "concat";
break;
case Id_slice:
arity = 2;
s = "slice";
break;
case Id_bold:
arity = 0;
s = "bold";
break;
case Id_italics:
arity = 0;
s = "italics";
break;
case Id_fixed:
arity = 0;
s = "fixed";
break;
case Id_strike:
arity = 0;
s = "strike";
break;
case Id_small:
arity = 0;
s = "small";
break;
case Id_big:
arity = 0;
s = "big";
break;
case Id_blink:
arity = 0;
s = "blink";
break;
case Id_sup:
arity = 0;
s = "sup";
break;
case Id_sub:
arity = 0;
s = "sub";
break;
case Id_fontsize:
arity = 0;
s = "fontsize";
break;
case Id_fontcolor:
arity = 0;
s = "fontcolor";
break;
case Id_link:
arity = 0;
s = "link";
break;
case Id_anchor:
arity = 0;
s = "anchor";
break;
case Id_equals:
arity = 1;
s = "equals";
break;
case Id_equalsIgnoreCase:
arity = 1;
s = "equalsIgnoreCase";
break;
case Id_match:
arity = 1;
s = "match";
break;
case Id_search:
arity = 1;
s = "search";
break;
case Id_replace:
arity = 1;
s = "replace";
break;
default:
throw new ArgumentException (Convert.ToString (id));
}
InitPrototypeMethod (STRING_TAG, id, s, arity);
}
public override object ExecIdCall (IdFunctionObject f, Context cx, IScriptable scope, IScriptable thisObj, object [] args)
{
if (!f.HasTag (STRING_TAG)) {
return base.ExecIdCall (f, cx, scope, thisObj, args);
}
int id = f.MethodId;
switch (id) {
case ConstructorId_fromCharCode: {
int N = args.Length;
if (N < 1)
return "";
System.Text.StringBuilder sb = new System.Text.StringBuilder (N);
for (int i = 0; i != N; ++i) {
sb.Append (ScriptConvert.ToUint16 (args [i]));
}
return sb.ToString ();
}
case Id_constructor: {
string s = (args.Length >= 1) ? ScriptConvert.ToString (args [0]) : "";
if (thisObj == null) {
// new String(val) creates a new String object.
return new BuiltinString (s);
}
// String(val) converts val to a string value.
return s;
}
case Id_toString:
case Id_valueOf:
// ECMA 15.5.4.2: 'the toString function is not generic.
return RealThis (thisObj, f).m_Value;
case Id_toSource: {
string s = RealThis (thisObj, f).m_Value;
return "(new String(\"" + ScriptRuntime.escapeString (s) + "\"))";
}
case Id_charAt:
case Id_charCodeAt: {
// See ECMA 15.5.4.[4,5]
string target = ScriptConvert.ToString (thisObj);
double pos = ScriptConvert.ToInteger (args, 0);
if (pos < 0 || pos >= target.Length) {
if (id == Id_charAt)
return "";
else
return double.NaN;
}
char c = target [(int)pos];
if (id == Id_charAt)
return Convert.ToString (c);
else
return (int)c;
}
case Id_indexOf:
return js_indexOf (ScriptConvert.ToString (thisObj), args);
case Id_lastIndexOf:
return js_lastIndexOf (ScriptConvert.ToString (thisObj), args);
case Id_split:
return ImplSplit (cx, scope, ScriptConvert.ToString (thisObj), args);
case Id_substring:
return js_substring (cx, ScriptConvert.ToString (thisObj), args);
case Id_toLowerCase:
// See ECMA 15.5.4.11
return ScriptConvert.ToString (thisObj).ToLower ();
case Id_toUpperCase:
// See ECMA 15.5.4.12
return ScriptConvert.ToString (thisObj).ToUpper ();
case Id_substr:
return js_substr (ScriptConvert.ToString (thisObj), args);
case Id_concat:
return js_concat (ScriptConvert.ToString (thisObj), args);
case Id_slice:
return js_slice (ScriptConvert.ToString (thisObj), args);
case Id_bold:
return Tagify (thisObj, "b", null, null);
case Id_italics:
return Tagify (thisObj, "i", null, null);
case Id_fixed:
return Tagify (thisObj, "tt", null, null);
case Id_strike:
return Tagify (thisObj, "strike", null, null);
case Id_small:
return Tagify (thisObj, "small", null, null);
case Id_big:
return Tagify (thisObj, "big", null, null);
case Id_blink:
return Tagify (thisObj, "blink", null, null);
case Id_sup:
return Tagify (thisObj, "sup", null, null);
case Id_sub:
return Tagify (thisObj, "sub", null, null);
case Id_fontsize:
return Tagify (thisObj, "font", "size", args);
case Id_fontcolor:
return Tagify (thisObj, "font", "color", args);
case Id_link:
return Tagify (thisObj, "a", "href", args);
case Id_anchor:
return Tagify (thisObj, "a", "name", args);
case Id_equals:
case Id_equalsIgnoreCase: {
string s1 = ScriptConvert.ToString (thisObj);
string s2 = ScriptConvert.ToString (args, 0);
return (id == Id_equals) ? s1.Equals (s2) : s1.ToUpper ().Equals (s2.ToUpper ());
}
case Id_match:
case Id_search:
case Id_replace: {
RegExpActions actionType;
if (id == Id_match) {
actionType = EcmaScript.NET.RegExpActions.Match;
}
else if (id == Id_search) {
actionType = EcmaScript.NET.RegExpActions.Search;
}
else {
actionType = EcmaScript.NET.RegExpActions.Replace;
}
return cx.regExpProxy.Perform (cx, scope, thisObj, args, actionType);
}
}
throw new ArgumentException (Convert.ToString (id));
}
private static BuiltinString RealThis (IScriptable thisObj, IdFunctionObject f)
{
if (!(thisObj is BuiltinString))
throw IncompatibleCallError (f);
return (BuiltinString)thisObj;
}
/// <summary>
/// HTML composition aids.
/// </summary>
private static string Tagify (object thisObj, string tag, string attribute, object [] args)
{
string str = ScriptConvert.ToString (thisObj);
System.Text.StringBuilder result = new System.Text.StringBuilder ();
result.Append ('<');
result.Append (tag);
if (attribute != null) {
result.Append (' ');
result.Append (attribute);
result.Append ("=\"");
result.Append (ScriptConvert.ToString (args, 0));
result.Append ('"');
}
result.Append ('>');
result.Append (str);
result.Append ("</");
result.Append (tag);
result.Append ('>');
return result.ToString ();
}
public override string ToString ()
{
return m_Value;
}
/// <summary>
/// Make array-style property lookup work for strings.
///
/// TODO: is this ECMA? A version check is probably needed. In js too.
/// </summary>
/// <param name="index"></param>
/// <param name="start"></param>
/// <returns></returns>
public override object Get (int index, IScriptable start)
{
if (0 <= index && index < m_Value.Length) {
return m_Value.Substring (index, (index + 1) - (index));
}
return base.Get (index, start);
}
public override object Put (int index, IScriptable start, object value)
{
if (0 <= index && index < m_Value.Length) {
return Undefined.Value;
}
return base.Put (index, start, value);
}
/*
*
* See ECMA 15.5.4.6. Uses Java String.indexOf()
* OPT to add - BMH searching from jsstr.c.
*/
private static int js_indexOf (string target, object [] args)
{
string search = ScriptConvert.ToString (args, 0);
double begin = ScriptConvert.ToInteger (args, 1);
if (begin > target.Length) {
return -1;
}
else {
if (begin < 0)
begin = 0;
return target.IndexOf (search, (int)begin);
}
}
/*
*
* See ECMA 15.5.4.7
*
*/
private static int js_lastIndexOf (string target, object [] args)
{
string search = ScriptConvert.ToString (args, 0);
double end = ScriptConvert.ToNumber (args, 1);
if (double.IsNaN (end) || end > target.Length)
end = target.Length;
else if (end < 0)
end = 0;
return lastIndexOf (
target.ToCharArray (), 0, target.Length, search.ToCharArray (), 0, search.Length, (int)end);
}
static int lastIndexOf (char [] source, int sourceOffset, int sourceCount,
char [] target, int targetOffset, int targetCount,
int fromIndex)
{
/*
* Check arguments; return immediately where possible. For
* consistency, don't check for null str.
*/
int rightIndex = sourceCount - targetCount;
if (fromIndex < 0) {
return -1;
}
if (fromIndex > rightIndex) {
fromIndex = rightIndex;
}
/* Empty string always matches. */
if (targetCount == 0) {
return fromIndex;
}
int strLastIndex = targetOffset + targetCount - 1;
char strLastChar = target [strLastIndex];
int min = sourceOffset + targetCount - 1;
int i = min + fromIndex;
startSearchForLastChar:
while (true) {
while (i >= min && source [i] != strLastChar) {
i--;
}
if (i < min) {
return -1;
}
int j = i - 1;
int start = j - (targetCount - 1);
int k = strLastIndex - 1;
while (j > start) {
if (source [j--] != target [k--]) {
i--;
goto startSearchForLastChar;
}
}
return start - sourceOffset + 1;
}
}
/*
* Used by js_split to find the next split point in target,
* starting at offset ip and looking either for the given
* separator substring, or for the next re match. ip and
* matchlen must be reference variables (assumed to be arrays of
* length 1) so they can be updated in the leading whitespace or
* re case.
*
* Return -1 on end of string, >= 0 for a valid index of the next
* separator occurrence if found, or the string length if no
* separator is found.
*/
private static int find_split (Context cx, IScriptable scope, string target, string separator, Context.Versions version, RegExpProxy reProxy, IScriptable re, int [] ip, int [] matchlen, bool [] matched, string [] [] parensp)
{
int i = ip [0];
int length = target.Length;
/*
* Perl4 special case for str.split(' '), only if the user has selected
* JavaScript1.2 explicitly. Split on whitespace, and skip leading w/s.
* Strange but true, apparently modeled after awk.
*/
if (version == Context.Versions.JS1_2 && re == null && separator.Length == 1 && separator [0] == ' ') {
/* Skip leading whitespace if at front of str. */
if (i == 0) {
while (i < length && char.IsWhiteSpace (target [i]))
i++;
ip [0] = i;
}
/* Don't delimit whitespace at end of string. */
if (i == length)
return -1;
/* Skip over the non-whitespace chars. */
while (i < length && !char.IsWhiteSpace (target [i]))
i++;
/* Now skip the next run of whitespace. */
int j = i;
while (j < length && char.IsWhiteSpace (target [j]))
j++;
/* Update matchlen to count delimiter chars. */
matchlen [0] = j - i;
return i;
}
/*
* Stop if past end of string. If at end of string, we will
* return target length, so that
*
* "ab,".split(',') => new Array("ab", "")
*
* and the resulting array converts back to the string "ab,"
* for symmetry. NB: This differs from perl, which drops the
* trailing empty substring if the LIMIT argument is omitted.
*/
if (i > length)
return -1;
/*
* Match a regular expression against the separator at or
* above index i. Return -1 at end of string instead of
* trying for a match, so we don't get stuck in a loop.
*/
if (re != null) {
return reProxy.FindSplit (cx, scope, target, separator, re, ip, matchlen, matched, parensp);
}
/*
* Deviate from ECMA by never splitting an empty string by any separator
* string into a non-empty array (an array of length 1 that contains the
* empty string).
*/
if (version != Context.Versions.Default && version < Context.Versions.JS1_3 && length == 0)
return -1;
/*
* Special case: if sep is the empty string, split str into
* one character substrings. Let our caller worry about
* whether to split once at end of string into an empty
* substring.
*
* For 1.2 compatibility, at the end of the string, we return the length as
* the result, and set the separator length to 1 -- this allows the caller
* to include an additional null string at the end of the substring list.
*/
if (separator.Length == 0) {
if (version == Context.Versions.JS1_2) {
if (i == length) {
matchlen [0] = 1;
return i;
}
return i + 1;
}
return (i == length) ? -1 : i + 1;
}
/* Punt to j.l.s.indexOf; return target length if seperator is
* not found.
*/
if (ip [0] >= length)
return length;
i = target.IndexOf (separator, ip [0]);
return (i != -1) ? i : length;
}
/*
* See ECMA 15.5.4.8. Modified to match JS 1.2 - optionally takes
* a limit argument and accepts a regular expression as the split
* argument.
*/
private static object ImplSplit (Context cx, IScriptable scope, string target, object [] args)
{
// create an empty Array to return;
IScriptable top = GetTopLevelScope (scope);
IScriptable result = ScriptRuntime.NewObject (cx, top, "Array", null);
// return an array consisting of the target if no separator given
// don't check against undefined, because we want
// 'fooundefinedbar'.split(void 0) to split to ['foo', 'bar']
if (args.Length < 1) {
result.Put (0, result, target);
return result;
}
// Use the second argument as the split limit, if given.
bool limited = (args.Length > 1) && (args [1] != Undefined.Value);
long limit = 0; // Initialize to avoid warning.
if (limited) {
/* Clamp limit between 0 and 1 + string length. */
limit = ScriptConvert.ToUint32 (args [1]);
if (limit > target.Length)
limit = 1 + target.Length;
}
string separator = null;
int [] matchlen = new int [1];
IScriptable re = null;
RegExpProxy reProxy = null;
if (args [0] is IScriptable) {
reProxy = cx.RegExpProxy;
if (reProxy != null) {
IScriptable test = (IScriptable)args [0];
if (reProxy.IsRegExp (test)) {
re = test;
}
}
}
if (re == null) {
separator = ScriptConvert.ToString (args [0]);
matchlen [0] = separator.Length;
}
// split target with separator or re
int [] ip = new int [] { 0 };
int match;
int len = 0;
bool [] matched = new bool [] { false };
string [] [] parens = new string [] [] { null };
Context.Versions version = cx.Version;
while ((match = find_split (cx, scope, target, separator, version, reProxy, re, ip, matchlen, matched, parens)) >= 0) {
if ((limited && len >= limit) || (match > target.Length))
break;
string substr;
if (target.Length == 0)
substr = target;
else
substr = target.Substring (ip [0], (match) - (ip [0]));
result.Put (len, result, substr);
len++;
/*
* Imitate perl's feature of including parenthesized substrings
* that matched part of the delimiter in the new array, after the
* split substring that was delimited.
*/
// CB, 02.01.2007: Don't do this, causes bug #287630
// https://bugzilla.mozilla.org/show_bug.cgi?query_format=specific&order=relevance+desc&bug_status=__open__&id=287630
/*
if (re != null && matched [0] == true) {
int size = parens [0].Length;
for (int num = 0; num < size; num++) {
if (limited && len >= limit)
break;
result.Put (len, result, parens [0] [num]);
len++;
}
matched [0] = false;
}
*/
ip [0] = match + matchlen [0];
if (version < Context.Versions.JS1_3 && version != Context.Versions.Default) {
/*
* Deviate from ECMA to imitate Perl, which omits a final
* split unless a limit argument is given and big enough.
*/
if (!limited && ip [0] == target.Length)
break;
}
}
return result;
}
/*
* See ECMA 15.5.4.15
*/
private static string js_substring (Context cx, string target, object [] args)
{
int length = target.Length;
double start = ScriptConvert.ToInteger (args, 0);
double end;
if (start < 0)
start = 0;
else if (start > length)
start = length;
if (args.Length <= 1 || args [1] == Undefined.Value) {
end = length;
}
else {
end = ScriptConvert.ToInteger (args [1]);
if (end < 0)
end = 0;
else if (end > length)
end = length;
// swap if end < start
if (end < start) {
if (cx.Version != Context.Versions.JS1_2) {
double temp = start;
start = end;
end = temp;
}
else {
// Emulate old JDK1.0 java.lang.String.substring()
end = start;
}
}
}
return target.Substring ((int)start, ((int)end) - ((int)start));
}
/*
* Non-ECMA methods.
*/
private static string js_substr (string target, object [] args)
{
if (args.Length < 1)
return target;
double begin = ScriptConvert.ToInteger (args [0]);
double end;
int length = target.Length;
if (begin < 0) {
begin += length;
if (begin < 0)
begin = 0;
}
else if (begin > length) {
begin = length;
}
if (args.Length == 1) {
end = length;
}
else {
end = ScriptConvert.ToInteger (args [1]);
if (end < 0)
end = 0;
end += begin;
if (end > length)
end = length;
}
return target.Substring ((int)begin, ((int)end) - ((int)begin));
}
/*
* Python-esque sequence operations.
*/
private static string js_concat (string target, object [] args)
{
int N = args.Length;
if (N == 0) {
return target;
}
else if (N == 1) {
string arg = ScriptConvert.ToString (args [0]);
return string.Concat (target, arg);
}
// Find total capacity for the final string to avoid unnecessary
// re-allocations in StringBuffer
int size = target.Length;
string [] argsAsStrings = new string [N];
for (int i = 0; i != N; ++i) {
string s = ScriptConvert.ToString (args [i]);
argsAsStrings [i] = s;
size += s.Length;
}
System.Text.StringBuilder result = new System.Text.StringBuilder (size);
result.Append (target);
for (int i = 0; i != N; ++i) {
result.Append (argsAsStrings [i]);
}
return result.ToString ();
}
private static string js_slice (string target, object [] args)
{
if (args.Length != 0) {
double begin = ScriptConvert.ToInteger (args [0]);
double end;
int length = target.Length;
if (begin < 0) {
begin += length;
if (begin < 0)
begin = 0;
}
else if (begin > length) {
begin = length;
}
if (args.Length == 1) {
end = length;
}
else {
end = ScriptConvert.ToInteger (args [1]);
if (end < 0) {
end += length;
if (end < 0)
end = 0;
}
else if (end > length) {
end = length;
}
if (end < begin)
end = begin;
}
return target.Substring ((int)begin, ((int)end) - ((int)begin));
}
return target;
}
protected internal override int FindPrototypeId (string s)
{
int id;
#region Generated PrototypeId Switch
L0: {
id = 0;
string X = null;
int c;
L:
switch (s.Length) {
case 3:
c = s [2];
if (c == 'b') { if (s [0] == 's' && s [1] == 'u') { id = Id_sub; goto EL0; } }
else if (c == 'g') { if (s [0] == 'b' && s [1] == 'i') { id = Id_big; goto EL0; } }
else if (c == 'p') { if (s [0] == 's' && s [1] == 'u') { id = Id_sup; goto EL0; } }
break;
case 4:
c = s [0];
if (c == 'b') { X = "bold"; id = Id_bold; }
else if (c == 'l') { X = "link"; id = Id_link; }
break;
case 5:
switch (s [4]) {
case 'd':
X = "fixed";
id = Id_fixed;
break;
case 'e':
X = "slice";
id = Id_slice;
break;
case 'h':
X = "match";
id = Id_match;
break;
case 'k':
X = "blink";
id = Id_blink;
break;
case 'l':
X = "small";
id = Id_small;
break;
case 't':
X = "split";
id = Id_split;
break;
}
break;
case 6:
switch (s [1]) {
case 'e':
X = "search";
id = Id_search;
break;
case 'h':
X = "charAt";
id = Id_charAt;
break;
case 'n':
X = "anchor";
id = Id_anchor;
break;
case 'o':
X = "concat";
id = Id_concat;
break;
case 'q':
X = "equals";
id = Id_equals;
break;
case 't':
X = "strike";
id = Id_strike;
break;
case 'u':
X = "substr";
id = Id_substr;
break;
}
break;
case 7:
switch (s [1]) {
case 'a':
X = "valueOf";
id = Id_valueOf;
break;
case 'e':
X = "replace";
id = Id_replace;
break;
case 'n':
X = "indexOf";
id = Id_indexOf;
break;
case 't':
X = "italics";
id = Id_italics;
break;
}
break;
case 8:
c = s [4];
if (c == 'r') { X = "toString"; id = Id_toString; }
else if (c == 's') { X = "fontsize"; id = Id_fontsize; }
else if (c == 'u') { X = "toSource"; id = Id_toSource; }
break;
case 9:
c = s [0];
if (c == 'f') { X = "fontcolor"; id = Id_fontcolor; }
else if (c == 's') { X = "substring"; id = Id_substring; }
break;
case 10:
X = "charCodeAt";
id = Id_charCodeAt;
break;
case 11:
switch (s [2]) {
case 'L':
X = "toLowerCase";
id = Id_toLowerCase;
break;
case 'U':
X = "toUpperCase";
id = Id_toUpperCase;
break;
case 'n':
X = "constructor";
id = Id_constructor;
break;
case 's':
X = "lastIndexOf";
id = Id_lastIndexOf;
break;
}
break;
case 16:
X = "equalsIgnoreCase";
id = Id_equalsIgnoreCase;
break;
}
if (X != null && X != s && !X.Equals (s))
id = 0;
}
EL0:
#endregion
return id;
}
#region PrototypeIds
private const int ConstructorId_fromCharCode = -1;
private const int Id_constructor = 1;
private const int Id_toString = 2;
private const int Id_toSource = 3;
private const int Id_valueOf = 4;
private const int Id_charAt = 5;
private const int Id_charCodeAt = 6;
private const int Id_indexOf = 7;
private const int Id_lastIndexOf = 8;
private const int Id_split = 9;
private const int Id_substring = 10;
private const int Id_toLowerCase = 11;
private const int Id_toUpperCase = 12;
private const int Id_substr = 13;
private const int Id_concat = 14;
private const int Id_slice = 15;
private const int Id_bold = 16;
private const int Id_italics = 17;
private const int Id_fixed = 18;
private const int Id_strike = 19;
private const int Id_small = 20;
private const int Id_big = 21;
private const int Id_blink = 22;
private const int Id_sup = 23;
private const int Id_sub = 24;
private const int Id_fontsize = 25;
private const int Id_fontcolor = 26;
private const int Id_link = 27;
private const int Id_anchor = 28;
private const int Id_equals = 29;
private const int Id_equalsIgnoreCase = 30;
private const int Id_match = 31;
private const int Id_search = 32;
private const int Id_replace = 33;
private const int MAX_PROTOTYPE_ID = 33;
#endregion
private string m_Value;
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using NLog.Config;
namespace NLog.UnitTests.Layouts
{
using System;
using System.Collections.Generic;
using System.Linq;
using NLog.Layouts;
using NLog.Targets;
using Xunit;
public class JsonLayoutTests : NLogTestBase
{
private const string ExpectedIncludeAllPropertiesWithExcludes = "{ \"StringProp\": \"ValueA\", \"IntProp\": 123, \"DoubleProp\": 123.123, \"DecimalProp\": 123.123, \"BoolProp\": true, \"NullProp\": null, \"DateTimeProp\": \"2345-01-23T12:34:56Z\" }";
private const string ExpectedExcludeEmptyPropertiesWithExcludes = "{ \"StringProp\": \"ValueA\", \"IntProp\": 123, \"DoubleProp\": 123.123, \"DecimalProp\": 123.123, \"BoolProp\": true, \"DateTimeProp\": \"2345-01-23T12:34:56Z\", \"NoEmptyProp4\": \"hello\" }";
[Fact]
public void JsonLayoutRendering()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "hello, world"
};
Assert.Equal("{ \"date\": \"2010-01-01 12:34:56.0000\", \"level\": \"Info\", \"message\": \"hello, world\" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingNoSpaces()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
},
SuppressSpaces = true
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "hello, world"
};
Assert.Equal("{\"date\":\"2010-01-01 12:34:56.0000\",\"level\":\"Info\",\"message\":\"hello, world\"}", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingEscapeUnicode()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("logger", "${logger}") { EscapeUnicode = true },
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${event-properties:msg}") { EscapeUnicode = false },
},
SuppressSpaces = true,
IncludeEventProperties = true,
};
var logEventInfo = LogEventInfo.Create(LogLevel.Info, "\u00a9", null, "{$a}", new object[] { "\\" });
logEventInfo.Properties["msg"] = "\u00a9";
Assert.Equal("{\"logger\":\"\\u00a9\",\"level\":\"Info\",\"message\":\"\u00a9\",\"a\":\"\\\\\",\"msg\":\"\u00a9\"}", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingAndEncodingSpecialCharacters()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "\"hello, world\""
};
Assert.Equal("{ \"date\": \"2010-01-01 12:34:56.0000\", \"level\": \"Info\", \"message\": \"\\\"hello, world\\\"\" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingAndEncodingLineBreaks()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "hello,\n\r world"
};
Assert.Equal("{ \"date\": \"2010-01-01 12:34:56.0000\", \"level\": \"Info\", \"message\": \"hello,\\n\\r world\" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingAndNotEncodingMessageAttribute()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}", false),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "{ \"hello\" : \"world\" }"
};
Assert.Equal("{ \"date\": \"2010-01-01 12:34:56.0000\", \"level\": \"Info\", \"message\": { \"hello\" : \"world\" } }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutRenderingAndEncodingMessageAttribute()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}"),
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "{ \"hello\" : \"world\" }"
};
Assert.Equal("{ \"date\": \"2010-01-01 12:34:56.0000\", \"level\": \"Info\", \"message\": \"{ \\\"hello\\\" : \\\"world\\\" }\" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonLayoutValueTypeAttribute()
{
var jsonLayout = new JsonLayout()
{
Attributes =
{
new JsonAttribute("date", "${longdate}") { ValueType = typeof(DateTime) },
new JsonAttribute("level", "${level}"),
new JsonAttribute("message", "${message}"),
}
};
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "{ \"hello\" : \"world\" }"
};
Assert.Equal("{ \"date\": \"2010-01-01T12:34:56Z\", \"level\": \"Info\", \"message\": \"{ \\\"hello\\\" : \\\"world\\\" }\" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void JsonAttributeThreadAgnosticTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets async='true'>
<target name='debug' type='Debug'>
<layout type='JsonLayout'>
<attribute name='type' layout='${exception:format=Type}'/>
<attribute name='message' layout='${exception:format=Message}'/>
<attribute name='threadid' layout='${threadid}'/>
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("B");
var logEventInfo = CreateLogEventWithExcluded();
logger.Debug(logEventInfo);
var target = LogManager.Configuration.AllTargets.OfType<DebugTarget>().First();
LogManager.Configuration = null; // Flush
var message = target.LastMessage;
Assert.Contains(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString(), message);
}
[Fact]
public void JsonAttributeStackTraceUsageTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type='JsonLayout'>
<attribute name='type' layout='${exception:format=Type}'/>
<attribute name='message' layout='${exception:format=Message}'/>
<attribute name='className' layout='${callsite:className=true}'/>
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("C");
var logEventInfo = CreateLogEventWithExcluded();
logger.Debug(logEventInfo);
var message = GetDebugLastMessage("debug");
Assert.Contains(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.FullName, message);
}
[Fact]
public void NestedJsonAttrTest()
{
var jsonLayout = new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=Type}"),
new JsonAttribute("message", "${exception:format=Message}"),
new JsonAttribute("innerException", new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=:innerFormat=Type:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
new JsonAttribute("message", "${exception:format=:innerFormat=Message:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
}
},
//don't escape layout
false)
}
};
var logEventInfo = new LogEventInfo
{
Exception = new NLogRuntimeException("test", new NullReferenceException("null is bad!"))
};
var json = jsonLayout.Render(logEventInfo);
Assert.Equal("{ \"type\": \"NLog.NLogRuntimeException\", \"message\": \"test\", \"innerException\": { \"type\": \"System.NullReferenceException\", \"message\": \"null is bad!\" } }", json);
}
[Fact]
public void NestedJsonAttrDoesNotRenderEmptyLiteralIfRenderEmptyObjectIsFalseTest()
{
var jsonLayout = new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=Type}"),
new JsonAttribute("message", "${exception:format=Message}"),
new JsonAttribute("innerException", new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=:innerFormat=Type:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
new JsonAttribute("message", "${exception:format=:innerFormat=Message:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
},
RenderEmptyObject = false
},
//don't escape layout
false)
}
};
var logEventInfo = new LogEventInfo
{
Exception = new NLogRuntimeException("test", (Exception)null)
};
var json = jsonLayout.Render(logEventInfo);
Assert.Equal("{ \"type\": \"NLog.NLogRuntimeException\", \"message\": \"test\" }", json);
}
[Fact]
public void NestedJsonAttrRendersEmptyLiteralIfRenderEmptyObjectIsTrueTest()
{
var jsonLayout = new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=Type}"),
new JsonAttribute("message", "${exception:format=Message}"),
new JsonAttribute("innerException", new JsonLayout
{
Attributes =
{
new JsonAttribute("type", "${exception:format=:innerFormat=Type:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
new JsonAttribute("message", "${exception:format=:innerFormat=Message:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"),
},
RenderEmptyObject = true
},
//don't escape layout
false)
}
};
var logEventInfo = new LogEventInfo
{
Exception = new NLogRuntimeException("test", (Exception)null)
};
var json = jsonLayout.Render(logEventInfo);
Assert.Equal("{ \"type\": \"NLog.NLogRuntimeException\", \"message\": \"test\", \"innerException\": { } }", json);
}
[Fact]
public void NestedJsonAttrTestFromXML()
{
var configXml = @"
<nlog>
<targets>
<target name='jsonFile' type='File' fileName='log.json'>
<layout type='JsonLayout'>
<attribute name='time' layout='${longdate}' />
<attribute name='level' layout='${level:upperCase=true}'/>
<attribute name='nested' encode='false' >
<layout type='JsonLayout'>
<attribute name='message' layout='${message}' />
<attribute name='exception' layout='${exception:message}' />
</layout>
</attribute>
</layout>
</target>
</targets>
<rules>
</rules>
</nlog>
";
var config = XmlLoggingConfiguration.CreateFromXmlString(configXml);
Assert.NotNull(config);
var target = config.FindTargetByName<FileTarget>("jsonFile");
Assert.NotNull(target);
var jsonLayout = target.Layout as JsonLayout;
Assert.NotNull(jsonLayout);
var attrs = jsonLayout.Attributes;
Assert.NotNull(attrs);
Assert.Equal(3, attrs.Count);
Assert.Equal(typeof(SimpleLayout), attrs[0].Layout.GetType());
Assert.Equal(typeof(SimpleLayout), attrs[1].Layout.GetType());
Assert.Equal(typeof(JsonLayout), attrs[2].Layout.GetType());
var nestedJsonLayout = (JsonLayout)attrs[2].Layout;
Assert.Equal(2, nestedJsonLayout.Attributes.Count);
Assert.Equal("${message}", nestedJsonLayout.Attributes[0].Layout.ToString());
Assert.Equal("${exception:message}", nestedJsonLayout.Attributes[1].Layout.ToString());
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2016, 10, 30, 13, 30, 55),
Message = "this is message",
Level = LogLevel.Info,
Exception = new NLogRuntimeException("test", new NullReferenceException("null is bad!"))
};
var json = jsonLayout.Render(logEventInfo);
Assert.Equal("{ \"time\": \"2016-10-30 13:30:55.0000\", \"level\": \"INFO\", \"nested\": { \"message\": \"this is message\", \"exception\": \"test\" } }", json);
}
[Fact]
public void IncludeAllJsonProperties()
{
var jsonLayout = new JsonLayout()
{
IncludeEventProperties = true
};
jsonLayout.ExcludeProperties.Add("Excluded1");
jsonLayout.ExcludeProperties.Add("Excluded2");
var logEventInfo = CreateLogEventWithExcluded();
Assert.Equal(ExpectedIncludeAllPropertiesWithExcludes, jsonLayout.Render(logEventInfo));
}
[Fact]
public void PropertyKeyWithQuote()
{
var jsonLayout = new JsonLayout()
{
IncludeEventProperties = true,
};
var logEventInfo = new LogEventInfo();
logEventInfo.Properties.Add(@"fo""o", "bar");
Assert.Equal(@"{ ""fo\""o"": ""bar"" }", jsonLayout.Render(logEventInfo));
}
[Fact]
public void AttributerKeyWithQuote()
{
var jsonLayout = new JsonLayout();
jsonLayout.Attributes.Add(new JsonAttribute(@"fo""o", "bar"));
Assert.Equal(@"{ ""fo\""o"": ""bar"" }", jsonLayout.Render(LogEventInfo.CreateNullEvent()));
}
[Fact]
public void ExcludeEmptyJsonProperties()
{
var jsonLayout = new JsonLayout()
{
IncludeEventProperties = true,
ExcludeEmptyProperties = true
};
jsonLayout.ExcludeProperties.Add("Excluded1");
jsonLayout.ExcludeProperties.Add("Excluded2");
var logEventInfo = CreateLogEventWithExcluded();
logEventInfo.Properties.Add("EmptyProp", "");
logEventInfo.Properties.Add("EmptyProp1", null);
logEventInfo.Properties.Add("EmptyProp2", new DummyContextLogger() { Value = null });
logEventInfo.Properties.Add("EmptyProp3", new DummyContextLogger() { Value = "" });
logEventInfo.Properties.Add("NoEmptyProp4", new DummyContextLogger() { Value = "hello" });
Assert.Equal(ExpectedExcludeEmptyPropertiesWithExcludes, jsonLayout.Render(logEventInfo));
}
[Fact]
public void IncludeAllJsonPropertiesMaxRecursionLimit()
{
var jsonLayout = new JsonLayout()
{
IncludeEventProperties = true,
MaxRecursionLimit = 1,
};
LogEventInfo logEventInfo = new LogEventInfo()
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
};
logEventInfo.Properties["Message"] = new
{
data = new Dictionary<int, string>() { { 42, "Hello" } }
};
Assert.Equal(@"{ ""Message"": {""data"":{}} }", jsonLayout.Render(logEventInfo));
}
[Fact]
[Obsolete("Replaced by ScopeContext.PushProperty or Logger.PushScopeProperty using ${scopeproperty}. Marked obsolete on NLog 5.0")]
public void IncludeMdcJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeMdc='true' ExcludeProperties='Excluded1,Excluded2'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
MappedDiagnosticsContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
MappedDiagnosticsContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
[Fact]
[Obsolete("Replaced by ScopeContext.PushProperty or Logger.PushScopeProperty using ${scopeproperty}. Marked obsolete on NLog 5.0")]
public void IncludeMdcNoEmptyJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeMdc='true' ExcludeProperties='Excluded1,Excluded2' ExcludeEmptyProperties='true'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
ILogger logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
logEventInfo.Properties.Add("EmptyProp", "");
logEventInfo.Properties.Add("EmptyProp1", null);
logEventInfo.Properties.Add("EmptyProp2", new DummyContextLogger() { Value = null });
logEventInfo.Properties.Add("EmptyProp3", new DummyContextLogger() { Value = "" });
logEventInfo.Properties.Add("NoEmptyProp4", new DummyContextLogger() { Value = "hello" });
MappedDiagnosticsContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
MappedDiagnosticsContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedExcludeEmptyPropertiesWithExcludes);
}
[Fact]
public void IncludeGdcJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeGdc='true' ExcludeProperties='Excluded1,Excluded2'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
GlobalDiagnosticsContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
GlobalDiagnosticsContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
[Fact]
public void IncludeGdcNoEmptyJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeGdc='true' ExcludeProperties='Excluded1,Excluded2' ExcludeEmptyProperties='true'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
ILogger logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
logEventInfo.Properties.Add("EmptyProp", "");
logEventInfo.Properties.Add("EmptyProp1", null);
logEventInfo.Properties.Add("EmptyProp2", new DummyContextLogger() { Value = null });
logEventInfo.Properties.Add("EmptyProp3", new DummyContextLogger() { Value = "" });
logEventInfo.Properties.Add("NoEmptyProp4", new DummyContextLogger() { Value = "hello" });
GlobalDiagnosticsContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
GlobalDiagnosticsContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedExcludeEmptyPropertiesWithExcludes);
}
[Fact]
[Obsolete("Replaced by ScopeContext.PushProperty or Logger.PushScopeProperty using ${scopeproperty}. Marked obsolete on NLog 5.0")]
public void IncludeMdlcJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeMdlc='true' ExcludeProperties='Excluded1,Excluded2'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
MappedDiagnosticsLogicalContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
MappedDiagnosticsLogicalContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
[Fact]
[Obsolete("Replaced by ScopeContext.PushProperty or Logger.PushScopeProperty using ${scopeproperty}. Marked obsolete on NLog 5.0")]
public void IncludeMdlcNoEmptyJsonProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeMdlc='true' ExcludeProperties='Excluded1,Excluded2' ExcludeEmptyProperties='true'>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
ILogger logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
logEventInfo.Properties.Add("EmptyProp", "");
logEventInfo.Properties.Add("EmptyProp1", null);
logEventInfo.Properties.Add("EmptyProp2", new DummyContextLogger() { Value = null });
logEventInfo.Properties.Add("EmptyProp3", new DummyContextLogger() { Value = "" });
logEventInfo.Properties.Add("NoEmptyProp4", new DummyContextLogger() { Value = "hello" });
MappedDiagnosticsLogicalContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
MappedDiagnosticsLogicalContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessage("debug", ExpectedExcludeEmptyPropertiesWithExcludes);
}
[Fact]
[Obsolete("Replaced by ScopeContext.PushProperty or Logger.PushScopeProperty using ${scopeproperty}. Marked obsolete on NLog 5.0")]
public void IncludeMdlcJsonNestedProperties()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='AsyncWrapper' timeToSleepBetweenBatches='0'>
<target name='debug' type='Debug'>
<layout type='JsonLayout'>
<attribute name='scope' encode='false' >
<layout type='JsonLayout' includeMdlc='true' />
</attribute>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
MappedDiagnosticsLogicalContext.Clear();
foreach (var prop in logEventInfo.Properties)
if (prop.Key.ToString() != "Excluded1" && prop.Key.ToString() != "Excluded2")
MappedDiagnosticsLogicalContext.Set(prop.Key.ToString(), prop.Value);
logEventInfo.Properties.Clear();
logger.Debug(logEventInfo);
LogManager.Flush();
AssertDebugLastMessageContains("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
/// <summary>
/// Test from XML, needed for the list (ExcludeProperties)
/// </summary>
[Fact]
public void IncludeAllJsonPropertiesXml()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeAllProperties='true' ExcludeProperties='Excluded1,EXCLUDED2'>
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo = CreateLogEventWithExcluded();
logger.Debug(logEventInfo);
AssertDebugLastMessage("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
[Fact]
public void IncludeAllJsonPropertiesMutableXml()
{
// Arrange
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='BufferingWrapper'>
<target name='debug' type='Debug'>
<layout type='JsonLayout' IncludeAllProperties='true' ExcludeProperties='Excluded1,Excluded2' maxRecursionLimit='0' />
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
// Act
var logEventInfo = CreateLogEventWithExcluded();
var stringPropBuilder = new System.Text.StringBuilder(logEventInfo.Properties["StringProp"].ToString());
logEventInfo.Properties["StringProp"] = stringPropBuilder;
logger.Debug(logEventInfo);
stringPropBuilder.Clear();
LogManager.Flush();
// Assert
AssertDebugLastMessage("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
[Fact]
public void IncludeAllJsonPropertiesMutableNestedXml()
{
// Arrange
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='asyncDebug' type='BufferingWrapper'>
<target name='debug' type='Debug'>
<layout type='JsonLayout' maxRecursionLimit='0'>
<attribute name='properties' encode='false' >
<layout type='JsonLayout' IncludeAllProperties='true' ExcludeProperties='Excluded1,Excluded2' maxRecursionLimit='0'/>
</attribute>
</layout>
</target>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='asyncDebug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
// Act
var logEventInfo = CreateLogEventWithExcluded();
var stringPropBuilder = new System.Text.StringBuilder(logEventInfo.Properties["StringProp"].ToString());
logEventInfo.Properties["StringProp"] = stringPropBuilder;
logger.Debug(logEventInfo);
stringPropBuilder.Clear();
LogManager.Flush();
// Assert
AssertDebugLastMessageContains("debug", ExpectedIncludeAllPropertiesWithExcludes);
}
/// <summary>
/// Serialize object deep
/// </summary>
[Fact]
public void SerializeObjectRecursionSingle()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeAllProperties='true' maxRecursionLimit='1' >
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo1 = new LogEventInfo();
logEventInfo1.Properties.Add("nestedObject", new List<object> { new { val = 1, val2 = "value2" }, new { val3 = 3, val4 = "value4" } });
logger.Debug(logEventInfo1);
AssertDebugLastMessage("debug", "{ \"nestedObject\": [{\"val\":1, \"val2\":\"value2\"},{\"val3\":3, \"val4\":\"value4\"}] }");
var logEventInfo2 = new LogEventInfo();
logEventInfo2.Properties.Add("nestedObject", new { val = 1, val2 = "value2" });
logger.Debug(logEventInfo2);
AssertDebugLastMessage("debug", "{ \"nestedObject\": {\"val\":1, \"val2\":\"value2\"} }");
var logEventInfo3 = new LogEventInfo();
logEventInfo3.Properties.Add("nestedObject", new List<object> { new List<object> { new { val = 1, val2 = "value2" } } });
logger.Debug(logEventInfo3);
AssertDebugLastMessage("debug", "{ \"nestedObject\": [[\"{ val = 1, val2 = value2 }\"]] }"); // Allows nested collection, but then only ToString
}
[Fact]
public void SerializeObjectRecursionZero()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeAllProperties='true' maxRecursionLimit='0' >
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo1 = new LogEventInfo();
logEventInfo1.Properties.Add("nestedObject", new List<object> { new { val = 1, val2 = "value2" }, new { val3 = 3, val4 = "value5" } });
logger.Debug(logEventInfo1);
AssertDebugLastMessage("debug", "{ \"nestedObject\": [\"{ val = 1, val2 = value2 }\",\"{ val3 = 3, val4 = value5 }\"] }"); // Allows single collection recursion
var logEventInfo2 = new LogEventInfo();
logEventInfo2.Properties.Add("nestedObject", new { val = 1, val2 = "value2" });
logger.Debug(logEventInfo2);
AssertDebugLastMessage("debug", "{ \"nestedObject\": \"{ val = 1, val2 = value2 }\" }"); // Never object recursion, only ToString
var logEventInfo3 = new LogEventInfo();
logEventInfo3.Properties.Add("nestedObject", new List<object> { new List<object> { new { val = 1, val2 = "value2" } } });
logger.Debug(logEventInfo3);
AssertDebugLastMessage("debug", "{ \"nestedObject\": [[]] }"); // No support for nested collections
}
[Fact]
public void EncodesInvalidCharacters()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeAllProperties='true' escapeForwardSlash='true'>
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo1 = new LogEventInfo();
logEventInfo1.Properties.Add("InvalidCharacters", "|#{}%&\"~+\\/:*?<>".ToCharArray());
logger.Debug(logEventInfo1);
AssertDebugLastMessage("debug", "{ \"InvalidCharacters\": [\"|\",\"#\",\"{\",\"}\",\"%\",\"&\",\"\\\"\",\"~\",\"+\",\"\\\\\",\"\\/\",\":\",\"*\",\"?\",\"<\",\">\"] }");
}
[Fact]
public void EncodesInvalidDoubles()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type=""JsonLayout"" IncludeAllProperties='true' >
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo1 = new LogEventInfo();
logEventInfo1.Properties.Add("DoubleNaN", double.NaN);
logEventInfo1.Properties.Add("DoubleInfPositive", double.PositiveInfinity);
logEventInfo1.Properties.Add("DoubleInfNegative", double.NegativeInfinity);
logEventInfo1.Properties.Add("FloatNaN", float.NaN);
logEventInfo1.Properties.Add("FloatInfPositive", float.PositiveInfinity);
logEventInfo1.Properties.Add("FloatInfNegative", float.NegativeInfinity);
logger.Debug(logEventInfo1);
AssertDebugLastMessage("debug", "{ \"DoubleNaN\": \"NaN\", \"DoubleInfPositive\": \"Infinity\", \"DoubleInfNegative\": \"-Infinity\", \"FloatNaN\": \"NaN\", \"FloatInfPositive\": \"Infinity\", \"FloatInfNegative\": \"-Infinity\" }");
}
[Fact]
public void EscapeForwardSlashDefaultTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' >
<layout type='JsonLayout' escapeForwardSlash='false' includeAllProperties='true'>
<attribute name='myurl1' layout='${event-properties:myurl}' />
<attribute name='myurl2' layout='${event-properties:myurl}' escapeForwardSlash='true' />
</layout>
</target>
</targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
var logEventInfo1 = new LogEventInfo();
logEventInfo1.Properties.Add("myurl", "http://hello.world.com/");
logger.Debug(logEventInfo1);
AssertDebugLastMessage("debug", "{ \"myurl1\": \"http://hello.world.com/\", \"myurl2\": \"http:\\/\\/hello.world.com\\/\", \"myurl\": \"http://hello.world.com/\" }");
}
[Fact]
public void SkipInvalidJsonPropertyValues()
{
var jsonLayout = new JsonLayout() { IncludeEventProperties = true };
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = string.Empty,
};
var expectedValue = Guid.NewGuid();
logEventInfo.Properties["BadObject"] = new BadObject();
logEventInfo.Properties["RequestId"] = expectedValue;
Assert.Equal($"{{ \"RequestId\": \"{expectedValue}\" }}", jsonLayout.Render(logEventInfo));
}
class BadObject : IFormattable
{
public string ToString(string format, IFormatProvider formatProvider)
{
throw new ApplicationException("BadObject");
}
public override string ToString()
{
return ToString(null, null);
}
}
private static LogEventInfo CreateLogEventWithExcluded()
{
var logEventInfo = new LogEventInfo
{
TimeStamp = new DateTime(2010, 01, 01, 12, 34, 56),
Level = LogLevel.Info,
Message = "hello, world"
};
logEventInfo.Properties.Add("StringProp", "ValueA");
logEventInfo.Properties.Add("IntProp", 123);
logEventInfo.Properties.Add("DoubleProp", 123.123);
logEventInfo.Properties.Add("DecimalProp", 123.123m);
logEventInfo.Properties.Add("BoolProp", true);
logEventInfo.Properties.Add("NullProp", null);
logEventInfo.Properties.Add("DateTimeProp", new DateTime(2345, 1, 23, 12, 34, 56, DateTimeKind.Utc));
logEventInfo.Properties.Add("Excluded1", "ExcludedValue");
logEventInfo.Properties.Add("Excluded2", "Also excluded");
return logEventInfo;
}
public class DummyContextLogger
{
internal string Value { get; set; }
public override string ToString()
{
return Value;
}
}
}
}
| |
namespace Oort.AzureStorage.Mockable
{
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Auth;
using Microsoft.WindowsAzure.Storage.Queue;
using Microsoft.WindowsAzure.Storage.Queue.Protocol;
using Microsoft.WindowsAzure.Storage.RetryPolicies;
using Microsoft.WindowsAzure.Storage.Shared.Protocol;
public class OortQueueClient : ICloudQueueClient
{
private readonly CloudQueueClient _cloudQueueClient;
public IEnumerable<CloudQueue> ListQueues(string prefix = null, QueueListingDetails queueListingDetails = QueueListingDetails.None,
QueueRequestOptions options = null, OperationContext operationContext = null)
{
return _cloudQueueClient.ListQueues(prefix, queueListingDetails, options, operationContext);
}
public QueueResultSegment ListQueuesSegmented(QueueContinuationToken currentToken)
{
return _cloudQueueClient.ListQueuesSegmented(currentToken);
}
public QueueResultSegment ListQueuesSegmented(string prefix, QueueContinuationToken currentToken)
{
return _cloudQueueClient.ListQueuesSegmented(prefix, currentToken);
}
public QueueResultSegment ListQueuesSegmented(string prefix, QueueListingDetails queueListingDetails, int? maxResults,
QueueContinuationToken currentToken, QueueRequestOptions options = null, OperationContext operationContext = null)
{
return _cloudQueueClient.ListQueuesSegmented(prefix, queueListingDetails, maxResults, currentToken, options, operationContext);
}
public ICancellableAsyncResult BeginListQueuesSegmented(QueueContinuationToken currentToken, AsyncCallback callback,
object state)
{
return _cloudQueueClient.BeginListQueuesSegmented(currentToken, callback, state);
}
public ICancellableAsyncResult BeginListQueuesSegmented(string prefix, QueueContinuationToken currentToken,
AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginListQueuesSegmented(prefix, currentToken, callback, state);
}
public ICancellableAsyncResult BeginListQueuesSegmented(string prefix, QueueListingDetails queueListingDetails, int? maxResults,
QueueContinuationToken currentToken, QueueRequestOptions options, OperationContext operationContext,
AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginListQueuesSegmented(prefix, queueListingDetails, maxResults, currentToken, options, operationContext, callback, state);
}
public QueueResultSegment EndListQueuesSegmented(IAsyncResult asyncResult)
{
return _cloudQueueClient.EndListQueuesSegmented(asyncResult);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(QueueContinuationToken currentToken)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(currentToken);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(QueueContinuationToken currentToken, CancellationToken cancellationToken)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(currentToken, cancellationToken);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(string prefix, QueueContinuationToken currentToken)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(prefix, currentToken);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(string prefix, QueueContinuationToken currentToken, CancellationToken cancellationToken)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(prefix, currentToken, cancellationToken);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(string prefix, QueueListingDetails queueListingDetails, int? maxResults,
QueueContinuationToken currentToken, QueueRequestOptions options, OperationContext operationContext)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(prefix, queueListingDetails, maxResults, currentToken, options, operationContext);
}
public Task<QueueResultSegment> ListQueuesSegmentedAsync(string prefix, QueueListingDetails queueListingDetails, int? maxResults,
QueueContinuationToken currentToken, QueueRequestOptions options, OperationContext operationContext,
CancellationToken cancellationToken)
{
return _cloudQueueClient.ListQueuesSegmentedAsync(prefix, queueListingDetails, maxResults, currentToken, options, operationContext, cancellationToken);
}
public ICancellableAsyncResult BeginGetServiceProperties(AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginGetServiceProperties(callback, state);
}
public ICancellableAsyncResult BeginGetServiceProperties(QueueRequestOptions requestOptions, OperationContext operationContext,
AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginGetServiceProperties(requestOptions, operationContext, callback, state);
}
public ServiceProperties EndGetServiceProperties(IAsyncResult asyncResult)
{
return _cloudQueueClient.EndGetServiceProperties(asyncResult);
}
public Task<ServiceProperties> GetServicePropertiesAsync()
{
return _cloudQueueClient.GetServicePropertiesAsync();
}
public Task<ServiceProperties> GetServicePropertiesAsync(CancellationToken cancellationToken)
{
return _cloudQueueClient.GetServicePropertiesAsync(cancellationToken);
}
public Task<ServiceProperties> GetServicePropertiesAsync(QueueRequestOptions requestOptions, OperationContext operationContext)
{
return _cloudQueueClient.GetServicePropertiesAsync(requestOptions, operationContext);
}
public Task<ServiceProperties> GetServicePropertiesAsync(QueueRequestOptions requestOptions, OperationContext operationContext,
CancellationToken cancellationToken)
{
return _cloudQueueClient.GetServicePropertiesAsync(requestOptions, operationContext, cancellationToken);
}
public ServiceProperties GetServiceProperties(QueueRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _cloudQueueClient.GetServiceProperties(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginSetServiceProperties(ServiceProperties properties, AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginSetServiceProperties(properties, callback, state);
}
public ICancellableAsyncResult BeginSetServiceProperties(ServiceProperties properties, QueueRequestOptions requestOptions,
OperationContext operationContext, AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginSetServiceProperties(properties, requestOptions, operationContext, callback, state);
}
public void EndSetServiceProperties(IAsyncResult asyncResult)
{
_cloudQueueClient.EndSetServiceProperties(asyncResult);
}
public Task SetServicePropertiesAsync(ServiceProperties properties)
{
return _cloudQueueClient.SetServicePropertiesAsync(properties);
}
public Task SetServicePropertiesAsync(ServiceProperties properties, CancellationToken cancellationToken)
{
return _cloudQueueClient.SetServicePropertiesAsync(properties, cancellationToken);
}
public Task SetServicePropertiesAsync(ServiceProperties properties, QueueRequestOptions options,
OperationContext operationContext)
{
return _cloudQueueClient.SetServicePropertiesAsync(properties, options, operationContext);
}
public Task SetServicePropertiesAsync(ServiceProperties properties, QueueRequestOptions options,
OperationContext operationContext, CancellationToken cancellationToken)
{
return _cloudQueueClient.SetServicePropertiesAsync(properties, options, operationContext, cancellationToken);
}
public void SetServiceProperties(ServiceProperties properties, QueueRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
_cloudQueueClient.SetServiceProperties(properties, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginGetServiceStats(AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginGetServiceStats(callback, state);
}
public ICancellableAsyncResult BeginGetServiceStats(QueueRequestOptions requestOptions, OperationContext operationContext,
AsyncCallback callback, object state)
{
return _cloudQueueClient.BeginGetServiceStats(requestOptions, operationContext, callback, state);
}
public ServiceStats EndGetServiceStats(IAsyncResult asyncResult)
{
return _cloudQueueClient.EndGetServiceStats(asyncResult);
}
public Task<ServiceStats> GetServiceStatsAsync()
{
return _cloudQueueClient.GetServiceStatsAsync();
}
public Task<ServiceStats> GetServiceStatsAsync(CancellationToken cancellationToken)
{
return _cloudQueueClient.GetServiceStatsAsync(cancellationToken);
}
public Task<ServiceStats> GetServiceStatsAsync(QueueRequestOptions requestOptions, OperationContext operationContext)
{
return _cloudQueueClient.GetServiceStatsAsync(requestOptions, operationContext);
}
public Task<ServiceStats> GetServiceStatsAsync(QueueRequestOptions requestOptions, OperationContext operationContext,
CancellationToken cancellationToken)
{
return _cloudQueueClient.GetServiceStatsAsync(requestOptions, operationContext, cancellationToken);
}
public ServiceStats GetServiceStats(QueueRequestOptions requestOptions = null, OperationContext operationContext = null)
{
return _cloudQueueClient.GetServiceStats(requestOptions, operationContext);
}
public CloudQueue GetQueueReference(string queueName)
{
return _cloudQueueClient.GetQueueReference(queueName);
}
public AuthenticationScheme AuthenticationScheme
{
get { return _cloudQueueClient.AuthenticationScheme; }
set { _cloudQueueClient.AuthenticationScheme = value; }
}
public IBufferManager BufferManager
{
get { return _cloudQueueClient.BufferManager; }
set { _cloudQueueClient.BufferManager = value; }
}
public StorageCredentials Credentials
{
get { return _cloudQueueClient.Credentials; }
}
public Uri BaseUri
{
get { return _cloudQueueClient.BaseUri; }
}
public StorageUri StorageUri
{
get { return _cloudQueueClient.StorageUri; }
}
public IRetryPolicy RetryPolicy
{
get { return _cloudQueueClient.RetryPolicy; }
set { _cloudQueueClient.RetryPolicy = value; }
}
public LocationMode LocationMode
{
get { return _cloudQueueClient.LocationMode; }
set { _cloudQueueClient.LocationMode = value; }
}
public TimeSpan? ServerTimeout
{
get { return _cloudQueueClient.ServerTimeout; }
set { _cloudQueueClient.ServerTimeout = value; }
}
public TimeSpan? MaximumExecutionTime
{
get { return _cloudQueueClient.MaximumExecutionTime; }
set { _cloudQueueClient.MaximumExecutionTime = value; }
}
public OortQueueClient(CloudQueueClient cloudQueueClient)
{
_cloudQueueClient = cloudQueueClient;
}
public static implicit operator OortQueueClient(CloudQueueClient wrap)
{
return new OortQueueClient(wrap);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using gView.Framework.UI;
using gView.Framework.Carto;
using gView.Framework.Geometry;
using gView.Framework.system;
using gView.Framework.Symbology;
using gView.Framework.UI.Events;
using System.Threading;
namespace gView.Plugins.MapTools.Dialogs
{
public partial class FormOverviewMap : Form
{
private IMapDocument _doc;
private IMap _ovmap;
private EnvelopeGraphics _envGraphics;
private OVTool _tool;
public FormOverviewMap(IMapDocument doc)
{
InitializeComponent();
_doc = doc;
if (_doc.Application is IMapApplication)
{
this.ShowInTaskbar = false;
this.TopLevel = true;
this.Owner = ((IMapApplication)_doc.Application).DocumentWindow as Form;
}
_envGraphics = new EnvelopeGraphics();
_tool = new OVTool();
_tool.OnCreate(_doc);
_tool.RefreshOverviewMap += new OVTool.RefreshOverviewMapHandler(Tool_RefreshOverviewMap);
}
private void Tool_RefreshOverviewMap()
{
if (_doc == null || _doc.FocusMap == null)
{
InsertMapExtent(_doc.FocusMap.Display);
}
BeginRefreshThread(DrawPhase.All);
}
private void FormOverviewMap_Load(object sender, EventArgs e)
{
RefreshOverviewMap();
}
public void RefreshOverviewMap()
{
if (_doc == null || _doc.FocusMap == null) return;
_ovmap = new Map(_doc.FocusMap as Map, false);
mapView1.Map = _ovmap;
mapView1.resizeMode = gView.Framework.UI.Controls.MapView.ResizeMode.SameExtent;
_ovmap.Display.ZoomTo(_doc.FocusMap.Display.Limit);
_ovmap.NewBitmap += new NewBitmapEvent(mapView1.NewBitmapCreated);
_ovmap.Display.SpatialReference = _doc.FocusMap.Display.SpatialReference;
InsertMapExtent(_doc.FocusMap.Display);
_tool.OvMap = _ovmap;
mapView1.Tool = _tool;
BeginRefreshThread(DrawPhase.All);
}
private void BeginRefreshThread(DrawPhase phase)
{
Thread thread = new Thread(new ParameterizedThreadStart(this.StartRefreshTread));
thread.Start(phase);
}
private void StartRefreshTread(object phase)
{
//this.Cursor = Cursors.WaitCursor;
mapView1.RefreshMap((DrawPhase)phase);
//this.Cursor = Cursors.Cross;
}
private void InsertMapExtent(IDisplay display)
{
if (_ovmap == null || display == null ||
_ovmap.Display == null) return;
IGeometry mapEnv = display.Envelope;
_ovmap.Display.GraphicsContainer.Elements.Clear();
if (display.SpatialReference != null &&
!display.SpatialReference.Equals(_ovmap.Display.SpatialReference))
{
mapEnv = GeometricTransformer.Transform2D(
mapEnv, display.SpatialReference,
_ovmap.Display.SpatialReference);
}
_envGraphics.LimitEnvelope = _ovmap.Display.Envelope;
_envGraphics.Geometry = mapEnv;
_ovmap.Display.GraphicsContainer.Elements.Add(_envGraphics);
}
public void DrawMapExtent(IDisplay display)
{
InsertMapExtent(display);
BeginRefreshThread(DrawPhase.Graphics);
}
#region HelperClasses
private class EnvelopeGraphics : IGraphicElement
{
private ISymbol _symbol, _symbol2;
private IGeometry _geometry = null;
private IPolyline _cross = null;
private IEnvelope _limit = null;
public EnvelopeGraphics()
{
_symbol = PlugInManager.Create(KnownObjects.Symbology_SimpleFillSymbol) as ISymbol;
if (_symbol is IBrushColor)
((IBrushColor)_symbol).FillColor = Color.FromArgb(155, Color.White);
_symbol2 = PlugInManager.Create(KnownObjects.Symbology_SimpleLineSymbol) as ISymbol;
if (_symbol2 is IPenColor)
((IPenColor)_symbol2).PenColor = Color.Blue;
}
public IGeometry Geometry
{
get { return _geometry; }
set
{
if (value is IPolygon ||
value is IEnvelope)
{
_geometry = value;
if (_limit != null)
{
IPoint center = _geometry.Envelope.Center;
_cross = new Polyline();
Path p1 = new Path();
p1.AddPoint(new gView.Framework.Geometry.Point(_limit.minx, center.Y));
p1.AddPoint(new gView.Framework.Geometry.Point(_limit.maxx, center.Y));
Path p2 = new Path();
p2.AddPoint(new gView.Framework.Geometry.Point(center.X, _limit.miny));
p2.AddPoint(new gView.Framework.Geometry.Point(center.X, _limit.maxy));
_cross.AddPath(p1);
_cross.AddPath(p2);
}
}
}
}
public IEnvelope LimitEnvelope
{
get { return _limit; }
set { _limit = value; }
}
#region IGraphicElement Member
public void Draw(IDisplay display)
{
if (_geometry != null && _symbol != null)
display.Draw(_symbol, _geometry);
if (_cross != null && _symbol2 != null)
display.Draw(_symbol2, _cross);
if (_geometry != null && _symbol2 != null)
display.Draw(_symbol2, _geometry);
}
#endregion
}
private class OVTool : ITool, IToolContextMenu
{
private IMapDocument _doc = null;
private ContextMenuStrip _contextMenu;
private IMap _ovmap = null;
public delegate void RefreshOverviewMapHandler();
public event RefreshOverviewMapHandler RefreshOverviewMap = null;
public OVTool()
{
_contextMenu = new ContextMenuStrip();
ToolStripMenuItem item = new ToolStripMenuItem(
"Zoom to actual extent",
global::gView.Plugins.Tools.Properties.Resources.zoom
);
item.Click += new EventHandler(zoom2actual_Click);
_contextMenu.Items.Add(item);
item = new ToolStripMenuItem(
"Zoom to maximum extent",
global::gView.Plugins.Tools.Properties.Resources.map16);
item.Click += new EventHandler(zoom2max_Click);
_contextMenu.Items.Add(item);
if (RefreshOverviewMap != null)
RefreshOverviewMap();
}
void zoom2actual_Click(object sender, EventArgs e)
{
if (_doc == null || _doc.FocusMap == null || _doc.FocusMap.Display == null ||
_ovmap == null || _ovmap.Display == null) return;
IEnvelope extent = _doc.FocusMap.Display.Envelope;
if (_doc.FocusMap.Display.SpatialReference != null &&
!_doc.FocusMap.Display.SpatialReference.Equals(_ovmap.Display.SpatialReference))
{
extent = GeometricTransformer.Transform2D(
extent,
_doc.FocusMap.Display.SpatialReference,
_ovmap.Display.SpatialReference).Envelope;
}
_ovmap.Display.ZoomTo(extent);
if (RefreshOverviewMap != null)
RefreshOverviewMap();
}
void zoom2max_Click(object sender, EventArgs e)
{
if (_doc == null || _doc.FocusMap == null || _doc.FocusMap.Display == null ||
_ovmap == null || _ovmap.Display == null) return;
IEnvelope extent = _doc.FocusMap.Display.Limit;
if (_doc.FocusMap.Display.SpatialReference != null &&
!_doc.FocusMap.Display.SpatialReference.Equals(_ovmap.Display.SpatialReference))
{
extent = GeometricTransformer.Transform2D(
extent,
_doc.FocusMap.Display.SpatialReference,
_ovmap.Display.SpatialReference).Envelope;
}
_ovmap.Display.ZoomTo(extent);
if (RefreshOverviewMap != null)
RefreshOverviewMap();
}
public IMap OvMap
{
get { return _ovmap; }
set { _ovmap = value; }
}
#region ITool Member
public string Name
{
get { return "OVTool"; }
}
public bool Enabled
{
get { return true; }
}
public string ToolTip
{
get { return ""; }
}
public ToolType toolType
{
get { return ToolType.rubberband; }
}
public object Image
{
get { return null; }
}
public void OnCreate(object hook)
{
if (hook is IMapDocument)
_doc = (IMapDocument)hook;
}
public void OnEvent(object MapEvent)
{
if (_doc == null || _doc.FocusMap == null || _doc.FocusMap.Display == null) return;
if (!(MapEvent is MapEventRubberband)) return;
MapEventRubberband ev = (MapEventRubberband)MapEvent;
if (ev.Map == null) return;
if (!(ev.Map.Display is Display)) return;
Display nav = (Display)ev.Map.Display;
IEnvelope extent = new Envelope(ev.minX, ev.minY, ev.maxX, ev.maxY);
if (ev.Map.Display.SpatialReference != null &&
!ev.Map.Display.SpatialReference.Equals(_doc.FocusMap.Display.SpatialReference))
{
extent = GeometricTransformer.Transform2D(
extent,
ev.Map.Display.SpatialReference,
_doc.FocusMap.Display.SpatialReference).Envelope;
}
if (Math.Abs(ev.maxX - ev.minX) < 1e-5 ||
Math.Abs(ev.maxY - ev.minY) < 1e-5)
{
IEnvelope dispEnv = new Envelope(_doc.FocusMap.Display.Envelope);
dispEnv.Center = extent.Center;
_doc.FocusMap.Display.ZoomTo(dispEnv);
}
else
{
_doc.FocusMap.Display.ZoomTo(extent);
}
if (_doc.Application is IMapApplication)
{
((IMapApplication)_doc.Application).RefreshActiveMap(DrawPhase.All);
}
//ev.refreshMap = true;
//ev.drawPhase = DrawPhase.Graphics;
}
#endregion
#region IToolContextMenu Member
public ContextMenuStrip ContextMenu
{
get
{
return _contextMenu;
}
}
#endregion
}
#endregion
private void mapView1_MouseDown(object sender, MouseEventArgs e)
{
//this.MouseLeave -= new EventHandler(mapView1_MouseLeave);
//this.MouseEnter -= new EventHandler(mapView1_MouseEnter);
}
private void mapView1_MouseUp(object sender, MouseEventArgs e)
{
//this.MouseLeave += new EventHandler(mapView1_MouseLeave);
//this.MouseEnter += new EventHandler(mapView1_MouseEnter);
}
private void FormOverviewMap_MouseEnter(object sender, EventArgs e)
{
this.Activate();
}
private void FormOverviewMap_MouseLeave(object sender, EventArgs e)
{
if (this.Owner != null)
this.Owner.Activate();
}
}
}
| |
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using SIL.Extensions;
using SIL.Scripture.Properties;
namespace SIL.Scripture
{
/// <summary>
/// Manages internal information for a versification. You should use the <see cref="ScrVers"/> class to
/// access the versification information.
/// </summary>
public sealed class Versification
{
#region Constants/Member variables
public const int NonCanonicalLastChapterOrVerse = 998;
private static readonly Regex versificationNameMatcher =
new Regex("^#\\s*Versification\\s+\"(?<name>[^\"]+)\"\\s*$", RegexOptions.Compiled);
private BookSet scriptureBookSet;
#endregion
#region Member variables
private readonly string name;
private readonly List<int[]> bookList;
/// <summary>Mapping to and from standard versification</summary>
private readonly VerseMappings mappings;
/// <summary>Excluded verses are represented as BBBCCCVVV integers so lookup with segments will be handled correctly</summary>
private readonly HashSet<int> excludedVerses;
/// <summary>Verses with segments are represented as BBBCCCVVV integers so lookup with segments will be handled correctly</summary>
private readonly Dictionary<int, string[]> verseSegments;
private string description;
#endregion
#region Construct/Initialize
/// <summary>
/// Creates a new Versification with the specified name
/// </summary>
private Versification(string versName, string fullPath)
{
name = versName;
Type = Table.GetVersificationType(versName);
FullPath = fullPath;
bookList = new List<int[]>();
mappings = new VerseMappings();
excludedVerses = new HashSet<int>();
verseSegments = new Dictionary<int, string[]>();
}
/// <summary>
/// Creates a copy of another Versification
/// </summary>
private Versification(Versification baseVersification, string newName, string fullPath)
{
if (baseVersification == null)
throw new ArgumentNullException("baseVersification");
name = newName;
FullPath = fullPath;
BaseVersification = baseVersification;
Type = ScrVersType.Unknown;
description = baseVersification.description;
bookList = new List<int[]>(baseVersification.bookList);
mappings = new VerseMappings(baseVersification.mappings);
excludedVerses = new HashSet<int>(baseVersification.excludedVerses);
verseSegments = new Dictionary<int, string[]>(baseVersification.verseSegments);
}
private void Clear()
{
bookList.Clear();
mappings.Clear();
excludedVerses.Clear();
verseSegments.Clear();
}
#endregion
#region Internal properties
/// <summary>
/// Gets the name of this versification
/// </summary>
internal string Name
{
get { return name; }
}
/// <summary>
/// Gets the base versification of this customized versification or null if this versification is
/// not customized.
/// </summary>
internal Versification BaseVersification { get; private set; }
/// <summary>
/// Gets the full path for this versification file (e.g. \My Paratext Projects\eng.vrs).
/// <para>Note that this will be null for built-in versifications since they are stored as embedded resources.</para>
/// </summary>
internal string FullPath { get; private set; }
/// <summary>
/// Is versification file for this versification present
/// </summary>
internal bool IsPresent
{
get { return Table.Implementation.VersificationFileExists(Name); }
}
/// <summary>
/// Gets whether or not this versification is created from a custom VRS file that overrides
/// a default base versification
/// </summary>
internal bool IsCustomized
{
get { return BaseVersification != null; }
}
/// <summary>
/// Gets the type of versification.
/// </summary>
internal ScrVersType Type { get; private set; }
/// <summary>
/// Gets whether the current versification has verse segment information.
/// </summary>
internal bool HasVerseSegments
{
get { return verseSegments != null && verseSegments.Count > 0; }
}
/// <summary>
/// All books which are valid in this scripture text.
/// Valid means a) is a cannonical book, b) not obsolete, c) present in the versification for this text
/// </summary>
internal BookSet ScriptureBooks
{
get
{
if (scriptureBookSet == null)
{
scriptureBookSet = new BookSet();
foreach (int bookNum in Canon.ScriptureBooks.SelectedBookNumbers
.Where(bookNum => LastChapter(bookNum) != 1 || LastVerse(bookNum, 1) != 1))
{
scriptureBookSet.Add(bookNum);
}
}
return scriptureBookSet;
}
}
#endregion
#region Internal methods
/// <summary>
/// Gets last book in this project
/// </summary>
internal int LastBook()
{
return (bookList != null) ? bookList.Count : 0;
}
/// <summary>
/// Gets last chapter number in this book.
/// </summary>
internal int LastChapter(int bookNum)
{
// Non-scripture books have 998 chapters
if (!Canon.IsCanonical(bookNum))
return NonCanonicalLastChapterOrVerse; // Use 998 so the VerseRef.BBBCCCVVV value is computed properly
// Anything else not in .vrs file has 1 chapter
if (bookNum > bookList.Count)
return 1;
int[] chapters = bookList[bookNum - 1];
return chapters.Length;
}
/// <summary>
/// Gets last verse number in this book/chapter.
/// </summary>
internal int LastVerse(int bookNum, int chapterNum)
{
// Non-scripture books have 998 verses in each chapter
if (!Canon.IsCanonical(bookNum))
return NonCanonicalLastChapterOrVerse; // Use 998 so the VerseRef.BBBCCCVVV value is computed properly
// Anything else not in .vrs file has 1 chapter
if (bookNum > bookList.Count)
return 1;
int[] chapters = bookList[bookNum - 1];
if (chapterNum > chapters.Length || chapterNum < 1)
return 1;
return chapters[chapterNum - 1];
}
/// <summary>
/// Determines whether the specified verse is excluded in the versification.
/// </summary>
internal bool IsExcluded(int bbbcccvvv)
{
return excludedVerses.Contains(bbbcccvvv);
}
/// <summary>
/// Gets a list of verse segments for the specified reference or null if the specified
/// reference does not have segments defined in the versification.
/// </summary>
internal string[] VerseSegments(int bbbcccvvv)
{
string[] segments;
if (verseSegments.TryGetValue(bbbcccvvv, out segments))
return segments;
return null;
}
/// <summary>
/// Change the versification of an entry with Verse like 1-3 or 1,3a.
/// Can't really work in the most general case because the verse parts could become separate chapters.
/// </summary>
/// <returns>true if successful (i.e. all verses were in the same the same chapter in the new versification),
/// false if the changing resulted in the reference spanning chapters (which makes the results undefined)</returns>
internal bool ChangeVersificationWithRanges(VerseRef vref, out VerseRef newRef)
{
VerseRef vref2 = vref;
string[] parts = Regex.Split(vref.Verse, @"([,\-])");
vref.Verse = parts[0];
ChangeVersification(ref vref);
bool allSameChapter = true;
for (int i = 2; i < parts.Length; i += 2)
{
VerseRef vref3 = vref2;
vref3.Verse = parts[i];
ChangeVersification(ref vref3);
allSameChapter &= vref.ChapterNum == vref3.ChapterNum;
vref.Verse = vref.Verse + parts[i - 1] + vref3.Verse;
}
newRef = vref;
return allSameChapter;
}
/// <summary>
/// Change the passed VerseRef to be this versification.
/// </summary>
internal void ChangeVersification(ref VerseRef vref)
{
if (vref.IsDefault || vref.Versification == null || vref.Versification.VersInfo == this)
{
vref.Versification = new ScrVers(this);
return;
}
Debug.Assert(!vref.HasMultiple, "Use ChangeVersificationWithRanges");
Versification origVersification = vref.Versification.VersInfo;
// Map from existing to standard versification
VerseRef origVerse = vref;
origVerse.Versification = null;
VerseRef standardVerse;
if (origVersification.mappings != null)
standardVerse = origVersification.mappings.GetStandard(origVerse) ?? origVerse;
else
standardVerse = origVerse;
// If both versifications contain this verse and
// map this verse to the same location then no versification change is needed.
// This test is present in order to prevent a verse being changed when you have a many to one mapping from
// a versification to a standard versification (e.g. FB-17661)
VerseRef standardVerseThisVersification;
if (mappings != null)
standardVerseThisVersification = mappings.GetStandard(origVerse) ?? origVerse;
else
standardVerseThisVersification = origVerse;
// ESG is a specicial case since we have added mappings from verses to LXX segments in several versifications and
// want this mapping to work both ways.
if (vref.Book != "ESG" && standardVerse.Equals(standardVerseThisVersification) && BookChapterVerseExists(vref))
{
vref.Versification = new ScrVers(this);
return;
}
// Map from standard versification to this versification
VerseRef newVerse;
if (mappings != null)
newVerse = mappings.GetVers(standardVerse) ?? standardVerse;
else
newVerse = standardVerse;
// If verse has changed, parse new value
if (!origVerse.Equals(newVerse))
vref.CopyFrom(newVerse);
vref.Versification = new ScrVers(this);
}
private bool BookChapterVerseExists(VerseRef vref)
{
return vref.BookNum <= LastBook() &&
vref.ChapterNum <= LastChapter(vref.BookNum) &&
vref.VerseNum <= LastVerse(vref.BookNum, vref.ChapterNum);
}
/// <summary>
/// Write out versification information to the specified stream.
/// </summary>
internal void WriteToStream(StringWriter stream)
{
// Write out the list of books, chapters, verses
stream.WriteLine("# List of books, chapters, verses");
stream.WriteLine("# One line per book.");
stream.WriteLine("# One entry for each chapter.");
stream.WriteLine("# Verse number is the maximum verse number for that chapter.");
for (int book = 0; book < bookList.Count; book++)
{
int[] versesInChapter = bookList[book];
stream.Write(Canon.BookNumberToId(book + 1));
for (int chap = 0; chap < versesInChapter.Length; chap++)
stream.Write(" " + (chap + 1) + Table.chapVerseSep + versesInChapter[chap]);
stream.WriteLine();
}
// Write out the mappings, if any
stream.WriteLine("#");
stream.WriteLine("# Mappings from this versification to standard versification");
Dictionary<VerseRef, VerseRef> mappingRanges = mappings.GetMappingRanges();
foreach (KeyValuePair<VerseRef, VerseRef> mappingRange in mappingRanges)
stream.WriteLine(mappingRange.Key + " = " + mappingRange.Value);
// Write out excluded verses, if any
stream.WriteLine("#");
stream.WriteLine("# Excluded verses");
foreach (int bbbcccvvv in excludedVerses)
stream.WriteLine("#! -" + new VerseRef(bbbcccvvv));
// Write out verse segment information, if any
stream.WriteLine("#");
stream.WriteLine("# Verse segment information");
foreach (KeyValuePair<int, string[]> verseSegPair in verseSegments)
{
stream.Write("#! *" + new VerseRef(verseSegPair.Key));
foreach (string seg in verseSegPair.Value)
stream.Write("," + (seg.Length == 0 ? "-" : seg));
stream.WriteLine();
}
}
/// <summary>
/// Get the string description of the versification.
/// </summary>
public override string ToString()
{
if (description != null)
return description;
description = FullPath; // set default;
if (!string.IsNullOrEmpty(FullPath) && File.Exists(FullPath))
{
using (TextReader reader = new StreamReader(FullPath))
{
string text;
while ((text = reader.ReadLine()) != null)
{
Match match = versificationNameMatcher.Match(text);
if (match.Success)
{
description = match.Groups[1].Value;
break;
}
}
}
}
if (string.IsNullOrEmpty(description))
description = Name;
return description;
}
public override bool Equals(object obj)
{
var other = obj as Versification;
if (other == null)
return false;
if (ReferenceEquals(this, obj))
return true;
return name == other.name && description == other.description &&
bookList.SequenceEqual(other.bookList, new IntArrayComparer()) &&
excludedVerses.KeyedSetsEqual(other.excludedVerses) &&
verseSegments.KeyedSetsEqual(other.verseSegments) &&
mappings.Equals(other.mappings);
}
public override int GetHashCode()
{
return name.GetHashCode();
}
private sealed class IntArrayComparer : IEqualityComparer<int[]>
{
public bool Equals(int[] x, int[] y)
{
return x.Length == y.Length && x.SequenceEqual(y);
}
public int GetHashCode(int[] obj)
{
return obj.GetHashCode();
}
}
#endregion
#region Table class
/// <summary>
/// Provides public access to the list of versifications
/// </summary>
public class Table
{
#region Constants
// Symbols used in parsing lines from a versification file
private const char commentSymbol = '#';
private const char excludedSymbol = '-';
private const char segmentSymbol = '*';
private const char unspecifiedSegSymbol = '-';
private const char segmentSep = ',';
private const char mappingSymbol = '=';
private const char versExtensionSymbol = '!';
internal const char chapVerseSep = ':';
#endregion
#region Member variables
private static readonly Dictionary<string, ScrVersType> stringToTypeMap = new Dictionary<string, ScrVersType>();
public static Table Implementation = new Table();
private readonly Dictionary<VersificationKey, Versification> versifications =
new Dictionary<VersificationKey, Versification>();
#endregion
#region Static constructor
static Table()
{
foreach (ScrVersType type in Enum.GetValues(typeof(ScrVersType)))
stringToTypeMap[type.ToString()] = type;
}
#endregion
#region Public methods
/// <summary>
/// True iff named versification exists
/// </summary>
public bool Exists(string versName)
{
ScrVersType versificationType = GetVersificationType(versName);
if (versificationType != ScrVersType.Unknown)
return true;
lock (versifications)
return versifications.ContainsKey(new VersificationKey(ScrVersType.Unknown, versName));
}
public virtual bool VersificationFileExists(string versName)
{
if (!Exists(versName))
return false;
Versification versification = Get(versName);
if (versification.FullPath != null)
return File.Exists(versification.FullPath);
// If not a known type and it doesn't have a path, then assume it's an invalid versification.
return typeof(ScrVersType).IsEnumDefined(versName);
}
/// <summary>
/// Removes all versifications that have an unknown type (i.e. all versifications that are not built-in).
/// Mostly used for testing purposes.
/// </summary>
public void RemoveAllUnknownVersifications()
{
lock (versifications)
{
foreach (ScrVers ver in VersificationTables())
{
if (ver.Type == ScrVersType.Unknown)
versifications.Remove(new VersificationKey(ScrVersType.Unknown, ver.Name));
}
}
}
/// <summary>
/// Gets all versification schemes.
/// </summary>
public IEnumerable<ScrVers> VersificationTables()
{
yield return ScrVers.English;
yield return ScrVers.Original;
yield return ScrVers.Septuagint;
yield return ScrVers.Vulgate;
yield return ScrVers.RussianOrthodox;
yield return ScrVers.RussianProtestant;
List<Versification> versificationList;
lock (versifications)
versificationList = versifications.Values.ToList();
foreach (var versification in versificationList.Where(v => v.Type == ScrVersType.Unknown))
yield return new ScrVers(versification);
}
/// <summary>
/// Reload all non-standard versifications used so far.
/// This is necessary after a versification file has changed.
/// </summary>
public void ReloadVersifications()
{
foreach (Versification versificationReadonly in versifications.Values)
{
Versification versification = versificationReadonly;
// REVIEW: This version doesn't seem to support customized versifications.
if (string.IsNullOrEmpty(versification.FullPath) || !File.Exists(versification.FullPath))
continue; // Don't reload versifications that don't have backing files
versification.Clear();
Load(versification.FullPath, versification.Type, ref versification);
}
}
/// <summary>
/// Loads the specified versification file and returns the results. The versification is loaded into
/// the versification map so any calls to get a versification of that name will return the same versification.
/// </summary>
public ScrVers Load(string fullPath, string fallbackName = null)
{
Versification versification = null;
Load(fullPath, ScrVersType.Unknown, ref versification, fallbackName);
return new ScrVers(versification);
}
/// <summary>
/// Loads a versification from the specified stream while overriding a base versification.
/// The versification is loaded into the versification map so any calls to get a versification of that name
/// will return the same versification.
/// </summary>
public ScrVers Load(TextReader stream, string fullPath, ScrVers baseVers, string name)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException("name");
if (baseVers == null)
throw new ArgumentNullException("baseVers");
if (baseVers.IsCustomized)
throw new InvalidOperationException("Can not create a custom versification from customized versification " + baseVers.Name);
Versification versification;
lock (versifications)
{
versification = new Versification(baseVers.VersInfo, name, fullPath);
Load(stream, fullPath, ScrVersType.Unknown, ref versification, name);
versifications.Add(new VersificationKey(ScrVersType.Unknown, name), versification);
}
return new ScrVers(versification);
}
public static ParsedVersificationLine ParseLine(string line)
{
line = line.Trim();
bool isCommentLine = (line.Length > 0 && line[0] == commentSymbol);
string[] parts = line.Split(new[] { commentSymbol }, 2);
line = parts[0].Trim();
string comment = parts.Length == 2 ? parts[1].Trim() : string.Empty;
LineType lineType;
if (line == string.Empty && comment.Length > 2 && comment[0] == versExtensionSymbol)
{
line = comment.Substring(1).Trim(); // found Paratext 7.3(+) versification line beginning with #!
comment = "";
isCommentLine = false;
}
if (line.Length == 0 || isCommentLine)
lineType = LineType.comment;
else if (line.Contains(mappingSymbol))
{
// mapping one verse to multiple
lineType = line[0] == '&' ? LineType.oneToManyMapping : LineType.standardMapping;
}
else if (line[0] == excludedSymbol)
lineType = LineType.excludedVerse;
else if (line[0] == segmentSymbol)
lineType = LineType.verseSegments;
else
lineType = LineType.chapterVerse;
return new ParsedVersificationLine(lineType, line, comment);
}
#endregion
/// <summary>
/// Override this to handle a versification line error besides just throwing it
/// </summary>
/// <returns>True if the exception was handled, false otherwise (meaning it will be thrown)</returns>
protected virtual bool HandleVersificationLineError(InvalidVersificationLineException ex)
{
return false;
}
#region Private/internal methods
/// <summary>
/// Get the versification table for this versification
/// </summary>
internal Versification Get(ScrVersType type)
{
lock (versifications)
{
VersificationKey key = CreateKey(type, "");
Versification versification;
if (versifications.TryGetValue(key, out versification))
return versification;
string resourceFileText;
switch (type)
{
case ScrVersType.Original: resourceFileText = Resources.org_vrs; break;
case ScrVersType.English: resourceFileText = Resources.eng_vrs; break;
case ScrVersType.Septuagint: resourceFileText = Resources.lxx_vrs; break;
case ScrVersType.Vulgate: resourceFileText = Resources.vul_vrs; break;
case ScrVersType.RussianOrthodox: resourceFileText = Resources.rso_vrs; break;
case ScrVersType.RussianProtestant: resourceFileText = Resources.rsc_vrs; break;
default: throw new InvalidOperationException("Can not create a versification for an unknown type");
}
versification = new Versification(type.ToString(), null);
using (TextReader fallbackVersificationStream = new StringReader(resourceFileText))
ReadVersificationFile(fallbackVersificationStream, null, type, ref versification);
versifications[key] = versification;
return versification;
}
}
/// <summary>
/// Gets the versification with the specified name. This can be a built-in versification or a custom one.
/// </summary>
protected internal virtual Versification Get(string versName)
{
if (string.IsNullOrEmpty(versName))
throw new ArgumentNullException("versName");
ScrVersType type = GetVersificationType(versName);
if (type != ScrVersType.Unknown)
return Get(type);
lock (versifications)
{
VersificationKey key = CreateKey(ScrVersType.Unknown, versName);
Versification versification;
if (versifications.TryGetValue(key, out versification))
return versification;
versification = new Versification(versName, null);
using (TextReader fallbackVersificationStream = new StringReader(Resources.eng_vrs))
ReadVersificationFile(fallbackVersificationStream, null, ScrVersType.Unknown, ref versification);
versifications[key] = versification;
return versification;
}
}
/// <summary>
/// Get the versification type given its name. If it is not a standard
/// versification type, return Unknown.
/// </summary>
internal static ScrVersType GetVersificationType(string versName)
{
ScrVersType type;
return stringToTypeMap.TryGetValue(versName, out type) ? type : ScrVersType.Unknown;
}
private void Load(string filePath, ScrVersType type, ref Versification versification, string fallbackName = null)
{
using (TextReader stream = new StreamReader(filePath))
Load(stream, filePath, type, ref versification, fallbackName);
}
private void Load(TextReader stream, string filePath, ScrVersType type, ref Versification versification, string fallbackName)
{
ReadVersificationFile(stream, filePath, type, ref versification, fallbackName);
}
/// <summary>
/// Read versification file and "add" its entries.
/// At the moment we only do this once. Eventually we will call this twice.
/// Once for the standard versification, once for custom entries in versification.vrs
/// file for this project.
/// </summary>
private void ReadVersificationFile(TextReader stream, string filePath, ScrVersType type,
ref Versification versification, string fallbackName = null)
{
// Parse the lines in the versification file
foreach (string line in GetLines(stream))
{
try
{
ProcessVersLine(line, filePath, type, fallbackName, ref versification);
}
catch (InvalidVersificationLineException ex)
{
if (!HandleVersificationLineError(ex))
throw;
}
}
}
/// <summary>
/// Read lines from a file into a list of strings.
/// </summary>
private static IEnumerable<string> GetLines(TextReader reader)
{
List<string> lines = new List<string>();
for (string line = reader.ReadLine(); line != null; line = reader.ReadLine())
lines.Add(line);
return lines;
}
/// <summary>
/// Process a line from a versification file.
/// </summary>
/// <param name="line">line of text in the file</param>
/// <param name="filePath">full path to the versification file (if loaded from a file)</param>
/// <param name="type"></param>
/// <param name="fallbackName">Optional name to use if no name is found inside the file.</param>
/// <param name="versification">Existing versification (being reloaded) or null (loading a new one)</param>
private void ProcessVersLine(string line, string filePath, ScrVersType type, string fallbackName, ref Versification versification)
{
if (versification == null)
{
string name = null;
var match = versificationNameMatcher.Match(line);
if (match.Success)
name = match.Groups["name"].Value;
if (!string.IsNullOrEmpty(name))
{
versification = new Versification(name, filePath);
versifications[CreateKey(type, name)] = versification;
}
}
ParsedVersificationLine parsedLine = ParseLine(line);
if (parsedLine.LineType == LineType.comment)
return;
if (versification == null)
{
if (!string.IsNullOrEmpty(fallbackName))
{
versification = new Versification(fallbackName, filePath);
versifications[CreateKey(type, fallbackName)] = versification;
}
else
throw new InvalidVersificationLineException(VersificationLoadErrorType.MissingName, parsedLine.Line, filePath);
}
switch (parsedLine.LineType)
{
case LineType.comment:
break;
case LineType.chapterVerse:
ParseChapterVerseLine(filePath, versification, parsedLine.Line);
break;
case LineType.standardMapping:
ParseMappingLine(filePath, versification, parsedLine.Line);
break;
case LineType.oneToManyMapping:
ParseRangeToOneMappingLine(filePath, versification, parsedLine.Line);
break;
case LineType.excludedVerse:
ParseExcludedVerseLine(filePath, versification, parsedLine.Line);
break;
case LineType.verseSegments:
if (parsedLine.Line.IndexOf(commentSymbol) != -1)
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, parsedLine.Line, filePath);
ParseVerseSegmentsLine(filePath, versification, parsedLine.Line);
break;
}
}
/// <summary>
/// Parse lines mapping from this versification to standard versification. For example:
/// GEN 1:10 = GEN 2:11
/// GEN 1:10-13 = GEN 2:11-14
/// </summary>
private static void ParseChapterVerseLine(string fileName, Versification versification, string line)
{
string[] parts = line.Split(' ');
int bookNum = Canon.BookIdToNumber(parts[0]);
if (bookNum == 0)
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
while (versification.bookList.Count < bookNum)
versification.bookList.Add(new[] { 1 });
// Initialize to previous list of verses, if any.
List<int> versesInChapter = new List<int>(versification.bookList[bookNum - 1]);
int chapter = 0;
for (int i = 1; i < parts.Length; ++i)
{
// END is used if the number of chapters in custom is less than base
if (parts[i] == "END")
{
if (versesInChapter.Count > chapter)
versesInChapter.RemoveRange(chapter, versesInChapter.Count - chapter);
break;
}
string[] pieces = parts[i].Split(chapVerseSep);
if (!int.TryParse(pieces[0], out chapter) || chapter <= 0)
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
int verseCount;
if (pieces.Length != 2 || !int.TryParse(pieces[1], out verseCount) || verseCount < 0)
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
if (versesInChapter.Count < chapter)
{
for (int iChapter = versesInChapter.Count; iChapter < chapter; iChapter++)
versesInChapter.Add(1); // by default, chapters have one verse
}
versesInChapter[chapter - 1] = verseCount;
}
versification.bookList[bookNum - 1] = versesInChapter.ToArray();
}
/// <summary>
/// Parse lines indicating excluded verse numbers, like:
/// -GEN 1:5
/// </summary>
private static void ParseExcludedVerseLine(string fileName, Versification scrVers, string line)
{
line = line.Trim();
if (line.Length < 8 || line[0] != excludedSymbol || !line.Contains(chapVerseSep) || !line.Contains(' '))
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
string[] parts = line.Split(' ');
try
{
// Get Scripture reference, throwing an exception if it is not valid.
string bookName;
int chapter, verse;
GetVerseReference(parts, out bookName, out chapter, out verse);
VerseRef verseRef = new VerseRef(bookName, chapter.ToString(), verse.ToString(), new ScrVers(scrVers));
if (!scrVers.excludedVerses.Contains(verseRef.BBBCCCVVV))
scrVers.excludedVerses.Add(verseRef.BBBCCCVVV);
else
throw new InvalidVersificationLineException(VersificationLoadErrorType.DuplicateExcludedVerse, line, fileName);
}
catch (InvalidVersificationLineException)
{
throw;
}
catch
{
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
}
}
/// <summary>
/// Parse lines specifying segments for a specific verse, like:
/// *GEN 1:5,-,a,b,c,d,e,f
/// </summary>
private static void ParseVerseSegmentsLine(string fileName, Versification scrVers, string line)
{
line = line.Trim();
if (line.Length < 8 || line[0] != segmentSymbol || !line.Contains(chapVerseSep) ||
!line.Contains(' ') || !line.Contains(segmentSep))
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
int indexOfColon = line.IndexOf(':');
line = RemoveSpaces(line, indexOfColon);
string[] parts = line.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
try
{
// Get segmenting information
int segmentStart = parts[1].IndexOf(segmentSep);
if (segmentStart == -1)
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
string segments = parts[1].Substring(segmentStart + 1);
// Get Scripture reference, throwing an exception if it is not valid.
string bookName;
int chapter, verse;
parts[1] = parts[1].Substring(0, segmentStart);
// Remove segment info from chapter:verse reference
GetVerseReference(parts, out bookName, out chapter, out verse);
List<string> segmentList = new List<string>();
bool nonEmptySegmentFound = false;
foreach (string seg in segments.Split(segmentSep))
{
if (string.IsNullOrEmpty(seg))
continue;
if (nonEmptySegmentFound && seg == unspecifiedSegSymbol.ToString())
throw new InvalidVersificationLineException(VersificationLoadErrorType.UnspecifiedSegmentLocation, line, fileName);
if (seg == unspecifiedSegSymbol.ToString())
{
// '-' indicates no marking for segment
segmentList.Add(string.Empty);
}
else
{
segmentList.Add(seg);
nonEmptySegmentFound = true;
}
}
if (segmentList.Count == 1 && string.IsNullOrEmpty(segmentList[0]))
throw new InvalidVersificationLineException(VersificationLoadErrorType.NoSegmentsDefined, line, fileName);
int bbbcccvvv = VerseRef.GetBBBCCCVVV(Canon.BookIdToNumber(bookName), chapter, verse);
// Don't allow overwrites for built-in versifications
if (fileName == null && scrVers.verseSegments.ContainsKey(bbbcccvvv))
throw new InvalidVersificationLineException(VersificationLoadErrorType.DuplicateSegment, line);
scrVers.verseSegments[bbbcccvvv] = segmentList.ToArray();
}
catch (InvalidVersificationLineException)
{
throw;
}
catch
{
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
}
}
/// <summary>
/// Remove spaces from the specified index.
/// </summary>
private static string RemoveSpaces(string line, int index)
{
if (index < 1)
throw new ArgumentException("Invalid index " + index);
if (string.IsNullOrEmpty(line) || line.Length < 2)
throw new ArgumentException("Invalid line");
StringBuilder strBldr = new StringBuilder();
strBldr.Append(line.Substring(0, index));
string[] parts = line.Substring(index).Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
foreach (string part in parts)
strBldr.Append(part);
return strBldr.ToString();
}
/// <summary>
/// Parse lines giving a mapping from this versification to standard versification:
/// NUM 17:1-13 = NUM 17:16-28
/// </summary>
private static void ParseMappingLine(string fileName, Versification versification, string line)
{
try
{
string[] parts = line.Split(mappingSymbol);
string[] leftPieces = parts[0].Trim().Split('-');
string[] rightPieces = parts[1].Trim().Split('-');
VerseRef newVerseRef = new VerseRef(leftPieces[0]);
int leftLimit = leftPieces.Length == 1 ? 0 : int.Parse(leftPieces[1]);
VerseRef standardVerseRef = new VerseRef(rightPieces[0]);
while (true)
{
versification.mappings.AddMapping(newVerseRef.Clone(), standardVerseRef.Clone());
if (newVerseRef.VerseNum >= leftLimit)
break;
newVerseRef.VerseNum++;
standardVerseRef.VerseNum++;
}
}
catch
{
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
}
}
/// <summary>
/// Parse lines giving a mapping from this versification to standard versification where
/// a range of verses is mapped to a single verse. For example:
/// NUM 17:1 = NUM 17:1-3
/// NUM 17:1-3 = NUM 17:1
/// </summary>
private static void ParseRangeToOneMappingLine(string fileName, Versification versification, string line)
{
line = line.Substring(1); // remove initial '&'
VerseRef[] versRefs;
VerseRef[] standardRefs;
try
{
string[] parts = line.Split(mappingSymbol);
string[] leftPieces = parts[0].Trim().Split('-');
string[] rightPieces = parts[1].Trim().Split('-');
versRefs = GetReferences(leftPieces);
standardRefs = GetReferences(rightPieces);
}
catch (Exception)
{
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidSyntax, line, fileName);
}
if (versRefs.Length != 1 && standardRefs.Length != 1) // either versification or standard must have just one verse
throw new InvalidVersificationLineException(VersificationLoadErrorType.InvalidManyToOneMap, line, fileName);
versification.mappings.AddMapping(versRefs, standardRefs);
}
/// <summary>
/// Gets the reference(s) in a string array.
/// </summary>
private static VerseRef[] GetReferences(string[] versePieces)
{
if (versePieces.Length == 1)
return new[] { new VerseRef(versePieces[0]) };
VerseRef newVerseRef = new VerseRef(versePieces[0]);
int limit = int.Parse(versePieces[1]);
List<VerseRef> verseRefs = new List<VerseRef>();
while (true)
{
verseRefs.Add(newVerseRef.Clone());
if (newVerseRef.VerseNum >= limit)
break;
newVerseRef.VerseNum = newVerseRef.VerseNum + 1;
}
return verseRefs.ToArray();
}
/// <summary>
/// Get a verse reference from an a string array. If the reference is not valid, an exception will be thrown.
/// </summary>
/// <param name="parts">a string arry. The first string should contain a three-letter book name. The second
/// string should contain chapter:verse</param>
/// <param name="bookName">[out] three-letter abbreviation for book</param>
/// <param name="chapter">[out] chapter number</param>
/// <param name="verse">[out] verse number</param>
private static void GetVerseReference(string[] parts, out string bookName, out int chapter, out int verse)
{
bookName = parts[0].Substring(1);
if (Canon.BookIdToNumber(bookName) == 0)
throw new Exception();
// Confirm that chapter and verse are valid numbers.
string[] pieces = parts[1].Split(chapVerseSep);
chapter = int.Parse(pieces[0]);
verse = int.Parse(pieces[1]);
}
private VersificationKey CreateKey(ScrVersType type, string scrVersName)
{
return new VersificationKey(type, type == ScrVersType.Unknown ? scrVersName : "");
}
#endregion
#region VersificationKey class
private sealed class VersificationKey
{
private readonly ScrVersType type;
private readonly string name;
public VersificationKey(ScrVersType type, string name)
{
this.type = type;
this.name = name;
}
public override int GetHashCode()
{
return type.GetHashCode() ^ name.GetHashCode();
}
public override bool Equals(object obj)
{
VersificationKey other = obj as VersificationKey;
return other != null && other.type == type && other.name == name;
}
}
#endregion
}
#endregion
#region VerseMapping class
/// <summary>
/// Provides a bidirectional mapping from both the standard versification and a
/// specific versification.
/// Although this class isn't needed from a design standpoint, it does help the
/// readability of the code that uses it.
/// </summary>
private sealed class VerseMappings
{
private readonly Dictionary<VerseRef, VerseRef> versToStandard;
private readonly Dictionary<VerseRef, VerseRef> standardToVers;
/// <summary>
/// Default constructor.
/// </summary>
public VerseMappings()
{
versToStandard = new Dictionary<VerseRef, VerseRef>(100);
standardToVers = new Dictionary<VerseRef, VerseRef>(100);
}
/// <summary>
/// Creates a copy of an original mapping.
/// </summary>
public VerseMappings(VerseMappings origMapping)
{
versToStandard = new Dictionary<VerseRef, VerseRef>(origMapping.versToStandard);
standardToVers = new Dictionary<VerseRef, VerseRef>(origMapping.standardToVers);
}
/// <summary>
/// Adds a new verse mapping. Calling this for an existing mapping will replace it.
/// </summary>
/// <param name="vers">The verse mapping for the specific versification</param>
/// <param name="standard">The verse mapping for the standard versification</param>
public void AddMapping(VerseRef vers, VerseRef standard)
{
if (vers.AllVerses().Count() != 1 || standard.AllVerses().Count() != 1)
throw new ArgumentException("Mappings must resolve into a single reference on both sides.");
// Want to compare references while ignoring versification
standard.Versification = null;
vers.Versification = null;
versToStandard[vers] = standard;
standardToVers[standard] = vers;
}
/// <summary>
/// Adds a new verse mapping. Calling this for an existing mapping will replace it.
/// </summary>
/// <param name="vers">The verse mapping for the specific versification</param>
/// <param name="standard">The verse mapping for the standard versification</param>
public void AddMapping(VerseRef[] vers, VerseRef[] standard)
{
for (int iVers = vers.Length - 1; iVers >= 0; iVers--)
{
for (int iStandard = standard.Length - 1; iStandard >= 0; iStandard--)
AddMapping(vers[iVers], standard[iStandard]);
}
}
/// <summary>
/// Gets the specific verse mapping for the specified standard verse mapping
/// </summary>
/// <param name="standard">The verse mapping for the standard versification</param>
/// <returns>The found verse mapping for the specific versification (null if not found)</returns>
public VerseRef? GetVers(VerseRef standard)
{
VerseRef vers;
return standardToVers.TryGetValue(standard, out vers) ? (VerseRef?)vers : null;
}
/// <summary>
/// Gets the standard verse mapping for the specified specific verse mapping
/// </summary>
/// <param name="vers">The verse mapping for the specific versification</param>
/// <returns>The found verse mapping for the standard versification (null if not found)</returns>
public VerseRef? GetStandard(VerseRef vers)
{
VerseRef standard;
return versToStandard.TryGetValue(vers, out standard) ? (VerseRef?)standard : null;
}
/// <summary>
/// Get the verse mappings as verse ranges rather than individual verses.
/// </summary>
/// <returns>a dictionary of mappings where the key is the verse or verse range in this versifcation;
/// value is a verse or verse range in the standard versification that it maps to</returns>
public Dictionary<VerseRef, VerseRef> GetMappingRanges()
{
// Create a sorted list of all individual verse mappings
SortedDictionary<VerseRef, VerseRef> mappings = new SortedDictionary<VerseRef, VerseRef>(versToStandard);
// For some strange reason, some versifications have multiple verses mapping to a single verse, so
// we need to add any unaccounted for mappings from the other direction.
foreach (KeyValuePair<VerseRef, VerseRef> data in standardToVers)
mappings[data.Value] = data.Key;
// Create a dictionary of mappings where we merge any contiguous verse mappings into a single mapping range
Dictionary<VerseRef, VerseRef> mergedMappings = new Dictionary<VerseRef, VerseRef>();
while (mappings.Count > 0)
{
KeyValuePair<VerseRef, VerseRef> versePair = mappings.First();
VerseRef nextLeftVerse = versePair.Key.Clone();
VerseRef nextRightVerse = versePair.Value.Clone();
int lastLeftVerse, lastRightVerse;
VerseRef mappedVerse;
// Look for any contiguous verse mappings, keeping track of the last one.
do
{
mappings.Remove(nextLeftVerse); // dealt with this mapping
lastLeftVerse = nextLeftVerse.VerseNum;
lastRightVerse = nextRightVerse.VerseNum;
nextLeftVerse.VerseNum++;
nextRightVerse.VerseNum++;
}
while (mappings.TryGetValue(nextLeftVerse, out mappedVerse) && mappedVerse.Equals(nextRightVerse));
VerseRef leftMergedVerse = versePair.Key;
VerseRef rightMergedVerse = versePair.Value;
if (leftMergedVerse.VerseNum != lastLeftVerse)
{
// We found contigous verse mappings, so create a mapping range.
Debug.Assert(rightMergedVerse.VerseNum != lastRightVerse);
leftMergedVerse.Verse = leftMergedVerse.VerseNum + "-" + lastLeftVerse;
rightMergedVerse.Verse = rightMergedVerse.VerseNum + "-" + lastRightVerse;
}
mergedMappings.Add(leftMergedVerse, rightMergedVerse);
}
return mergedMappings;
}
/// <summary>
/// Called from tests with reflection to clear the mappings
/// </summary>
public void Clear()
{
versToStandard.Clear();
standardToVers.Clear();
}
public override bool Equals(object obj)
{
var other = obj as VerseMappings;
if (other == null)
return false;
return versToStandard.KeyedSetsEqual(other.versToStandard) &&
standardToVers.KeyedSetsEqual(other.standardToVers);
}
public override int GetHashCode()
{
return standardToVers.GetHashCode() ^ versToStandard.GetHashCode();
}
}
#endregion
}
#region VersificationLoadErrorType enum
public enum VersificationLoadErrorType
{
MissingName,
InvalidSyntax,
DuplicateExcludedVerse,
UnspecifiedSegmentLocation,
NoSegmentsDefined,
DuplicateSegment,
InvalidManyToOneMap
}
#endregion
#region InvalidVersificationLineException class
[Serializable]
public class InvalidVersificationLineException : Exception
{
public readonly VersificationLoadErrorType Type;
public readonly string LineText;
public readonly string FileName;
public InvalidVersificationLineException(VersificationLoadErrorType type, string lineText = null, string fileName = null)
{
Type = type;
LineText = lineText;
FileName = fileName;
}
}
#endregion
#region ScrVersType enum
/// <summary>
/// List of versification types. Used mostly for backwards compatibility where just a
/// versification integer code was stored.
/// <para>WARNING: The order of these items are very important as they correspond to the old, legacy codes.</para>
/// </summary>
public enum ScrVersType
{
/// <summary>
/// This means the versification was loaded from a file or it is a custom versification based on a
/// built-in versification
/// </summary>
Unknown,
Original,
Septuagint,
Vulgate,
English,
RussianProtestant,
RussianOrthodox
}
#endregion
#region LineType enumeration
public enum LineType { comment, chapterVerse, standardMapping, oneToManyMapping, excludedVerse, verseSegments };
#endregion
#region ParsedVersificationLine class
public sealed class ParsedVersificationLine
{
public ParsedVersificationLine(LineType lineType, string line, string comment)
{
LineType = lineType;
Line = line.Trim();
Comment = comment.Trim();
}
public LineType LineType { get; private set; }
public string Line { get; private set; }
public string Comment { get; private set; }
public override string ToString()
{
switch (LineType)
{
case LineType.chapterVerse: return Line;
case LineType.oneToManyMapping: return "#! " + Line;
case LineType.comment:
if (Comment != "")
return "# " + Comment;
return "";
default: return Line + (Comment != "" ? " # " + Comment : "");
}
}
}
#endregion
}
| |
using System;
using System.Collections;
using System.Collections.Specialized;
using System.Data;
using System.Globalization;
using System.Linq;
using System.Runtime.CompilerServices;
using Umbraco.Core;
using Umbraco.Core.Events;
using umbraco.DataLayer;
using umbraco.cms.businesslogic;
using System.Collections.Generic;
using DeleteEventArgs = umbraco.cms.businesslogic.DeleteEventArgs;
namespace umbraco.BusinessLogic
{
//TODO: Wrap this in the new services/repo layer!
/// <summary>
/// Summary description for Permission.
/// </summary>
public class Permission
{
public int NodeId { get; private set; }
public int UserId { get; private set; }
public char PermissionId { get; private set; }
/// <summary>
/// Private constructor, this class cannot be directly instantiated
/// </summary>
private Permission() { }
private static ISqlHelper SqlHelper
{
get { return Application.SqlHelper; }
}
public static void MakeNew(User User, CMSNode Node, char PermissionKey)
{
MakeNew(User, Node, PermissionKey, true);
}
[MethodImpl(MethodImplOptions.Synchronized)]
internal static void MakeNew(User user, IEnumerable<CMSNode> nodes, char permissionKey, bool raiseEvents)
{
var asArray = nodes.ToArray();
foreach (var node in asArray)
{
var parameters = new[] { SqlHelper.CreateParameter("@userId", user.Id),
SqlHelper.CreateParameter("@nodeId", node.Id),
SqlHelper.CreateParameter("@permission", permissionKey.ToString()) };
// Method is synchronized so exists remains consistent (avoiding race condition)
var exists = SqlHelper.ExecuteScalar<int>(
"SELECT COUNT(userId) FROM umbracoUser2nodePermission WHERE userId = @userId AND nodeId = @nodeId AND permission = @permission",
parameters) > 0;
if (exists) return;
SqlHelper.ExecuteNonQuery(
"INSERT INTO umbracoUser2nodePermission (userId, nodeId, permission) VALUES (@userId, @nodeId, @permission)",
parameters);
}
if (raiseEvents)
{
OnNew(new UserPermission(user, asArray, new[] { permissionKey }), new NewEventArgs());
}
}
private static void MakeNew(User User, CMSNode Node, char PermissionKey, bool raiseEvents)
{
MakeNew(User, new[] {Node}, PermissionKey, raiseEvents);
}
/// <summary>
/// Returns the permissions for a user
/// </summary>
/// <param name="user"></param>
/// <returns></returns>
public static IEnumerable<Permission> GetUserPermissions(User user)
{
var items = new List<Permission>();
using (IRecordsReader dr = SqlHelper.ExecuteReader("select * from umbracoUser2NodePermission where userId = @userId order by nodeId", SqlHelper.CreateParameter("@userId", user.Id)))
{
while (dr.Read())
{
items.Add(new Permission()
{
NodeId = dr.GetInt("nodeId"),
PermissionId = Convert.ToChar(dr.GetString("permission")),
UserId = dr.GetInt("userId")
});
}
}
return items;
}
/// <summary>
/// Returns the permissions for a node
/// </summary>
/// <param name="node"></param>
/// <returns></returns>
public static IEnumerable<Permission> GetNodePermissions(CMSNode node)
{
var items = new List<Permission>();
using (IRecordsReader dr = SqlHelper.ExecuteReader("select * from umbracoUser2NodePermission where nodeId = @nodeId order by nodeId", SqlHelper.CreateParameter("@nodeId", node.Id)))
{
while (dr.Read())
{
items.Add(new Permission()
{
NodeId = dr.GetInt("nodeId"),
PermissionId = Convert.ToChar(dr.GetString("permission")),
UserId = dr.GetInt("userId")
});
}
}
return items;
}
/// <summary>
/// Delets all permissions for the node/user combination
/// </summary>
/// <param name="user"></param>
/// <param name="node"></param>
public static void DeletePermissions(User user, CMSNode node)
{
DeletePermissions(user, node, true);
}
internal static void DeletePermissions(User user, CMSNode node, bool raiseEvents)
{
// delete all settings on the node for this user
SqlHelper.ExecuteNonQuery("delete from umbracoUser2NodePermission where userId = @userId and nodeId = @nodeId",
SqlHelper.CreateParameter("@userId", user.Id), SqlHelper.CreateParameter("@nodeId", node.Id));
if (raiseEvents)
{
OnDeleted(new UserPermission(user, node, null), new DeleteEventArgs());
}
}
/// <summary>
/// deletes all permissions for the user
/// </summary>
/// <param name="user"></param>
public static void DeletePermissions(User user)
{
// delete all settings on the node for this user
SqlHelper.ExecuteNonQuery("delete from umbracoUser2NodePermission where userId = @userId",
SqlHelper.CreateParameter("@userId", user.Id));
OnDeleted(new UserPermission(user, Enumerable.Empty<CMSNode>(), null), new DeleteEventArgs());
}
public static void DeletePermissions(int iUserID, int[] iNodeIDs)
{
var sql = "DELETE FROM umbracoUser2NodePermission WHERE nodeID IN ({0}) AND userID=@userID";
var nodeIDs = string.Join(",", Array.ConvertAll(iNodeIDs, Converter));
sql = string.Format(sql, nodeIDs);
SqlHelper.ExecuteNonQuery(sql, new[] { SqlHelper.CreateParameter("@userID", iUserID) });
OnDeleted(new UserPermission(iUserID, iNodeIDs), new DeleteEventArgs());
}
public static void DeletePermissions(int iUserID, int iNodeID)
{
DeletePermissions(iUserID, new[] { iNodeID });
}
private static string Converter(int from)
{
return from.ToString(CultureInfo.InvariantCulture);
}
/// <summary>
/// delete all permissions for this node
/// </summary>
/// <param name="node"></param>
public static void DeletePermissions(CMSNode node)
{
SqlHelper.ExecuteNonQuery(
"delete from umbracoUser2NodePermission where nodeId = @nodeId",
SqlHelper.CreateParameter("@nodeId", node.Id));
OnDeleted(new UserPermission(null, node, null), new DeleteEventArgs());
}
[MethodImpl(MethodImplOptions.Synchronized)]
public static void UpdateCruds(User user, CMSNode node, string permissions)
{
ApplicationContext.Current.Services.UserService.ReplaceUserPermissions(
user.Id,
permissions.ToCharArray(),
node.Id);
OnUpdated(new UserPermission(user, node, permissions.ToCharArray()), new SaveEventArgs());
}
internal static event TypedEventHandler<UserPermission, DeleteEventArgs> Deleted;
private static void OnDeleted(UserPermission permission, DeleteEventArgs args)
{
if (Deleted != null)
{
Deleted(permission, args);
}
}
internal static event TypedEventHandler<UserPermission, SaveEventArgs> Updated;
private static void OnUpdated(UserPermission permission, SaveEventArgs args)
{
if (Updated != null)
{
Updated(permission, args);
}
}
internal static event TypedEventHandler<UserPermission, NewEventArgs> New;
private static void OnNew(UserPermission permission, NewEventArgs args)
{
if (New != null)
{
New(permission, args);
}
}
}
internal class UserPermission
{
private int? _userId;
private readonly int[] _nodeIds;
internal UserPermission(int userId)
{
_userId = userId;
}
internal UserPermission(int userId, IEnumerable<int> nodeIds)
{
_userId = userId;
_nodeIds = nodeIds.ToArray();
}
internal UserPermission(User user, CMSNode node, char[] permissionKeys)
{
User = user;
Nodes = new[] { node };
PermissionKeys = permissionKeys;
}
internal UserPermission(User user, IEnumerable<CMSNode> nodes, char[] permissionKeys)
{
User = user;
Nodes = nodes;
PermissionKeys = permissionKeys;
}
internal int UserId
{
get
{
if (_userId.HasValue)
{
return _userId.Value;
}
if (User != null)
{
return User.Id;
}
return -1;
}
}
internal IEnumerable<int> NodeIds
{
get
{
if (_nodeIds != null)
{
return _nodeIds;
}
if (Nodes != null)
{
return Nodes.Select(x => x.Id);
}
return Enumerable.Empty<int>();
}
}
internal User User { get; private set; }
internal IEnumerable<CMSNode> Nodes { get; private set; }
internal char[] PermissionKeys { get; private set; }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Security
{
//
// This is a wrapping stream that does data encryption/decryption based on a successfully authenticated SSPI context.
//
internal class SslStreamInternal
{
private static readonly AsyncCallback s_writeCallback = new AsyncCallback(WriteCallback);
private static readonly AsyncProtocolCallback s_resumeAsyncWriteCallback = new AsyncProtocolCallback(ResumeAsyncWriteCallback);
private static readonly AsyncProtocolCallback s_resumeAsyncReadCallback = new AsyncProtocolCallback(ResumeAsyncReadCallback);
private static readonly AsyncProtocolCallback s_readHeaderCallback = new AsyncProtocolCallback(ReadHeaderCallback);
private static readonly AsyncProtocolCallback s_readFrameCallback = new AsyncProtocolCallback(ReadFrameCallback);
private const int PinnableReadBufferSize = 4096 * 4 + 32; // We read in 16K chunks + headers.
private static PinnableBufferCache s_PinnableReadBufferCache = new PinnableBufferCache("System.Net.SslStream", PinnableReadBufferSize);
private const int PinnableWriteBufferSize = 4096 + 1024; // We write in 4K chunks + encryption overhead.
private static PinnableBufferCache s_PinnableWriteBufferCache = new PinnableBufferCache("System.Net.SslStream", PinnableWriteBufferSize);
private SslState _sslState;
private int _nestedWrite;
private int _nestedRead;
private AsyncProtocolRequest _readProtocolRequest; // cached, reusable AsyncProtocolRequest used for read operations
private AsyncProtocolRequest _writeProtocolRequest; // cached, reusable AsyncProtocolRequest used for write operations
// Never updated directly, special properties are used. This is the read buffer.
private byte[] _internalBuffer;
private bool _internalBufferFromPinnableCache;
private byte[] _pinnableOutputBuffer; // Used for writes when we can do it.
private byte[] _pinnableOutputBufferInUse; // Remembers what UNENCRYPTED buffer is using _PinnableOutputBuffer.
private int _internalOffset;
private int _internalBufferCount;
private FixedSizeReader _reader;
internal SslStreamInternal(SslState sslState)
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage1("CTOR: In System.Net._SslStream.SslStream", this.GetHashCode());
}
_sslState = sslState;
_reader = new FixedSizeReader(_sslState.InnerStream);
}
// If we have a read buffer from the pinnable cache, return it.
private void FreeReadBuffer()
{
if (_internalBufferFromPinnableCache)
{
s_PinnableReadBufferCache.FreeBuffer(_internalBuffer);
_internalBufferFromPinnableCache = false;
}
_internalBuffer = null;
}
~SslStreamInternal()
{
if (_internalBufferFromPinnableCache)
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage2("DTOR: In System.Net._SslStream.~SslStream Freeing Read Buffer", this.GetHashCode(), PinnableBufferCacheEventSource.AddressOfByteArray(_internalBuffer));
}
FreeReadBuffer();
}
if (_pinnableOutputBuffer != null)
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage2("DTOR: In System.Net._SslStream.~SslStream Freeing Write Buffer", this.GetHashCode(), PinnableBufferCacheEventSource.AddressOfByteArray(_pinnableOutputBuffer));
}
s_PinnableWriteBufferCache.FreeBuffer(_pinnableOutputBuffer);
}
}
internal int ReadByte()
{
if (Interlocked.Exchange(ref _nestedRead, 1) == 1)
{
throw new NotSupportedException(SR.Format(SR.net_io_invalidnestedcall, "ReadByte", "read"));
}
// If there's any data in the buffer, take one byte, and we're done.
try
{
if (InternalBufferCount > 0)
{
int b = InternalBuffer[InternalOffset];
SkipBytes(1);
return b;
}
}
finally
{
// Regardless of whether we were able to read a byte from the buffer,
// reset the read tracking. If we weren't able to read a byte, the
// subsequent call to Read will set the flag again.
_nestedRead = 0;
}
// Otherwise, fall back to reading a byte via Read, the same way Stream.ReadByte does.
// This allocation is unfortunate but should be relatively rare, as it'll only occur once
// per buffer fill internally by Read.
byte[] oneByte = new byte[1];
int bytesRead = Read(oneByte, 0, 1);
Debug.Assert(bytesRead == 0 || bytesRead == 1);
return bytesRead == 1 ? oneByte[0] : -1;
}
internal int Read(byte[] buffer, int offset, int count)
{
return ProcessRead(buffer, offset, count, null);
}
internal void Write(byte[] buffer, int offset, int count)
{
ProcessWrite(buffer, offset, count, null);
}
internal IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState)
{
var bufferResult = new BufferAsyncResult(this, buffer, offset, count, asyncState, asyncCallback);
ProcessRead(buffer, offset, count, bufferResult);
return bufferResult;
}
internal int EndRead(IAsyncResult asyncResult)
{
if (asyncResult == null)
{
throw new ArgumentNullException(nameof(asyncResult));
}
BufferAsyncResult bufferResult = asyncResult as BufferAsyncResult;
if (bufferResult == null)
{
throw new ArgumentException(SR.Format(SR.net_io_async_result, asyncResult.GetType().FullName), nameof(asyncResult));
}
if (Interlocked.Exchange(ref _nestedRead, 0) == 0)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, "EndRead"));
}
// No "artificial" timeouts implemented so far, InnerStream controls timeout.
bufferResult.InternalWaitForCompletion();
if (bufferResult.Result is Exception)
{
if (bufferResult.Result is IOException)
{
throw (Exception)bufferResult.Result;
}
throw new IOException(SR.net_io_read, (Exception)bufferResult.Result);
}
return bufferResult.Int32Result;
}
internal IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState)
{
var lazyResult = new LazyAsyncResult(this, asyncState, asyncCallback);
ProcessWrite(buffer, offset, count, lazyResult);
return lazyResult;
}
internal void EndWrite(IAsyncResult asyncResult)
{
if (asyncResult == null)
{
throw new ArgumentNullException(nameof(asyncResult));
}
LazyAsyncResult lazyResult = asyncResult as LazyAsyncResult;
if (lazyResult == null)
{
throw new ArgumentException(SR.Format(SR.net_io_async_result, asyncResult.GetType().FullName), nameof(asyncResult));
}
if (Interlocked.Exchange(ref _nestedWrite, 0) == 0)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, "EndWrite"));
}
// No "artificial" timeouts implemented so far, InnerStream controls timeout.
lazyResult.InternalWaitForCompletion();
if (lazyResult.Result is Exception)
{
if (lazyResult.Result is IOException)
{
throw (Exception)lazyResult.Result;
}
throw new IOException(SR.net_io_write, (Exception)lazyResult.Result);
}
}
internal bool DataAvailable
{
get { return InternalBufferCount != 0; }
}
private byte[] InternalBuffer
{
get
{
return _internalBuffer;
}
}
private int InternalOffset
{
get
{
return _internalOffset;
}
}
private int InternalBufferCount
{
get
{
return _internalBufferCount;
}
}
private void SkipBytes(int decrCount)
{
_internalOffset += decrCount;
_internalBufferCount -= decrCount;
}
//
// This will set the internal offset to "curOffset" and ensure internal buffer.
// If not enough, reallocate and copy up to "curOffset".
//
private void EnsureInternalBufferSize(int curOffset, int addSize)
{
if (_internalBuffer == null || _internalBuffer.Length < addSize + curOffset)
{
bool wasPinnable = _internalBufferFromPinnableCache;
byte[] saved = _internalBuffer;
int newSize = addSize + curOffset;
if (newSize <= PinnableReadBufferSize)
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage2("In System.Net._SslStream.EnsureInternalBufferSize IS pinnable", this.GetHashCode(), newSize);
}
_internalBufferFromPinnableCache = true;
_internalBuffer = s_PinnableReadBufferCache.AllocateBuffer();
}
else
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage2("In System.Net._SslStream.EnsureInternalBufferSize NOT pinnable", this.GetHashCode(), newSize);
}
_internalBufferFromPinnableCache = false;
_internalBuffer = new byte[newSize];
}
if (saved != null && curOffset != 0)
{
Buffer.BlockCopy(saved, 0, _internalBuffer, 0, curOffset);
}
if (wasPinnable)
{
s_PinnableReadBufferCache.FreeBuffer(saved);
}
}
_internalOffset = curOffset;
_internalBufferCount = curOffset + addSize;
}
//
// Validates user parameters for all Read/Write methods.
//
private void ValidateParameters(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > buffer.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.net_offset_plus_count);
}
}
private AsyncProtocolRequest GetOrCreateProtocolRequest(ref AsyncProtocolRequest aprField, LazyAsyncResult asyncResult)
{
AsyncProtocolRequest request = null;
if (asyncResult != null)
{
// SslStreamInternal supports only a single read and a single write operation at a time.
// As such, we can cache and reuse the AsyncProtocolRequest object that's used throughout
// the implementation.
request = aprField;
if (request != null)
{
request.Reset(asyncResult);
}
else
{
aprField = request = new AsyncProtocolRequest(asyncResult);
}
}
return request;
}
//
// Sync write method.
//
private void ProcessWrite(byte[] buffer, int offset, int count, LazyAsyncResult asyncResult)
{
_sslState.CheckThrow(authSuccessCheck:true, shutdownCheck:true);
ValidateParameters(buffer, offset, count);
if (Interlocked.Exchange(ref _nestedWrite, 1) == 1)
{
throw new NotSupportedException(SR.Format(SR.net_io_invalidnestedcall, "Write", "write"));
}
// If this is an async operation, get the AsyncProtocolRequest to use.
// We do this only after we verify we're the sole write operation in flight.
AsyncProtocolRequest asyncRequest = GetOrCreateProtocolRequest(ref _writeProtocolRequest, asyncResult);
bool failed = false;
try
{
StartWriting(buffer, offset, count, asyncRequest);
}
catch (Exception e)
{
_sslState.FinishWrite();
failed = true;
if (e is IOException)
{
throw;
}
throw new IOException(SR.net_io_write, e);
}
finally
{
if (asyncRequest == null || failed)
{
_nestedWrite = 0;
}
}
}
private void StartWriting(byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest)
{
if (asyncRequest != null)
{
asyncRequest.SetNextRequest(buffer, offset, count, s_resumeAsyncWriteCallback);
}
// We loop to this method from the callback.
// If the last chunk was just completed from async callback (count < 0), we complete user request.
if (count >= 0 )
{
byte[] outBuffer = null;
if (_pinnableOutputBufferInUse == null)
{
if (_pinnableOutputBuffer == null)
{
_pinnableOutputBuffer = s_PinnableWriteBufferCache.AllocateBuffer();
}
_pinnableOutputBufferInUse = buffer;
outBuffer = _pinnableOutputBuffer;
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage3("In System.Net._SslStream.StartWriting Trying Pinnable", this.GetHashCode(), count, PinnableBufferCacheEventSource.AddressOfByteArray(outBuffer));
}
}
else
{
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage2("In System.Net._SslStream.StartWriting BufferInUse", this.GetHashCode(), count);
}
}
do
{
if (count == 0 && !SslStreamPal.CanEncryptEmptyMessage)
{
// If it's an empty message and the PAL doesn't support that,
// we're done.
break;
}
// Request a write IO slot.
if (_sslState.CheckEnqueueWrite(asyncRequest))
{
// Operation is async and has been queued, return.
return;
}
int chunkBytes = Math.Min(count, _sslState.MaxDataSize);
int encryptedBytes;
SecurityStatusPal status = _sslState.EncryptData(buffer, offset, chunkBytes, ref outBuffer, out encryptedBytes);
if (status.ErrorCode != SecurityStatusPalErrorCode.OK)
{
// Re-handshake status is not supported.
ProtocolToken message = new ProtocolToken(null, status);
throw new IOException(SR.net_io_encrypt, message.GetException());
}
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage3("In System.Net._SslStream.StartWriting Got Encrypted Buffer",
this.GetHashCode(), encryptedBytes, PinnableBufferCacheEventSource.AddressOfByteArray(outBuffer));
}
if (asyncRequest != null)
{
// Prepare for the next request.
asyncRequest.SetNextRequest(buffer, offset + chunkBytes, count - chunkBytes, s_resumeAsyncWriteCallback);
Task t = _sslState.InnerStream.WriteAsync(outBuffer, 0, encryptedBytes);
if (t.IsCompleted)
{
t.GetAwaiter().GetResult();
}
else
{
IAsyncResult ar = TaskToApm.Begin(t, s_writeCallback, asyncRequest);
if (!ar.CompletedSynchronously)
{
return;
}
TaskToApm.End(ar);
}
}
else
{
_sslState.InnerStream.Write(outBuffer, 0, encryptedBytes);
}
offset += chunkBytes;
count -= chunkBytes;
// Release write IO slot.
_sslState.FinishWrite();
} while (count != 0);
}
if (asyncRequest != null)
{
asyncRequest.CompleteUser();
}
if (buffer == _pinnableOutputBufferInUse)
{
_pinnableOutputBufferInUse = null;
if (PinnableBufferCacheEventSource.Log.IsEnabled())
{
PinnableBufferCacheEventSource.Log.DebugMessage1("In System.Net._SslStream.StartWriting Freeing buffer.", this.GetHashCode());
}
}
}
//
// Combined sync/async read method. For sync request asyncRequest==null.
//
private int ProcessRead(byte[] buffer, int offset, int count, BufferAsyncResult asyncResult)
{
ValidateParameters(buffer, offset, count);
if (Interlocked.Exchange(ref _nestedRead, 1) == 1)
{
throw new NotSupportedException(SR.Format(SR.net_io_invalidnestedcall, (asyncResult!=null? "BeginRead":"Read"), "read"));
}
// If this is an async operation, get the AsyncProtocolRequest to use.
// We do this only after we verify we're the sole write operation in flight.
AsyncProtocolRequest asyncRequest = GetOrCreateProtocolRequest(ref _readProtocolRequest, asyncResult);
bool failed = false;
try
{
int copyBytes;
if (InternalBufferCount != 0)
{
copyBytes = InternalBufferCount > count ? count : InternalBufferCount;
if (copyBytes != 0)
{
Buffer.BlockCopy(InternalBuffer, InternalOffset, buffer, offset, copyBytes);
SkipBytes(copyBytes);
}
asyncRequest?.CompleteUser(copyBytes);
return copyBytes;
}
return StartReading(buffer, offset, count, asyncRequest);
}
catch (Exception e)
{
_sslState.FinishRead(null);
failed = true;
if (e is IOException)
{
throw;
}
throw new IOException(SR.net_io_read, e);
}
finally
{
if (asyncRequest == null || failed)
{
_nestedRead = 0;
}
}
}
//
// To avoid recursion when decrypted 0 bytes this method will loop until a decrypted result at least 1 byte.
//
private int StartReading(byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest)
{
int result = 0;
if (InternalBufferCount != 0)
{
NetEventSource.Fail(this, $"Previous frame was not consumed. InternalBufferCount: {InternalBufferCount}");
}
do
{
if (asyncRequest != null)
{
asyncRequest.SetNextRequest(buffer, offset, count, s_resumeAsyncReadCallback);
}
int copyBytes = _sslState.CheckEnqueueRead(buffer, offset, count, asyncRequest);
if (copyBytes == 0)
{
// Queued but not completed!
return 0;
}
if (copyBytes != -1)
{
asyncRequest?.CompleteUser(copyBytes);
return copyBytes;
}
}
// When we read -1 bytes means we have decrypted 0 bytes or rehandshaking, need looping.
while ((result = StartFrameHeader(buffer, offset, count, asyncRequest)) == -1);
return result;
}
private int StartFrameHeader(byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest)
{
int readBytes = 0;
//
// Always pass InternalBuffer for SSPI "in place" decryption.
// A user buffer can be shared by many threads in that case decryption/integrity check may fail cause of data corruption.
//
// Reset internal buffer for a new frame.
EnsureInternalBufferSize(0, SecureChannel.ReadHeaderSize);
if (asyncRequest != null)
{
asyncRequest.SetNextRequest(InternalBuffer, 0, SecureChannel.ReadHeaderSize, s_readHeaderCallback);
_reader.AsyncReadPacket(asyncRequest);
if (!asyncRequest.MustCompleteSynchronously)
{
return 0;
}
readBytes = asyncRequest.Result;
}
else
{
readBytes = _reader.ReadPacket(InternalBuffer, 0, SecureChannel.ReadHeaderSize);
}
return StartFrameBody(readBytes, buffer, offset, count, asyncRequest);
}
private int StartFrameBody(int readBytes, byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest)
{
if (readBytes == 0)
{
//EOF : Reset the buffer as we did not read anything into it.
SkipBytes(InternalBufferCount);
asyncRequest?.CompleteUser(0);
return 0;
}
// Now readBytes is a payload size.
readBytes = _sslState.GetRemainingFrameSize(InternalBuffer, readBytes);
if (readBytes < 0)
{
throw new IOException(SR.net_frame_read_size);
}
EnsureInternalBufferSize(SecureChannel.ReadHeaderSize, readBytes);
if (asyncRequest != null)
{
asyncRequest.SetNextRequest(InternalBuffer, SecureChannel.ReadHeaderSize, readBytes, s_readFrameCallback);
_reader.AsyncReadPacket(asyncRequest);
if (!asyncRequest.MustCompleteSynchronously)
{
return 0;
}
readBytes = asyncRequest.Result;
}
else
{
readBytes = _reader.ReadPacket(InternalBuffer, SecureChannel.ReadHeaderSize, readBytes);
}
return ProcessFrameBody(readBytes, buffer, offset, count, asyncRequest);
}
//
// readBytes == SSL Data Payload size on input or 0 on EOF.
//
private int ProcessFrameBody(int readBytes, byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest)
{
if (readBytes == 0)
{
// EOF
throw new IOException(SR.net_io_eof);
}
// Set readBytes to total number of received bytes.
readBytes += SecureChannel.ReadHeaderSize;
// Decrypt into internal buffer, change "readBytes" to count now _Decrypted Bytes_.
int data_offset = 0;
SecurityStatusPal status = _sslState.DecryptData(InternalBuffer, ref data_offset, ref readBytes);
if (status.ErrorCode != SecurityStatusPalErrorCode.OK)
{
byte[] extraBuffer = null;
if (readBytes != 0)
{
extraBuffer = new byte[readBytes];
Buffer.BlockCopy(InternalBuffer, data_offset, extraBuffer, 0, readBytes);
}
// Reset internal buffer count.
SkipBytes(InternalBufferCount);
return ProcessReadErrorCode(status, buffer, offset, count, asyncRequest, extraBuffer);
}
if (readBytes == 0 && count != 0)
{
// Read again since remote side has sent encrypted 0 bytes.
SkipBytes(InternalBufferCount);
return -1;
}
// Decrypted data start from "data_offset" offset, the total count can be shrunk after decryption.
EnsureInternalBufferSize(0, data_offset + readBytes);
SkipBytes(data_offset);
if (readBytes > count)
{
readBytes = count;
}
Buffer.BlockCopy(InternalBuffer, InternalOffset, buffer, offset, readBytes);
// This will adjust both the remaining internal buffer count and the offset.
SkipBytes(readBytes);
_sslState.FinishRead(null);
asyncRequest?.CompleteUser(readBytes);
return readBytes;
}
private int ProcessReadErrorCode(SecurityStatusPal status, byte[] buffer, int offset, int count, AsyncProtocolRequest asyncRequest, byte[] extraBuffer)
{
ProtocolToken message = new ProtocolToken(null, status);
if (NetEventSource.IsEnabled) NetEventSource.Info(null, $"***Processing an error Status = {message.Status}");
if (message.Renegotiate)
{
_sslState.ReplyOnReAuthentication(extraBuffer);
// Loop on read.
return -1;
}
if (message.CloseConnection)
{
_sslState.FinishRead(null);
asyncRequest?.CompleteUser(0);
return 0;
}
throw new IOException(SR.net_io_decrypt, message.GetException());
}
private static void WriteCallback(IAsyncResult transportResult)
{
if (transportResult.CompletedSynchronously)
{
return;
}
if (!(transportResult.AsyncState is AsyncProtocolRequest))
{
NetEventSource.Fail(transportResult, "State type is wrong, expected AsyncProtocolRequest.");
}
AsyncProtocolRequest asyncRequest = (AsyncProtocolRequest)transportResult.AsyncState;
var sslStream = (SslStreamInternal)asyncRequest.AsyncObject;
try
{
TaskToApm.End(transportResult);
sslStream._sslState.FinishWrite();
if (asyncRequest.Count == 0)
{
// This was the last chunk.
asyncRequest.Count = -1;
}
sslStream.StartWriting(asyncRequest.Buffer, asyncRequest.Offset, asyncRequest.Count, asyncRequest);
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
sslStream._sslState.FinishWrite();
asyncRequest.CompleteUserWithError(e);
}
}
//
// This is used in a rare situation when async Read is resumed from completed handshake.
//
private static void ResumeAsyncReadCallback(AsyncProtocolRequest request)
{
try
{
((SslStreamInternal)request.AsyncObject).StartReading(request.Buffer, request.Offset, request.Count, request);
}
catch (Exception e)
{
if (request.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
((SslStreamInternal)request.AsyncObject)._sslState.FinishRead(null);
request.CompleteUserWithError(e);
}
}
//
// This is used in a rare situation when async Write is resumed from completed handshake.
//
private static void ResumeAsyncWriteCallback(AsyncProtocolRequest asyncRequest)
{
try
{
((SslStreamInternal)asyncRequest.AsyncObject).StartWriting(asyncRequest.Buffer, asyncRequest.Offset, asyncRequest.Count, asyncRequest);
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
((SslStreamInternal)asyncRequest.AsyncObject)._sslState.FinishWrite();
asyncRequest.CompleteUserWithError(e);
}
}
private static void ReadHeaderCallback(AsyncProtocolRequest asyncRequest)
{
try
{
SslStreamInternal sslStream = (SslStreamInternal)asyncRequest.AsyncObject;
BufferAsyncResult bufferResult = (BufferAsyncResult)asyncRequest.UserAsyncResult;
if (-1 == sslStream.StartFrameBody(asyncRequest.Result, bufferResult.Buffer, bufferResult.Offset, bufferResult.Count, asyncRequest))
{
// in case we decrypted 0 bytes start another reading.
sslStream.StartReading(bufferResult.Buffer, bufferResult.Offset, bufferResult.Count, asyncRequest);
}
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
asyncRequest.CompleteUserWithError(e);
}
}
private static void ReadFrameCallback(AsyncProtocolRequest asyncRequest)
{
try
{
SslStreamInternal sslStream = (SslStreamInternal)asyncRequest.AsyncObject;
BufferAsyncResult bufferResult = (BufferAsyncResult)asyncRequest.UserAsyncResult;
if (-1 == sslStream.ProcessFrameBody(asyncRequest.Result, bufferResult.Buffer, bufferResult.Offset, bufferResult.Count, asyncRequest))
{
// in case we decrypted 0 bytes start another reading.
sslStream.StartReading(bufferResult.Buffer, bufferResult.Offset, bufferResult.Count, asyncRequest);
}
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
asyncRequest.CompleteUserWithError(e);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information.
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Testing;
using Test.Utilities;
using Xunit;
using VerifyCS = Test.Utilities.CSharpCodeFixVerifier<
Microsoft.NetCore.Analyzers.Runtime.CallGCSuppressFinalizeCorrectlyAnalyzer,
Microsoft.NetCore.CSharp.Analyzers.Runtime.CSharpCallGCSuppressFinalizeCorrectlyFixer>;
using VerifyVB = Test.Utilities.VisualBasicCodeFixVerifier<
Microsoft.NetCore.Analyzers.Runtime.CallGCSuppressFinalizeCorrectlyAnalyzer,
Microsoft.NetCore.VisualBasic.Analyzers.Runtime.BasicCallGCSuppressFinalizeCorrectlyFixer>;
namespace Microsoft.NetCore.Analyzers.Runtime.UnitTests
{
public class CallGCSuppressFinalizeCorrectlyTests
{
private const string GCSuppressFinalizeMethodSignature_CSharp = "GC.SuppressFinalize(object)";
private const string GCSuppressFinalizeMethodSignature_Basic = "GC.SuppressFinalize(Object)";
private static DiagnosticResult GetCA1816CSharpResultAt(int line, int column, DiagnosticDescriptor rule, string containingMethodName, string gcSuppressFinalizeMethodName) =>
#pragma warning disable RS0030 // Do not used banned APIs
VerifyCS.Diagnostic(rule)
.WithLocation(line, column)
#pragma warning restore RS0030 // Do not used banned APIs
.WithArguments(containingMethodName, gcSuppressFinalizeMethodName);
private static DiagnosticResult GetCA1816BasicResultAt(int line, int column, DiagnosticDescriptor rule, string containingMethodName, string gcSuppressFinalizeMethodName) =>
#pragma warning disable RS0030 // Do not used banned APIs
VerifyVB.Diagnostic(rule)
.WithLocation(line, column)
#pragma warning restore RS0030 // Do not used banned APIs
.WithArguments(containingMethodName, gcSuppressFinalizeMethodName);
#region NoDiagnosticCases
[Fact]
public async Task DisposableWithoutFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableWithoutFinalizer : IDisposable
{
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task DisposableWithoutFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableWithoutFinalizer
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(Me)
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task DisposableWithFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableWithFinalizer : IDisposable
{
~DisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task DisposableWithFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableWithFinalizer
Implements IDisposable
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(Me)
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task AsyncDisposableWithFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
class MyAsyncDisposable : IAsyncDisposable
{
[DllImport(""example.dll"")]
private static extern int GetHandle();
[DllImport(""example.dll"")]
private static extern void FreeHandle(int handle);
private readonly int handle;
public MyAsyncDisposable()
{
this.handle = GetHandle();
}
~MyAsyncDisposable()
{
FreeHandle(this.handle);
}
public async ValueTask DisposeAsync()
{
await Task.Run(() => FreeHandle(this.handle)).ConfigureAwait(false);
GC.SuppressFinalize(this);
}
}";
await new VerifyCS.Test
{
ReferenceAssemblies = AdditionalMetadataReferences.DefaultWithAsyncInterfaces,
TestCode = code
}.RunAsync();
}
[Fact]
public async Task SealedDisposableWithoutFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public sealed class SealedDisposableWithoutFinalizer : IDisposable
{
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableWithoutFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class SealedDisposableWithoutFinalizer
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(Me)
End Sub
Private Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableWithFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public sealed class SealedDisposableWithFinalizer : IDisposable
{
~SealedDisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableWithFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class SealedDisposableWithFinalizer
Implements IDisposable
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(Me)
End Sub
Private Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task InternalDisposableWithoutFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
internal class InternalDisposableWithoutFinalizer : IDisposable
{
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task InternalDisposableWithoutFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Friend Class InternalDisposableWithoutFinalizer
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task PrivateDisposableWithoutFinalizer_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public static class NestedClassHolder
{
private class PrivateDisposableWithoutFinalizer : IDisposable
{
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task PrivateDisposableWithoutFinalizer_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class NestedClassHolder
Private Sub New()
End Sub
Private Class PrivateDisposableWithoutFinalizer
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public sealed class SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize : IDisposable
{
public void Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
End Sub
Private Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task DisposableStruct_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public struct DisposableStruct : IDisposable
{
public void Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task DisposableStruct_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Structure DisposableStruct
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
End Sub
Private Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Structure";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableCallingGCSuppressFinalizeInConstructor_CSharp_NoDiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public sealed class SealedDisposableCallingGCSuppressFinalizeInConstructor : Component
{
public SealedDisposableCallingGCSuppressFinalizeInConstructor()
{
// We don't ever want our finalizer (that we inherit from Component) to run
// (We are sealed and we don't own any unmanaged resources).
GC.SuppressFinalize(this);
}
}";
await VerifyCS.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task SealedDisposableCallingGCSuppressFinalizeInConstructor_Basic_NoDiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class SealedDisposableCallingGCSuppressFinalizeInConstructor
Inherits Component
Public Sub New()
' We don't ever want our finalizer (that we inherit from Component) to run
' (We are sealed and we don't own any unmanaged resources).
GC.SuppressFinalize(Me)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(code);
}
[Fact]
public async Task Disposable_ImplementedExplicitly_NoDiagnosticAsync()
{
var csharpCode = @"
using System;
public class ImplementsDisposableExplicitly : IDisposable
{
void IDisposable.Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
}
}";
await VerifyCS.VerifyAnalyzerAsync(csharpCode);
var vbCode = @"
Imports System
Public Class C
Implements IDisposable
Protected Sub NamedDifferent() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(Me)
End Sub
Public Sub Dispose(disposing As Boolean)
End Sub
End Class";
await VerifyVB.VerifyAnalyzerAsync(vbCode);
}
#endregion
#region DiagnosticCases
[Fact]
public async Task SealedDisposableWithFinalizer_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class SealedDisposableWithFinalizer : IDisposable
{
public static void Main(string[] args)
{
}
~SealedDisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 17,
column: 21,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "SealedDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task SealedDisposableWithFinalizer_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class SealedDisposableWithFinalizer
Implements IDisposable
Public Shared Sub Main(args As String())
End Sub
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Private Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 19,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "SealedDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableWithFinalizer_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableWithFinalizer : IDisposable
{
~DisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 12,
column: 17,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "DisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableWithFinalizer_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableWithFinalizer
Implements IDisposable
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 15,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "DisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task InternalDisposableWithFinalizer_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
internal class InternalDisposableWithFinalizer : IDisposable
{
~InternalDisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 12,
column: 17,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "InternalDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task InternalDisposableWithFinalizer_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Friend Class InternalDisposableWithFinalizer
Implements IDisposable
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 15,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "InternalDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task PrivateDisposableWithFinalizer_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public static class NestedClassHolder
{
private class PrivateDisposableWithFinalizer : IDisposable
{
~PrivateDisposableWithFinalizer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 14,
column: 21,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "NestedClassHolder.PrivateDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task PrivateDisposableWithFinalizer_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public NotInheritable Class NestedClassHolder
Private Sub New()
End Sub
Private Class PrivateDisposableWithFinalizer
Implements IDisposable
Protected Overrides Sub Finalize()
Try
Dispose(False)
Finally
MyBase.Finalize()
End Try
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 18,
column: 14,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule,
containingMethodName: "NestedClassHolder.PrivateDisposableWithFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableWithoutFinalizer_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableWithoutFinalizer : IDisposable
{
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
Console.WriteLine(this);
Console.WriteLine(disposing);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 7,
column: 17,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableWithoutFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableWithoutFinalizer_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableWithoutFinalizer
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
Console.WriteLine(Me)
Console.WriteLine(disposing)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 7,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableWithoutFinalizer.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableComponent_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableComponent : Component, IDisposable
{
public void Dispose()
{
Dispose(true);
// GC.SuppressFinalize(this);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 7,
column: 17,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableComponent.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableComponent_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableComponent
Inherits Component
Implements IDisposable
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
' GC.SuppressFinalize(this);
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 8,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableComponent.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task NotADisposableClass_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class NotADisposableClass
{
public NotADisposableClass()
{
GC.SuppressFinalize(this);
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 9,
column: 9,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "NotADisposableClass.NotADisposableClass()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task NotADisposableClass_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class NotADisposableClass
Public Sub New()
GC.SuppressFinalize(Me)
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 7,
column: 3,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "NotADisposableClass.New()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces : IDisposable
{
public DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces()
{
GC.SuppressFinalize(this);
}
public void Dispose()
{
Dispose(true);
CallGCSuppressFinalize();
}
private void CallGCSuppressFinalize()
{
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
Console.WriteLine(this);
GC.SuppressFinalize(this);
}
}
}";
var diagnosticResult1 = GetCA1816CSharpResultAt(
line: 9,
column: 9,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
var diagnosticResult2 = GetCA1816CSharpResultAt(
line: 12,
column: 17,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
var diagnosticResult3 = GetCA1816CSharpResultAt(
line: 20,
column: 9,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.CallGCSuppressFinalize()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
var diagnosticResult4 = GetCA1816CSharpResultAt(
line: 28,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose(bool)",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult1, diagnosticResult2, diagnosticResult3, diagnosticResult4);
}
[Fact]
public async Task DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces
Implements IDisposable
Public Sub New()
GC.SuppressFinalize(Me)
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
CallGCSuppressFinalize()
End Sub
Private Sub CallGCSuppressFinalize()
GC.SuppressFinalize(Me)
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
If disposing Then
Console.WriteLine(Me)
GC.SuppressFinalize(Me)
End If
End Sub
End Class";
var diagnosticResult1 = GetCA1816BasicResultAt(
line: 8,
column: 3,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.New()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
var diagnosticResult2 = GetCA1816BasicResultAt(
line: 11,
column: 13,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
var diagnosticResult3 = GetCA1816BasicResultAt(
line: 17,
column: 3,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.CallGCSuppressFinalize()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
var diagnosticResult4 = GetCA1816BasicResultAt(
line: 23,
column: 4,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose(Boolean)",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult1, diagnosticResult2, diagnosticResult3, diagnosticResult4);
}
[Fact]
public async Task DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments_CSharp_DiagnosticAsync()
{
var code = @"
using System;
using System.ComponentModel;
public class DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments : IDisposable
{
public DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments()
{
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
Console.WriteLine(this);
}
}
}";
var diagnosticResult = GetCA1816CSharpResultAt(
line: 14,
column: 9,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotPassedThisRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp);
await VerifyCS.VerifyAnalyzerAsync(code, diagnosticResult);
}
[Fact]
public async Task DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments_Basic_DiagnosticAsync()
{
var code = @"
Imports System
Imports System.ComponentModel
Public Class DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments
Implements IDisposable
Public Sub New()
End Sub
Public Sub Dispose() Implements IDisposable.Dispose
Dispose(True)
GC.SuppressFinalize(True)
End Sub
Protected Overridable Sub Dispose(disposing As Boolean)
If disposing Then
Console.WriteLine(Me)
End If
End Sub
End Class";
var diagnosticResult = GetCA1816BasicResultAt(
line: 12,
column: 3,
rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotPassedThisRule,
containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments.Dispose()",
gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic);
await VerifyVB.VerifyAnalyzerAsync(code, diagnosticResult);
}
#endregion
}
}
| |
// ***********************************************************************
// Copyright (c) 2014 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Threading;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal.Execution;
#if !SILVERLIGHT && !NETCF && !PORTABLE
using System.Runtime.Remoting.Messaging;
using System.Security.Principal;
using NUnit.Framework.Compatibility;
#endif
namespace NUnit.Framework.Internal
{
/// <summary>
/// Helper class used to save and restore certain static or
/// singleton settings in the environment that affect tests
/// or which might be changed by the user tests.
///
/// An internal class is used to hold settings and a stack
/// of these objects is pushed and popped as Save and Restore
/// are called.
/// </summary>
public class TestExecutionContext
#if !SILVERLIGHT && !NETCF && !PORTABLE
: LongLivedMarshalByRefObject, ILogicalThreadAffinative
#endif
{
// NOTE: Be very careful when modifying this class. It uses
// conditional compilation extensively and you must give
// thought to whether any new features will be supported
// on each platform. In particular, instance fields,
// properties, initialization and restoration must all
// use the same conditions for each feature.
#region Instance Fields
/// <summary>
/// Link to a prior saved context
/// </summary>
private TestExecutionContext _priorContext;
/// <summary>
/// Indicates that a stop has been requested
/// </summary>
private TestExecutionStatus _executionStatus;
/// <summary>
/// The event listener currently receiving notifications
/// </summary>
private ITestListener _listener = TestListener.NULL;
/// <summary>
/// The number of assertions for the current test
/// </summary>
private int _assertCount;
private Randomizer _randomGenerator;
private IWorkItemDispatcher _dispatcher;
/// <summary>
/// The current culture
/// </summary>
private CultureInfo _currentCulture;
/// <summary>
/// The current UI culture
/// </summary>
private CultureInfo _currentUICulture;
/// <summary>
/// The current test result
/// </summary>
private TestResult _currentResult;
#if !NETCF && !SILVERLIGHT && !PORTABLE
/// <summary>
/// The current Principal.
/// </summary>
private IPrincipal _currentPrincipal;
#endif
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="TestExecutionContext"/> class.
/// </summary>
public TestExecutionContext()
{
_priorContext = null;
this.TestCaseTimeout = 0;
this.UpstreamActions = new List<ITestAction>();
_currentCulture = CultureInfo.CurrentCulture;
_currentUICulture = CultureInfo.CurrentUICulture;
#if !NETCF && !SILVERLIGHT && !PORTABLE
_currentPrincipal = Thread.CurrentPrincipal;
#endif
}
/// <summary>
/// Initializes a new instance of the <see cref="TestExecutionContext"/> class.
/// </summary>
/// <param name="other">An existing instance of TestExecutionContext.</param>
public TestExecutionContext(TestExecutionContext other)
{
_priorContext = other;
this.CurrentTest = other.CurrentTest;
this.CurrentResult = other.CurrentResult;
this.TestObject = other.TestObject;
this.WorkDirectory = other.WorkDirectory;
_listener = other._listener;
this.StopOnError = other.StopOnError;
this.TestCaseTimeout = other.TestCaseTimeout;
this.UpstreamActions = new List<ITestAction>(other.UpstreamActions);
_currentCulture = CultureInfo.CurrentCulture;
_currentUICulture = CultureInfo.CurrentUICulture;
#if !NETCF && !SILVERLIGHT && !PORTABLE
_currentPrincipal = other.CurrentPrincipal;
#endif
this.Dispatcher = other.Dispatcher;
this.ParallelScope = other.ParallelScope;
}
#endregion
#region Static Singleton Instance
/// <summary>
/// The current context, head of the list of saved contexts.
/// </summary>
#if SILVERLIGHT || PORTABLE
[ThreadStatic]
private static TestExecutionContext current;
#elif NETCF
private static LocalDataStoreSlot slotContext = Thread.AllocateDataSlot();
#else
private static readonly string CONTEXT_KEY = "NUnit.Framework.TestContext";
#endif
/// <summary>
/// Gets the current context.
/// </summary>
/// <value>The current context.</value>
public static TestExecutionContext CurrentContext
{
get
{
// If a user creates a thread then the current context
// will be null. This also happens when the compiler
// automatically creates threads for async methods.
// We create a new context, which is automatically
// populated with _values taken from the current thread.
#if SILVERLIGHT || PORTABLE
if (current == null)
current = new TestExecutionContext();
return current;
#elif NETCF
var current = (TestExecutionContext)Thread.GetData(slotContext);
if (current == null)
{
current = new TestExecutionContext();
Thread.SetData(slotContext, current);
}
return current;
#else
var context = GetTestExecutionContext();
if (context == null) // This can happen on Mono
{
context = new TestExecutionContext();
CallContext.SetData(CONTEXT_KEY, context);
}
return context;
#endif
}
private set
{
#if SILVERLIGHT || PORTABLE
current = value;
#elif NETCF
Thread.SetData(slotContext, value);
#else
if (value == null)
CallContext.FreeNamedDataSlot(CONTEXT_KEY);
else
CallContext.SetData(CONTEXT_KEY, value);
#endif
}
}
#if !SILVERLIGHT && !NETCF && !PORTABLE
/// <summary>
/// Get the current context or return null if none is found.
/// </summary>
public static TestExecutionContext GetTestExecutionContext()
{
return CallContext.GetData(CONTEXT_KEY) as TestExecutionContext;
}
#endif
/// <summary>
/// Clear the current context. This is provided to
/// prevent "leakage" of the CallContext containing
/// the current context back to any runners.
/// </summary>
public static void ClearCurrentContext()
{
CurrentContext = null;
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the current test
/// </summary>
public Test CurrentTest { get; set; }
/// <summary>
/// The time the current test started execution
/// </summary>
public DateTime StartTime { get; set; }
/// <summary>
/// The time the current test started in Ticks
/// </summary>
public long StartTicks { get; set; }
/// <summary>
/// Gets or sets the current test result
/// </summary>
public TestResult CurrentResult
{
get { return _currentResult; }
set
{
_currentResult = value;
if (value != null)
OutWriter = value.OutWriter;
}
}
/// <summary>
/// Gets a TextWriter that will send output to the current test result.
/// </summary>
public TextWriter OutWriter { get; private set; }
/// <summary>
/// The current test object - that is the user fixture
/// object on which tests are being executed.
/// </summary>
public object TestObject { get; set; }
/// <summary>
/// Get or set the working directory
/// </summary>
public string WorkDirectory { get; set; }
/// <summary>
/// Get or set indicator that run should stop on the first error
/// </summary>
public bool StopOnError { get; set; }
/// <summary>
/// Gets an enum indicating whether a stop has been requested.
/// </summary>
public TestExecutionStatus ExecutionStatus
{
get
{
// ExecutionStatus may have been set to StopRequested or AbortRequested
// in a prior context. If so, reflect the same setting in this context.
if (_executionStatus == TestExecutionStatus.Running && _priorContext != null)
_executionStatus = _priorContext.ExecutionStatus;
return _executionStatus;
}
set
{
_executionStatus = value;
// Push the same setting up to all prior contexts
if (_priorContext != null)
_priorContext.ExecutionStatus = value;
}
}
/// <summary>
/// The current test event listener
/// </summary>
internal ITestListener Listener
{
get { return _listener; }
set { _listener = value; }
}
/// <summary>
/// The current WorkItemDispatcher
/// </summary>
internal IWorkItemDispatcher Dispatcher
{
get
{
if (_dispatcher == null)
_dispatcher = new SimpleWorkItemDispatcher();
return _dispatcher;
}
set { _dispatcher = value; }
}
/// <summary>
/// The ParallelScope to be used by tests running in this context.
/// For builds with out the parallel feature, it has no effect.
/// </summary>
public ParallelScope ParallelScope { get; set; }
/// <summary>
/// Gets the RandomGenerator specific to this Test
/// </summary>
public Randomizer RandomGenerator
{
get
{
if (_randomGenerator == null)
_randomGenerator = new Randomizer(CurrentTest.Seed);
return _randomGenerator;
}
}
/// <summary>
/// Gets the assert count.
/// </summary>
/// <value>The assert count.</value>
internal int AssertCount
{
get { return _assertCount; }
}
/// <summary>
/// Gets or sets the test case timeout value
/// </summary>
public int TestCaseTimeout { get; set; }
/// <summary>
/// Gets a list of ITestActions set by upstream tests
/// </summary>
public List<ITestAction> UpstreamActions { get; private set; }
// TODO: Put in checks on all of these settings
// with side effects so we only change them
// if the value is different
/// <summary>
/// Saves or restores the CurrentCulture
/// </summary>
public CultureInfo CurrentCulture
{
get { return _currentCulture; }
set
{
_currentCulture = value;
#if !NETCF
Thread.CurrentThread.CurrentCulture = _currentCulture;
#endif
}
}
/// <summary>
/// Saves or restores the CurrentUICulture
/// </summary>
public CultureInfo CurrentUICulture
{
get { return _currentUICulture; }
set
{
_currentUICulture = value;
#if !NETCF
Thread.CurrentThread.CurrentUICulture = _currentUICulture;
#endif
}
}
#if !NETCF && !SILVERLIGHT && !PORTABLE
/// <summary>
/// Gets or sets the current <see cref="IPrincipal"/> for the Thread.
/// </summary>
public IPrincipal CurrentPrincipal
{
get { return _currentPrincipal; }
set
{
_currentPrincipal = value;
Thread.CurrentPrincipal = _currentPrincipal;
}
}
#endif
#endregion
#region Instance Methods
/// <summary>
/// Record any changes in the environment made by
/// the test code in the execution context so it
/// will be passed on to lower level tests.
/// </summary>
public void UpdateContextFromEnvironment()
{
_currentCulture = CultureInfo.CurrentCulture;
_currentUICulture = CultureInfo.CurrentUICulture;
#if !NETCF && !SILVERLIGHT && !PORTABLE
_currentPrincipal = Thread.CurrentPrincipal;
#endif
}
/// <summary>
/// Set up the execution environment to match a context.
/// Note that we may be running on the same thread where the
/// context was initially created or on a different thread.
/// </summary>
public void EstablishExecutionEnvironment()
{
#if !NETCF
Thread.CurrentThread.CurrentCulture = _currentCulture;
Thread.CurrentThread.CurrentUICulture = _currentUICulture;
#endif
#if !NETCF && !SILVERLIGHT && !PORTABLE
Thread.CurrentPrincipal = _currentPrincipal;
#endif
CurrentContext = this;
}
/// <summary>
/// Increments the assert count by one.
/// </summary>
public void IncrementAssertCount()
{
Interlocked.Increment(ref _assertCount);
}
/// <summary>
/// Increments the assert count by a specified amount.
/// </summary>
public void IncrementAssertCount(int count)
{
// TODO: Temporary implementation
while (count-- > 0)
Interlocked.Increment(ref _assertCount);
}
#endregion
#region InitializeLifetimeService
#if !SILVERLIGHT && !NETCF && !PORTABLE
/// <summary>
/// Obtain lifetime service object
/// </summary>
/// <returns></returns>
public override object InitializeLifetimeService()
{
return null;
}
#endif
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Serilog;
using Serilog.Events;
using Totem.Runtime.Hosting;
using Totem.Timeline.EventStore.Hosting;
using Totem.Timeline.Hosting;
namespace Totem.App.Service
{
/// <summary>
/// Configures the hosting of a Totem service application
/// </summary>
public class ConfigureServiceApp
{
class ServiceStep<TArg> : ConfigureStep<HostBuilderContext, TArg> {}
readonly ConfigureStep<IHostBuilder> _host = new ConfigureStep<IHostBuilder>();
readonly ConfigureStep<IConfigurationBuilder> _hostConfiguration = new ConfigureStep<IConfigurationBuilder>();
readonly ServiceStep<IConfigurationBuilder> _appConfiguration = new ServiceStep<IConfigurationBuilder>();
readonly ServiceStep<IServiceCollection> _services = new ServiceStep<IServiceCollection>();
readonly ServiceStep<ITimelineBuilder> _timeline = new ServiceStep<ITimelineBuilder>();
readonly ServiceStep<LoggerConfiguration> _serilog = new ServiceStep<LoggerConfiguration>();
CancellationToken _cancellationToken;
bool _disableSerilog;
public ConfigureServiceApp CancellationToken(CancellationToken token)
{
_cancellationToken = token;
return this;
}
public ConfigureServiceApp DisableSerilog()
{
_disableSerilog = true;
return this;
}
//
// Before/After/Replace
//
public ConfigureServiceApp BeforeHost(Action<IHostBuilder> configure) =>
_host.Before(this, configure);
public ConfigureServiceApp BeforeHostConfiguration(Action<IConfigurationBuilder> configure) =>
_hostConfiguration.Before(this, configure);
public ConfigureServiceApp BeforeAppConfiguration(Action<HostBuilderContext, IConfigurationBuilder> configure) =>
_appConfiguration.Before(this, configure);
public ConfigureServiceApp BeforeServices(Action<HostBuilderContext, IServiceCollection> configure) =>
_services.Before(this, configure);
public ConfigureServiceApp BeforeTimeline(Action<HostBuilderContext, ITimelineBuilder> configure) =>
_timeline.Before(this, configure);
public ConfigureServiceApp BeforeSerilog(Action<HostBuilderContext, LoggerConfiguration> configure) =>
_serilog.Before(this, configure);
public ConfigureServiceApp AfterHost(Action<IHostBuilder> configure) =>
_host.After(this, configure);
public ConfigureServiceApp AfterHostConfiguration(Action<IConfigurationBuilder> configure) =>
_hostConfiguration.After(this, configure);
public ConfigureServiceApp AfterAppConfiguration(Action<HostBuilderContext, IConfigurationBuilder> configure) =>
_appConfiguration.After(this, configure);
public ConfigureServiceApp AfterServices(Action<HostBuilderContext, IServiceCollection> configure) =>
_services.After(this, configure);
public ConfigureServiceApp AfterTimeline(Action<HostBuilderContext, ITimelineBuilder> configure) =>
_timeline.After(this, configure);
public ConfigureServiceApp AfterSerilog(Action<HostBuilderContext, LoggerConfiguration> configure) =>
_serilog.After(this, configure);
public ConfigureServiceApp ReplaceHost(Action<IHostBuilder> configure) =>
_host.Replace(this, configure);
public ConfigureServiceApp ReplaceHostConfiguration(Action<IConfigurationBuilder> configure) =>
_hostConfiguration.Replace(this, configure);
public ConfigureServiceApp ReplaceAppConfiguration(Action<HostBuilderContext, IConfigurationBuilder> configure) =>
_appConfiguration.Replace(this, configure);
public ConfigureServiceApp ReplaceServices(Action<HostBuilderContext, IServiceCollection> configure) =>
_services.Replace(this, configure);
public ConfigureServiceApp ReplaceTimeline(Action<HostBuilderContext, ITimelineBuilder> configure) =>
_timeline.Replace(this, configure);
public ConfigureServiceApp ReplaceSerilog(Action<HostBuilderContext, LoggerConfiguration> configure) =>
_serilog.Replace(this, configure);
//
// Before/After/Replace (without context)
//
public ConfigureServiceApp BeforeAppConfiguration(Action<IConfigurationBuilder> configure) =>
_appConfiguration.Before(this, configure);
public ConfigureServiceApp BeforeServices(Action<IServiceCollection> configure) =>
_services.Before(this, configure);
public ConfigureServiceApp BeforeTimeline(Action<ITimelineBuilder> configure) =>
_timeline.Before(this, configure);
public ConfigureServiceApp BeforeSerilog(Action<LoggerConfiguration> configure) =>
_serilog.Before(this, configure);
public ConfigureServiceApp AfterAppConfiguration(Action<IConfigurationBuilder> configure) =>
_appConfiguration.After(this, configure);
public ConfigureServiceApp AfterServices(Action<IServiceCollection> configure) =>
_services.After(this, configure);
public ConfigureServiceApp AfterTimeline(Action<ITimelineBuilder> configure) =>
_timeline.After(this, configure);
public ConfigureServiceApp AfterSerilog(Action<LoggerConfiguration> configure) =>
_serilog.After(this, configure);
public ConfigureServiceApp ReplaceAppConfiguration(Action<IConfigurationBuilder> configure) =>
_appConfiguration.Replace(this, configure);
public ConfigureServiceApp ReplaceServices(Action<IServiceCollection> configure) =>
_services.Replace(this, configure);
public ConfigureServiceApp ReplaceTimeline(Action<ITimelineBuilder> configure) =>
_timeline.Replace(this, configure);
public ConfigureServiceApp ReplaceSerilog(Action<LoggerConfiguration> configure) =>
_serilog.Replace(this, configure);
//
// Apply
//
public void ApplyHost(IHostBuilder host) =>
_host.Apply(host);
public void ApplyHostConfiguration(IHostBuilder host) =>
host.ConfigureHostConfiguration(hostConfiguration =>
_hostConfiguration.Apply(hostConfiguration, () =>
{
var pairs = new Dictionary<string, string>
{
[HostDefaults.EnvironmentKey] = Environment.GetEnvironmentVariable("NETCORE_ENVIRONMENT") ?? EnvironmentName.Development
};
hostConfiguration.AddInMemoryCollection(pairs);
}));
public void ApplyAppConfiguration(IHostBuilder host) =>
host.ConfigureAppConfiguration((context, appConfiguration) =>
_appConfiguration.Apply(context, appConfiguration, () =>
{
appConfiguration
.AddEnvironmentVariables()
.AddCommandLine(Environment.GetCommandLineArgs())
.AddJsonFile("appsettings.json", optional: true)
.AddJsonFile($"appsettings.{context.HostingEnvironment.EnvironmentName}.json", optional: true);
if(context.HostingEnvironment.IsDevelopment())
{
appConfiguration.AddUserSecrets(Assembly.GetEntryAssembly(), optional: true);
}
}));
public void ApplyServices<TArea>(IHostBuilder host) where TArea : TimelineArea, new() =>
host.ConfigureServices((context, services) =>
_services.Apply(context, services, () =>
{
services.AddTotemRuntime();
services.AddTimeline<TArea>(timeline =>
_timeline.Apply(context, timeline, () =>
timeline.AddEventStore().BindOptionsToConfiguration()));
// Allow an external host (such as a Windows Service) to stop the application
services.AddSingleton<IHostedService>(p =>
new ServiceAppCancellation(p.GetService<IApplicationLifetime>(), _cancellationToken));
}));
public void ApplySerilog(IHostBuilder host)
{
if(_disableSerilog)
{
return;
}
host.UseSerilog((context, serilog) =>
_serilog.Apply(context, serilog, () =>
{
if(Environment.UserInteractive)
{
serilog.WriteTo.Console();
}
if(context.HostingEnvironment.IsDevelopment())
{
serilog
.MinimumLevel.Information()
.MinimumLevel.Override("System", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning);
}
else
{
serilog.MinimumLevel.Warning();
}
serilog.ReadFrom.Configuration(context.Configuration);
}));
}
}
}
| |
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using Gallio.Common.Collections;
using Gallio.Common.Markup;
using Gallio.Common.Markup.Tags;
using MbUnit.Framework;
using MbUnit.Framework.ContractVerifiers;
namespace Gallio.Tests.Common.Markup
{
[TestsOn(typeof(StructuredText))]
public class StructuredTextTest
{
private static readonly StructuredText Example = new StructuredText(new BodyTag()
{
Contents =
{
new TextTag("a"),
new SectionTag("blah") { Contents = { new TextTag("bc") }},
new MarkerTag(Marker.AssertionFailure) { Contents = { new TextTag("def")}},
new EmbedTag("attachment"),
new TextTag("ghij")
}
}, new Attachment[] { new TextAttachment("attachment", MimeTypes.PlainText, "text") });
[VerifyContract]
public readonly IContract EqualityTests = new EqualityContract<StructuredText>
{
EquivalenceClasses =
{
{ new StructuredText("lalalala") },
{ new StructuredText(new BodyTag { Contents = { new TextTag("blah") }}) },
{ new StructuredText(new BodyTag { Contents = { new TextTag("blah") }}, new[] { new TextAttachment("abc", MimeTypes.PlainText, "blah") }) }
}
};
[Test, ExpectedArgumentNullException]
public void ConstructorWithTextThrowsIfStringIsNull()
{
new StructuredText((string)null);
}
[Test, ExpectedArgumentNullException]
public void ConstructorWithBodyTagThrowsIfBodyTagIsNull()
{
new StructuredText((BodyTag)null);
}
[Test, ExpectedArgumentNullException]
public void ConstructorWithBodyTagAndAttachmentThrowsIfBodyTagIsNull()
{
new StructuredText(null, EmptyArray<Attachment>.Instance);
}
[Test, ExpectedArgumentNullException]
public void ConstructorWithBodyTagAndAttachmentThrowsIfAttachmentListIsNull()
{
new StructuredText(new BodyTag(), null);
}
[Test]
public void ConstructorWithTextInitializesProperties()
{
StructuredText text = new StructuredText("blah");
Assert.Count(0, text.Attachments);
Assert.AreEqual(new BodyTag() { Contents = { new TextTag("blah") } }, text.BodyTag);
}
[Test]
public void ConstructorWithBodyTagInitializesProperties()
{
StructuredText text = new StructuredText(Example.BodyTag);
Assert.Count(0, text.Attachments);
Assert.AreEqual(Example.BodyTag, text.BodyTag);
}
[Test]
public void ConstructorWithBodyTagAndAttachmentsInitializesProperties()
{
StructuredText text = new StructuredText(Example.BodyTag, Example.Attachments);
Assert.AreEqual(Example.BodyTag, text.BodyTag);
Assert.AreEqual(Example.Attachments, text.Attachments);
}
[Test]
public void GetTextLength()
{
Assert.AreEqual(10, Example.GetTextLength());
}
[Test, ExpectedArgumentNullException]
public void WriteToThrowsIfWriterIsNull()
{
Example.WriteTo(null);
}
[Test, ExpectedArgumentNullException]
public void TruncatedWriteToThrowsIfWriterIsNull()
{
Example.TruncatedWriteTo(null, 0);
}
[Test, ExpectedArgumentOutOfRangeException]
public void TruncatedWriteToThrowsIfMaxLengthIsNegative()
{
Example.TruncatedWriteTo(new StructuredTextWriter(), -1);
}
[Test]
public void WriteToRecreatesTheStructuredText()
{
StructuredTextWriter writer = new StructuredTextWriter();
Example.WriteTo(writer);
Assert.AreEqual(Example, writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToRecreatesTheStructuredTextWhenMaxLengthEqualsTextLength()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsFalse(Example.TruncatedWriteTo(writer, Example.GetTextLength()));
Assert.AreEqual(Example, writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToRecreatesTheStructuredTextWhenMaxLengthExceedsTextLength()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsFalse(Example.TruncatedWriteTo(writer, Example.GetTextLength() + 1));
Assert.AreEqual(Example, writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToTruncatesWhenLengthIsInsufficient0()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsTrue(Example.TruncatedWriteTo(writer, 0));
Assert.AreEqual(new StructuredText(new BodyTag(),
new Attachment[] { new TextAttachment("attachment", MimeTypes.PlainText, "text") }),
writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToTruncatesWhenLengthIsInsufficient1()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsTrue(Example.TruncatedWriteTo(writer, 1));
Assert.AreEqual(new StructuredText(new BodyTag()
{
Contents =
{
new TextTag("a"),
new SectionTag("blah")
}
}, new Attachment[] { new TextAttachment("attachment", MimeTypes.PlainText, "text") }),
writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToTruncatesWhenLengthIsInsufficient5()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsTrue(Example.TruncatedWriteTo(writer, 5));
Assert.AreEqual(new StructuredText(new BodyTag()
{
Contents =
{
new TextTag("a"),
new SectionTag("blah") { Contents = { new TextTag("bc") }},
new MarkerTag(Marker.AssertionFailure) { Contents = { new TextTag("de")}}
}
}, new Attachment[] { new TextAttachment("attachment", MimeTypes.PlainText, "text") }),
writer.ToStructuredText());
}
[Test]
public void TruncatedWriteToTruncatesWhenLengthIsInsufficient8()
{
StructuredTextWriter writer = new StructuredTextWriter();
Assert.IsTrue(Example.TruncatedWriteTo(writer, 8));
Assert.AreEqual(new StructuredText(new BodyTag()
{
Contents =
{
new TextTag("a"),
new SectionTag("blah") { Contents = { new TextTag("bc") }},
new MarkerTag(Marker.AssertionFailure) { Contents = { new TextTag("def")}},
new EmbedTag("attachment"),
new TextTag("gh")
}
}, new Attachment[] { new TextAttachment("attachment", MimeTypes.PlainText, "text") }),
writer.ToStructuredText());
}
[Test]
public void ToStringEqualsBodyTagToString()
{
Assert.AreEqual(Example.BodyTag.ToString(), Example.ToString());
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Toggl.Core.UI.Collections;
using Toggl.Core.UI.Collections.Diffing;
namespace Toggl.Core.Tests.UI.Collections.Extensions
{
public static class ChangesetExtensions
{
public class ItemModelTypeWrapper<TItem>
{
public TItem Item { get; set; }
public bool Deleted { get; set; } = false;
public bool Updated { get; set; } = false;
public ItemPath Moved { get; set; } = null;
public ItemModelTypeWrapper(TItem item)
{
Item = item;
}
}
public class SectionModelTypeWrapper<TSection, THeader, TElement>
where TSection : ISectionModel<THeader, TElement>, new()
{
public bool Updated { get; set; } = false;
public bool Deleted { get; set; } = false;
public int? Moved { get; set; } = null;
public List<ItemModelTypeWrapper<TElement>> Items { get; set; }
public TSection Section { get; set; }
public SectionModelTypeWrapper(TSection section)
{
Section = section;
Items = section.Items.Select(item => new ItemModelTypeWrapper<TElement>(item)).ToList();
}
public static List<SectionModelTypeWrapper<TSection, THeader, TElement>> Wrap(List<TSection> sections)
{
return sections.Select(section => new SectionModelTypeWrapper<TSection, THeader, TElement>(section)).ToList();
}
public static List<TSection> Unwrap(List<SectionModelTypeWrapper<TSection, THeader, TElement>> sections)
{
return sections.Select(sectionWrapper =>
{
var items = sectionWrapper.Items.Select(wrapper => wrapper.Item);
var newSection = new TSection();
newSection.Initialize(sectionWrapper.Section.Header, items);
return newSection;
}).ToList();
}
}
public static bool OnlyContains<TSection, THeader, TItem, TKey>(
this Diffing<TSection, THeader, TItem, TKey>.Changeset changeset,
int insertedSections = 0,
int deletedSections = 0,
int movedSections = 0,
int updatedSections = 0,
int insertedItems = 0,
int deletedItems = 0,
int movedItems = 0,
int updatedItems = 0
)
where TKey : IEquatable<TKey>
where TSection : IAnimatableSectionModel<THeader, TItem, TKey>, new()
where THeader : IDiffable<TKey>
where TItem : IDiffable<TKey>, IEquatable<TItem>
{
if (changeset.InsertedSections.Count != insertedSections)
{
return false;
}
if (changeset.DeletedSections.Count != deletedSections)
{
return false;
}
if (changeset.MovedSections.Count != movedSections)
{
return false;
}
if (changeset.UpdatedSections.Count != updatedSections)
{
return false;
}
if (changeset.InsertedItems.Count != insertedItems)
{
return false;
}
if (changeset.DeletedItems.Count != deletedItems)
{
return false;
}
if (changeset.MovedItems.Count != movedItems)
{
return false;
}
if (changeset.UpdatedItems.Count != updatedItems)
{
return false;
}
return true;
}
public static List<TSection> Apply<TSection, THeader, TElement, TKey>(
this Diffing<TSection, THeader, TElement, TKey>.Changeset changeset,
List<TSection> original
)
where TKey : IEquatable<TKey>
where TSection : IAnimatableSectionModel<THeader, TElement, TKey>, new()
where THeader : IDiffable<TKey>
where TElement : IDiffable<TKey>, IEquatable<TElement>
{
var afterDeletesAndUpdates = changeset.applyDeletesAndUpdates(original);
var afterSectionMovesAndInserts = changeset.applySectionMovesAndInserts(afterDeletesAndUpdates);
var afterItemInsertsAndMoves = changeset.applyItemInsertsAndMoves(afterSectionMovesAndInserts);
return afterItemInsertsAndMoves;
}
private static List<TSection> applyDeletesAndUpdates<TSection, THeader, TElement, TKey>(
this Diffing<TSection, THeader, TElement, TKey>.Changeset changeset,
List<TSection> original
)
where TKey : IEquatable<TKey>
where TSection : IAnimatableSectionModel<THeader, TElement, TKey>, new()
where THeader : IDiffable<TKey>
where TElement : IDiffable<TKey>, IEquatable<TElement>
{
var resultAfterDeletesAndUpdates =
SectionModelTypeWrapper<TSection, THeader, TElement>.Wrap(original);
foreach (var index in changeset.UpdatedItems)
{
resultAfterDeletesAndUpdates[index.sectionIndex].Items[index.itemIndex].Updated = true;
}
foreach (var index in changeset.DeletedItems)
{
resultAfterDeletesAndUpdates[index.sectionIndex].Items[index.itemIndex].Deleted = true;
}
foreach (var section in changeset.DeletedSections)
{
resultAfterDeletesAndUpdates[section].Deleted = true;
}
resultAfterDeletesAndUpdates = resultAfterDeletesAndUpdates.Where(section => !section.Deleted).ToList();
for (int sectionIndex = 0; sectionIndex < resultAfterDeletesAndUpdates.Count; sectionIndex++)
{
var section = resultAfterDeletesAndUpdates[sectionIndex];
section.Items = section.Items.Where(item => !item.Deleted).ToList();
for (int itemIndex = 0; itemIndex < section.Items.Count; itemIndex++)
{
var item = section.Items[itemIndex];
if (item.Updated)
{
section.Items[itemIndex] = new ItemModelTypeWrapper<TElement>(changeset.FinalSections[sectionIndex].Items[itemIndex]);
}
}
}
return SectionModelTypeWrapper<TSection, THeader, TElement>.Unwrap(resultAfterDeletesAndUpdates);
}
private static List<TSection> applySectionMovesAndInserts<TSection, THeader, TElement, TKey>(
this Diffing<TSection, THeader, TElement, TKey>.Changeset changeset,
List<TSection> original
)
where TKey : IEquatable<TKey>
where TSection : IAnimatableSectionModel<THeader, TElement, TKey>, new()
where THeader : IDiffable<TKey>
where TElement : IDiffable<TKey>, IEquatable<TElement>
{
var sourceSectionIndexes = new HashSet<int>(changeset.MovedSections.Select(movement => movement.Item1));
var destinationToSourceMapping = new Dictionary<int, int>();
foreach (var movement in changeset.MovedSections)
{
destinationToSourceMapping[movement.Item2] = movement.Item1;
}
var insertedSectionsIndexes = new HashSet<int>(changeset.InsertedSections);
var nextUntouchedSourceSectionIndex = -1;
bool findNextUntouchedSourceSection()
{
nextUntouchedSourceSectionIndex += 1;
while (nextUntouchedSourceSectionIndex < original.Count && sourceSectionIndexes.Contains(nextUntouchedSourceSectionIndex))
{
nextUntouchedSourceSectionIndex += 1;
}
return nextUntouchedSourceSectionIndex < original.Count;
}
var totalCount = original.Count + changeset.InsertedSections.Count;
var results = new List<TSection>();
for (int index = 0; index < totalCount; index++)
{
if (insertedSectionsIndexes.Contains(index))
{
results.Add(changeset.FinalSections[index]);
}
else
{
if (destinationToSourceMapping.ContainsKey(index))
{
var sourceIndex = destinationToSourceMapping[index];
results.Add(original[sourceIndex]);
}
else
{
if (!findNextUntouchedSourceSection())
{
throw new Exception("Oooops, wrong commands.");
}
results.Add(original[nextUntouchedSourceSectionIndex]);
}
}
}
return results;
}
private static List<TSection> applyItemInsertsAndMoves<TSection, THeader, TElement, TKey>(
this Diffing<TSection, THeader, TElement, TKey>.Changeset changeset,
List<TSection> original
)
where TKey : IEquatable<TKey>
where TSection : IAnimatableSectionModel<THeader, TElement, TKey>, new()
where THeader : IDiffable<TKey>
where TElement : IDiffable<TKey>, IEquatable<TElement>
{
var resultAfterInsertsAndMoves = original;
var sourceIndexesThatShouldBeMoved = new HashSet<ItemPath>(changeset.MovedItems.Select(item => item.Item1).ToList());
var destinationToSourceMapping = new Dictionary<ItemPath, ItemPath>();
foreach (var movedItem in changeset.MovedItems)
{
destinationToSourceMapping[movedItem.Item2] = movedItem.Item1;
}
var insertedItemPaths = new HashSet<ItemPath>(changeset.InsertedItems);
var insertedPerSection = Enumerable.Repeat(0, original.Count).ToList();
var movedInSection = Enumerable.Repeat(0, original.Count).ToList();
var movedOutSection = Enumerable.Repeat(0, original.Count).ToList();
foreach (var insertedItemPath in changeset.InsertedItems)
{
insertedPerSection[insertedItemPath.sectionIndex] += 1;
}
foreach (var moveItem in changeset.MovedItems)
{
movedInSection[moveItem.Item2.sectionIndex] += 1;
movedOutSection[moveItem.Item1.sectionIndex] += 1;
}
for (int sectionIndex = 0; sectionIndex < resultAfterInsertsAndMoves.Count; sectionIndex++)
{
var section = resultAfterInsertsAndMoves[sectionIndex];
var originalItems = section.Items;
var nextUntouchedSourceItemIndex = -1;
bool findNextUntouchedSourceItem()
{
nextUntouchedSourceItemIndex += 1;
while (nextUntouchedSourceItemIndex < section.Items.Count
&& sourceIndexesThatShouldBeMoved.Contains(new ItemPath(sectionIndex: sectionIndex, itemIndex: nextUntouchedSourceItemIndex)))
{
nextUntouchedSourceItemIndex += 1;
}
return nextUntouchedSourceItemIndex < section.Items.Count;
}
var totalCount = section.Items.Count
+ insertedPerSection[sectionIndex]
+ movedInSection[sectionIndex]
- movedOutSection[sectionIndex];
var resultItems = new List<TElement>();
for (int index = 0; index < totalCount; index++)
{
var itemPath = new ItemPath(sectionIndex, index);
if (insertedItemPaths.Contains(itemPath))
{
resultItems.Add(changeset.FinalSections[itemPath.sectionIndex].Items[itemPath.itemIndex]);
}
else
{
if (destinationToSourceMapping.ContainsKey(itemPath))
{
var sourceIndex = destinationToSourceMapping[itemPath];
resultItems.Add(original[sourceIndex.sectionIndex].Items[sourceIndex.itemIndex]);
}
else
{
if (!findNextUntouchedSourceItem())
{
throw new Exception("Oooops, wrong commands.");
}
resultItems.Add(originalItems[nextUntouchedSourceItemIndex]);
}
}
}
var newSection = new TSection();
newSection.Initialize(section.Header, resultItems);
resultAfterInsertsAndMoves[sectionIndex] = newSection;
}
return resultAfterInsertsAndMoves;
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ec2-2015-10-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.EC2.Model
{
/// <summary>
/// Describes the snapshot created from the imported disk.
/// </summary>
public partial class SnapshotDetail
{
private string _description;
private string _deviceName;
private double? _diskImageSize;
private string _format;
private string _progress;
private string _snapshotId;
private string _status;
private string _statusMessage;
private string _url;
private UserBucketDetails _userBucket;
/// <summary>
/// Gets and sets the property Description.
/// <para>
/// A description for the snapshot.
/// </para>
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
// Check to see if Description property is set
internal bool IsSetDescription()
{
return this._description != null;
}
/// <summary>
/// Gets and sets the property DeviceName.
/// <para>
/// The block device mapping for the snapshot.
/// </para>
/// </summary>
public string DeviceName
{
get { return this._deviceName; }
set { this._deviceName = value; }
}
// Check to see if DeviceName property is set
internal bool IsSetDeviceName()
{
return this._deviceName != null;
}
/// <summary>
/// Gets and sets the property DiskImageSize.
/// <para>
/// The size of the disk in the snapshot, in GiB.
/// </para>
/// </summary>
public double DiskImageSize
{
get { return this._diskImageSize.GetValueOrDefault(); }
set { this._diskImageSize = value; }
}
// Check to see if DiskImageSize property is set
internal bool IsSetDiskImageSize()
{
return this._diskImageSize.HasValue;
}
/// <summary>
/// Gets and sets the property Format.
/// <para>
/// The format of the disk image from which the snapshot is created.
/// </para>
/// </summary>
public string Format
{
get { return this._format; }
set { this._format = value; }
}
// Check to see if Format property is set
internal bool IsSetFormat()
{
return this._format != null;
}
/// <summary>
/// Gets and sets the property Progress.
/// <para>
/// The percentage of progress for the task.
/// </para>
/// </summary>
public string Progress
{
get { return this._progress; }
set { this._progress = value; }
}
// Check to see if Progress property is set
internal bool IsSetProgress()
{
return this._progress != null;
}
/// <summary>
/// Gets and sets the property SnapshotId.
/// <para>
/// The snapshot ID of the disk being imported.
/// </para>
/// </summary>
public string SnapshotId
{
get { return this._snapshotId; }
set { this._snapshotId = value; }
}
// Check to see if SnapshotId property is set
internal bool IsSetSnapshotId()
{
return this._snapshotId != null;
}
/// <summary>
/// Gets and sets the property Status.
/// <para>
/// A brief status of the snapshot creation.
/// </para>
/// </summary>
public string Status
{
get { return this._status; }
set { this._status = value; }
}
// Check to see if Status property is set
internal bool IsSetStatus()
{
return this._status != null;
}
/// <summary>
/// Gets and sets the property StatusMessage.
/// <para>
/// A detailed status message for the snapshot creation.
/// </para>
/// </summary>
public string StatusMessage
{
get { return this._statusMessage; }
set { this._statusMessage = value; }
}
// Check to see if StatusMessage property is set
internal bool IsSetStatusMessage()
{
return this._statusMessage != null;
}
/// <summary>
/// Gets and sets the property Url.
/// <para>
/// The URL used to access the disk image.
/// </para>
/// </summary>
public string Url
{
get { return this._url; }
set { this._url = value; }
}
// Check to see if Url property is set
internal bool IsSetUrl()
{
return this._url != null;
}
/// <summary>
/// Gets and sets the property UserBucket.
/// </summary>
public UserBucketDetails UserBucket
{
get { return this._userBucket; }
set { this._userBucket = value; }
}
// Check to see if UserBucket property is set
internal bool IsSetUserBucket()
{
return this._userBucket != null;
}
}
}
| |
using System;
using System.IO;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using L4p.Common.Extensions;
using L4p.Common.Helpers;
using L4p.Common.Loggers;
namespace L4p.Common.ConfigurationFiles
{
interface IJson2Config<T>
where T : class, new()
{
T ParseJson(string file, string json, ILogFile log = null);
}
class Json2Config<T> : IJson2Config<T>
where T : class, new()
{
#region configuration wrapper
public class SingleConfiguration
{
public string[] PathKeys { get; set; }
public T Configuration { get; set; }
}
class MultipleConfiguration
{
public SingleConfiguration[] Configurations { get; set; }
}
#endregion
#region members
private readonly string _path;
private readonly string _hostname;
#endregion
#region construction
public static IJson2Config<T> New(string path)
{
return
new Json2Config<T>(path);
}
private Json2Config(string path)
{
_path = path;
_hostname = Environment.MachineName;
}
#endregion
#region private
private static string remove_js_header(string json)
{
int firstBracketAt = json.IndexOf('{');
Validate.That(firstBracketAt >= 0);
if (firstBracketAt == 0)
return json;
return
json.Substring(firstBracketAt);
}
private static string remove_js_comments(string json)
{
// from http://stackoverflow.com/a/3524689/675116
var blockComments = @"/\*(.*?)\*/";
var lineComments = @"//(.*?)\r?\n";
var strings = @"""((\\[^\n]|[^""\n])*)""";
var verbatimStrings = @"@(""[^""]*"")+";
var noComments = Regex.Replace(json,
blockComments + "|" + lineComments + "|" + strings + "|" + verbatimStrings,
me => {
if (me.Value.StartsWith("//"))
return Environment.NewLine;
if (me.Value.StartsWith("/*"))
return "";
// Keep the literal strings
return me.Value;
},
RegexOptions.Singleline);
return noComments;
}
private static TStruct parse_json<TStruct>(string json)
{
var timeSpanConverter = new TimeSpanFromJsonConverter();
var @struct = JsonConvert.DeserializeObject<TStruct>(json, timeSpanConverter);
return @struct;
}
private void fail_if(bool expr, string msg, params object[] args)
{
if (expr == false)
return;
string hdr = "Bad configuration file '{0}': ".Fmt(_path);
string errorMsg = hdr + msg.Fmt(args);
throw new ConfigFileException(errorMsg);
}
private void validate_mconfig(MultipleConfiguration mconfig)
{
fail_if(mconfig.Configurations == null, "No configurations are found");
fail_if(mconfig.Configurations.Length == 0, "Configurations are empty");
}
private void validate_sconfig(SingleConfiguration sconfig)
{
fail_if(sconfig.Configuration == null, "Single configuration is empty");
}
private MultipleConfiguration parse_as_mconfig(string json)
{
var mconfig = parse_json<MultipleConfiguration>(json);
if (mconfig == null)
return null;
if (mconfig.Configurations == null)
return null;
validate_mconfig(mconfig);
return mconfig;
}
private bool has_key_in_path(SingleConfiguration sconfig, string path, out string matchedKey)
{
matchedKey = null;
foreach (var key in sconfig.PathKeys)
{
bool containsKey =
path.IndexOf(key, StringComparison.InvariantCultureIgnoreCase) != -1;
containsKey |=
0 == String.Compare(key, _hostname, StringComparison.InvariantCultureIgnoreCase);
if (containsKey == false)
continue;
matchedKey = key;
return true;
}
return false;
}
private T choose_sconfig(MultipleConfiguration mconfig, out string matchedKey)
{
matchedKey = null;
foreach (var sconfig in mconfig.Configurations)
{
validate_sconfig(sconfig);
if (sconfig.PathKeys == null)
return sconfig.Configuration;
if (sconfig.PathKeys.Length == 0)
return sconfig.Configuration;
bool hasKeyInPath = has_key_in_path(sconfig, _path, out matchedKey);
if (hasKeyInPath == false)
continue;
return
sconfig.Configuration;
}
throw
new ConfigFileException("Configuration file '{0}': has no config for its path", _path);
}
private static T parse_as_sconfig(string json)
{
var sconfig = parse_json<T>(json);
return sconfig;
}
#endregion
#region IJson2Config
T IJson2Config<T>.ParseJson(string file, string json, ILogFile log)
{
T sconfig;
json = remove_js_header(json);
json = remove_js_comments(json);
file = Path.GetFileName(file);
var mconfig = parse_as_mconfig(json);
if (mconfig != null)
{
string matchedKey;
sconfig = choose_sconfig(mconfig, out matchedKey);
if (log != null)
log.Trace("PathKey '{0}' is matched ({1})", matchedKey, file);
return sconfig;
}
sconfig = parse_as_sconfig(json);
if (log != null)
log.Trace("Single configuration file ({0})", file);
return sconfig;
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.IO;
using NBitcoin.BouncyCastle.Utilities;
namespace NBitcoin.BouncyCastle.Asn1
{
internal abstract class Asn1Sequence
: Asn1Object, IEnumerable
{
private readonly IList seq;
/**
* return an Asn1Sequence from the given object.
*
* @param obj the object we want converted.
* @exception ArgumentException if the object cannot be converted.
*/
public static Asn1Sequence GetInstance(
object obj)
{
if(obj == null || obj is Asn1Sequence)
{
return (Asn1Sequence)obj;
}
else if(obj is Asn1SequenceParser)
{
return Asn1Sequence.GetInstance(((Asn1SequenceParser)obj).ToAsn1Object());
}
else if(obj is byte[])
{
try
{
return Asn1Sequence.GetInstance(FromByteArray((byte[])obj));
}
catch(IOException e)
{
throw new ArgumentException("failed to construct sequence from byte[]: " + e.Message);
}
}
else if(obj is Asn1Encodable)
{
Asn1Object primitive = ((Asn1Encodable)obj).ToAsn1Object();
if(primitive is Asn1Sequence)
{
return (Asn1Sequence)primitive;
}
}
throw new ArgumentException("Unknown object in GetInstance: " + Platform.GetTypeName(obj), "obj");
}
protected internal Asn1Sequence(
int capacity)
{
seq = Platform.CreateArrayList(capacity);
}
public virtual IEnumerator GetEnumerator()
{
return seq.GetEnumerator();
}
[Obsolete("Use GetEnumerator() instead")]
public IEnumerator GetObjects()
{
return GetEnumerator();
}
private class Asn1SequenceParserImpl
: Asn1SequenceParser
{
private readonly Asn1Sequence outer;
private readonly int max;
private int index;
public Asn1SequenceParserImpl(
Asn1Sequence outer)
{
this.outer = outer;
this.max = outer.Count;
}
public IAsn1Convertible ReadObject()
{
if(index == max)
return null;
Asn1Encodable obj = outer[index++];
if(obj is Asn1Sequence)
return ((Asn1Sequence)obj).Parser;
// NB: Asn1OctetString implements Asn1OctetStringParser directly
// if (obj is Asn1OctetString)
// return ((Asn1OctetString)obj).Parser;
return obj;
}
public Asn1Object ToAsn1Object()
{
return outer;
}
}
public virtual Asn1SequenceParser Parser
{
get
{
return new Asn1SequenceParserImpl(this);
}
}
/**
* return the object at the sequence position indicated by index.
*
* @param index the sequence number (starting at zero) of the object
* @return the object at the sequence position indicated by index.
*/
public virtual Asn1Encodable this[int index]
{
get
{
return (Asn1Encodable)seq[index];
}
}
[Obsolete("Use 'object[index]' syntax instead")]
public Asn1Encodable GetObjectAt(
int index)
{
return this[index];
}
[Obsolete("Use 'Count' property instead")]
public int Size
{
get
{
return Count;
}
}
public virtual int Count
{
get
{
return seq.Count;
}
}
protected override int Asn1GetHashCode()
{
int hc = Count;
foreach(object o in this)
{
hc *= 17;
if(o == null)
{
hc ^= DerNull.Instance.GetHashCode();
}
else
{
hc ^= o.GetHashCode();
}
}
return hc;
}
protected override bool Asn1Equals(
Asn1Object asn1Object)
{
Asn1Sequence other = asn1Object as Asn1Sequence;
if(other == null)
return false;
if(Count != other.Count)
return false;
IEnumerator s1 = GetEnumerator();
IEnumerator s2 = other.GetEnumerator();
while(s1.MoveNext() && s2.MoveNext())
{
Asn1Object o1 = GetCurrent(s1).ToAsn1Object();
Asn1Object o2 = GetCurrent(s2).ToAsn1Object();
if(!o1.Equals(o2))
return false;
}
return true;
}
private Asn1Encodable GetCurrent(IEnumerator e)
{
Asn1Encodable encObj = (Asn1Encodable)e.Current;
// unfortunately null was allowed as a substitute for DER null
if(encObj == null)
return DerNull.Instance;
return encObj;
}
protected internal void AddObject(
Asn1Encodable obj)
{
seq.Add(obj);
}
}
}
| |
namespace Signum.Entities;
public class OperationSymbol : Symbol
{
private OperationSymbol() { }
private OperationSymbol(Type declaringType, string fieldName)
: base(declaringType, fieldName)
{
}
public static class Construct<T>
where T : class, IEntity
{
public static ConstructSymbol<T>.Simple Simple(Type declaringType, string fieldName)
{
return new SimpleImp(new OperationSymbol(declaringType, fieldName));
}
public static ConstructSymbol<T>.From<F> From<F>(Type declaringType, string fieldName)
where F : class, IEntity
{
return new FromImp<F>(new OperationSymbol(declaringType, fieldName));
}
public static ConstructSymbol<T>.FromMany<F> FromMany<F>(Type declaringType, string fieldName)
where F : class, IEntity
{
return new FromManyImp<F>(new OperationSymbol(declaringType, fieldName));
}
class SimpleImp : ConstructSymbol<T>.Simple
{
public SimpleImp(OperationSymbol symbol)
{
this.Symbol = symbol;
}
public OperationSymbol Symbol { get; internal set; }
public override string ToString()
{
return "{0}({1})".FormatWith(this.GetType().TypeName(), Symbol);
}
}
class FromImp<F> : ConstructSymbol<T>.From<F>
where F : class, IEntity
{
public FromImp(OperationSymbol symbol)
{
Symbol = symbol;
}
public OperationSymbol Symbol { get; private set; }
public Type BaseType
{
get { return typeof(F); }
}
public override string ToString()
{
return "{0}({1})".FormatWith(this.GetType().TypeName(), Symbol);
}
}
class FromManyImp<F> : ConstructSymbol<T>.FromMany<F>
where F : class, IEntity
{
public FromManyImp(OperationSymbol symbol)
{
Symbol = symbol;
}
public OperationSymbol Symbol { get; set; }
public Type BaseType
{
get { return typeof(F); }
}
public override string ToString()
{
return "{0}({1})".FormatWith(this.GetType().TypeName(), Symbol);
}
}
}
public static ExecuteSymbol<T> Execute<T>(Type declaringType, string fieldName)
where T : class, IEntity
{
return new ExecuteSymbolImp<T>(new OperationSymbol(declaringType, fieldName));
}
public static DeleteSymbol<T> Delete<T>(Type declaringType, string fieldName)
where T : class, IEntity
{
return new DeleteSymbolImp<T>(new OperationSymbol(declaringType, fieldName));
}
class ExecuteSymbolImp<T> : ExecuteSymbol<T>
where T : class, IEntity
{
public ExecuteSymbolImp(OperationSymbol symbol)
{
Symbol = symbol;
}
public OperationSymbol Symbol { get; private set; }
public Type BaseType
{
get { return typeof(T); }
}
public override string ToString()
{
return "{0}({1})".FormatWith(this.GetType().TypeName(), Symbol);
}
}
class DeleteSymbolImp<T> : DeleteSymbol<T>
where T : class, IEntity
{
public DeleteSymbolImp(OperationSymbol symbol)
{
Symbol = symbol;
}
public OperationSymbol Symbol { get; private set; }
public Type BaseType
{
get { return typeof(T); }
}
public override string ToString()
{
return "{0}({1})".FormatWith(this.GetType().TypeName(), Symbol);
}
}
}
public interface IOperationSymbolContainer
{
OperationSymbol Symbol { get; }
}
public interface IEntityOperationSymbolContainer : IOperationSymbolContainer
{
}
public interface IEntityOperationSymbolContainer<in T> : IEntityOperationSymbolContainer
where T : class, IEntity
{
Type BaseType { get; }
}
public static class ConstructSymbol<T>
where T : class, IEntity
{
public interface Simple : IOperationSymbolContainer
{
}
public interface From<in F> : IEntityOperationSymbolContainer<F>
where F : class, IEntity
{
}
public interface FromMany<in F> : IOperationSymbolContainer
where F : class, IEntity
{
Type BaseType { get; }
}
}
public interface ExecuteSymbol<in T> : IEntityOperationSymbolContainer<T>
where T : class, IEntity
{
}
public interface DeleteSymbol<in T> : IEntityOperationSymbolContainer<T>
where T : class, IEntity
{
}
public class OperationInfo
{
public OperationInfo(OperationSymbol symbol, OperationType type)
{
this.OperationSymbol = symbol;
this.OperationType = type;
}
public OperationSymbol OperationSymbol { get; internal set; }
public OperationType OperationType { get; internal set; }
public bool? CanBeModified { get; internal set; }
public bool? CanBeNew { get; internal set; }
public bool? HasStates { get; internal set; }
public bool? HasCanExecute { get; internal set; }
public bool Returns { get; internal set; }
public Type? ReturnType { get; internal set; }
public Type? BaseType { get; internal set; }
public override string ToString()
{
return "{0} ({1})".FormatWith(OperationSymbol, OperationType);
}
public bool IsEntityOperation
{
get
{
return OperationType == OperationType.Execute ||
OperationType == OperationType.ConstructorFrom ||
OperationType == OperationType.Delete;
}
}
}
[InTypeScript(true)]
public enum OperationType
{
Execute,
Delete,
Constructor,
ConstructorFrom,
ConstructorFromMany
}
[InTypeScript(true), DescriptionOptions(DescriptionOptions.Members | DescriptionOptions.Description)]
public enum PropertyOperation
{
Set,
AddElement,
AddNewElement,
ChangeElements,
RemoveElement,
RemoveElementsWhere,
ModifyEntity,
CreateNewEntity,
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO.PortsTests;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Legacy.Support;
using Xunit;
using Xunit.NetCore.Extensions;
namespace System.IO.Ports.Tests
{
public class SerialStream_Write_byte_int_int_Generic : PortsTest
{
// Set bounds fore random timeout values.
// If the min is to low write will not timeout accurately and the testcase will fail
private const int minRandomTimeout = 250;
// If the max is to large then the testcase will take forever to run
private const int maxRandomTimeout = 2000;
// If the percentage difference between the expected timeout and the actual timeout
// found through Stopwatch is greater then 10% then the timeout value was not correctly
// to the write method and the testcase fails.
private const double maxPercentageDifference = .15;
// The byte size used when veryifying exceptions that write will throw
private const int BYTE_SIZE_EXCEPTION = 4;
// The byte size used when veryifying timeout
private static readonly int s_BYTE_SIZE_TIMEOUT = TCSupport.MinimumBlockingByteCount;
// The byte size used when veryifying BytesToWrite
private static readonly int s_BYTE_SIZE_BYTES_TO_WRITE = TCSupport.MinimumBlockingByteCount;
// The bytes size used when veryifying Handshake
private static readonly int s_BYTE_SIZE_HANDSHAKE = TCSupport.MinimumBlockingByteCount;
private const int NUM_TRYS = 5;
#region Test Cases
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteAfterClose()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
Debug.WriteLine("Verifying write method throws exception after a call to Cloes()");
com.Open();
Stream serialStream = com.BaseStream;
com.Close();
VerifyWriteException(serialStream, typeof(ObjectDisposedException));
}
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteAfterBaseStreamClose()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
Debug.WriteLine("Verifying write method throws exception after a call to BaseStream.Close()");
com.Open();
Stream serialStream = com.BaseStream;
com.BaseStream.Close();
VerifyWriteException(serialStream, typeof(ObjectDisposedException));
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void Timeout()
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var rndGen = new Random(-55);
var XOffBuffer = new byte[1];
XOffBuffer[0] = 19;
com1.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com1.Handshake = Handshake.XOnXOff;
Debug.WriteLine("Verifying WriteTimeout={0}", com1.WriteTimeout);
com1.Open();
com2.Open();
com2.BaseStream.Write(XOffBuffer, 0, 1);
Thread.Sleep(250);
com2.Close();
VerifyTimeout(com1);
}
}
[Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void SuccessiveWriteTimeout()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
var rndGen = new Random(-55);
com.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com.Handshake = Handshake.RequestToSendXOnXOff;
// com.Encoding = new System.Text.UTF7Encoding();
com.Encoding = Encoding.Unicode;
Debug.WriteLine("Verifying WriteTimeout={0} with successive call to write method", com.WriteTimeout);
com.Open();
try
{
com.BaseStream.Write(new byte[s_BYTE_SIZE_TIMEOUT], 0, s_BYTE_SIZE_TIMEOUT);
}
catch (TimeoutException)
{
}
VerifyTimeout(com);
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void SuccessiveWriteTimeoutWithWriteSucceeding()
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
var rndGen = new Random(-55);
var asyncEnableRts = new AsyncEnableRts();
var t = new Task(asyncEnableRts.EnableRTS);
com1.WriteTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout);
com1.Handshake = Handshake.RequestToSend;
com1.Encoding = new UTF8Encoding();
Debug.WriteLine(
"Verifying WriteTimeout={0} with successive call to write method with the write succeeding sometime before it's timeout",
com1.WriteTimeout);
com1.Open();
// Call EnableRTS asynchronously this will enable RTS in the middle of the following write call allowing it to succeed
// before the timeout is reached
t.Start();
TCSupport.WaitForTaskToStart(t);
try
{
com1.BaseStream.Write(new byte[s_BYTE_SIZE_TIMEOUT], 0, s_BYTE_SIZE_TIMEOUT);
}
catch (TimeoutException)
{
}
asyncEnableRts.Stop();
TCSupport.WaitForTaskCompletion(t);
VerifyTimeout(com1);
}
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void BytesToWrite()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
var asyncWriteRndByteArray = new AsyncWriteRndByteArray(com, s_BYTE_SIZE_BYTES_TO_WRITE);
var t = new Task(asyncWriteRndByteArray.WriteRndByteArray);
Debug.WriteLine("Verifying BytesToWrite with one call to Write");
com.Handshake = Handshake.RequestToSend;
com.Open();
com.WriteTimeout = 500;
// Write a random byte[] asynchronously so we can verify some things while the write call is blocking
t.Start();
TCSupport.WaitForTaskToStart(t);
TCSupport.WaitForWriteBufferToLoad(com, s_BYTE_SIZE_BYTES_TO_WRITE);
TCSupport.WaitForTaskCompletion(t);
}
}
[Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void BytesToWriteSuccessive()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
var asyncWriteRndByteArray = new AsyncWriteRndByteArray(com, s_BYTE_SIZE_BYTES_TO_WRITE);
var t1 = new Task(asyncWriteRndByteArray.WriteRndByteArray);
var t2 = new Task(asyncWriteRndByteArray.WriteRndByteArray);
Debug.WriteLine("Verifying BytesToWrite with successive calls to Write");
com.Handshake = Handshake.RequestToSend;
com.Open();
com.WriteTimeout = 4000;
// Write a random byte[] asynchronously so we can verify some things while the write call is blocking
t1.Start();
TCSupport.WaitForTaskToStart(t1);
TCSupport.WaitForExactWriteBufferLoad(com, s_BYTE_SIZE_BYTES_TO_WRITE);
// Write a random byte[] asynchronously so we can verify some things while the write call is blocking
t2.Start();
TCSupport.WaitForTaskToStart(t2);
TCSupport.WaitForExactWriteBufferLoad(com, s_BYTE_SIZE_BYTES_TO_WRITE * 2);
// Wait for both write methods to timeout
var aggregatedException = Assert.Throws<AggregateException>(() => TCSupport.WaitForTaskCompletion(t2));
Assert.IsType<IOException>(aggregatedException.InnerException);
}
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Handshake_None()
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
var asyncWriteRndByteArray = new AsyncWriteRndByteArray(com, s_BYTE_SIZE_HANDSHAKE);
var t = new Task(asyncWriteRndByteArray.WriteRndByteArray);
// Write a random byte[] asynchronously so we can verify some things while the write call is blocking
Debug.WriteLine("Verifying Handshake=None");
com.Open();
t.Start();
TCSupport.WaitForTaskCompletion(t);
Assert.Equal(0, com.BytesToWrite);
}
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void Handshake_RequestToSend()
{
Verify_Handshake(Handshake.RequestToSend);
}
[ConditionalFact(nameof(HasNullModem))]
public void Handshake_XOnXOff()
{
Verify_Handshake(Handshake.XOnXOff);
}
[ConditionalFact(nameof(HasNullModem), nameof(HasHardwareFlowControl))]
public void Handshake_RequestToSendXOnXOff()
{
Verify_Handshake(Handshake.RequestToSendXOnXOff);
}
private class AsyncEnableRts
{
private bool _stop;
public void EnableRTS()
{
lock (this)
{
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var rndGen = new Random(-55);
int sleepPeriod = rndGen.Next(minRandomTimeout, maxRandomTimeout / 2);
// Sleep some random period with of a maximum duration of half the largest possible timeout value for a write method on COM1
Thread.Sleep(sleepPeriod);
com2.Open();
com2.RtsEnable = true;
while (!_stop)
Monitor.Wait(this);
com2.RtsEnable = false;
}
}
}
public void Stop()
{
lock (this)
{
_stop = true;
Monitor.Pulse(this);
}
}
}
private class AsyncWriteRndByteArray
{
private readonly SerialPort _com;
private readonly int _byteLength;
public AsyncWriteRndByteArray(SerialPort com, int byteLength)
{
_com = com;
_byteLength = byteLength;
}
public void WriteRndByteArray()
{
var buffer = new byte[_byteLength];
var rndGen = new Random(-55);
for (var i = 0; i < buffer.Length; i++)
{
buffer[i] = (byte)rndGen.Next(0, 256);
}
try
{
_com.BaseStream.Write(buffer, 0, buffer.Length);
}
catch (TimeoutException)
{
}
}
}
#endregion
#region Verification for Test Cases
private static void VerifyWriteException(Stream serialStream, Type expectedException)
{
Assert.Throws(expectedException,
() => serialStream.Write(new byte[BYTE_SIZE_EXCEPTION], 0, BYTE_SIZE_EXCEPTION));
}
private void VerifyTimeout(SerialPort com)
{
var timer = new Stopwatch();
int expectedTime = com.WriteTimeout;
var actualTime = 0;
try
{
com.BaseStream.Write(new byte[s_BYTE_SIZE_TIMEOUT], 0, s_BYTE_SIZE_TIMEOUT); // Warm up write method
}
catch (TimeoutException)
{
}
Thread.CurrentThread.Priority = ThreadPriority.Highest;
for (var i = 0; i < NUM_TRYS; i++)
{
timer.Start();
try
{
com.BaseStream.Write(new byte[s_BYTE_SIZE_TIMEOUT], 0, s_BYTE_SIZE_TIMEOUT);
}
catch (TimeoutException)
{
}
timer.Stop();
actualTime += (int)timer.ElapsedMilliseconds;
timer.Reset();
}
Thread.CurrentThread.Priority = ThreadPriority.Normal;
actualTime /= NUM_TRYS;
double percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime);
// Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference
if (maxPercentageDifference < percentageDifference)
{
Fail("ERROR!!!: The write method timedout in {0} expected {1} percentage difference: {2}", actualTime,
expectedTime, percentageDifference);
}
}
private void Verify_Handshake(Handshake handshake)
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var asyncWriteRndByteArray = new AsyncWriteRndByteArray(com1, s_BYTE_SIZE_HANDSHAKE);
var t = new Task(asyncWriteRndByteArray.WriteRndByteArray);
var XOffBuffer = new byte[1];
var XOnBuffer = new byte[1];
XOffBuffer[0] = 19;
XOnBuffer[0] = 17;
Debug.WriteLine("Verifying Handshake={0}", handshake);
com1.Handshake = handshake;
com1.Open();
com2.Open();
// Setup to ensure write will bock with type of handshake method being used
if (Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.RtsEnable = false;
}
if (Handshake.XOnXOff == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.BaseStream.Write(XOffBuffer, 0, 1);
Thread.Sleep(250);
}
// Write a random byte asynchronously so we can verify some things while the write call is blocking
t.Start();
TCSupport.WaitForTaskToStart(t);
TCSupport.WaitForWriteBufferToLoad(com1, s_BYTE_SIZE_HANDSHAKE);
// Verify that CtsHolding is false if the RequestToSend or RequestToSendXOnXOff handshake method is used
if ((Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake) &&
com1.CtsHolding)
{
Fail("ERROR!!! Expected CtsHolding={0} actual {1}", false, com1.CtsHolding);
}
// Setup to ensure write will succeed
if (Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.RtsEnable = true;
}
if (Handshake.XOnXOff == handshake || Handshake.RequestToSendXOnXOff == handshake)
{
com2.BaseStream.Write(XOnBuffer, 0, 1);
}
TCSupport.WaitForTaskCompletion(t);
// Verify that the correct number of bytes are in the buffer
Assert.Equal(0, com1.BytesToWrite);
// Verify that CtsHolding is true if the RequestToSend or RequestToSendXOnXOff handshake method is used
if ((Handshake.RequestToSend == handshake || Handshake.RequestToSendXOnXOff == handshake) &&
!com1.CtsHolding)
{
Fail("ERROR!!! Expected CtsHolding={0} actual {1}", true, com1.CtsHolding);
}
}
#endregion
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Mail.Net.ABNF
{
#region usings
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
#endregion
/// <summary>
/// This class represent ABNF "dec-val". Defined in RFC 5234 4.
/// </summary>
public class ABNF_DecVal : ABNF_Element
{
#region Nested type: ValueType
private enum ValueType
{
Single = 0,
Concated = 1,
Range = 2,
}
#endregion
#region Constructor
/// <summary>
/// Default 'range' value constructor.
/// </summary>
/// <param name="start">Range start value.</param>
/// <param name="end">Range end value.</param>
/// <exception cref="ArgumentException">Is raised when any of the arguments has invalid value.</exception>
public ABNF_DecVal(int start, int end)
{
if (start < 0)
{
throw new ArgumentException("Argument 'start' value must be >= 0.");
}
if (end < 0)
{
throw new ArgumentException("Argument 'end' value must be >= 0.");
}
// TODO:
}
/// <summary>
/// Default 'concated' value constructor.
/// </summary>
/// <param name="values">Concated values.</param>
/// <exception cref="ArgumentNullException">Is raised when <b>values</b> is null reference value.</exception>
/// <exception cref="ArgumentException">Is raised when any of the arguments has invalid value.</exception>
public ABNF_DecVal(int[] values)
{
if (values == null)
{
throw new ArgumentNullException("values");
}
if (values.Length < 1)
{
throw new ArgumentException("Argument 'values' must contain at least 1 value.");
}
// TODO:
}
#endregion
#region Methods
/// <summary>
///
/// </summary>
/// <param name="reader"></param>
/// <returns></returns>
public static ABNF_DecVal Parse(StringReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
// dec-val = "d" 1*DIGIT [ 1*("." 1*DIGIT) / ("-" 1*DIGIT) ]
if (reader.Peek() != 'd')
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
// Eat 'd'.
reader.Read();
if (!char.IsNumber((char) reader.Peek()))
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
ValueType valueType = ValueType.Single;
List<int> values = new List<int>();
StringBuilder b = new StringBuilder();
while (true)
{
// We reached end of string.
if (reader.Peek() == -1)
{
// - or . without required 1 DIGIT.
if (b.Length == 0)
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
break;
}
else if (char.IsNumber((char) reader.Peek()))
{
b.Append((char) reader.Read());
}
// Concated value.
else if (reader.Peek() == '.')
{
// Range and conacted is not allowed to mix.
if (valueType == ValueType.Range)
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
if (b.Length == 0)
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
values.Add(Convert.ToInt32(b.ToString()));
b = new StringBuilder();
valueType = ValueType.Concated;
// Eat '.'.
reader.Read();
}
// Value range.
else if (reader.Peek() == '-')
{
// Range and conacted is not allowed to mix. Also multiple ranges not allowed.
if (valueType != ValueType.Single)
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
values.Add(Convert.ToInt32(b.ToString()));
b = new StringBuilder();
valueType = ValueType.Range;
// Eat '-'.
reader.Read();
}
// Not dec-val char, value reading completed.
else
{
// - or . without required 1 DIGIT.
if (b.Length == 0)
{
throw new ParseException("Invalid ABNF 'dec-val' value '" + reader.ReadToEnd() + "'.");
}
break;
}
}
values.Add(Convert.ToInt32(b.ToString()));
//Console.WriteLine(valueType.ToString());
//foreach(int v in values){
// Console.WriteLine(v);
// }
if (valueType == ValueType.Single)
{
return new ABNF_DecVal(values[0], values[0]);
}
else if (valueType == ValueType.Concated)
{
return new ABNF_DecVal(values.ToArray());
}
else
{
return new ABNF_DecVal(values[0], values[1]);
}
}
#endregion
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections;
using System.Globalization;
using log4net.Core;
using log4net.Layout;
namespace log4net.Util
{
/// <summary>
/// Most of the work of the <see cref="PatternLayout"/> class
/// is delegated to the PatternParser class.
/// </summary>
/// <remarks>
/// <para>
/// The <c>PatternParser</c> processes a pattern string and
/// returns a chain of <see cref="PatternConverter"/> objects.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
public sealed class PatternParser
{
#region Public Instance Constructors
/// <summary>
/// Constructor
/// </summary>
/// <param name="pattern">The pattern to parse.</param>
/// <remarks>
/// <para>
/// Initializes a new instance of the <see cref="PatternParser" /> class
/// with the specified pattern string.
/// </para>
/// </remarks>
public PatternParser(string pattern)
{
m_pattern = pattern;
}
#endregion Public Instance Constructors
#region Public Instance Methods
/// <summary>
/// Parses the pattern into a chain of pattern converters.
/// </summary>
/// <returns>The head of a chain of pattern converters.</returns>
/// <remarks>
/// <para>
/// Parses the pattern into a chain of pattern converters.
/// </para>
/// </remarks>
public PatternConverter Parse()
{
string[] converterNamesCache = BuildCache();
ParseInternal(m_pattern, converterNamesCache);
return m_head;
}
#endregion Public Instance Methods
#region Public Instance Properties
/// <summary>
/// Get the converter registry used by this parser
/// </summary>
/// <value>
/// The converter registry used by this parser
/// </value>
/// <remarks>
/// <para>
/// Get the converter registry used by this parser
/// </para>
/// </remarks>
public Hashtable PatternConverters
{
get { return m_patternConverters; }
}
#endregion Public Instance Properties
#region Private Instance Methods
/// <summary>
/// Build the unified cache of converters from the static and instance maps
/// </summary>
/// <returns>the list of all the converter names</returns>
/// <remarks>
/// <para>
/// Build the unified cache of converters from the static and instance maps
/// </para>
/// </remarks>
private string[] BuildCache()
{
string[] converterNamesCache = new string[m_patternConverters.Keys.Count];
m_patternConverters.Keys.CopyTo(converterNamesCache, 0);
// sort array so that longer strings come first
Array.Sort(converterNamesCache, 0, converterNamesCache.Length, StringLengthComparer.Instance);
return converterNamesCache;
}
#region StringLengthComparer
/// <summary>
/// Sort strings by length
/// </summary>
/// <remarks>
/// <para>
/// <see cref="IComparer" /> that orders strings by string length.
/// The longest strings are placed first
/// </para>
/// </remarks>
private sealed class StringLengthComparer : IComparer
{
public static readonly StringLengthComparer Instance = new StringLengthComparer();
private StringLengthComparer()
{
}
#region Implementation of IComparer
public int Compare(object x, object y)
{
string s1 = x as string;
string s2 = y as string;
if (s1 == null && s2 == null)
{
return 0;
}
if (s1 == null)
{
return 1;
}
if (s2 == null)
{
return -1;
}
return s2.Length.CompareTo(s1.Length);
}
#endregion
}
#endregion // StringLengthComparer
/// <summary>
/// Internal method to parse the specified pattern to find specified matches
/// </summary>
/// <param name="pattern">the pattern to parse</param>
/// <param name="matches">the converter names to match in the pattern</param>
/// <remarks>
/// <para>
/// The matches param must be sorted such that longer strings come before shorter ones.
/// </para>
/// </remarks>
private void ParseInternal(string pattern, string[] matches)
{
int offset = 0;
while(offset < pattern.Length)
{
int i = pattern.IndexOf('%', offset);
if (i < 0 || i == pattern.Length - 1)
{
ProcessLiteral(pattern.Substring(offset));
offset = pattern.Length;
}
else
{
if (pattern[i+1] == '%')
{
// Escaped
ProcessLiteral(pattern.Substring(offset, i - offset + 1));
offset = i + 2;
}
else
{
ProcessLiteral(pattern.Substring(offset, i - offset));
offset = i + 1;
FormattingInfo formattingInfo = new FormattingInfo();
// Process formatting options
// Look for the align flag
if (offset < pattern.Length)
{
if (pattern[offset] == '-')
{
// Seen align flag
formattingInfo.LeftAlign = true;
offset++;
}
}
// Look for the minimum length
while (offset < pattern.Length && char.IsDigit(pattern[offset]))
{
// Seen digit
if (formattingInfo.Min < 0)
{
formattingInfo.Min = 0;
}
formattingInfo.Min = (formattingInfo.Min * 10) + int.Parse(pattern[offset].ToString(), NumberFormatInfo.InvariantInfo);
offset++;
}
// Look for the separator between min and max
if (offset < pattern.Length)
{
if (pattern[offset] == '.')
{
// Seen separator
offset++;
}
}
// Look for the maximum length
while (offset < pattern.Length && char.IsDigit(pattern[offset]))
{
// Seen digit
if (formattingInfo.Max == int.MaxValue)
{
formattingInfo.Max = 0;
}
formattingInfo.Max = (formattingInfo.Max * 10) + int.Parse(pattern[offset].ToString(), NumberFormatInfo.InvariantInfo);
offset++;
}
int remainingStringLength = pattern.Length - offset;
// Look for pattern
for(int m=0; m<matches.Length; m++)
{
string key = matches[m];
if (key.Length <= remainingStringLength)
{
if (string.Compare(pattern, offset, key, 0, key.Length) == 0)
{
// Found match
offset = offset + matches[m].Length;
string option = null;
// Look for option
if (offset < pattern.Length)
{
if (pattern[offset] == '{')
{
// Seen option start
offset++;
int optEnd = pattern.IndexOf('}', offset);
if (optEnd < 0)
{
// error
}
else
{
option = pattern.Substring(offset, optEnd - offset);
offset = optEnd + 1;
}
}
}
ProcessConverter(matches[m], option, formattingInfo);
break;
}
}
}
}
}
}
}
/// <summary>
/// Process a parsed literal
/// </summary>
/// <param name="text">the literal text</param>
private void ProcessLiteral(string text)
{
if (text.Length > 0)
{
// Convert into a pattern
ProcessConverter("literal", text, new FormattingInfo());
}
}
/// <summary>
/// Process a parsed converter pattern
/// </summary>
/// <param name="converterName">the name of the converter</param>
/// <param name="option">the optional option for the converter</param>
/// <param name="formattingInfo">the formatting info for the converter</param>
private void ProcessConverter(string converterName, string option, FormattingInfo formattingInfo)
{
LogLog.Debug(declaringType, "Converter ["+converterName+"] Option ["+option+"] Format [min="+formattingInfo.Min+",max="+formattingInfo.Max+",leftAlign="+formattingInfo.LeftAlign+"]");
// Lookup the converter type
ConverterInfo converterInfo = (ConverterInfo)m_patternConverters[converterName];
if (converterInfo == null)
{
LogLog.Error(declaringType, "Unknown converter name ["+converterName+"] in conversion pattern.");
}
else
{
// Create the pattern converter
PatternConverter pc = null;
try
{
pc = (PatternConverter)Activator.CreateInstance(converterInfo.Type);
}
catch(Exception createInstanceEx)
{
LogLog.Error(declaringType, "Failed to create instance of Type [" + converterInfo.Type.FullName + "] using default constructor. Exception: " + createInstanceEx.ToString());
}
// formattingInfo variable is an instance variable, occasionally reset
// and used over and over again
pc.FormattingInfo = formattingInfo;
pc.Option = option;
pc.Properties = converterInfo.Properties;
IOptionHandler optionHandler = pc as IOptionHandler;
if (optionHandler != null)
{
optionHandler.ActivateOptions();
}
AddConverter(pc);
}
}
/// <summary>
/// Resets the internal state of the parser and adds the specified pattern converter
/// to the chain.
/// </summary>
/// <param name="pc">The pattern converter to add.</param>
private void AddConverter(PatternConverter pc)
{
// Add the pattern converter to the list.
if (m_head == null)
{
m_head = m_tail = pc;
}
else
{
// Set the next converter on the tail
// Update the tail reference
// note that a converter may combine the 'next' into itself
// and therefore the tail would not change!
m_tail = m_tail.SetNext(pc);
}
}
#endregion Protected Instance Methods
#region Private Constants
private const char ESCAPE_CHAR = '%';
#endregion Private Constants
#region Private Instance Fields
/// <summary>
/// The first pattern converter in the chain
/// </summary>
private PatternConverter m_head;
/// <summary>
/// the last pattern converter in the chain
/// </summary>
private PatternConverter m_tail;
/// <summary>
/// The pattern
/// </summary>
private string m_pattern;
/// <summary>
/// Internal map of converter identifiers to converter types
/// </summary>
/// <remarks>
/// <para>
/// This map overrides the static s_globalRulesRegistry map.
/// </para>
/// </remarks>
private Hashtable m_patternConverters = new Hashtable();
#endregion Private Instance Fields
#region Private Static Fields
/// <summary>
/// The fully qualified type of the PatternParser class.
/// </summary>
/// <remarks>
/// Used by the internal logger to record the Type of the
/// log message.
/// </remarks>
private readonly static Type declaringType = typeof(PatternParser);
#endregion Private Static Fields
}
}
| |
using System.Collections.Generic;
using System;
using System.Linq;
namespace Zios.Interface{
using Containers;
using UnityEngine;
using UnityEditor;
[Serializable]
public class ThemePalette{
public static List<ThemePalette> all = new List<ThemePalette>();
public string name;
public string path;
public bool usesSystem;
public Dictionary<Color,RelativeColor> swap = new Dictionary<Color,RelativeColor>();
public Hierarchy<string,string,RelativeColor> colors = new Hierarchy<string,string,RelativeColor>(){{"*",new Dictionary<string,RelativeColor>(){{"Window","#C0C0C0"}}}};
//=================================
// Files
//=================================
public static List<ThemePalette> Import(string path=null){
path = path ?? "*.unitypalette";
var imported = new List<ThemePalette>();
foreach(var file in FileManager.FindAll(path,Theme.debug)){
var active = imported.AddNew();
active.name = file.name;
active.path = file.path;
active.Deserialize(file.GetText());
}
return imported;
}
public void Export(string path=null){
var theme = Theme.active;
var savePath = path ?? Theme.storagePath+"Palettes";
var saveName = theme.palette.name+"-Variant";
path = path.IsEmpty() ? EditorUtility.SaveFilePanel("Save Theme [Palette]",savePath.GetAssetPath(),saveName,"unitypalette") : path;
if(path.Length > 0){
var file = FileManager.Create(path);
file.WriteText(this.Serialize());
AssetDatabase.ImportAsset(path.GetAssetPath());
Utility.SetPref<string>("EditorPalette"+Theme.suffix,path.GetFileName());
Theme.Reset(true);
}
}
//=================================
// Data
//=================================
public void Deserialize(string data){
if(data.IsEmpty()){return;}
bool skipTexture = false;
var group = "Default";
var sourceMap = new Dictionary<string,string>();
this.colors.Clear();
foreach(var line in data.GetLines()){
if(line.Trim().IsEmpty()){continue;}
if(line.Contains("(")){
group = line.Parse("(",")");
continue;
}
if(line.Contains("[")){
group = "Default";
skipTexture = line.Contains("[No",true) || line.Contains("[Skip",true);
continue;
}
var color = new RelativeColor().Deserialize(line);
color.skipTexture = skipTexture;
this.colors.AddNew(group)[color.name] = color;
this.colors.AddNew("*")[color.name] = color;
sourceMap[color.name] = color.sourceName;
}
RelativeColor.UpdateSystem();
foreach(var item in sourceMap){
if(item.Value.IsEmpty()){continue;}
this.colors["*"][item.Key].Assign(this,item.Value);
if(this.colors["*"][item.Key].source == RelativeColor.system){
this.usesSystem = true;
}
}
foreach(var color in this.colors["*"]){
color.Value.ApplyOffset();
}
}
public string Serialize(){
var contents = "";
contents = contents.AddLine("[Textured]");
var nameLength = this.colors["*"].Select(x=>x.Value).OrderByDescending(x=>x.name.Length).First().name.Length;
var sourceLength = this.colors["*"].Select(x=>x.Value).OrderByDescending(x=>x.sourceName.Length).First().sourceName.Length;
foreach(var item in this.colors.Where(x=>x.Key!="*")){
var values = item.Value.Where(x=>!x.Value.skipTexture);
if(values.Count() > 0){
if(item.Key != "Default"){contents = contents.AddLine("("+item.Key+")");}
foreach(var textured in values){
contents = contents.AddLine("\t"+textured.Value.Serialize(nameLength,sourceLength));
}
}
}
contents = contents.AddLine("");
contents = contents.AddLine("[NonTextured]");
foreach(var item in this.colors.Where(x=>x.Key!="*")){
var values = item.Value.Where(x=>x.Value.skipTexture);
if(values.Count() > 0){
if(item.Key != "Default"){contents = contents.AddLine("("+item.Key+")");}
foreach(var untextured in values){
contents = contents.AddLine("\t"+untextured.Value.Serialize(nameLength,sourceLength));
}
}
}
return contents;
}
//=================================
// Utility
//=================================
public bool Has(string name){return this.colors["*"].ContainsKey(name);}
public Color Get(string name){
if(this.Has(name)){return this.colors["*"][name].value;}
return Color.magenta;
}
public ThemePalette Use(ThemePalette other){
this.name = other.name;
this.path = other.path;
this.colors.Clear();
foreach(var group in other.colors){
foreach(var color in group.Value){
this.colors.AddNew(group.Key)[color.Key] = other.colors[group.Key][color.Key].Copy();
}
}
return this;
}
public bool Matches(ThemePalette other){
foreach(var item in this.colors["*"]){
var name = item.Key;
if(!other.colors["*"].ContainsKey(name)){return false;}
var colorA = this.colors["*"][name];
var colorB = other.colors["*"][name];
var isBlended = colorA.blendMode != ColorBlend.Normal;
var isSystem = colorA.source == RelativeColor.system;
bool mismatchedValue = !isSystem && !isBlended && (colorA.value != colorB.value);
bool mismatchedBlend = isBlended && (colorA.blendMode.ToInt() != colorB.blendMode.ToInt());
bool mismatchedOffset = colorA.offset != colorB.offset;
bool mismatchedSource = colorA.sourceName != colorB.sourceName;
if(mismatchedBlend || mismatchedValue || mismatchedSource || mismatchedOffset){
return false;
}
}
return true;
}
public void Build(){
if(this.colors.Values.Count < 3){
Debug.LogWarning("[ThemePalette] Colors attempted build before initialized.");
return;
}
var active = new Color32(0,255,255,0);
foreach(var color in this.colors["*"]){
active.r += 1;
this.swap[active] = color.Value.value;
}
active = new Color32(255,0,255,0);
foreach(var color in this.colors.Values.ElementAt(2)){
active.g += 1;
this.swap[active] = color.Value.value;
}
active = new Color32(255,255,0,0);
foreach(var color in this.colors.Values.ElementAt(3)){
active.b += 1;
this.swap[active] = color.Value.value;
}
}
//=================================
// Dynamics
//=================================
public void Apply(GUISkin skin){
if(this.swap.Count < 1){this.Build();}
var styles = skin.GetStyles();
foreach(var style in styles){
foreach(var state in style.GetStates()){
foreach(var swap in this.swap){
var color = swap.Value.value;
if(state.textColor.Matches(swap.Key,false)){
state.textColor = state.textColor.a == 0 ? color : new Color(color.r,color.g,color.b,state.textColor.a);
}
}
}
}
foreach(var swap in this.swap){
var color = swap.Value.value;
var settings = skin.settings;
if(settings.selectionColor.Matches(swap.Key,false)){
settings.selectionColor = settings.selectionColor.a == 0 ? color : new Color(color.r,color.g,color.b,settings.selectionColor.a);
}
if(settings.cursorColor.Matches(swap.Key,false)){
settings.cursorColor = settings.cursorColor.a == 0 ? color : new Color(color.r,color.g,color.b,settings.cursorColor.a);
}
}
}
public Color ParseColor(string term){
var index = term.Remove("S","O","I").Split("A").First();
var offset = index.IsNumber() ? index.ToInt() : -1;
var swap = this.swap.ElementAtOrDefault(offset-1);
var current = swap.IsNull() || offset == -1 ? Color.clear : swap.Value.value;
if(term.StartsWith("C")){current = Color.clear;}
if(term.StartsWith("W")){current = Color.white;}
if(term.StartsWith("B")){current = Color.black;}
if(current != Color.clear){
if(term.StartsWith("S")){current = current.GetIntensity() < 0.33f ? Color.black : Color.white;}
if(term.StartsWith("O")){current = current.GetIntensity() < 0.33f ? Color.white : Color.black;}
if(term.StartsWith("I")){current = current.Invert();}
if(term.Contains("A")){
if(term.Split("A")[1].IsEmpty()){current.a = 1;}
else{current.a *= term.Split("A")[1].ToFloat() / 10.0f;}
}
}
return current;
}
public void ApplyTexture(string path,Texture2D texture,bool writeToDisk=false){
if(texture.IsNull()){return;}
var name = path.GetPathTerm().TrimLeft("#");
var ignoreAlpha = name.StartsWith("A-");
var isSplat = name.StartsWith("!");
var parts = name.TrimLeft("!","A-").Split("-");
if(isSplat && parts.Length < 2){
Debug.LogWarning("[ThemePalette] : Improperly formed splat texture -- " + path.GetPathTerm());
return;
}
var colorA = isSplat ? this.ParseColor(parts[0]) : Color.clear;
var colorB = isSplat ? this.ParseColor(parts[1]) : Color.clear;
var colorC = isSplat ? this.ParseColor(parts[2]) : Color.clear;
if(isSplat){
parts = parts.Skip(3).ToArray();
}
name = parts.Join("-");
int index = 0;
bool changes = false;
var originalPath = path.GetDirectory().GetDirectory()+"/"+name;
var originalImage = FileManager.GetAsset<Texture2D>(originalPath,false) ?? FileManager.GetAsset<Texture2D>(name,false);
var pixels = texture.GetPixels();
if(originalImage.IsNull() || pixels.Length != originalImage.GetPixels().Length){
Debug.Log("[TexturePalette] : Generating source for index/splat -- " + originalPath.GetPathTerm());
texture.SaveAs(originalPath);
AssetDatabase.ImportAsset(originalPath.GetAssetPath());
originalImage = FileManager.GetAsset<Texture2D>(originalPath,false);
}
originalPath = originalImage.GetAssetPath();
if(Theme.debug && originalImage.format != TextureFormat.RGBA32){
Debug.Log("[ThemePalette] Original image is not an RGBA32 texture -- " + originalPath);
}
var originalPixels = pixels.Copy();
foreach(var pixel in pixels){
if(isSplat){
var emptyRed = pixel.r == 0 || colorA.a == 0;
var emptyGreen = pixel.g == 0 || colorB.a == 0;
var emptyBlue = pixel.b == 0 || colorC.a == 0;
var colorAStart = emptyGreen && emptyBlue ? colorA.SetAlpha(0) : Color.clear;
var colorBStart = emptyRed && emptyBlue ? colorB.SetAlpha(0) : Color.clear;
var colorCStart = emptyRed && emptyGreen ? colorC.SetAlpha(0) : Color.clear;
var splatA = colorAStart.Lerp(colorA,pixel.r);
var splatB = colorBStart.Lerp(colorB,pixel.g);
var splatC = colorCStart.Lerp(colorC,pixel.b);
var pixelColor = splatA + splatB + splatC;
pixelColor.a *= pixel.a;
if(originalPixels[index] != pixelColor){
originalPixels[index] = pixelColor;
changes = true;
}
index += 1;
continue;
}
foreach(var swap in this.swap){
if(pixel.Matches(swap.Key,false)){
var color = swap.Value.value;
color.a = ignoreAlpha ? pixel.a : color.a * pixel.a;
if(originalPixels[index] != color){
originalPixels[index] = color;
changes = true;
}
}
}
index += 1;
}
if(changes){
originalImage.SetPixels(originalPixels);
originalImage.Apply();
if(writeToDisk){
Utility.DelayCall(originalImage,()=>originalImage.SaveAs(originalPath),0.5f);
}
}
}
}
public class ColorImportSettings : AssetPostprocessor{
public static void OnPostprocessAllAssets(string[] imported,string[] deleted,string[] movedTo,string[] movedFrom){
Theme.Reset(true);
}
public void OnPreprocessTexture(){
TextureImporter importer = (TextureImporter)this.assetImporter;
if(importer.assetPath.ContainsAny("Themes","@Themes")){
ColorImportSettings.Apply(importer);
}
}
public static void Apply(TextureImporter importer){
importer.SetTextureType("Advanced");
importer.SetTextureFormat(TextureImporterFormat.RGBA32);
importer.npotScale = TextureImporterNPOTScale.None;
importer.isReadable = true;
importer.mipmapEnabled = false;
#if UNITY_5_5_OR_NEWER
importer.sRGBTexture = false;
#else
importer.linearTexture = false;
#endif
}
}
}
| |
using System.Text.RegularExpressions;
namespace Signum.Test.LinqProvider;
/// <summary>
/// Summary description for LinqProvider
/// </summary>
public class WhereTest
{
public WhereTest()
{
MusicStarter.StartAndLoad();
Connector.CurrentLogger = new DebugTextWriter();
}
[Fact]
public void Where()
{
var list = Database.Query<AlbumEntity>().Where(a => a.Year < 1995).ToList();
}
[Fact]
public void WhereWithExpressionOr()
{
var list = (from a in Database.Query<ArtistEntity>()
where a.Sex.IsDefined()
select a).ToList();
}
[Fact]
public void WhereIndex()
{
var list = Database.Query<AlbumEntity>().Where((a, i) => i % 2 == 0).ToList();
}
[Fact]
public void WhereExplicitConvert()
{
var list = Database.Query<AlbumEntity>().Where(a => a.Id.ToString() == "1").ToList();
}
[Fact]
public void WhereImplicitConvert()
{
Database.Query<AlbumEntity>().Where(a => ("C" + a.Id) == "C1").Any();
}
[Fact]
public void WhereCombineConvert()
{
var query = Database.Query<AlbumEntity>().Where(a => ("C" + a.Id) + "B" == "C1B").Select(a => a.Id);
Assert.Single(new Regex("CONCAT").Matches(query.QueryText()));
query.ToList();
}
[Fact]
public void WhereSelect()
{
var list = Database.Query<AlbumEntity>().Where(a => a.Year < 1995).Select(a => new { a.Year, Author = a.Author.ToLite(), a.Name }).ToList();
}
[Fact]
public void WhereBool()
{
var list = Database.Query<ArtistEntity>().Where(a => a.Dead).ToList();
}
[Fact]
public void WhereNotNull()
{
var list = Database.Query<ArtistEntity>().Where(a => a.LastAward != null).ToList();
}
[Fact]
public void SingleFirstLast()
{
var artists = Database.Query<ArtistEntity>();
Assert.Throws<InvalidOperationException>(() => artists.SingleEx(a => a.Dead && !a.Dead));
Assert.NotNull(artists.SingleEx(a => a.Dead)); //michael
Assert.Throws<InvalidOperationException>(() => artists.SingleEx(a => a.Sex == Sex.Male));
Assert.Null(artists.SingleOrDefaultEx(a => a.Dead && !a.Dead));
Assert.NotNull(artists.SingleOrDefaultEx(a => a.Dead)); //michael
Assert.Throws<InvalidOperationException>(() => artists.SingleOrDefaultEx(a => a.Sex == Sex.Male));
Assert.Throws<InvalidOperationException>(() => artists.FirstEx(a => a.Dead && !a.Dead));
Assert.NotNull(artists.FirstEx(a => a.Dead)); //michael
Assert.NotNull(artists.FirstEx(a => a.Sex == Sex.Male));
Assert.Null(artists.FirstOrDefault(a => a.Dead && !a.Dead));
Assert.NotNull(artists.FirstOrDefault(a => a.Dead)); //michael
Assert.NotNull(artists.FirstOrDefault(a => a.Sex == Sex.Male));
}
[Fact]
public void SingleFirstLastError()
{
var artists = Database.Query<ArtistEntity>();
AssertThrows<InvalidOperationException>("Y", () => artists.Where(a => a.Dead && !a.Dead).SingleEx(() => "Y"));
AssertThrows<InvalidOperationException>("Y", () => artists.Where(a => a.Sex == Sex.Male).SingleEx(() => "X", () => "Y"));
AssertThrows<InvalidOperationException>("Y", () => artists.Where(a => a.Sex == Sex.Male).SingleOrDefaultEx(() => "Y"));
AssertThrows<InvalidOperationException>("X", () => artists.Where(a => a.Dead && !a.Dead).FirstEx(() => "X"));
AssertThrows<InvalidOperationException>(typeof(ArtistEntity).Name, () => artists.SingleEx(a => a.Dead && !a.Dead));
AssertThrows<InvalidOperationException>(typeof(ArtistEntity).Name, () => artists.SingleEx(a => a.Sex == Sex.Male));
AssertThrows<InvalidOperationException>(typeof(ArtistEntity).Name, () => artists.SingleOrDefaultEx(a => a.Sex == Sex.Male));
AssertThrows<InvalidOperationException>(typeof(ArtistEntity).Name, () => artists.FirstEx(a => a.Dead && !a.Dead));
AssertThrows<InvalidOperationException>("X", () => artists.Where(a => a.Dead && !a.Dead).SingleOrManyEx(() => "X"));
}
static void AssertThrows<T>(string message, Action action) where T : Exception
{
var e = Assert.Throws<T>(action);
Assert.Contains(message, e.Message);
}
[Fact]
public void WhereEntityEquals()
{
ArtistEntity wretzky = Database.Query<ArtistEntity>().SingleEx(a => a.Sex == Sex.Female);
BandEntity smashing = (from b in Database.Query<BandEntity>()
from a in b.Members
where a.Is(wretzky)
select b).SingleEx();
}
[Fact]
public void WhereLiteEquals()
{
ArtistEntity wretzky = Database.Query<ArtistEntity>().SingleEx(a => a.Sex == Sex.Female);
BandEntity smashing = (from b in Database.Query<BandEntity>()
from a in b.Members
where a.ToLite().Is(wretzky.ToLite())
select b).SingleEx();
}
[Fact]
public void WhereEntityEqualsIB()
{
ArtistEntity michael = Database.Query<ArtistEntity>().SingleEx(a => a.Dead);
var albums = (from a in Database.Query<AlbumEntity>()
where a.Author == michael
select a.ToLite()).ToList();
}
[Fact]
public void WhereEntityEqualsIBA()
{
ArtistEntity michael = Database.Query<ArtistEntity>().SingleEx(a => a.Dead);
var albums = (from n in Database.Query<NoteWithDateEntity>()
where n.Target == michael
select n.ToLite()).ToList();
}
[Fact]
public void WhereLiteEqualsIB()
{
ArtistEntity michael = Database.Query<ArtistEntity>().SingleEx(a => a.Dead);
var albums = (from a in Database.Query<AlbumEntity>()
where a.Author.ToLite().Is(michael.ToLite())
select a.ToLite()).ToList();
}
[Fact]
public void WhereLiteEqualsIBA()
{
ArtistEntity michael = Database.Query<ArtistEntity>().SingleEx(a => a.Dead);
var albums = (from n in Database.Query<NoteWithDateEntity>()
where n.Target.ToLite().Is(michael.ToLite())
select n.ToLite()).ToList();
}
[Fact]
public void WhereIs()
{
var albums = (from a in Database.Query<AlbumEntity>()
where a.Author is ArtistEntity
select a.ToLite()).ToList();
}
[Fact]
public void WhereRefersTo1()
{
var lite = (Lite<BandEntity>?)null;
var first = Database.Query<BandEntity>().Where(b => lite.Is(b)).FirstOrDefault();
Assert.Null(first);
}
[Fact]
public void WhereRefersTo2()
{
var entity = (BandEntity?)null;
var first = Database.Query<BandEntity>().Where(b => b.ToLite().Is(entity)).FirstOrDefault();
Assert.Null(first);
}
[Fact]
public void WhereCase()
{
var list = (from a in Database.Query<ArtistEntity>()
where a.Dead ? a.Name.Contains("Michael") : a.Name.Contains("Billy")
select a).ToArray();
}
[Fact]
public void WherePolyExpressionMethodUnion()
{
var list = Database.Query<AlbumEntity>().Where(a => a.Author.CombineUnion().Lonely()).ToArray();
}
[Fact]
public void WherePolyExpressionMethodSwitch()
{
var list = Database.Query<AlbumEntity>().Where(a => a.Author.CombineCase().Lonely()).ToArray();
}
[Fact]
public void WhereOptimize()
{
var list = Database.Query<ArtistEntity>().Where(a => a.Dead && true).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead && false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead || true).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead || false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead == true).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead == false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead != true).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => a.Dead != false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => true ? a.Dead : false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => false ? false : a.Dead).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => true).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => !false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => true ? true : false).Select(a => a.Name).ToList();
list = Database.Query<ArtistEntity>().Where(a => false ? false : true).Select(a => a.Name).ToList();
}
[Fact]
public void WhereInnerQueryable()
{
var females = Database.Query<ArtistEntity>().Where(a => a.Sex == Sex.Female);
string f = females.ToString()!;
var female = Database.Query<ArtistEntity>().SingleEx(a => females.Contains(a));
}
[Fact]
public void WhereEnumToString()
{
var females = Database.Query<ArtistEntity>().Count(a => a.Sex.ToString() == Sex.Female.ToString());
var females2 = Database.Query<ArtistEntity>().Count(a => a.Sex == Sex.Female);
Assert.Equal(females, females2);
var bla = Database.Query<ArtistEntity>().Count(a => a.Sex.ToString() == a.Name);
Assert.Equal(0, bla);
}
[Fact]
public void WhereNullableEnumToString()
{
var females = Database.Query<ArtistEntity>().Count(a => a.Status.ToString() == Status.Married.ToString());
var females2 = Database.Query<ArtistEntity>().Count(a => a.Status == Status.Married);
Assert.Equal(females, females2);
}
[Fact]
public void WhereEmbeddedNull()
{
var albumsWithBonusTrack = Database.Query<AlbumEntity>().Where(a => a.BonusTrack == null).ToList();
}
[Fact]
public void WhereEmbeddedNotNull()
{
var albumsWithBonusTrack = Database.Query<AlbumEntity>().Where(a => a.BonusTrack != null).ToList();
}
[Fact]
public void WhereMixinNullThrows()
{
Assert.Throws<InvalidOperationException>(() =>
Database.Query<NoteWithDateEntity>().Where(n => n.Mixin<CorruptMixin>() == null).ToList());
}
[Fact]
public void WhereMixinField()
{
var list = Database.Query<NoteWithDateEntity>().Where(n => n.Mixin<CorruptMixin>().Corrupt == false).ToList();
}
[Fact]
public void WhereMixinMainEntityField()
{
var list = Database.Query<NoteWithDateEntity>().Where(n => n.Mixin<CorruptMixin>().MainEntity == n).ToList();
}
[Fact]
public void WhereBindTuple()
{
var albums = Database.Query<AlbumEntity>().Select(a => Tuple.Create(a.Name, a.Label)).Where(t => t.Item2 == null).ToList();
}
[Fact]
public void WhereBindBigTuple()
{
var albums = Database.Query<AlbumEntity>().Select(a => Tuple.Create(a.Name, a.Name, a.Name, a.Name, a.Name, a.Name, a.Name, a.Label)).Where(t => t.Rest.Item1 == null).ToList();
}
[Fact]
public void WhereOutsideIs()
{
var albums = Database.Query<BandEntity>().Where(a => a.LastAward is PersonalAwardEntity).ToList();
}
[Fact]
public void WhereOutsideCast()
{
var albums = Database.Query<BandEntity>().Where(a => ((PersonalAwardEntity?)a.LastAward) != null).ToList();
}
[Fact]
public void WhereOutsideEquals()
{
var pa = Database.Query<PersonalAwardEntity>().FirstEx();
var albums = Database.Query<BandEntity>().Where(a => a.LastAward.Is(pa)).ToList();
}
[Fact]
public void WhereMListContains()
{
var female = Database.Query<ArtistEntity>().Single(a => a.Sex == Sex.Female);
var albums = Database.Query<BandEntity>().Where(a => a.Members.Contains(female)).Select(a => a.ToLite()).ToList();
}
[Fact]
public void WhereMListLiteContains()
{
var female = Database.Query<ArtistEntity>().Select(a => a.ToLite()).Single(a => a.Entity.Sex == Sex.Female);
var albums = Database.Query<ArtistEntity>().Where(a => a.Friends.Contains(female)).Select(a => a.ToLite()).ToList();
}
[Fact]
public void WhereMListContainsSingle()
{
var albums = Database.Query<BandEntity>().Where(a => a.Members.Contains(
Database.Query<ArtistEntity>().Single(a2 => a2.Sex == Sex.Female)
)).Select(a => a.ToLite()).ToList();
}
[Fact]
public void WhereMListLiteContainsSingle()
{
var albums = Database.Query<ArtistEntity>().Where(a =>
a.Friends.Contains(Database.Query<ArtistEntity>().Single(a2 => a2.Sex == Sex.Female).ToLite())
).Select(a => a.ToLite()).ToList();
}
[Fact]
public void NullableBoolFix()
{
var artist = Database.Query<ArtistEntity>().Where(a => ((bool?)(a.Dead ? a.Friends.Any() : false)) == true).ToList();
}
[Fact]
public void ExceptionTest()
{
Assert.Throws<FieldReaderException>(() =>
Database.Query<ArtistEntity>().Select(a => Throw(a.Id)).ToList());
}
public static bool Throw(PrimaryKey a)
{
throw new ArgumentException("a");
}
[Fact]
public void DistinctWithNulls()
{
var id = Database.Query<AlbumEntity>().Select(a => a.Id).FirstEx();
var nullRight = Database.Query<AlbumEntity>().Where(alb => LinqHints.DistinctNull(alb.Id, (PrimaryKey?)null)).Count();
var notNullRight = Database.Query<AlbumEntity>().Where(alb => LinqHints.DistinctNull(alb.Id, (PrimaryKey?)id)).Count();
var nullLeft = Database.Query<AlbumEntity>().Where(alb => LinqHints.DistinctNull((PrimaryKey?)null, alb.Id)).Count();
var notNullLeft = Database.Query<AlbumEntity>().Where(alb => LinqHints.DistinctNull((PrimaryKey?)id, alb.Id)).Count();
}
[Fact]
public void WhereEqualsNew()
{
GrammyAwardEntity award = new GrammyAwardEntity();
var count = Database.Query<BandEntity>().Count(a => a.LastAward.Is(award));
Assert.Equal(0, count);
}
[Fact]
public void WhereNotEqualsNew()
{
GrammyAwardEntity award = new GrammyAwardEntity();
var count = Database.Query<BandEntity>().Count(a => !a.LastAward.Is(award));
Assert.True(count > 0);
}
[Fact]
public void WhereEqualsNewIBA()
{
GrammyAwardEntity award = new GrammyAwardEntity();
var count = Database.Query<ArtistEntity>().Count(a => a.LastAward.Is(award));
Assert.Equal(0, count);
}
[Fact]
public void WhereCount()
{
var album = Database.Query<ArtistEntity>()
.Where(a => Database.Query<AlbumEntity>().Where(al => al.Author.Is(a)).Count() > 0)
.Select(a => a.Name)
.ToList();
}
[Fact]
public void WhereFormat()
{
var album = Database.Query<ArtistEntity>()
.Where(a => $"Hi {(a.IsMale ? "Mr." : "Ms.")} {a}".Contains("Mr. Michael"))
.Select(a => a.ToLite())
.ToList();
}
[Fact]
public void WhereFormat4()
{
var album = Database.Query<ArtistEntity>()
.Where(a => $"Hi {a.Name} {a.Name} {a.Name} {a.Name}".Contains("Mr. Michael"))
.Select(a => a.ToLite())
.ToList();
}
[Fact]
public void WhereNoFormat()
{
var album = Database.Query<ArtistEntity>()
.Where(a => ("Hi " + (a.IsMale ? "Mr." : "Ms.") + " " + a.Name + " ToStr " + a).Contains("Mr. Michael"))
.Select(a => a.ToLite())
.ToList();
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using gagr = Google.Api.Gax.ResourceNames;
using wkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Video.Transcoder.V1Beta1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedTranscoderServiceClientTest
{
[xunit::FactAttribute]
public void CreateJobRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.CreateJob(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.CreateJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.CreateJobAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateJob()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.CreateJob(request.Parent, request.Job);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.CreateJobAsync(request.Parent, request.Job, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.CreateJobAsync(request.Parent, request.Job, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateJobResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.CreateJob(request.ParentAsLocationName, request.Job);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobRequest request = new CreateJobRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Job = new Job(),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.CreateJobAsync(request.ParentAsLocationName, request.Job, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.CreateJobAsync(request.ParentAsLocationName, request.Job, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJobRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.GetJob(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.GetJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.GetJobAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJob()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.GetJob(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.GetJobAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.GetJobAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJobResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job response = client.GetJob(request.JobName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobRequest request = new GetJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
Job expectedResponse = new Job
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
InputUri = "input_uriec9462a7",
OutputUri = "output_urice759a4d",
TemplateId = "template_id6435f574",
Config = new JobConfig(),
Priority = 1546225849,
OriginUri = new Job.Types.OriginUri(),
State = Job.Types.ProcessingState.Succeeded,
Progress = new Progress(),
FailureReason = "failure_reasonb933af24",
FailureDetails =
{
new FailureDetail(),
},
CreateTime = new wkt::Timestamp(),
StartTime = new wkt::Timestamp(),
EndTime = new wkt::Timestamp(),
TtlAfterCompletionDays = 1495978457,
};
mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
Job responseCallSettings = await client.GetJobAsync(request.JobName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Job responseCancellationToken = await client.GetJobAsync(request.JobName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJobRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJob(request);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobAsync(request, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJob()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJob(request.Name);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobAsync(request.Name, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJobResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJob(request.JobName);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobRequest request = new DeleteJobRequest
{
JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobAsync(request.JobName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobAsync(request.JobName, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateJobTemplateRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.CreateJobTemplate(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobTemplateRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.CreateJobTemplateAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.CreateJobTemplateAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateJobTemplate()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.CreateJobTemplate(request.Parent, request.JobTemplate, request.JobTemplateId);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobTemplateAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.CreateJobTemplateAsync(request.Parent, request.JobTemplate, request.JobTemplateId, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.CreateJobTemplateAsync(request.Parent, request.JobTemplate, request.JobTemplateId, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateJobTemplateResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.CreateJobTemplate(request.ParentAsLocationName, request.JobTemplate, request.JobTemplateId);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateJobTemplateResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
CreateJobTemplateRequest request = new CreateJobTemplateRequest
{
ParentAsLocationName = gagr::LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
JobTemplate = new JobTemplate(),
JobTemplateId = "job_template_id7acfca7e",
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.CreateJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.CreateJobTemplateAsync(request.ParentAsLocationName, request.JobTemplate, request.JobTemplateId, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.CreateJobTemplateAsync(request.ParentAsLocationName, request.JobTemplate, request.JobTemplateId, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJobTemplateRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.GetJobTemplate(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobTemplateRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.GetJobTemplateAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.GetJobTemplateAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJobTemplate()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.GetJobTemplate(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobTemplateAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.GetJobTemplateAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.GetJobTemplateAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetJobTemplateResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate response = client.GetJobTemplate(request.JobTemplateName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetJobTemplateResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
GetJobTemplateRequest request = new GetJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
JobTemplate expectedResponse = new JobTemplate
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
Config = new JobConfig(),
};
mockGrpcClient.Setup(x => x.GetJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<JobTemplate>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
JobTemplate responseCallSettings = await client.GetJobTemplateAsync(request.JobTemplateName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
JobTemplate responseCancellationToken = await client.GetJobTemplateAsync(request.JobTemplateName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJobTemplateRequestObject()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJobTemplate(request);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobTemplateRequestObjectAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobTemplateAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobTemplateAsync(request, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJobTemplate()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJobTemplate(request.Name);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobTemplateAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobTemplateAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobTemplateAsync(request.Name, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteJobTemplateResourceNames()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplate(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
client.DeleteJobTemplate(request.JobTemplateName);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteJobTemplateResourceNamesAsync()
{
moq::Mock<TranscoderService.TranscoderServiceClient> mockGrpcClient = new moq::Mock<TranscoderService.TranscoderServiceClient>(moq::MockBehavior.Strict);
DeleteJobTemplateRequest request = new DeleteJobTemplateRequest
{
JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteJobTemplateAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
TranscoderServiceClient client = new TranscoderServiceClientImpl(mockGrpcClient.Object, null);
await client.DeleteJobTemplateAsync(request.JobTemplateName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteJobTemplateAsync(request.JobTemplateName, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
}
}
| |
using System;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections.Generic;
using System.Linq;
using log4net;
namespace CommandLine
{
/// <summary>
/// Enumeration of interrupt events that might be received by the process
/// via a ConsoleCtrlHandler callback.
/// </summary>
public enum EInterruptTypes
{
Ctrl_C = 0,
Ctrl_Break = 1,
Close = 2,
Logoff = 5,
Shutdown = 6
}
/// <summary>
/// Class to hold definition of external SetConsoleCtrlHandler routine.
/// </summary>
public class Win32
{
public delegate bool Handler(EInterruptTypes ctrlType);
[DllImport("Kernel32")]
public static extern bool SetConsoleCtrlHandler(Handler handler, bool Add);
}
/// <summary>
/// Main class for interacting via the command-line. Handles the definition
/// and parsing of command-line arguments, and the display of usage and help
/// messages.
/// </summary>
public class UI
{
// Reference to class logger
protected static readonly ILog _log = LogManager.GetLogger(
System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Static public flag indicating that the application is to terminate
/// immediately, e.g. in response to a Ctrl-C or Logoff event. Any long-
/// running command should check this flag periodically and attempt to
/// abort gracefully.
/// </summary>
public static bool Interrupted = false;
/// Flag indicating whether console output is redirected
public static bool IsRedirected = false;
private static bool _escPressed = false;
/// <summary>
/// Returns true if escape has been pressed.
/// </summary>
public static bool EscPressed
{
get {
try {
if(!_escPressed && !UI.IsRedirected && System.Console.KeyAvailable) {
var keyInfo = System.Console.ReadKey();
if(keyInfo.Key == System.ConsoleKey.Escape) {
_escPressed = true;
}
}
}
catch(InvalidOperationException) {
UI.IsRedirected = true;
}
return _escPressed;
}
set {
_escPressed = value;
}
}
/// <summary>
/// This method registers this class as a handler for Ctrl-C etc events
/// in the console. It returns a handle to the handler, which should be
/// referenced via the following at the end of the program Main method:
/// GC.KeepAlive(hr);
/// </summary>
public static Win32.Handler RegisterCtrlHandler()
{
// Hook up CtrlHandler to handle breaks, logoffs, etc
Win32.Handler hr = new Win32.Handler(UI.CtrlHandler);
Win32.SetConsoleCtrlHandler(hr, true);
return hr;
}
/// <summary>
/// Handler to receive control events, such as Ctrl-C and logoff and
/// shutdown events. As a minimum, this logs the event, so that a record
/// of why the process exited is maintained.
/// </summary>
/// <param name="ctrlType">The type of event that occurred.</param>
/// <returns>True, indicating we have handled the event.</returns>
static bool CtrlHandler(EInterruptTypes ctrlType)
{
_log.Warn("An interrupt [" + ctrlType + "] has been received");
Interrupted = true;
return true;
}
/// Length of content on last incomplete line
private int _lineLength;
/// The set of possible arguments to be recognised.
public Definition Definition;
/// Returns the console width, or -1 if the console is redirected
public int ConsoleWidth {
get {
if(UI.IsRedirected) { return -1; }
try {
return System.Console.BufferWidth;
}
catch(IOException) {
UI.IsRedirected = true;
return -1;
}
}
}
/// Get/set the console foreground color
public ConsoleColor ForegroundColor {
get {
if(UI.IsRedirected) {
return ConsoleColor.White;
}
try {
return System.Console.ForegroundColor;
}
catch(IOException) {
UI.IsRedirected = true;
return ConsoleColor.White;
}
}
set {
if(!UI.IsRedirected) {
System.Console.ForegroundColor = value;
}
}
}
/// Get/set the console background color
public ConsoleColor BackgroundColor {
get {
if(UI.IsRedirected) {
return ConsoleColor.Black;
}
try {
return System.Console.BackgroundColor;
}
catch(IOException) {
UI.IsRedirected = true;
return ConsoleColor.Black;
}
}
set {
if(!UI.IsRedirected) {
System.Console.BackgroundColor = value;
}
}
}
/// <summary>
/// Constructor; requires a purpose for the program whose args we are
/// parsing.
/// </summary>
public UI(string help)
{
Definition = new Definition { HelpInstructions = help };
}
/// <summary>
/// Displays the application title, version etc.
/// </summary>
public void DisplayTitle(TextWriter console)
{
console.WriteLine("{0} {1} [{2}]", ApplicationInfo.Title,
ApplicationInfo.Version.ToString(), ApplicationInfo.Product);
console.WriteLine("Copyright (c) {0}", ApplicationInfo.Copyright);
console.WriteLine();
}
/// <summary>
/// Displays a usgae message, based on the allowed arguments and purpose
/// represented by this class.
/// </summary>
public void DisplayUsage(TextWriter console, Dictionary<string, object> args)
{
DisplayTitle(console);
console.WriteLine(ApplicationInfo.Description);
if(Definition.HelpInstructions != null) {
console.WriteLine();
console.WriteLine(Definition.HelpInstructions.Trim(), ApplicationInfo.ExeName);
}
console.WriteLine();
console.WriteLine();
console.WriteLine("Usage:");
console.WriteLine();
console.Write(" {0}", ApplicationInfo.ExeName);
var posArgs = Definition.PositionalArguments.ToList();
var i = 0;
foreach(var arg in Definition.PositionalArguments) {
if(arg.IsCommand && (args.ContainsKey(arg.Key) ||
(arg.Alias != null && args.ContainsKey(arg.Alias)))) {
console.Write(arg.IsRequired ? " {0}" : " [{0}]",
args.ContainsKey(arg.Key) ? args[arg.Key] : args[arg.Alias]);
posArgs.RemoveAt(i);
}
else {
console.Write(arg.IsRequired ? " <{0}>" : " [<{0}>]", arg.Key);
}
i++;
}
if(Definition.KeywordArguments.Count > 0) {
console.Write(" [<Key>:<Value> ...]");
}
if(Definition.FlagArguments.Count > 0) {
console.Write(" [--<Flag> ...]");
}
console.WriteLine();
if(posArgs.Count > 0) {
console.WriteLine();
console.WriteLine(" Positional Arguments:");
posArgs.ForEach(x => OutputArg(x, console));
}
if(Definition.KeywordArguments.Count > 0) {
console.WriteLine();
console.WriteLine(" Keyword Arguments:");
Definition.KeywordArguments.ForEach(x => OutputArg(x, console));
}
if(Definition.FlagArguments.Count > 0) {
console.WriteLine();
console.WriteLine(" Flag Arguments:");
Definition.FlagArguments.ForEach(x => OutputArg(x, console));
}
console.WriteLine();
}
/// Outputs a single argument definition
protected void OutputArg(Argument arg, TextWriter console) {
if(arg is FlagArgument) {
console.Write(" --{0,-14} {1}", arg.Key, arg.Description);
}
else {
console.Write(" {0,-16} {1}", arg.Key, arg.Description);
}
var valArg = arg as ValueArgument;
if(valArg != null) {
if(valArg.DefaultValue != null) {
console.Write(" (Default: {0})", valArg.DefaultValue);
}
else if(valArg.IsRequired) {
console.Write(" (Required)");
}
}
console.WriteLine();
}
/// <summary>
/// Clear existing arguments from the definition.
/// </summary>
public void ClearArguments()
{
Definition.Clear();
}
/// <summary>
/// Convenience method for defining a new positional argument.
/// </summary>
public PositionalArgument AddPositionalArgument(string key, string desc)
{
return AddPositionalArgument(key, desc, null);
}
/// <summary>
/// Convenience method for defining a new positional argument.
/// </summary>
public PositionalArgument AddPositionalArgument(string key, string desc,
Argument.OnParseHandler onParse)
{
var arg = new PositionalArgument { Key = key, Description = desc };
arg.OnParse += onParse;
return (PositionalArgument)Definition.AddArgument(arg);
}
/// <summary>
/// Convenience method for defining a new keyword argument.
/// </summary>
public KeywordArgument AddKeywordArgument(string key, string desc)
{
return AddKeywordArgument(key, desc, null);
}
/// <summary>
/// Convenience method for defining a new keyword argument.
/// </summary>
public KeywordArgument AddKeywordArgument(string key, string desc,
Argument.OnParseHandler onParse)
{
var arg = new KeywordArgument { Key = key, Description = desc };
arg.OnParse += onParse;
return (KeywordArgument)Definition.AddArgument(arg);
}
/// <summary>
/// Convenience method for defining a new flag argument.
/// </summary>
public FlagArgument AddFlagArgument(string key, string desc)
{
return AddFlagArgument(key, desc, null);
}
/// <summary>
/// Convenience method for defining a new flag argument.
/// </summary>
public FlagArgument AddFlagArgument(string key, string desc,
Argument.OnParseHandler onParse)
{
var arg = new FlagArgument { Key = key, Description = desc };
arg.OnParse += onParse;
return (FlagArgument)Definition.AddArgument(arg);
}
/// <summary>
/// Classifies the supplied list of arguments, returning a count of the
/// number of positional arguments found.
/// </summary>
public int ClassifyArguments(IEnumerable<string> args)
{
var parser = new Parser(Definition);
return parser.ClassifyArguments(new List<string>(args));
}
/// <summary>
/// Parses the supplied set of arg strings using the list of Argument
// definitions maintained by this command-line UI instance.
/// </summary>
public Dictionary<string, object> Parse(IEnumerable<string> args)
{
var parser = new Parser(Definition);
var result = parser.Parse(new List<string>(args));
if(parser.ShowUsage) {
DisplayUsage(System.Console.Error, result);
result = null; // Don't act on whatever we parsed
}
else if(parser.ParseException != null) {
throw parser.ParseException;
}
return result;
}
/// <summary>
/// Writes a line of text to the console, ensuring lines the same width
/// as the console don't output an unnecessary new-line.
/// </summary>
public void WriteLine(string text)
{
foreach(var line in text.Split('\n')) {
if(line.Length == ConsoleWidth) {
System.Console.Out.Write(line);
}
else {
System.Console.Out.WriteLine(line);
}
}
_lineLength = 0;
}
/// <summary>
/// Writes a blank line to the console.
/// </summary>
public void WriteLine()
{
System.Console.Out.WriteLine();
_lineLength = 0;
}
/// <summary>
/// Writes a partial line of text to the console, without moving to the
/// next line.
/// </summary>
public void Write(string line)
{
System.Console.Out.Write(line);
_lineLength = line.Length;
}
/// <summary>
/// Clears the last text written to the console using Write.
/// Note that this may be more than a single line of text, if the
/// window width is less than the length of the string written.
/// </summary>
public void ClearLine()
{
if(!UI.IsRedirected) {
try {
while(ConsoleWidth > -1 && _lineLength > 0) {
var buf = new char[ConsoleWidth];
System.Console.CursorLeft = 0;
System.Console.Write(buf);
System.Console.CursorLeft = 0;
System.Console.CursorTop = System.Console.CursorTop - 2;
_lineLength = _lineLength - ConsoleWidth;
}
System.Console.CursorTop = System.Console.CursorTop + 1;
}
catch(IOException) {
UI.IsRedirected = true;
}
}
_lineLength = 0;
}
/// <summary>
/// For completeness - read a line of input from the console.
/// </summary>
public string ReadLine(string prompt)
{
System.Console.Write(prompt);
return System.Console.ReadLine();
}
/// <summary>
/// Reads a password from the command-line, echoing * for each keypress.
/// </summary>
public string ReadPassword(string prompt)
{
ConsoleKeyInfo ki;
var pwd = new StringBuilder();
System.Console.Write(prompt);
while(true) {
ki = System.Console.ReadKey(true);
if(ki.Key == ConsoleKey.Enter) {
break;
}
else if(ki.Key == ConsoleKey.Backspace) {
if(pwd.Length > 0) {
pwd.Remove(pwd.Length - 1, 1);
System.Console.Write("\b \b");
}
}
else {
pwd.Append(ki.KeyChar);
System.Console.Write('*');
}
}
System.Console.WriteLine();
return pwd.ToString();
}
/// <summary>
/// Performs the specified operation +op+ with the console color
/// temporarily changed to +color+. On completion of the operation,
/// the color is reset to the original color.
/// </summary>
public void WithColor(ConsoleColor color, Action op)
{
var oldColor = ForegroundColor;
if(color != oldColor) { ForegroundColor = color; }
try {
op();
}
finally {
if(color != oldColor) { ForegroundColor = oldColor; }
}
}
}
/// <summary>
/// Returns information about the current application from the assembly
/// properties.
/// </summary>
static public class ApplicationInfo
{
private static Assembly AppAssembly { get { return Assembly.GetExecutingAssembly(); } }
/// Returns the assembly version number
public static Version Version { get { return AppAssembly.GetName().Version; } }
/// Returns the name of the currently executing application
public static string ExeName
{
get {
return System.IO.Path.GetFileNameWithoutExtension(AppAssembly.CodeBase);
}
}
/// Returns the application title
public static string Title
{
get {
object[] attributes = AppAssembly.GetCustomAttributes(typeof(AssemblyTitleAttribute), false);
if(attributes.Length > 0) {
AssemblyTitleAttribute titleAttribute = (AssemblyTitleAttribute)attributes[0];
if(titleAttribute.Title.Length > 0) {
return titleAttribute.Title;
}
}
return ExeName;
}
}
public static string Description
{
get {
object[] attributes = AppAssembly.GetCustomAttributes(typeof(AssemblyDescriptionAttribute), false);
return attributes.Length == 0 ? "" : ((AssemblyDescriptionAttribute)attributes[0]).Description;
}
}
public static string Copyright
{
get {
object[] attributes = AppAssembly.GetCustomAttributes(typeof(AssemblyCopyrightAttribute), false);
return attributes.Length == 0 ? "" : ((AssemblyCopyrightAttribute)attributes[0]).Copyright;
}
}
public static string Product
{
get {
object[] attributes = AppAssembly.GetCustomAttributes(typeof(AssemblyProductAttribute), false);
return attributes.Length == 0 ? "" : ((AssemblyProductAttribute)attributes[0]).Product;
}
}
public static string Company
{
get {
object[] attributes = AppAssembly.GetCustomAttributes(typeof(AssemblyCompanyAttribute), false);
return attributes.Length == 0 ? "" : ((AssemblyCompanyAttribute)attributes[0]).Company;
}
}
}
}
| |
// <copyright file="StatisticsTests.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2015 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System;
using System.Collections.Generic;
using System.Linq;
using MathNet.Numerics.Distributions;
using MathNet.Numerics.Random;
using MathNet.Numerics.TestData;
using NUnit.Framework;
// ReSharper disable InvokeAsExtensionMethod
namespace MathNet.Numerics.UnitTests.StatisticsTests
{
using Statistics;
/// <summary>
/// Statistics Tests
/// </summary>
[TestFixture, Category("Statistics")]
public class StatisticsTests
{
readonly IDictionary<string, StatTestData> _data = new Dictionary<string, StatTestData>
{
{ "lottery", new StatTestData("NIST.Lottery.dat") },
{ "lew", new StatTestData("NIST.Lew.dat") },
{ "mavro", new StatTestData("NIST.Mavro.dat") },
{ "michelso", new StatTestData("NIST.Michelso.dat") },
{ "numacc1", new StatTestData("NIST.NumAcc1.dat") },
{ "numacc2", new StatTestData("NIST.NumAcc2.dat") },
{ "numacc3", new StatTestData("NIST.NumAcc3.dat") },
{ "numacc4", new StatTestData("NIST.NumAcc4.dat") },
{ "meixner", new StatTestData("NIST.Meixner.dat") }
};
[Test]
public void ThrowsOnNullData()
{
double[] data = null;
// ReSharper disable ExpressionIsAlwaysNull
Assert.That(() => Statistics.Minimum(data), Throws.Exception);
Assert.That(() => Statistics.Maximum(data), Throws.Exception);
Assert.That(() => Statistics.Mean(data), Throws.Exception);
Assert.That(() => Statistics.HarmonicMean(data), Throws.Exception);
Assert.That(() => Statistics.GeometricMean(data), Throws.Exception);
Assert.That(() => Statistics.Median(data), Throws.Exception);
Assert.That(() => Statistics.Quantile(data, 0.3), Throws.Exception);
Assert.That(() => Statistics.Variance(data), Throws.Exception);
Assert.That(() => Statistics.StandardDeviation(data), Throws.Exception);
Assert.That(() => Statistics.PopulationVariance(data), Throws.Exception);
Assert.That(() => Statistics.PopulationStandardDeviation(data), Throws.Exception);
Assert.That(() => Statistics.Covariance(data, data), Throws.Exception);
Assert.That(() => Statistics.PopulationCovariance(data, data), Throws.Exception);
Assert.That(() => Statistics.RootMeanSquare(data), Throws.Exception);
Assert.That(() => SortedArrayStatistics.Minimum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.Minimum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.Maximum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.OrderStatistic(data, 1), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.Median(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.LowerQuartile(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.UpperQuartile(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.Percentile(data, 30), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.Quantile(data, 0.3), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.QuantileCustom(data, 0.3, 0, 0, 1, 0), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.QuantileCustom(data, 0.3, QuantileDefinition.Nearest), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.InterquartileRange(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => SortedArrayStatistics.FiveNumberSummary(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.Minimum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.Maximum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.OrderStatisticInplace(data, 1), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.Mean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.HarmonicMean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.GeometricMean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.Variance(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.StandardDeviation(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.PopulationVariance(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.PopulationStandardDeviation(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.Covariance(data, data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.PopulationCovariance(data, data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.RootMeanSquare(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.MedianInplace(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => ArrayStatistics.QuantileInplace(data, 0.3), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Minimum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Maximum(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Mean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.HarmonicMean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.GeometricMean(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Variance(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.StandardDeviation(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.PopulationVariance(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.PopulationStandardDeviation(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Covariance(data, data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.PopulationCovariance(data, data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.RootMeanSquare(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => StreamingStatistics.Entropy(data), Throws.Exception.TypeOf<NullReferenceException>());
Assert.That(() => new RunningStatistics(data), Throws.Exception);
Assert.That(() => new RunningStatistics().PushRange(data), Throws.Exception);
// ReSharper restore ExpressionIsAlwaysNull
}
[Test]
public void DoesNotThrowOnEmptyData()
{
double[] data = new double[0];
Assert.DoesNotThrow(() => Statistics.Minimum(data));
Assert.DoesNotThrow(() => Statistics.Maximum(data));
Assert.DoesNotThrow(() => Statistics.Mean(data));
Assert.DoesNotThrow(() => Statistics.HarmonicMean(data));
Assert.DoesNotThrow(() => Statistics.GeometricMean(data));
Assert.DoesNotThrow(() => Statistics.Median(data));
Assert.DoesNotThrow(() => Statistics.Quantile(data, 0.3));
Assert.DoesNotThrow(() => Statistics.Variance(data));
Assert.DoesNotThrow(() => Statistics.StandardDeviation(data));
Assert.DoesNotThrow(() => Statistics.PopulationVariance(data));
Assert.DoesNotThrow(() => Statistics.PopulationStandardDeviation(data));
Assert.DoesNotThrow(() => Statistics.Covariance(data, data));
Assert.DoesNotThrow(() => Statistics.PopulationCovariance(data, data));
Assert.DoesNotThrow(() => Statistics.RootMeanSquare(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.Minimum(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.Maximum(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.OrderStatistic(data, 1));
Assert.DoesNotThrow(() => SortedArrayStatistics.Median(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.LowerQuartile(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.UpperQuartile(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.Percentile(data, 30));
Assert.DoesNotThrow(() => SortedArrayStatistics.Quantile(data, 0.3));
Assert.DoesNotThrow(() => SortedArrayStatistics.QuantileCustom(data, 0.3, 0, 0, 1, 0));
Assert.DoesNotThrow(() => SortedArrayStatistics.QuantileCustom(data, 0.3, QuantileDefinition.Nearest));
Assert.DoesNotThrow(() => SortedArrayStatistics.InterquartileRange(data));
Assert.DoesNotThrow(() => SortedArrayStatistics.FiveNumberSummary(data));
Assert.DoesNotThrow(() => ArrayStatistics.Minimum(data));
Assert.DoesNotThrow(() => ArrayStatistics.Maximum(data));
Assert.DoesNotThrow(() => ArrayStatistics.OrderStatisticInplace(data, 1));
Assert.DoesNotThrow(() => ArrayStatistics.Mean(data));
Assert.DoesNotThrow(() => ArrayStatistics.Variance(data));
Assert.DoesNotThrow(() => ArrayStatistics.StandardDeviation(data));
Assert.DoesNotThrow(() => ArrayStatistics.PopulationVariance(data));
Assert.DoesNotThrow(() => ArrayStatistics.PopulationStandardDeviation(data));
Assert.DoesNotThrow(() => ArrayStatistics.Covariance(data, data));
Assert.DoesNotThrow(() => ArrayStatistics.PopulationCovariance(data, data));
Assert.DoesNotThrow(() => ArrayStatistics.RootMeanSquare(data));
Assert.DoesNotThrow(() => ArrayStatistics.MedianInplace(data));
Assert.DoesNotThrow(() => ArrayStatistics.QuantileInplace(data, 0.3));
Assert.DoesNotThrow(() => StreamingStatistics.Minimum(data));
Assert.DoesNotThrow(() => StreamingStatistics.Maximum(data));
Assert.DoesNotThrow(() => StreamingStatistics.Mean(data));
Assert.DoesNotThrow(() => StreamingStatistics.Variance(data));
Assert.DoesNotThrow(() => StreamingStatistics.StandardDeviation(data));
Assert.DoesNotThrow(() => StreamingStatistics.PopulationVariance(data));
Assert.DoesNotThrow(() => StreamingStatistics.PopulationStandardDeviation(data));
Assert.DoesNotThrow(() => StreamingStatistics.Covariance(data, data));
Assert.DoesNotThrow(() => StreamingStatistics.PopulationCovariance(data, data));
Assert.DoesNotThrow(() => StreamingStatistics.RootMeanSquare(data));
Assert.DoesNotThrow(() => StreamingStatistics.Entropy(data));
Assert.That(() => new RunningStatistics(data), Throws.Nothing);
Assert.That(() => new RunningStatistics().PushRange(data), Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Minimum, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Maximum, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Mean, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Variance, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).StandardDeviation, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Skewness, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).Kurtosis, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).PopulationVariance, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).PopulationStandardDeviation, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).PopulationSkewness, Throws.Nothing);
Assert.That(() => new RunningStatistics(data).PopulationKurtosis, Throws.Nothing);
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
[TestCase("numacc2")]
[TestCase("numacc3")]
[TestCase("numacc4")]
public void MeanConsistentWithNistData(string dataSet)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(data.Mean, Statistics.Mean(data.Data), 14);
AssertHelpers.AlmostEqualRelative(data.Mean, ArrayStatistics.Mean(data.Data), 14);
AssertHelpers.AlmostEqualRelative(data.Mean, StreamingStatistics.Mean(data.Data), 14);
AssertHelpers.AlmostEqualRelative(data.Mean, Statistics.MeanVariance(data.Data).Item1, 14);
AssertHelpers.AlmostEqualRelative(data.Mean, ArrayStatistics.MeanVariance(data.Data).Item1, 14);
AssertHelpers.AlmostEqualRelative(data.Mean, StreamingStatistics.MeanVariance(data.Data).Item1, 14);
AssertHelpers.AlmostEqualRelative(data.Mean, new RunningStatistics(data.Data).Mean, 14);
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
[TestCase("numacc2")]
[TestCase("numacc3")]
[TestCase("numacc4")]
public void NullableMeanConsistentWithNistData(string dataSet)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(data.Mean, Statistics.Mean(data.DataWithNulls), 14);
}
[TestCase("lottery", 14)]
[TestCase("lew", 14)]
[TestCase("mavro", 11)]
[TestCase("michelso", 11)]
[TestCase("numacc1", 15)]
[TestCase("numacc2", 13)]
[TestCase("numacc3", 9)]
[TestCase("numacc4", 7)]
public void StandardDeviationConsistentWithNistData(string dataSet, int digits)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, Statistics.StandardDeviation(data.Data), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, ArrayStatistics.StandardDeviation(data.Data), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, StreamingStatistics.StandardDeviation(data.Data), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, Math.Sqrt(Statistics.MeanVariance(data.Data).Item2), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, Math.Sqrt(ArrayStatistics.MeanVariance(data.Data).Item2), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, Math.Sqrt(StreamingStatistics.MeanVariance(data.Data).Item2), digits);
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, new RunningStatistics(data.Data).StandardDeviation, digits);
}
[TestCase("lottery", 14)]
[TestCase("lew", 14)]
[TestCase("mavro", 11)]
[TestCase("michelso", 11)]
[TestCase("numacc1", 15)]
[TestCase("numacc2", 13)]
[TestCase("numacc3", 9)]
[TestCase("numacc4", 7)]
public void NullableStandardDeviationConsistentWithNistData(string dataSet, int digits)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(data.StandardDeviation, Statistics.StandardDeviation(data.DataWithNulls), digits);
}
[Test]
public void MinimumMaximumOnShortSequence()
{
var samples = new[] { -1.0, 5, 0, -3, 10, -0.5, 4 };
Assert.That(Statistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(Statistics.Maximum(samples), Is.EqualTo(10), "Max");
Assert.That(ArrayStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(ArrayStatistics.Maximum(samples), Is.EqualTo(10), "Max");
Assert.That(StreamingStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(StreamingStatistics.Maximum(samples), Is.EqualTo(10), "Max");
Assert.That(new RunningStatistics(samples).Minimum, Is.EqualTo(-3), "Min");
Assert.That(new RunningStatistics(samples).Maximum, Is.EqualTo(10), "Max");
Array.Sort(samples);
Assert.That(SortedArrayStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(SortedArrayStatistics.Maximum(samples), Is.EqualTo(10), "Max");
}
[Test]
public void MinimumMaximumOnShortSequence32()
{
var samples = new[] { -1.0f, 5f, 0f, -3f, 10f, -0.5f, 4f };
Assert.That(Statistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(Statistics.Maximum(samples), Is.EqualTo(10), "Max");
Assert.That(ArrayStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(ArrayStatistics.Maximum(samples), Is.EqualTo(10), "Max");
Assert.That(StreamingStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(StreamingStatistics.Maximum(samples), Is.EqualTo(10), "Max");
Array.Sort(samples);
Assert.That(SortedArrayStatistics.Minimum(samples), Is.EqualTo(-3), "Min");
Assert.That(SortedArrayStatistics.Maximum(samples), Is.EqualTo(10), "Max");
}
[Test]
public void OrderStatisticsOnShortSequence()
{
// -3 -1 -0.5 0 1 4 5 6 10
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 1, 6 };
var f = Statistics.OrderStatisticFunc(samples);
Assert.That(f(0), Is.NaN, "Order-0 (bad)");
Assert.That(f(1), Is.EqualTo(-3), "Order-1");
Assert.That(f(2), Is.EqualTo(-1), "Order-2");
Assert.That(f(3), Is.EqualTo(-0.5), "Order-3");
Assert.That(f(7), Is.EqualTo(5), "Order-7");
Assert.That(f(8), Is.EqualTo(6), "Order-8");
Assert.That(f(9), Is.EqualTo(10), "Order-9");
Assert.That(f(10), Is.NaN, "Order-10 (bad)");
Assert.That(Statistics.OrderStatistic(samples, 0), Is.NaN, "Order-0 (bad)");
Assert.That(Statistics.OrderStatistic(samples, 1), Is.EqualTo(-3), "Order-1");
Assert.That(Statistics.OrderStatistic(samples, 2), Is.EqualTo(-1), "Order-2");
Assert.That(Statistics.OrderStatistic(samples, 3), Is.EqualTo(-0.5), "Order-3");
Assert.That(Statistics.OrderStatistic(samples, 7), Is.EqualTo(5), "Order-7");
Assert.That(Statistics.OrderStatistic(samples, 8), Is.EqualTo(6), "Order-8");
Assert.That(Statistics.OrderStatistic(samples, 9), Is.EqualTo(10), "Order-9");
Assert.That(Statistics.OrderStatistic(samples, 10), Is.NaN, "Order-10 (bad)");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 0), Is.NaN, "Order-0 (bad)");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 1), Is.EqualTo(-3), "Order-1");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 2), Is.EqualTo(-1), "Order-2");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 3), Is.EqualTo(-0.5), "Order-3");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 7), Is.EqualTo(5), "Order-7");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 8), Is.EqualTo(6), "Order-8");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 9), Is.EqualTo(10), "Order-9");
Assert.That(ArrayStatistics.OrderStatisticInplace(samples, 10), Is.NaN, "Order-10 (bad)");
Array.Sort(samples);
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 0), Is.NaN, "Order-0 (bad)");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 1), Is.EqualTo(-3), "Order-1");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 2), Is.EqualTo(-1), "Order-2");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 3), Is.EqualTo(-0.5), "Order-3");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 7), Is.EqualTo(5), "Order-7");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 8), Is.EqualTo(6), "Order-8");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 9), Is.EqualTo(10), "Order-9");
Assert.That(SortedArrayStatistics.OrderStatistic(samples, 10), Is.NaN, "Order-10 (bad)");
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 1/5d)]
[TestCase(0.2d, -1d)]
[TestCase(0.7d, 4d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 1d)]
[TestCase(0.325d, 0d)]
public void QuantileR1EmpiricalInvCDFOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=1)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{0,0},{1,0}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.EmpiricalInvCDF(samples, tau), 1e-14);
Assert.AreEqual(expected, Statistics.EmpiricalInvCDFFunc(samples)(tau), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.EmpiricalInvCDF), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.EmpiricalInvCDF)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.EmpiricalInvCDF), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 0d, 0d, 1d, 0d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.EmpiricalInvCDF), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 0d, 0d, 1d, 0d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -3/4d)]
[TestCase(0.7d, 9/2d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 1d)]
[TestCase(0.325d, 0d)]
public void QuantileR2EmpiricalInvCDFAverageOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=2)
// Mathematica: Not Supported
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R2), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R2)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.EmpiricalInvCDFAverage), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.EmpiricalInvCDFAverage), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 1/5d)]
[TestCase(0.2d, -1d)]
[TestCase(0.7d, 4d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 1/5d)]
[TestCase(0.325d, -1/2d)]
public void QuantileR3NearestOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=3)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{1/2,0},{0,0}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R3), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R3)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Nearest), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 0.5d, 0d, 0d, 0d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Nearest), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 0.5d, 0d, 0d, 0d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 1/5d)]
[TestCase(0.2d, -1d)]
[TestCase(0.7d, 4d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 48/5d)]
[TestCase(0.52d, 9/25d)]
[TestCase(0.325d, -3/8d)]
public void QuantileR4CaliforniaOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=4)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{0,0},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R4), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R4)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.California), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 0d, 0d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.California), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 0d, 0d, 0d, 1d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -3/4d)]
[TestCase(0.7d, 9/2d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 19/25d)]
[TestCase(0.325d, -1/8d)]
public void QuantileR5HydrologyOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=5)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{1/2,0},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R5), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R5)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Hydrology), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 0.5d, 0d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Hydrology), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 0.5d, 0d, 0d, 1d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -9/10d)]
[TestCase(0.7d, 47/10d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 97/125d)]
[TestCase(0.325d, -17/80d)]
public void QuantileR6WeibullOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=6)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{0,1},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R6), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R6)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Weibull), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 0d, 1d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Weibull), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 0d, 1d, 0d, 1d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -3/5d)]
[TestCase(0.7d, 43/10d)]
[TestCase(0.01d, -141/50d)]
[TestCase(0.99d, 241/25d)]
[TestCase(0.52d, 93/125d)]
[TestCase(0.325d, -3/80d)]
public void QuantileR7ExcelOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=7)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{1,-1},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R7), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R7)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Excel), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 1d, -1d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Excel), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 1d, -1d, 0d, 1d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -4/5d)]
[TestCase(0.7d, 137/30d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 287/375d)]
[TestCase(0.325d, -37/240d)]
public void QuantileR8MedianOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=8)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{1/3,1/3},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.Quantile(samples, tau), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R8), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R8)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileInplace(samples, tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Median), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 1/3d, 1/3d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.Quantile(samples, tau), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Median), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 1/3d, 1/3d, 0d, 1d), 1e-14);
}
[TestCase(0d, -3d)]
[TestCase(1d, 10d)]
[TestCase(0.5d, 3/5d)]
[TestCase(0.2d, -63/80d)]
[TestCase(0.7d, 91/20d)]
[TestCase(0.01d, -3d)]
[TestCase(0.99d, 10d)]
[TestCase(0.52d, 191/250d)]
[TestCase(0.325d, -47/320d)]
public void QuantileR9NormalOnShortSequence(double tau, double expected)
{
// R: quantile(c(-1,5,0,-3,10,-0.5,4,0.2,1,6),probs=c(0,1,0.5,0.2,0.7,0.01,0.99,0.52,0.325),type=9)
// Mathematica: Quantile[{-1,5,0,-3,10,-1/2,4,1/5,1,6},{0,1,1/2,1/5,7/10,1/100,99/100,13/25,13/40},{{3/8,1/4},{0,1}}]
var samples = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(expected, Statistics.QuantileCustom(samples, tau, QuantileDefinition.R9), 1e-14);
Assert.AreEqual(expected, Statistics.QuantileCustomFunc(samples, QuantileDefinition.R9)(tau), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, QuantileDefinition.Normal), 1e-14);
Assert.AreEqual(expected, ArrayStatistics.QuantileCustomInplace(samples, tau, 3/8d, 1/4d, 0d, 1d), 1e-14);
Array.Sort(samples);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, QuantileDefinition.Normal), 1e-14);
Assert.AreEqual(expected, SortedArrayStatistics.QuantileCustom(samples, tau, 3/8d, 1/4d, 0d, 1d), 1e-14);
}
[Test]
public void RanksSortedArray()
{
var distinct = new double[] { 1, 2, 4, 7, 8, 9, 10, 12 };
var ties = new double[] { 1, 2, 2, 7, 9, 9, 10, 12 };
// R: rank(sort(data), ties.method="average")
Assert.That(
SortedArrayStatistics.Ranks(distinct, RankDefinition.Average),
Is.EqualTo(new[] { 1.0, 2, 3, 4, 5, 6, 7, 8 }).AsCollection.Within(1e-8));
Assert.That(
SortedArrayStatistics.Ranks(ties, RankDefinition.Average),
Is.EqualTo(new[] { 1, 2.5, 2.5, 4, 5.5, 5.5, 7, 8 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="min")
Assert.That(
SortedArrayStatistics.Ranks(distinct, RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 2, 3, 4, 5, 6, 7, 8 }).AsCollection.Within(1e-8));
Assert.That(
SortedArrayStatistics.Ranks(ties, RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 2, 2, 4, 5, 5, 7, 8 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="max")
Assert.That(
SortedArrayStatistics.Ranks(distinct, RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 2, 3, 4, 5, 6, 7, 8 }).AsCollection.Within(1e-8));
Assert.That(
SortedArrayStatistics.Ranks(ties, RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 3, 3, 4, 6, 6, 7, 8 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="first")
Assert.That(
SortedArrayStatistics.Ranks(distinct, RankDefinition.First),
Is.EqualTo(new[] { 1.0, 2, 3, 4, 5, 6, 7, 8 }).AsCollection.Within(1e-8));
Assert.That(
SortedArrayStatistics.Ranks(ties, RankDefinition.First),
Is.EqualTo(new[] { 1.0, 2, 3, 4, 5, 6, 7, 8 }).AsCollection.Within(1e-8));
}
[Test]
public void RanksArray()
{
var distinct = new double[] { 1, 8, 12, 7, 2, 9, 10, 4 };
var ties = new double[] { 1, 9, 12, 7, 2, 9, 10, 2 };
// R: rank(data, ties.method="average")
Assert.That(
ArrayStatistics.RanksInplace((double[])distinct.Clone(), RankDefinition.Average),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
ArrayStatistics.RanksInplace((double[])ties.Clone(), RankDefinition.Average),
Is.EqualTo(new[] { 1, 5.5, 8, 4, 2.5, 5.5, 7, 2.5 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="min")
Assert.That(
ArrayStatistics.RanksInplace((double[])distinct.Clone(), RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
ArrayStatistics.RanksInplace((double[])ties.Clone(), RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 5, 7, 2 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="max")
Assert.That(
ArrayStatistics.RanksInplace((double[])distinct.Clone(), RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
ArrayStatistics.RanksInplace((double[])ties.Clone(), RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 6, 8, 4, 3, 6, 7, 3 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="first")
Assert.That(
ArrayStatistics.RanksInplace((double[])distinct.Clone(), RankDefinition.First),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
ArrayStatistics.RanksInplace((double[])ties.Clone(), RankDefinition.First),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
}
[Test]
public void Ranks()
{
var distinct = new double[] { 1, 8, 12, 7, 2, 9, 10, 4 };
var ties = new double[] { 1, 9, 12, 7, 2, 9, 10, 2 };
// R: rank(data, ties.method="average")
Assert.That(
Statistics.Ranks(distinct, RankDefinition.Average),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
Statistics.Ranks(ties, RankDefinition.Average),
Is.EqualTo(new[] { 1, 5.5, 8, 4, 2.5, 5.5, 7, 2.5 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="min")
Assert.That(
Statistics.Ranks(distinct, RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
Statistics.Ranks(ties, RankDefinition.Min),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 5, 7, 2 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="max")
Assert.That(
Statistics.Ranks(distinct, RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
Statistics.Ranks(ties, RankDefinition.Max),
Is.EqualTo(new[] { 1.0, 6, 8, 4, 3, 6, 7, 3 }).AsCollection.Within(1e-8));
// R: rank(data, ties.method="first")
Assert.That(
Statistics.Ranks(distinct, RankDefinition.First),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
Assert.That(
Statistics.Ranks(ties, RankDefinition.First),
Is.EqualTo(new[] { 1.0, 5, 8, 4, 2, 6, 7, 3 }).AsCollection.Within(1e-8));
}
[Test]
public void EmpiricalCDF()
{
// R: ecdf(data)(x)
var ties = new double[] { 1, 9, 12, 7, 2, 9, 10, 2 };
Assert.That(Statistics.EmpiricalCDF(ties, -1.0), Is.EqualTo(0.0).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 0.0), Is.EqualTo(0.0).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 1.0), Is.EqualTo(0.125).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 2.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 3.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 4.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 5.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 6.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 7.0), Is.EqualTo(0.5).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 8.0), Is.EqualTo(0.5).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 9.0), Is.EqualTo(0.75).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 10.0), Is.EqualTo(0.875).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 11.0), Is.EqualTo(0.875).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 12.0), Is.EqualTo(1.0).Within(1e-8));
Assert.That(Statistics.EmpiricalCDF(ties, 13.0), Is.EqualTo(1.0).Within(1e-8));
}
[Test]
public void EmpiricalCDFSortedArray()
{
// R: ecdf(data)(x)
var ties = new double[] { 1, 9, 12, 7, 2, 9, 10, 2 };
Array.Sort(ties);
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, -1.0), Is.EqualTo(0.0).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 0.0), Is.EqualTo(0.0).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 1.0), Is.EqualTo(0.125).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 2.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 3.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 4.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 5.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 6.0), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 7.0), Is.EqualTo(0.5).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 8.0), Is.EqualTo(0.5).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 9.0), Is.EqualTo(0.75).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 10.0), Is.EqualTo(0.875).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 11.0), Is.EqualTo(0.875).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 12.0), Is.EqualTo(1.0).Within(1e-8));
Assert.That(SortedArrayStatistics.EmpiricalCDF(ties, 13.0), Is.EqualTo(1.0).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, -1.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.0).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 0.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.0).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 1.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.125).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 2.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 3.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 4.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 5.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 6.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.375).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 7.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.5).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 8.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.5).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 9.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.75).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 10.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.875).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 11.0, RankDefinition.EmpiricalCDF), Is.EqualTo(0.875).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 12.0, RankDefinition.EmpiricalCDF), Is.EqualTo(1.0).Within(1e-8));
Assert.That(SortedArrayStatistics.QuantileRank(ties, 13.0, RankDefinition.EmpiricalCDF), Is.EqualTo(1.0).Within(1e-8));
}
[Test]
public void MedianOnShortSequence()
{
// R: median(c(-1,5,0,-3,10,-0.5,4,0.2,1,6))
// Mathematica: Median[{-1,5,0,-3,10,-1/2,4,1/5,1,6}]
var even = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1, 6 };
Assert.AreEqual(0.6d, Statistics.Median(even), 1e-14);
Assert.AreEqual(0.6d, ArrayStatistics.MedianInplace(even), 1e-14);
Array.Sort(even);
Assert.AreEqual(0.6d, SortedArrayStatistics.Median(even), 1e-14);
// R: median(c(-1,5,0,-3,10,-0.5,4,0.2,1))
// Mathematica: Median[{-1,5,0,-3,10,-1/2,4,1/5,1}]
var odd = new[] { -1, 5, 0, -3, 10, -0.5, 4, 0.2, 1 };
Assert.AreEqual(0.2d, Statistics.Median(odd), 1e-14);
Assert.AreEqual(0.2d, ArrayStatistics.MedianInplace(odd), 1e-14);
Array.Sort(even);
Assert.AreEqual(0.2d, SortedArrayStatistics.Median(odd), 1e-14);
}
[Test]
public void MedianOnLongConstantSequence()
{
var even = Generate.Repeat(100000, 2.0);
Assert.AreEqual(2.0,SortedArrayStatistics.Median(even), 1e-14);
var odd = Generate.Repeat(100001, 2.0);
Assert.AreEqual(2.0, SortedArrayStatistics.Median(odd), 1e-14);
}
/// <summary>
/// Validate Median/Variance/StdDev on a longer fixed-random sequence of a,
/// large mean but only a very small variance, verifying the numerical stability.
/// Naive summation algorithms generally fail this test.
/// </summary>
[Test]
public void StabilityMeanVariance()
{
// Test around 10^9, potential stability issues
var gaussian = new Normal(1e+9, 2, new MersenneTwister(100));
AssertHelpers.AlmostEqualRelative(1e+9, Statistics.Mean(gaussian.Samples().Take(10000)), 10);
AssertHelpers.AlmostEqualRelative(4d, Statistics.Variance(gaussian.Samples().Take(10000)), 0);
AssertHelpers.AlmostEqualRelative(2d, Statistics.StandardDeviation(gaussian.Samples().Take(10000)), 1);
AssertHelpers.AlmostEqualRelative(1e+9, Statistics.RootMeanSquare(gaussian.Samples().Take(10000)), 10);
AssertHelpers.AlmostEqualRelative(1e+9, ArrayStatistics.Mean(gaussian.Samples().Take(10000).ToArray()), 10);
AssertHelpers.AlmostEqualRelative(4d, ArrayStatistics.Variance(gaussian.Samples().Take(10000).ToArray()), 0);
AssertHelpers.AlmostEqualRelative(2d, ArrayStatistics.StandardDeviation(gaussian.Samples().Take(10000).ToArray()), 1);
AssertHelpers.AlmostEqualRelative(1e+9, ArrayStatistics.RootMeanSquare(gaussian.Samples().Take(10000).ToArray()), 10);
AssertHelpers.AlmostEqualRelative(1e+9, StreamingStatistics.Mean(gaussian.Samples().Take(10000)), 10);
AssertHelpers.AlmostEqualRelative(4d, StreamingStatistics.Variance(gaussian.Samples().Take(10000)), 0);
AssertHelpers.AlmostEqualRelative(2d, StreamingStatistics.StandardDeviation(gaussian.Samples().Take(10000)), 1);
AssertHelpers.AlmostEqualRelative(1e+9, StreamingStatistics.RootMeanSquare(gaussian.Samples().Take(10000)), 10);
AssertHelpers.AlmostEqualRelative(1e+9, new RunningStatistics(gaussian.Samples().Take(10000)).Mean, 10);
AssertHelpers.AlmostEqualRelative(4d, new RunningStatistics(gaussian.Samples().Take(10000)).Variance, 0);
AssertHelpers.AlmostEqualRelative(2d, new RunningStatistics(gaussian.Samples().Take(10000)).StandardDeviation, 1);
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
public void CovarianceConsistentWithVariance(string dataSet)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(Statistics.Variance(data.Data), Statistics.Covariance(data.Data, data.Data), 10);
AssertHelpers.AlmostEqualRelative(ArrayStatistics.Variance(data.Data), ArrayStatistics.Covariance(data.Data, data.Data), 10);
AssertHelpers.AlmostEqualRelative(StreamingStatistics.Variance(data.Data), StreamingStatistics.Covariance(data.Data, data.Data), 10);
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
public void PopulationCovarianceConsistentWithPopulationVariance(string dataSet)
{
var data = _data[dataSet];
AssertHelpers.AlmostEqualRelative(Statistics.PopulationVariance(data.Data), Statistics.PopulationCovariance(data.Data, data.Data), 10);
AssertHelpers.AlmostEqualRelative(ArrayStatistics.PopulationVariance(data.Data), ArrayStatistics.PopulationCovariance(data.Data, data.Data), 10);
AssertHelpers.AlmostEqualRelative(StreamingStatistics.PopulationVariance(data.Data), StreamingStatistics.PopulationCovariance(data.Data, data.Data), 10);
}
[Test]
public void CovarianceIsSymmetric()
{
var dataA = _data["lottery"].Data.Take(200).ToArray();
var dataB = _data["lew"].Data.Take(200).ToArray();
AssertHelpers.AlmostEqualRelative(Statistics.Covariance(dataA, dataB), Statistics.Covariance(dataB, dataA), 12);
AssertHelpers.AlmostEqualRelative(StreamingStatistics.Covariance(dataA, dataB), StreamingStatistics.Covariance(dataB, dataA), 12);
AssertHelpers.AlmostEqualRelative(ArrayStatistics.Covariance(dataA.ToArray(), dataB.ToArray()), ArrayStatistics.Covariance(dataB.ToArray(), dataA.ToArray()), 12);
AssertHelpers.AlmostEqualRelative(Statistics.PopulationCovariance(dataA, dataB), Statistics.PopulationCovariance(dataB, dataA), 12);
AssertHelpers.AlmostEqualRelative(StreamingStatistics.PopulationCovariance(dataA, dataB), StreamingStatistics.PopulationCovariance(dataB, dataA), 12);
AssertHelpers.AlmostEqualRelative(ArrayStatistics.PopulationCovariance(dataA.ToArray(), dataB.ToArray()), ArrayStatistics.PopulationCovariance(dataB.ToArray(), dataA.ToArray()), 12);
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
[TestCase("numacc2")]
[TestCase("meixner")]
public void ArrayStatisticsConsistentWithStreamimgStatistics(string dataSet)
{
var data = _data[dataSet];
Assert.That(ArrayStatistics.Mean(data.Data), Is.EqualTo(StreamingStatistics.Mean(data.Data)).Within(1e-15), "Mean");
Assert.That(ArrayStatistics.Variance(data.Data), Is.EqualTo(StreamingStatistics.Variance(data.Data)).Within(1e-15), "Variance");
Assert.That(ArrayStatistics.StandardDeviation(data.Data), Is.EqualTo(StreamingStatistics.StandardDeviation(data.Data)).Within(1e-15), "StandardDeviation");
Assert.That(ArrayStatistics.PopulationVariance(data.Data), Is.EqualTo(StreamingStatistics.PopulationVariance(data.Data)).Within(1e-15), "PopulationVariance");
Assert.That(ArrayStatistics.PopulationStandardDeviation(data.Data), Is.EqualTo(StreamingStatistics.PopulationStandardDeviation(data.Data)).Within(1e-15), "PopulationStandardDeviation");
Assert.That(ArrayStatistics.Covariance(data.Data, data.Data), Is.EqualTo(StreamingStatistics.Covariance(data.Data, data.Data)).Within(1e-10), "Covariance");
Assert.That(ArrayStatistics.PopulationCovariance(data.Data, data.Data), Is.EqualTo(StreamingStatistics.PopulationCovariance(data.Data, data.Data)).Within(1e-10), "PopulationCovariance");
Assert.That(ArrayStatistics.RootMeanSquare(data.Data), Is.EqualTo(StreamingStatistics.RootMeanSquare(data.Data)).Within(1e-15), "RootMeanSquare");
}
[TestCase("lottery")]
[TestCase("lew")]
[TestCase("mavro")]
[TestCase("michelso")]
[TestCase("numacc1")]
[TestCase("numacc2")]
[TestCase("meixner")]
public void RunningStatisticsConsistentWithDescriptiveStatistics(string dataSet)
{
var data = _data[dataSet];
var running = new RunningStatistics(data.Data);
var descriptive = new DescriptiveStatistics(data.Data);
Assert.That(running.Minimum, Is.EqualTo(descriptive.Minimum), "Minimum");
Assert.That(running.Maximum, Is.EqualTo(descriptive.Maximum), "Maximum");
Assert.That(running.Mean, Is.EqualTo(descriptive.Mean).Within(1e-15), "Mean");
Assert.That(running.Variance, Is.EqualTo(descriptive.Variance).Within(1e-15), "Variance");
Assert.That(running.StandardDeviation, Is.EqualTo(descriptive.StandardDeviation).Within(1e-15), "StandardDeviation");
Assert.That(running.Skewness, Is.EqualTo(descriptive.Skewness).Within(1e-15), "Skewness");
Assert.That(running.Kurtosis, Is.EqualTo(descriptive.Kurtosis).Within(1e-14), "Kurtosis");
}
[Test]
public void MinimumOfEmptyMustBeNaN()
{
Assert.That(Statistics.Minimum(new double[0]), Is.NaN);
Assert.That(Statistics.Minimum(new[] { 2d }), Is.Not.NaN);
Assert.That(ArrayStatistics.Minimum(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.Minimum(new[] { 2d }), Is.Not.NaN);
Assert.That(SortedArrayStatistics.Minimum(new double[0]), Is.NaN);
Assert.That(SortedArrayStatistics.Minimum(new[] { 2d }), Is.Not.NaN);
Assert.That(StreamingStatistics.Minimum(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.Minimum(new[] { 2d }), Is.Not.NaN);
Assert.That(new RunningStatistics(new double[0]).Minimum, Is.NaN);
Assert.That(new RunningStatistics(new[] { 2d }).Minimum, Is.Not.NaN);
}
[Test]
public void MaximumOfEmptyMustBeNaN()
{
Assert.That(Statistics.Maximum(new double[0]), Is.NaN);
Assert.That(Statistics.Maximum(new[] { 2d }), Is.Not.NaN);
Assert.That(ArrayStatistics.Maximum(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.Maximum(new[] { 2d }), Is.Not.NaN);
Assert.That(SortedArrayStatistics.Maximum(new double[0]), Is.NaN);
Assert.That(SortedArrayStatistics.Maximum(new[] { 2d }), Is.Not.NaN);
Assert.That(StreamingStatistics.Maximum(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.Maximum(new[] { 2d }), Is.Not.NaN);
Assert.That(new RunningStatistics(new double[0]).Maximum, Is.NaN);
Assert.That(new RunningStatistics(new[] { 2d }).Maximum, Is.Not.NaN);
}
[Test]
public void MeanOfEmptyMustBeNaN()
{
Assert.That(Statistics.Mean(new double[0]), Is.NaN);
Assert.That(Statistics.Mean(new[] { 2d }), Is.Not.NaN);
Assert.That(ArrayStatistics.Mean(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.Mean(new[] { 2d }), Is.Not.NaN);
Assert.That(StreamingStatistics.Mean(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.Mean(new[] { 2d }), Is.Not.NaN);
Assert.That(new RunningStatistics(new double[0]).Mean, Is.NaN);
Assert.That(new RunningStatistics(new[] { 2d }).Mean, Is.Not.NaN);
}
[Test]
public void RootMeanSquareOfEmptyMustBeNaN()
{
Assert.That(Statistics.RootMeanSquare(new double[0]), Is.NaN);
Assert.That(Statistics.RootMeanSquare(new[] { 2d }), Is.Not.NaN);
Assert.That(ArrayStatistics.RootMeanSquare(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.RootMeanSquare(new[] { 2d }), Is.Not.NaN);
Assert.That(StreamingStatistics.RootMeanSquare(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.RootMeanSquare(new[] { 2d }), Is.Not.NaN);
}
[Test]
public void SampleVarianceOfEmptyAndSingleMustBeNaN()
{
Assert.That(Statistics.Variance(new double[0]), Is.NaN);
Assert.That(Statistics.Variance(new[] { 2d }), Is.NaN);
Assert.That(Statistics.Variance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(ArrayStatistics.Variance(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.Variance(new[] { 2d }), Is.NaN);
Assert.That(ArrayStatistics.Variance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(StreamingStatistics.Variance(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.Variance(new[] { 2d }), Is.NaN);
Assert.That(StreamingStatistics.Variance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(new RunningStatistics(new[] { 2d }).Variance, Is.NaN);
Assert.That(new RunningStatistics(new[] { 2d, 3d }).Variance, Is.Not.NaN);
}
[Test]
public void PopulationVarianceOfEmptyMustBeNaN()
{
Assert.That(Statistics.PopulationVariance(new double[0]), Is.NaN);
Assert.That(Statistics.PopulationVariance(new[] { 2d }), Is.Not.NaN);
Assert.That(Statistics.PopulationVariance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(ArrayStatistics.PopulationVariance(new double[0]), Is.NaN);
Assert.That(ArrayStatistics.PopulationVariance(new[] { 2d }), Is.Not.NaN);
Assert.That(ArrayStatistics.PopulationVariance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(StreamingStatistics.PopulationVariance(new double[0]), Is.NaN);
Assert.That(StreamingStatistics.PopulationVariance(new[] { 2d }), Is.Not.NaN);
Assert.That(StreamingStatistics.PopulationVariance(new[] { 2d, 3d }), Is.Not.NaN);
Assert.That(new RunningStatistics(new[] { 2d }).PopulationVariance, Is.NaN);
Assert.That(new RunningStatistics(new[] { 2d, 3d }).PopulationVariance, Is.Not.NaN);
}
/// <summary>
/// URL http://mathnetnumerics.codeplex.com/workitem/5667
/// </summary>
[Test]
public void Median_CodeplexIssue5667()
{
var seq = Data.ReadAllLines("Codeplex-5667.csv").Select(double.Parse);
Assert.AreEqual(1.0, Statistics.Median(seq));
var array = seq.ToArray();
Assert.AreEqual(1.0, ArrayStatistics.MedianInplace(array));
Array.Sort(array);
Assert.AreEqual(1.0, SortedArrayStatistics.Median(array));
}
[Test]
public void VarianceDenominatorMustNotOverflow_GitHubIssue137()
{
var a = new double[46342];
a[a.Length - 1] = 1000d;
Assert.AreEqual(21.578697, a.Variance(), 1e-5);
Assert.AreEqual(21.578231, a.PopulationVariance(), 1e-5);
Assert.AreEqual(21.578697, new RunningStatistics(a).Variance, 1e-5);
Assert.AreEqual(21.578231, new RunningStatistics(a).PopulationVariance, 1e-5);
}
[Test]
public void MedianIsRobustOnCloseInfinities()
{
Assert.That(Statistics.Median(new[] { 2.0, double.NegativeInfinity, double.PositiveInfinity }), Is.EqualTo(2.0));
Assert.That(Statistics.Median(new[] { 2.0, double.NegativeInfinity, 3.0, double.PositiveInfinity }), Is.EqualTo(2.5));
Assert.That(ArrayStatistics.MedianInplace(new[] { 2.0, double.NegativeInfinity, double.PositiveInfinity }), Is.EqualTo(2.0));
Assert.That(ArrayStatistics.MedianInplace(new[] { double.NegativeInfinity, 2.0, double.PositiveInfinity }), Is.EqualTo(2.0));
Assert.That(ArrayStatistics.MedianInplace(new[] { double.NegativeInfinity, double.PositiveInfinity, 2.0 }), Is.EqualTo(2.0));
Assert.That(ArrayStatistics.MedianInplace(new[] { double.NegativeInfinity, 2.0, 3.0, double.PositiveInfinity }), Is.EqualTo(2.5));
Assert.That(ArrayStatistics.MedianInplace(new[] { double.NegativeInfinity, 2.0, double.PositiveInfinity, 3.0 }), Is.EqualTo(2.5));
Assert.That(SortedArrayStatistics.Median(new[] { double.NegativeInfinity, 2.0, double.PositiveInfinity }), Is.EqualTo(2.0));
Assert.That(SortedArrayStatistics.Median(new[] { double.NegativeInfinity, 2.0, 3.0, double.PositiveInfinity }), Is.EqualTo(2.5));
}
[Test]
public void RobustOnLargeSampleSets()
{
// 0, 0.25, 0.5, 0.75, 0, 0.25, 0.5, 0.75, ...
var shorter = Generate.Periodic(4*4096, 4, 1);
var longer = Generate.Periodic(4*32768, 4, 1);
Assert.That(Statistics.Mean(shorter), Is.EqualTo(0.375).Within(1e-14), "Statistics.Mean: shorter");
Assert.That(Statistics.Mean(longer), Is.EqualTo(0.375).Within(1e-14), "Statistics.Mean: longer");
Assert.That(new DescriptiveStatistics(shorter).Mean, Is.EqualTo(0.375).Within(1e-14), "DescriptiveStatistics.Mean: shorter");
Assert.That(new DescriptiveStatistics(longer).Mean, Is.EqualTo(00.375).Within(1e-14), "DescriptiveStatistics.Mean: longer");
Assert.That(Statistics.RootMeanSquare(shorter), Is.EqualTo(Math.Sqrt(0.21875)).Within(1e-14), "Statistics.RootMeanSquare: shorter");
Assert.That(Statistics.RootMeanSquare(longer), Is.EqualTo(Math.Sqrt(0.21875)).Within(1e-14), "Statistics.RootMeanSquare: longer");
Assert.That(Statistics.Skewness(shorter), Is.EqualTo(0.0).Within(1e-12), "Statistics.Skewness: shorter");
Assert.That(Statistics.Skewness(longer), Is.EqualTo(0.0).Within(1e-12), "Statistics.Skewness: longer");
Assert.That(new DescriptiveStatistics(shorter).Skewness, Is.EqualTo(0.0).Within(1e-12), "DescriptiveStatistics.Skewness: shorter");
Assert.That(new DescriptiveStatistics(longer).Skewness, Is.EqualTo(0.0).Within(1e-12), "DescriptiveStatistics.Skewness: longer");
Assert.That(Statistics.Kurtosis(shorter), Is.EqualTo(-1.36).Within(1e-4), "Statistics.Kurtosis: shorter");
Assert.That(Statistics.Kurtosis(longer), Is.EqualTo(-1.36).Within(1e-4), "Statistics.Kurtosis: longer");
Assert.That(new DescriptiveStatistics(shorter).Kurtosis, Is.EqualTo(-1.36).Within(1e-4), "DescriptiveStatistics.Kurtosis: shorter");
Assert.That(new DescriptiveStatistics(longer).Kurtosis, Is.EqualTo(-1.36).Within(1e-4), "DescriptiveStatistics.Kurtosis: longer");
}
[Test]
public void RootMeanSquareOfSinusoidal()
{
var data = Generate.Sinusoidal(128, 64, 16, 2.0);
Assert.That(Statistics.RootMeanSquare(data), Is.EqualTo(2.0/Constants.Sqrt2).Within(1e-12));
}
[Test]
public void EntropyIsMinimum()
{
var data1 = new double[] { 1, 1, 1, 1, 1 };
Assert.That(StreamingStatistics.Entropy(data1) == 0);
var data2 = new double[] { 0, 0 };
Assert.That(StreamingStatistics.Entropy(data2) == 0);
}
[Test]
public void EntropyIsMaximum()
{
var data1 = new double[] { 1, 2 };
Assert.That(StreamingStatistics.Entropy(data1) == 1.0);
var data2 = new double[] { 1, 2, 3, 4 };
Assert.That(StreamingStatistics.Entropy(data2) == 2.0);
}
[Test]
public void EntropyOfNaNIsNaN()
{
var data = new double[] { 1, 2, double.NaN };
Assert.That(double.IsNaN(StreamingStatistics.Entropy(data)));
}
}
}
// ReSharper restore InvokeAsExtensionMethod
| |
//
// Created by Ian Copland on 2015-11-10
//
// The MIT License (MIT)
//
// Copyright (c) 2015 Tag Games Limited
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
using UnityEngine;
using System.Collections;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text.RegularExpressions;
namespace SdkCore
{
/// <summary>
/// <para>Provides a means to make both GET and POST requests to the given
/// web-server. Requests can be both HTTP and HTTPS.</para>
///
/// <para>This is thread-safe.</para>
/// </summary>
public sealed class HttpSystem
{
private TaskScheduler m_taskScheduler;
/// <summary>
/// Initializes a new instance of the HTTP system with the given task
/// scheduler.
/// </summary>
///
/// <param name="taskScheduler">The task scheduler.</param>
public HttpSystem(TaskScheduler taskScheduler)
{
ReleaseAssert.IsTrue(taskScheduler != null, "The task scheduler in a HTTP request system must not be null.");
m_taskScheduler = taskScheduler;
}
/// <summary>
/// Makes a HTTP GET request with the given request object. This is
/// performed asynchronously, with the callback block run on a background
/// thread.
/// </summary>
///
/// <param name="request">The GET HTTP request.</param>
/// <param name="callback">The callback which will provide the response from the server.
/// The callback will be made on a background thread.</param>
public void SendRequest(HttpGetRequest request, Action<HttpGetRequest, HttpResponse> callback)
{
ReleaseAssert.IsTrue(request != null, "The HTTP GET request must not be null when sending a request.");
ReleaseAssert.IsTrue(callback != null, "The callback must not be null when sending a request.");
SendRequest(request.Url, request.Headers, null, (HttpResponse response) =>
{
callback(request, response);
});
}
/// <summary>
/// Makes a HTTP POST request with the given request object. This is
/// performed asynchronously, with the callback block run on a background
/// thread.
/// </summary>
///
/// <param name="request">The POST HTTP request.</param>
/// <param name="callback">The callback which will provide the response from the server.
/// The callback will be made on a background thread.</param>
public void SendRequest(HttpPostRequest request, Action<HttpPostRequest, HttpResponse> callback)
{
ReleaseAssert.IsTrue(request != null, "The HTTP POST request must not be null when sending a request.");
ReleaseAssert.IsTrue(callback != null, "The callback must not be null when sending a request.");
var headers = new Dictionary<string, string>(request.Headers);
if (request.ContentType != null)
{
headers.Add("Content-Type", request.ContentType);
}
SendRequest(request.Url, headers, request.Body, (HttpResponse response) =>
{
callback(request, response);
});
}
/// <summary>
/// Provides the means to send both GET and POST requests depending on the
/// input data.
/// </summary>
///
/// <param name="url">The URL that the request is targetting.</param>
/// <param name="headers">The headers for the HTTP request.</param>
/// <param name="body">The body of the request. If null, a GET request will be sent.</param>
/// <param name="callback">The callback providing the response from the server.</param>
private void SendRequest(String url, IDictionary<string, string> headers, byte[] body, Action<HttpResponse> callback)
{
ReleaseAssert.IsTrue(url != null, "The URL must not be null when sending a request.");
ReleaseAssert.IsTrue(headers != null, "The headers must not be null when sending a request.");
ReleaseAssert.IsTrue(callback != null, "The callback must not be null when sending a request.");
// Unity's WWW class works with the Dictionary concrete class rather than the abstract
// IDictionary. Rather than cast a copy is made so we can be sure other dictionary types
// will work.
Dictionary<string, string> headersConcreteDict = new Dictionary<string, string>(headers);
m_taskScheduler.ScheduleMainThreadTask(() =>
{
var www = new WWW(url, body, headersConcreteDict);
m_taskScheduler.StartCoroutine(ProcessRequest(www, callback));
});
}
/// <summary>
/// <para>The coroutine for processing the HTTP request. This will yield until the
/// request has completed then parse the information required by a HTTP response
/// from the WWW object.</para>
/// </summary>
///
/// <returns>The coroutine enumerator.</returns>
///
/// <param name="www">The WWW object.</param>
/// <param name="callback">The callback providing the response from the server.</param>
private IEnumerator ProcessRequest(WWW www, Action<HttpResponse> callback)
{
ReleaseAssert.IsTrue(www != null, "The WWW must not be null when sending a request.");
ReleaseAssert.IsTrue(callback != null, "The callback must not be null when sending a request.");
yield return www;
HttpResponseDesc desc = null;
if (string.IsNullOrEmpty(www.error))
{
ReleaseAssert.IsTrue(www.responseHeaders != null, "A successful HTTP response must have a headers object.");
desc = new HttpResponseDesc(HttpResult.Success);
desc.Headers = new Dictionary<string, string>(www.responseHeaders);
if (www.bytes != null)
{
desc.Body = www.bytes;
}
var httpStatus = www.responseHeaders ["STATUS"];
ReleaseAssert.IsTrue(!string.IsNullOrEmpty(httpStatus), "A successful HTTP response must have a HTTP status value in the header.");
desc.HttpResponseCode = ParseHttpStatus(httpStatus);
}
else
{
int httpResponseCode = ParseHttpError(www.error);
if (httpResponseCode != 0)
{
desc = new HttpResponseDesc(HttpResult.Success);
desc.Headers = new Dictionary<string, string>(www.responseHeaders);
if (www.bytes != null)
{
desc.Body = www.bytes;
}
desc.HttpResponseCode = httpResponseCode;
}
else
{
desc = new HttpResponseDesc(HttpResult.CouldNotConnect);
}
}
HttpResponse response = new HttpResponse(desc);
m_taskScheduler.ScheduleBackgroundTask(() =>
{
callback(response);
});
}
/// <summary>
/// Parses the HTTP response code from the given HTTP STATUS string. The string should
/// be in the format 'HTTP/X YYY...' or 'HTTP/X.X YYY...' where YYY is the response
/// code.
/// </summary>
///
/// <returns>The response code.</returns>
///
/// <param name="httpStatus">The HTTP status string in the format 'HTTP/X YYY...'
/// or 'HTTP/X.X YYY...'.</param>
private int ParseHttpStatus(string httpStatus)
{
ReleaseAssert.IsTrue(httpStatus != null, "The HTTP status string must not be null when parsing a response code.");
var regex = new Regex("[a-zA-Z]*\\/\\d+(\\.\\d)?\\s(?<httpResponseCode>\\d+)\\s");
var match = regex.Match(httpStatus);
ReleaseAssert.IsTrue(match.Groups.Count == 3, "There must be exactly 3 match groups when using a regex on a HTTP status.");
var responseCodeString = match.Groups ["httpResponseCode"].Value;
ReleaseAssert.IsTrue(responseCodeString != null, "The response code string cannot be null when using a regex on a HTTP status.");
return Int32.Parse(responseCodeString);
}
/// <summary>
/// Parses the HTTP response code from the given HTTP error string. The string should
/// be in the format 'XXX ...' where XXX is the HTTP response code.
/// </summary>
///
/// <returns>The response code parsed from the error, or 0 if there wasn't one.</returns>
///
/// <param name="httpError">The HTTP error string.</param>
private int ParseHttpError(string httpError)
{
ReleaseAssert.IsTrue(httpError != null, "The HTTP error string must not be null when parsing a response code.");
var regex = new Regex("(?<httpResponseCode>[0-9][0-9][0-9])\\s");
if (regex.IsMatch(httpError))
{
var match = regex.Match(httpError);
ReleaseAssert.IsTrue(match.Groups.Count == 2, "There must be exactly 2 match groups when using a regex on a HTTP error.");
var responseCodeString = match.Groups ["httpResponseCode"].Value;
ReleaseAssert.IsTrue(responseCodeString != null, "The response code string cannot be null when using a regex on a HTTP error.");
return Int32.Parse(responseCodeString);
}
return 0;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Orleans.CodeGenerator.Compatibility;
using Orleans.CodeGenerator.Model;
using Orleans.CodeGenerator.Utilities;
using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory;
namespace Orleans.CodeGenerator.Generators
{
/// <summary>
/// Generates GrainReference implementations for grains.
/// </summary>
internal static class GrainReferenceGenerator
{
/// <summary>
/// Returns the name of the generated class for the provided type.
/// </summary>
internal static string GetGeneratedClassName(INamedTypeSymbol type)
{
return CodeGenerator.ToolName + type.GetSuitableClassName() + "Reference";
}
/// <summary>
/// Generates the class for the provided grain types.
/// </summary>
internal static TypeDeclarationSyntax GenerateClass(WellKnownTypes wellKnownTypes, GrainInterfaceDescription description)
{
var generatedTypeName = description.ReferenceTypeName;
var grainType = description.Type;
var genericTypes = grainType.GetHierarchyTypeParameters()
.Select(_ => TypeParameter(_.ToString()))
.ToArray();
// Create the special marker attribute.
var grainTypeArgument = TypeOfExpression(grainType.WithoutTypeParameters().ToTypeSyntax());
var attributes = AttributeList()
.AddAttributes(
GeneratedCodeAttributeGenerator.GetGeneratedCodeAttributeSyntax(wellKnownTypes),
Attribute(wellKnownTypes.SerializableAttribute.ToNameSyntax()),
Attribute(wellKnownTypes.ExcludeFromCodeCoverageAttribute.ToNameSyntax()),
Attribute(wellKnownTypes.GrainReferenceAttribute.ToNameSyntax())
.AddArgumentListArguments(AttributeArgument(grainTypeArgument)));
var classDeclaration =
ClassDeclaration(generatedTypeName)
.AddModifiers(Token(SyntaxKind.InternalKeyword))
.AddBaseListTypes(
SimpleBaseType(wellKnownTypes.GrainReference.ToTypeSyntax()),
SimpleBaseType(grainType.ToTypeSyntax()))
.AddConstraintClauses(grainType.GetTypeConstraintSyntax())
.AddMembers(GenerateConstructors(wellKnownTypes, generatedTypeName))
.AddMembers(
GrainInterfaceCommon.GenerateInterfaceIdProperty(wellKnownTypes, description).AddModifiers(Token(SyntaxKind.OverrideKeyword)),
GrainInterfaceCommon.GenerateInterfaceVersionProperty(wellKnownTypes, description).AddModifiers(Token(SyntaxKind.OverrideKeyword)),
GenerateInterfaceNameProperty(wellKnownTypes, description),
GenerateIsCompatibleMethod(wellKnownTypes, description),
GenerateGetMethodNameMethod(wellKnownTypes, description))
.AddMembers(GenerateInvokeMethods(wellKnownTypes, description))
.AddAttributeLists(attributes);
if (genericTypes.Length > 0)
{
classDeclaration = classDeclaration.AddTypeParameterListParameters(genericTypes);
}
return classDeclaration;
}
/// <summary>
/// Generates constructors.
/// </summary>
private static MemberDeclarationSyntax[] GenerateConstructors(WellKnownTypes wellKnownTypes, string className)
{
var baseConstructors =
wellKnownTypes.GrainReference.Constructors.Where(c => c.DeclaredAccessibility != Accessibility.Private);
var constructors = new List<MemberDeclarationSyntax>();
foreach (var baseConstructor in baseConstructors)
{
var args = baseConstructor.Parameters
.Select(arg => Argument(arg.Name.ToIdentifierName()))
.ToArray();
var declaration =
baseConstructor.GetConstructorDeclarationSyntax(className)
.WithInitializer(
ConstructorInitializer(SyntaxKind.BaseConstructorInitializer)
.AddArgumentListArguments(args))
.AddBodyStatements();
constructors.Add(declaration);
}
return constructors.ToArray();
}
/// <summary>
/// Generates invoker methods.
/// </summary>
private static MemberDeclarationSyntax[] GenerateInvokeMethods(WellKnownTypes wellKnownTypes, GrainInterfaceDescription description)
{
var baseReference = BaseExpression();
var methods = description.Methods;
var members = new List<MemberDeclarationSyntax>();
foreach (var methodDescription in methods)
{
var method = methodDescription.Method;
var methodIdArgument = Argument(methodDescription.MethodId.ToHexLiteral());
// Construct a new object array from all method arguments.
var parameters = method.Parameters;
var body = new List<StatementSyntax>();
foreach (var parameter in parameters)
{
if (parameter.Type.HasInterface(wellKnownTypes.IGrainObserver))
{
body.Add(
ExpressionStatement(
InvocationExpression(wellKnownTypes.GrainFactoryBase.ToDisplayString().ToIdentifierName().Member("CheckGrainObserverParamInternal"))
.AddArgumentListArguments(Argument(parameter.Name.ToIdentifierName()))));
}
}
// Get the parameters argument value.
var objectArrayType = wellKnownTypes.Object.ToTypeSyntax().GetArrayTypeSyntax();
ExpressionSyntax args;
if (method.IsGenericMethod)
{
// Create an arguments array which includes the method's type parameters followed by the method's parameter list.
var allParameters = new List<ExpressionSyntax>();
foreach (var typeParameter in method.TypeParameters)
{
allParameters.Add(TypeOfExpression(typeParameter.ToTypeSyntax()));
}
allParameters.AddRange(parameters.Select(GetParameterForInvocation));
args =
ArrayCreationExpression(objectArrayType)
.WithInitializer(
InitializerExpression(SyntaxKind.ArrayInitializerExpression)
.AddExpressions(allParameters.ToArray()));
}
else if (parameters.Length == 0)
{
args = LiteralExpression(SyntaxKind.NullLiteralExpression);
}
else
{
args =
ArrayCreationExpression(objectArrayType)
.WithInitializer(
InitializerExpression(SyntaxKind.ArrayInitializerExpression)
.AddExpressions(parameters.Select(GetParameterForInvocation).ToArray()));
}
var options = GetInvokeOptions(wellKnownTypes, method);
// Construct the invocation call.
bool asyncMethod;
var isOneWayTask = method.HasAttribute(wellKnownTypes.OneWayAttribute);
if (method.ReturnsVoid || isOneWayTask)
{
// One-way methods are never marked async.
asyncMethod = false;
var invocation = InvocationExpression(baseReference.Member("InvokeOneWayMethod"))
.AddArgumentListArguments(methodIdArgument)
.AddArgumentListArguments(Argument(args));
if (options != null)
{
invocation = invocation.AddArgumentListArguments(options);
}
body.Add(ExpressionStatement(invocation));
if (isOneWayTask)
{
if (!wellKnownTypes.Task.Equals(method.ReturnType))
{
throw new CodeGenerationException(
$"Method {method} is marked with [{wellKnownTypes.OneWayAttribute.Name}], " +
$"but has a return type which is not assignable from {typeof(Task)}");
}
var done = wellKnownTypes.Task.ToNameSyntax().Member((object _) => Task.CompletedTask);
body.Add(ReturnStatement(done));
}
}
else if (method.ReturnType is INamedTypeSymbol methodReturnType)
{
// If the method doesn't return a Task type (eg, it returns ValueTask<T>), then we must make an async method and await the invocation result.
var isTaskMethod = wellKnownTypes.Task.Equals(methodReturnType)
|| methodReturnType.IsGenericType && wellKnownTypes.Task_1.Equals(methodReturnType.ConstructedFrom);
asyncMethod = !isTaskMethod;
var returnType = methodReturnType.IsGenericType
? methodReturnType.TypeArguments[0]
: wellKnownTypes.Object;
var invokeMethodAsync = "InvokeMethodAsync".ToGenericName().AddTypeArgumentListArguments(returnType.ToTypeSyntax());
var invocation =
InvocationExpression(MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression,
baseReference,
invokeMethodAsync))
.AddArgumentListArguments(methodIdArgument)
.AddArgumentListArguments(Argument(args));
if (options != null)
{
invocation = invocation.AddArgumentListArguments(options);
}
var methodResult = asyncMethod ? AwaitExpression(invocation) : (ExpressionSyntax)invocation;
body.Add(ReturnStatement(methodResult));
}
else throw new NotSupportedException($"Method {method} has unsupported return type, {method.ReturnType}.");
var methodDeclaration = method.GetDeclarationSyntax()
.WithModifiers(TokenList())
.WithExplicitInterfaceSpecifier(ExplicitInterfaceSpecifier(method.ContainingType.ToNameSyntax()))
.AddBodyStatements(body.ToArray())
// Since explicit implementation is used, constraints must not be specified.
.WithConstraintClauses(new SyntaxList<TypeParameterConstraintClauseSyntax>());
if (asyncMethod) methodDeclaration = methodDeclaration.AddModifiers(Token(SyntaxKind.AsyncKeyword));
members.Add(methodDeclaration);
}
return members.ToArray();
ExpressionSyntax GetParameterForInvocation(IParameterSymbol arg, int argIndex)
{
var argIdentifier = GetParameterName(arg, argIndex).ToIdentifierName();
// Addressable arguments must be converted to references before passing.
if (arg.Type.HasInterface(wellKnownTypes.IAddressable)
&& arg.Type.TypeKind == TypeKind.Interface)
{
return
ConditionalExpression(
BinaryExpression(SyntaxKind.IsExpression, argIdentifier, wellKnownTypes.Grain.ToTypeSyntax()),
InvocationExpression(argIdentifier.Member("AsReference".ToGenericName().AddTypeArgumentListArguments(arg.Type.ToTypeSyntax()))),
argIdentifier);
}
return argIdentifier;
string GetParameterName(IParameterSymbol parameter, int index)
{
var argName = parameter.Name;
if (string.IsNullOrWhiteSpace(argName))
{
argName = string.Format(CultureInfo.InvariantCulture, "arg{0:G}", index);
}
return argName;
}
}
}
/// <summary>
/// Returns syntax for the options argument to GrainReference.InvokeMethodAsync{T} and GrainReference.InvokeOneWayMethod.
/// </summary>
private static ArgumentSyntax GetInvokeOptions(WellKnownTypes wellKnownTypes, IMethodSymbol method)
{
var options = new List<ExpressionSyntax>();
var imo = wellKnownTypes.InvokeMethodOptions.ToNameSyntax();
if (method.HasAttribute(wellKnownTypes.ReadOnlyAttribute))
{
options.Add(imo.Member("ReadOnly"));
}
if (method.HasAttribute(wellKnownTypes.UnorderedAttribute))
{
options.Add(imo.Member("Unordered"));
}
if (method.HasAttribute(wellKnownTypes.AlwaysInterleaveAttribute))
{
options.Add(imo.Member("AlwaysInterleave"));
}
if (method.GetAttribute(wellKnownTypes.TransactionAttribute, out var attr))
{
var enumType = wellKnownTypes.TransactionOption;
var txRequirement = (int)attr.ConstructorArguments.First().Value;
var values = enumType.GetMembers().OfType<IFieldSymbol>().ToList();
var mapping = values.ToDictionary(m => (int) m.ConstantValue, m => m.Name);
if (!mapping.TryGetValue(txRequirement, out var value))
{
throw new NotSupportedException(
$"Transaction requirement {txRequirement} on method {method} was not understood."
+ $" Known values: {string.Join(", ", mapping.Select(kv => $"{kv.Key} ({kv.Value})"))}");
}
switch (value)
{
case "Suppress":
options.Add(imo.Member("TransactionSuppress"));
break;
case "CreateOrJoin":
options.Add(imo.Member("TransactionCreateOrJoin"));
break;
case "Create":
options.Add(imo.Member("TransactionCreate"));
break;
case "Join":
options.Add(imo.Member("TransactionJoin"));
break;
case "Supported":
options.Add(imo.Member("TransactionSupported"));
break;
case "NotAllowed":
options.Add(imo.Member("TransactionNotAllowed"));
break;
default:
throw new NotSupportedException($"Transaction requirement {value} on method {method} was not understood.");
}
}
ExpressionSyntax allOptions;
if (options.Count <= 1)
{
allOptions = options.FirstOrDefault();
}
else
{
allOptions =
options.Aggregate((a, b) => BinaryExpression(SyntaxKind.BitwiseOrExpression, a, b));
}
if (allOptions == null)
{
return null;
}
return Argument(NameColon("options"), Token(SyntaxKind.None), allOptions);
}
private static MemberDeclarationSyntax GenerateIsCompatibleMethod(WellKnownTypes wellKnownTypes, GrainInterfaceDescription description)
{
var method = wellKnownTypes.GrainReference.Method("IsCompatible");
var interfaceIdParameter = method.Parameters[0].Name.ToIdentifierName();
var interfaceIds =
new HashSet<int>(
new[] { description.InterfaceId }.Concat(
description.Type.AllInterfaces.Where(wellKnownTypes.IsGrainInterface).Select(wellKnownTypes.GetTypeId)));
var returnValue = default(BinaryExpressionSyntax);
foreach (var interfaceId in interfaceIds)
{
var check = BinaryExpression(
SyntaxKind.EqualsExpression,
interfaceIdParameter,
interfaceId.ToHexLiteral());
// If this is the first check, assign it, otherwise OR this check with the previous checks.
returnValue = returnValue == null
? check
: BinaryExpression(SyntaxKind.LogicalOrExpression, returnValue, check);
}
return
method.GetDeclarationSyntax()
.AddModifiers(Token(SyntaxKind.OverrideKeyword))
.WithExpressionBody(ArrowExpressionClause(returnValue))
.WithSemicolonToken(Token(SyntaxKind.SemicolonToken));
}
private static MemberDeclarationSyntax GenerateInterfaceNameProperty(WellKnownTypes wellKnownTypes, GrainInterfaceDescription description)
{
var returnValue = description.Type.Name.ToLiteralExpression();
return
PropertyDeclaration(wellKnownTypes.String.ToTypeSyntax(), "InterfaceName")
.WithExpressionBody(ArrowExpressionClause(returnValue))
.AddModifiers(Token(SyntaxKind.PublicKeyword), Token(SyntaxKind.OverrideKeyword))
.WithSemicolonToken(Token(SyntaxKind.SemicolonToken));
}
private static MethodDeclarationSyntax GenerateGetMethodNameMethod(WellKnownTypes wellKnownTypes, GrainInterfaceDescription description)
{
var method = wellKnownTypes.GrainReference.Method("GetMethodName");
var methodDeclaration = method.GetDeclarationSyntax().AddModifiers(Token(SyntaxKind.OverrideKeyword));
var parameters = method.Parameters;
var interfaceIdArgument = parameters[0].Name.ToIdentifierName();
var methodIdArgument = parameters[1].Name.ToIdentifierName();
var callThrowMethodNotImplemented = InvocationExpression(IdentifierName("ThrowMethodNotImplemented"))
.WithArgumentList(ArgumentList(SeparatedList(new[]
{
Argument(interfaceIdArgument),
Argument(methodIdArgument)
})));
// This method is used directly after its declaration to create blocks for each interface id, comprising
// primarily of a nested switch statement for each of the methods in the given interface.
BlockSyntax ComposeInterfaceBlock(INamedTypeSymbol interfaceType, SwitchStatementSyntax methodSwitch)
{
return Block(methodSwitch.AddSections(SwitchSection()
.AddLabels(DefaultSwitchLabel())
.AddStatements(
ExpressionStatement(callThrowMethodNotImplemented),
ReturnStatement(LiteralExpression(SyntaxKind.NullLiteralExpression)))));
}
var interfaceCases = GrainInterfaceCommon.GenerateGrainInterfaceAndMethodSwitch(
wellKnownTypes,
description.Type,
methodIdArgument,
methodType => new StatementSyntax[] { ReturnStatement(methodType.Name.ToLiteralExpression()) },
ComposeInterfaceBlock);
// Generate the default case, which will throw a NotImplementedException.
var callThrowInterfaceNotImplemented = InvocationExpression(IdentifierName("ThrowInterfaceNotImplemented"))
.WithArgumentList(ArgumentList(SingletonSeparatedList(Argument(interfaceIdArgument))));
var defaultCase = SwitchSection()
.AddLabels(DefaultSwitchLabel())
.AddStatements(
ExpressionStatement(callThrowInterfaceNotImplemented),
ReturnStatement(LiteralExpression(SyntaxKind.NullLiteralExpression)));
var throwInterfaceNotImplemented = GrainInterfaceCommon.GenerateMethodNotImplementedFunction(wellKnownTypes);
var throwMethodNotImplemented = GrainInterfaceCommon.GenerateInterfaceNotImplementedFunction(wellKnownTypes);
var interfaceIdSwitch =
SwitchStatement(interfaceIdArgument).AddSections(interfaceCases.ToArray()).AddSections(defaultCase);
return methodDeclaration.AddBodyStatements(interfaceIdSwitch, throwInterfaceNotImplemented, throwMethodNotImplemented);
}
}
}
| |
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace CodeCracker.CSharp.Usage
{
[DiagnosticAnalyzer(LanguageNames.CSharp)]
public class DisposableVariableNotDisposedAnalyzer : DiagnosticAnalyzer
{
internal const string Title = "Should dispose object";
internal const string MessageFormat = "{0} should be disposed.";
internal const string Category = SupportedCategories.Usage;
const string Description = "When a disposable object is created it should be disposed as soon as possible.\n" +
"This warning will appear if you create a disposable object and don't store, return or dispose it.";
public const string cantFix = "cantFix";
internal static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(
DiagnosticId.DisposableVariableNotDisposed.ToDiagnosticId(),
Title,
MessageFormat,
Category,
DiagnosticSeverity.Warning,
isEnabledByDefault: true,
description: Description,
helpLinkUri: HelpLink.ForDiagnostic(DiagnosticId.DisposableVariableNotDisposed));
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(Rule);
public override void Initialize(AnalysisContext context) => context.RegisterSyntaxNodeAction(AnalyzeObjectCreation, SyntaxKind.ObjectCreationExpression);
private static void AnalyzeObjectCreation(SyntaxNodeAnalysisContext context)
{
if (context.IsGenerated()) return;
var objectCreation = context.Node as ObjectCreationExpressionSyntax;
if (objectCreation == null) return;
if (objectCreation.Parent == null) return;
var originalNode = objectCreation;
SyntaxNode topSyntaxNode = originalNode;
while (topSyntaxNode.Parent.IsAnyKind(SyntaxKind.ParenthesizedExpression, SyntaxKind.ConditionalExpression, SyntaxKind.CastExpression))
topSyntaxNode = topSyntaxNode.Parent;
if (topSyntaxNode.Parent.IsAnyKind(SyntaxKind.ReturnStatement, SyntaxKind.UsingStatement))
return;
if (topSyntaxNode.Ancestors().Any(i => i.IsAnyKind(
SyntaxKind.ThisConstructorInitializer,
SyntaxKind.BaseConstructorInitializer,
SyntaxKind.ObjectCreationExpression)))
return;
var semanticModel = context.SemanticModel;
var type = semanticModel.GetSymbolInfo(originalNode.Type).Symbol as INamedTypeSymbol;
if (type == null) return;
if (!type.AllInterfaces.Any(i => i.ToString() == "System.IDisposable")) return;
ISymbol identitySymbol = null;
StatementSyntax statement = null;
if (topSyntaxNode.Parent.IsKind(SyntaxKind.SimpleAssignmentExpression))
{
var assignmentExpression = (AssignmentExpressionSyntax)topSyntaxNode.Parent;
identitySymbol = semanticModel.GetSymbolInfo(assignmentExpression.Left).Symbol;
if (identitySymbol?.Kind != SymbolKind.Local) return;
if (assignmentExpression.FirstAncestorOrSelf<MethodDeclarationSyntax>() == null) return;
var usingStatement = assignmentExpression.Parent as UsingStatementSyntax;
if (usingStatement != null) return;
statement = assignmentExpression.Parent as ExpressionStatementSyntax;
}
else if (topSyntaxNode.Parent.IsKind(SyntaxKind.EqualsValueClause) && topSyntaxNode.Parent.Parent.IsKind(SyntaxKind.VariableDeclarator))
{
var variableDeclarator = (VariableDeclaratorSyntax)topSyntaxNode.Parent.Parent;
var variableDeclaration = variableDeclarator?.Parent as VariableDeclarationSyntax;
identitySymbol = semanticModel.GetDeclaredSymbol(variableDeclarator);
if (identitySymbol == null) return;
var usingStatement = variableDeclaration?.Parent as UsingStatementSyntax;
if (usingStatement != null) return;
statement = variableDeclaration.Parent as LocalDeclarationStatementSyntax;
if ((statement?.FirstAncestorOrSelf<MethodDeclarationSyntax>()) == null) return;
}
else if (topSyntaxNode.Parent.IsAnyKind(SyntaxKind.SimpleLambdaExpression, SyntaxKind.ParenthesizedLambdaExpression))
{
var anonymousFunction = topSyntaxNode.Parent as AnonymousFunctionExpressionSyntax;
var methodSymbol = semanticModel.GetSymbolInfo(anonymousFunction).Symbol as IMethodSymbol;
if (!methodSymbol.ReturnsVoid) return;
var props = new Dictionary<string, string> { { "typeName", type.Name }, { cantFix, "" } }.ToImmutableDictionary();
context.ReportDiagnostic(Diagnostic.Create(Rule, originalNode.GetLocation(), props, type.Name.ToString()));
}
else
{
var props = new Dictionary<string, string> { { "typeName", type.Name } }.ToImmutableDictionary();
context.ReportDiagnostic(Diagnostic.Create(Rule, originalNode.GetLocation(), props, type.Name.ToString()));
return;
}
if (statement != null && identitySymbol != null)
{
var isDisposeOrAssigned = IsDisposedOrAssigned(semanticModel, statement, (ILocalSymbol)identitySymbol);
if (isDisposeOrAssigned) return;
var props = new Dictionary<string, string> { { "typeName", type.Name } }.ToImmutableDictionary();
context.ReportDiagnostic(Diagnostic.Create(Rule, originalNode.GetLocation(), props, type.Name.ToString()));
}
}
private static bool IsDisposedOrAssigned(SemanticModel semanticModel, StatementSyntax statement, ILocalSymbol identitySymbol)
{
var method = statement.FirstAncestorOrSelf<MethodDeclarationSyntax>();
if (method == null) return false;
if (IsReturned(method, statement, semanticModel, identitySymbol)) return true;
foreach (var childStatement in method.Body.DescendantNodes().OfType<StatementSyntax>())
{
if (childStatement.SpanStart > statement.SpanStart
&& (IsCorrectDispose(childStatement as ExpressionStatementSyntax, semanticModel, identitySymbol)
|| IsPassedAsArgument(childStatement, semanticModel, identitySymbol)
|| IsAssignedToFieldOrProperty(childStatement as ExpressionStatementSyntax, semanticModel, identitySymbol)))
return true;
}
return false;
}
private static bool IsPassedAsArgument(StatementSyntax statement, SemanticModel semanticModel, ILocalSymbol identitySymbol)
{
if (statement == null) return false;
var args = statement.DescendantNodes().OfKind<ArgumentSyntax>(SyntaxKind.Argument);
foreach (var arg in args)
{
var argSymbol = semanticModel.GetSymbolInfo(arg.Expression).Symbol;
if (identitySymbol.Equals(argSymbol)) return true;
}
return false;
}
private static bool IsReturned(MethodDeclarationSyntax method, StatementSyntax statement, SemanticModel semanticModel, ILocalSymbol identitySymbol)
{
var anonymousFunction = statement.FirstAncestorOfKind(SyntaxKind.ParenthesizedLambdaExpression,
SyntaxKind.SimpleLambdaExpression, SyntaxKind.AnonymousMethodExpression) as AnonymousFunctionExpressionSyntax;
IMethodSymbol methodSymbol;
BlockSyntax body;
if (anonymousFunction != null)
{
methodSymbol = semanticModel.GetSymbolInfo(anonymousFunction).Symbol as IMethodSymbol;
body = anonymousFunction.Body as BlockSyntax;
}
else
{
methodSymbol = semanticModel.GetDeclaredSymbol(method);
body = method.Body;
}
if (body == null) return true;
var returnExpressions = body.DescendantNodes().OfType<ReturnStatementSyntax>().Select(r => r.Expression);
var returnTypeSymbol = methodSymbol?.ReturnType;
if (returnTypeSymbol == null) return false;
if (returnTypeSymbol.SpecialType == SpecialType.System_Void) return false;
var isReturning = returnExpressions.Any(returnExpression =>
{
var returnSymbol = semanticModel.GetSymbolInfo(returnExpression).Symbol;
if (returnSymbol == null) return false;
return returnSymbol.Equals(identitySymbol);
});
return isReturning;
}
private static bool IsAssignedToFieldOrProperty(ExpressionStatementSyntax expressionStatement, SemanticModel semanticModel, ILocalSymbol identitySymbol)
{
if (expressionStatement == null) return false;
if (!expressionStatement.Expression.IsKind(SyntaxKind.SimpleAssignmentExpression)) return false;
var assignment = (AssignmentExpressionSyntax)expressionStatement.Expression;
var assignmentTarget = semanticModel.GetSymbolInfo(assignment.Left).Symbol;
if (assignmentTarget?.Kind != SymbolKind.Field && assignmentTarget?.Kind != SymbolKind.Property) return false;
var assignmentSource = semanticModel.GetSymbolInfo(assignment.Right).Symbol;
return (identitySymbol.Equals(assignmentSource));
}
private static bool IsCorrectDispose(ExpressionStatementSyntax expressionStatement, SemanticModel semanticModel, ILocalSymbol identitySymbol)
{
if (expressionStatement == null) return false;
var invocation = expressionStatement.Expression as InvocationExpressionSyntax;
ExpressionSyntax expressionAccessed;
IdentifierNameSyntax memberAccessed;
if (invocation == null)
{
var conditionalAccessExpression = expressionStatement.Expression as ConditionalAccessExpressionSyntax;
if (conditionalAccessExpression == null) return false;
invocation = conditionalAccessExpression.WhenNotNull as InvocationExpressionSyntax;
var memberBinding = invocation?.Expression as MemberBindingExpressionSyntax;
if (memberBinding == null) return false;
expressionAccessed = conditionalAccessExpression.Expression;
memberAccessed = memberBinding.Name as IdentifierNameSyntax;
}
else
{
var memberAccess = invocation.Expression as MemberAccessExpressionSyntax;
if (memberAccess == null) return false;
expressionAccessed = memberAccess.Expression;
memberAccessed = memberAccess.Name as IdentifierNameSyntax;
}
if (memberAccessed == null) return false;
if (invocation.ArgumentList.Arguments.Any()) return false;
ISymbol memberSymbol;
if (expressionAccessed.IsKind(SyntaxKind.IdentifierName))
{
memberSymbol = semanticModel.GetSymbolInfo(expressionAccessed).Symbol;
}
else if (expressionAccessed is ParenthesizedExpressionSyntax)
{
var parenthesizedExpression = (ParenthesizedExpressionSyntax)expressionAccessed;
var cast = parenthesizedExpression.Expression as CastExpressionSyntax;
if (cast == null) return false;
var catTypeSymbol = semanticModel.GetTypeInfo(cast.Type).Type;
if (catTypeSymbol.SpecialType != SpecialType.System_IDisposable) return false;
memberSymbol = semanticModel.GetSymbolInfo(cast.Expression).Symbol;
}
else return false;
if (memberSymbol == null || !memberSymbol.Equals(identitySymbol)) return false;
if (memberAccessed.Identifier.Text != "Dispose" || memberAccessed.Arity != 0) return false;
var methodSymbol = semanticModel.GetSymbolInfo(memberAccessed).Symbol as IMethodSymbol;
if (methodSymbol == null) return false;
if (methodSymbol.ToString() == "System.IDisposable.Dispose()") return true;
var disposeMethod = (IMethodSymbol)semanticModel.Compilation.GetSpecialType(SpecialType.System_IDisposable).GetMembers("Dispose").Single();
var isDispose = methodSymbol.Equals(methodSymbol.ContainingType.FindImplementationForInterfaceMember(disposeMethod));
return isDispose;
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the SysAntecedente class.
/// </summary>
[Serializable]
public partial class SysAntecedenteCollection : ActiveList<SysAntecedente, SysAntecedenteCollection>
{
public SysAntecedenteCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>SysAntecedenteCollection</returns>
public SysAntecedenteCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
SysAntecedente o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the Sys_Antecedente table.
/// </summary>
[Serializable]
public partial class SysAntecedente : ActiveRecord<SysAntecedente>, IActiveRecord
{
#region .ctors and Default Settings
public SysAntecedente()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public SysAntecedente(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public SysAntecedente(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public SysAntecedente(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Sys_Antecedente", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdAntecedente = new TableSchema.TableColumn(schema);
colvarIdAntecedente.ColumnName = "idAntecedente";
colvarIdAntecedente.DataType = DbType.Int32;
colvarIdAntecedente.MaxLength = 0;
colvarIdAntecedente.AutoIncrement = true;
colvarIdAntecedente.IsNullable = false;
colvarIdAntecedente.IsPrimaryKey = true;
colvarIdAntecedente.IsForeignKey = false;
colvarIdAntecedente.IsReadOnly = false;
colvarIdAntecedente.DefaultSetting = @"";
colvarIdAntecedente.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdAntecedente);
TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema);
colvarNombre.ColumnName = "nombre";
colvarNombre.DataType = DbType.String;
colvarNombre.MaxLength = 50;
colvarNombre.AutoIncrement = false;
colvarNombre.IsNullable = false;
colvarNombre.IsPrimaryKey = false;
colvarNombre.IsForeignKey = false;
colvarNombre.IsReadOnly = false;
colvarNombre.DefaultSetting = @"('')";
colvarNombre.ForeignKeyTableName = "";
schema.Columns.Add(colvarNombre);
TableSchema.TableColumn colvarIdTipoAntecedente = new TableSchema.TableColumn(schema);
colvarIdTipoAntecedente.ColumnName = "idTipoAntecedente";
colvarIdTipoAntecedente.DataType = DbType.Int32;
colvarIdTipoAntecedente.MaxLength = 0;
colvarIdTipoAntecedente.AutoIncrement = false;
colvarIdTipoAntecedente.IsNullable = true;
colvarIdTipoAntecedente.IsPrimaryKey = false;
colvarIdTipoAntecedente.IsForeignKey = true;
colvarIdTipoAntecedente.IsReadOnly = false;
colvarIdTipoAntecedente.DefaultSetting = @"";
colvarIdTipoAntecedente.ForeignKeyTableName = "Sys_TipoAntecedente";
schema.Columns.Add(colvarIdTipoAntecedente);
TableSchema.TableColumn colvarGrupo = new TableSchema.TableColumn(schema);
colvarGrupo.ColumnName = "grupo";
colvarGrupo.DataType = DbType.Int32;
colvarGrupo.MaxLength = 0;
colvarGrupo.AutoIncrement = false;
colvarGrupo.IsNullable = false;
colvarGrupo.IsPrimaryKey = false;
colvarGrupo.IsForeignKey = false;
colvarGrupo.IsReadOnly = false;
colvarGrupo.DefaultSetting = @"((0))";
colvarGrupo.ForeignKeyTableName = "";
schema.Columns.Add(colvarGrupo);
TableSchema.TableColumn colvarActivo = new TableSchema.TableColumn(schema);
colvarActivo.ColumnName = "activo";
colvarActivo.DataType = DbType.Boolean;
colvarActivo.MaxLength = 0;
colvarActivo.AutoIncrement = false;
colvarActivo.IsNullable = false;
colvarActivo.IsPrimaryKey = false;
colvarActivo.IsForeignKey = false;
colvarActivo.IsReadOnly = false;
colvarActivo.DefaultSetting = @"((1))";
colvarActivo.ForeignKeyTableName = "";
schema.Columns.Add(colvarActivo);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("Sys_Antecedente",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdAntecedente")]
[Bindable(true)]
public int IdAntecedente
{
get { return GetColumnValue<int>(Columns.IdAntecedente); }
set { SetColumnValue(Columns.IdAntecedente, value); }
}
[XmlAttribute("Nombre")]
[Bindable(true)]
public string Nombre
{
get { return GetColumnValue<string>(Columns.Nombre); }
set { SetColumnValue(Columns.Nombre, value); }
}
[XmlAttribute("IdTipoAntecedente")]
[Bindable(true)]
public int? IdTipoAntecedente
{
get { return GetColumnValue<int?>(Columns.IdTipoAntecedente); }
set { SetColumnValue(Columns.IdTipoAntecedente, value); }
}
[XmlAttribute("Grupo")]
[Bindable(true)]
public int Grupo
{
get { return GetColumnValue<int>(Columns.Grupo); }
set { SetColumnValue(Columns.Grupo, value); }
}
[XmlAttribute("Activo")]
[Bindable(true)]
public bool Activo
{
get { return GetColumnValue<bool>(Columns.Activo); }
set { SetColumnValue(Columns.Activo, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
private DalSic.SysRelAntecedentePacienteCollection colSysRelAntecedentePacienteRecords;
public DalSic.SysRelAntecedentePacienteCollection SysRelAntecedentePacienteRecords
{
get
{
if(colSysRelAntecedentePacienteRecords == null)
{
colSysRelAntecedentePacienteRecords = new DalSic.SysRelAntecedentePacienteCollection().Where(SysRelAntecedentePaciente.Columns.IdAntecedente, IdAntecedente).Load();
colSysRelAntecedentePacienteRecords.ListChanged += new ListChangedEventHandler(colSysRelAntecedentePacienteRecords_ListChanged);
}
return colSysRelAntecedentePacienteRecords;
}
set
{
colSysRelAntecedentePacienteRecords = value;
colSysRelAntecedentePacienteRecords.ListChanged += new ListChangedEventHandler(colSysRelAntecedentePacienteRecords_ListChanged);
}
}
void colSysRelAntecedentePacienteRecords_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemAdded)
{
// Set foreign key value
colSysRelAntecedentePacienteRecords[e.NewIndex].IdAntecedente = IdAntecedente;
}
}
private DalSic.SysParentescoCollection colSysParentescoRecords;
public DalSic.SysParentescoCollection SysParentescoRecords
{
get
{
if(colSysParentescoRecords == null)
{
colSysParentescoRecords = new DalSic.SysParentescoCollection().Where(SysParentesco.Columns.IdAntecedente, IdAntecedente).Load();
colSysParentescoRecords.ListChanged += new ListChangedEventHandler(colSysParentescoRecords_ListChanged);
}
return colSysParentescoRecords;
}
set
{
colSysParentescoRecords = value;
colSysParentescoRecords.ListChanged += new ListChangedEventHandler(colSysParentescoRecords_ListChanged);
}
}
void colSysParentescoRecords_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemAdded)
{
// Set foreign key value
colSysParentescoRecords[e.NewIndex].IdAntecedente = IdAntecedente;
}
}
#endregion
#region ForeignKey Properties
/// <summary>
/// Returns a SysTipoAntecedente ActiveRecord object related to this SysAntecedente
///
/// </summary>
public DalSic.SysTipoAntecedente SysTipoAntecedente
{
get { return DalSic.SysTipoAntecedente.FetchByID(this.IdTipoAntecedente); }
set { SetColumnValue("idTipoAntecedente", value.IdTipoAntecedente); }
}
#endregion
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varNombre,int? varIdTipoAntecedente,int varGrupo,bool varActivo)
{
SysAntecedente item = new SysAntecedente();
item.Nombre = varNombre;
item.IdTipoAntecedente = varIdTipoAntecedente;
item.Grupo = varGrupo;
item.Activo = varActivo;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdAntecedente,string varNombre,int? varIdTipoAntecedente,int varGrupo,bool varActivo)
{
SysAntecedente item = new SysAntecedente();
item.IdAntecedente = varIdAntecedente;
item.Nombre = varNombre;
item.IdTipoAntecedente = varIdTipoAntecedente;
item.Grupo = varGrupo;
item.Activo = varActivo;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdAntecedenteColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn NombreColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn IdTipoAntecedenteColumn
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn GrupoColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn ActivoColumn
{
get { return Schema.Columns[4]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdAntecedente = @"idAntecedente";
public static string Nombre = @"nombre";
public static string IdTipoAntecedente = @"idTipoAntecedente";
public static string Grupo = @"grupo";
public static string Activo = @"activo";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
if (colSysRelAntecedentePacienteRecords != null)
{
foreach (DalSic.SysRelAntecedentePaciente item in colSysRelAntecedentePacienteRecords)
{
if (item.IdAntecedente != IdAntecedente)
{
item.IdAntecedente = IdAntecedente;
}
}
}
if (colSysParentescoRecords != null)
{
foreach (DalSic.SysParentesco item in colSysParentescoRecords)
{
if (item.IdAntecedente != IdAntecedente)
{
item.IdAntecedente = IdAntecedente;
}
}
}
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
if (colSysRelAntecedentePacienteRecords != null)
{
colSysRelAntecedentePacienteRecords.SaveAll();
}
if (colSysParentescoRecords != null)
{
colSysParentescoRecords.SaveAll();
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Globalization;
using System.Management.Automation.Internal;
using System.Text;
namespace Microsoft.PowerShell.Commands.Internal.Format
{
/// <summary>
/// Writer class to handle Complex Object formatting.
/// </summary>
internal sealed class ComplexWriter
{
/// <summary>
/// Initialization method to be called before any other operation.
/// </summary>
/// <param name="lineOutput">LineOutput interfaces to write to.</param>
/// <param name="numberOfTextColumns">Number of columns used to write out.</param>
internal void Initialize(LineOutput lineOutput, int numberOfTextColumns)
{
_lo = lineOutput;
_textColumns = numberOfTextColumns;
}
/// <summary>
/// Writes a string.
/// </summary>
/// <param name="s"></param>
internal void WriteString(string s)
{
_indentationManager.Clear();
AddToBuffer(s);
WriteToScreen();
}
/// <summary>
/// It interprets a list of format value tokens and outputs it.
/// </summary>
/// <param name="formatValueList">List of FormatValue tokens to interpret.</param>
internal void WriteObject(List<FormatValue> formatValueList)
{
// we always start with no indentation
_indentationManager.Clear();
foreach (FormatEntry fe in formatValueList)
{
// operate on each directive inside the list,
// carrying the indentation from invocation to invocation
GenerateFormatEntryDisplay(fe, 0);
}
// make sure that, if we have pending text in the buffer it gets flushed
WriteToScreen();
}
/// <summary>
/// Operate on a single entry.
/// </summary>
/// <param name="fe">Entry to process.</param>
/// <param name="currentDepth">Current depth of recursion.</param>
private void GenerateFormatEntryDisplay(FormatEntry fe, int currentDepth)
{
foreach (object obj in fe.formatValueList)
{
FormatEntry feChild = obj as FormatEntry;
if (feChild != null)
{
if (currentDepth < maxRecursionDepth)
{
if (feChild.frameInfo != null)
{
// if we have frame information, we need to push it on the
// indentation stack
using (_indentationManager.StackFrame(feChild.frameInfo))
{
GenerateFormatEntryDisplay(feChild, currentDepth + 1);
}
}
else
{
// no need here of activating an indentation stack frame
GenerateFormatEntryDisplay(feChild, currentDepth + 1);
}
}
continue;
}
if (obj is FormatNewLine)
{
this.WriteToScreen();
continue;
}
FormatTextField ftf = obj as FormatTextField;
if (ftf != null)
{
this.AddToBuffer(ftf.text);
continue;
}
FormatPropertyField fpf = obj as FormatPropertyField;
if (fpf != null)
{
this.AddToBuffer(fpf.propertyValue);
}
}
}
/// <summary>
/// Add a string to the current buffer, waiting for a FlushBuffer()
/// </summary>
/// <param name="s">String to add to buffer.</param>
private void AddToBuffer(string s)
{
_stringBuffer.Append(s);
}
/// <summary>
/// Write to the output interface.
/// </summary>
private void WriteToScreen()
{
int leftIndentation = _indentationManager.LeftIndentation;
int rightIndentation = _indentationManager.RightIndentation;
int firstLineIndentation = _indentationManager.FirstLineIndentation;
// VALIDITY CHECKS:
// check the useful ("active") width
int usefulWidth = _textColumns - rightIndentation - leftIndentation;
if (usefulWidth <= 0)
{
// fatal error, there is nothing to write to the device
// just clear the buffer and return
_stringBuffer = new StringBuilder();
}
// check indentation or hanging is not larger than the active width
int indentationAbsoluteValue = (firstLineIndentation > 0) ? firstLineIndentation : -firstLineIndentation;
if (indentationAbsoluteValue >= usefulWidth)
{
// valu too big, we reset it to zero
firstLineIndentation = 0;
}
// compute the first line indentation or hanging
int firstLineWidth = _textColumns - rightIndentation - leftIndentation;
int followingLinesWidth = firstLineWidth;
if (firstLineIndentation >= 0)
{
// the first line has an indentation
firstLineWidth -= firstLineIndentation;
}
else
{
// the first line is hanging
followingLinesWidth += firstLineIndentation;
}
// error checking on invalid values
// generate the lines using the computed widths
StringCollection sc = StringManipulationHelper.GenerateLines(_lo.DisplayCells, _stringBuffer.ToString(),
firstLineWidth, followingLinesWidth);
// compute padding
int firstLinePadding = leftIndentation;
int followingLinesPadding = leftIndentation;
if (firstLineIndentation >= 0)
{
// the first line has an indentation
firstLinePadding += firstLineIndentation;
}
else
{
// the first line is hanging
followingLinesPadding -= firstLineIndentation;
}
// now write the lines on the screen
bool firstLine = true;
foreach (string s in sc)
{
if (firstLine)
{
firstLine = false;
_lo.WriteLine(StringManipulationHelper.PadLeft(s, firstLinePadding));
}
else
{
_lo.WriteLine(StringManipulationHelper.PadLeft(s, followingLinesPadding));
}
}
_stringBuffer = new StringBuilder();
}
/// <summary>
/// Helper object to manage the frame-based indentation and margins.
/// </summary>
private readonly IndentationManager _indentationManager = new IndentationManager();
/// <summary>
/// Buffer to accumulate partially constructed text.
/// </summary>
private StringBuilder _stringBuffer = new StringBuilder();
/// <summary>
/// Interface to write to.
/// </summary>
private LineOutput _lo;
/// <summary>
/// Number of columns for the output device.
/// </summary>
private int _textColumns;
private const int maxRecursionDepth = 50;
}
internal sealed class IndentationManager
{
private sealed class IndentationStackFrame : IDisposable
{
internal IndentationStackFrame(IndentationManager mgr)
{
_mgr = mgr;
}
public void Dispose()
{
if (_mgr != null)
{
_mgr.RemoveStackFrame();
}
}
private readonly IndentationManager _mgr;
}
internal void Clear()
{
_frameInfoStack.Clear();
}
internal IDisposable StackFrame(FrameInfo frameInfo)
{
IndentationStackFrame frame = new IndentationStackFrame(this);
_frameInfoStack.Push(frameInfo);
return frame;
}
private void RemoveStackFrame()
{
_frameInfoStack.Pop();
}
internal int RightIndentation
{
get
{
return ComputeRightIndentation();
}
}
internal int LeftIndentation
{
get
{
return ComputeLeftIndentation();
}
}
internal int FirstLineIndentation
{
get
{
if (_frameInfoStack.Count == 0)
return 0;
return _frameInfoStack.Peek().firstLine;
}
}
private int ComputeRightIndentation()
{
int val = 0;
foreach (FrameInfo fi in _frameInfoStack)
{
val += fi.rightIndentation;
}
return val;
}
private int ComputeLeftIndentation()
{
int val = 0;
foreach (FrameInfo fi in _frameInfoStack)
{
val += fi.leftIndentation;
}
return val;
}
private readonly Stack<FrameInfo> _frameInfoStack = new Stack<FrameInfo>();
}
/// <summary>
/// Result of GetWords.
/// </summary>
internal struct GetWordsResult
{
internal string Word;
internal string Delim;
}
/// <summary>
/// Collection of helper functions for string formatting.
/// </summary>
internal sealed class StringManipulationHelper
{
private static readonly char s_softHyphen = '\u00AD';
private static readonly char s_hardHyphen = '\u2011';
private static readonly char s_nonBreakingSpace = '\u00A0';
private static readonly Collection<string> s_cultureCollection = new Collection<string>();
static StringManipulationHelper()
{
s_cultureCollection.Add("en"); // English
s_cultureCollection.Add("fr"); // French
s_cultureCollection.Add("de"); // German
s_cultureCollection.Add("it"); // Italian
s_cultureCollection.Add("pt"); // Portuguese
s_cultureCollection.Add("es"); // Spanish
}
/// <summary>
/// Breaks a string into a collection of words
/// TODO: we might be able to improve this function in the future
/// so that we do not break paths etc.
/// </summary>
/// <param name="s">Input string.</param>
/// <returns>A collection of words.</returns>
private static IEnumerable<GetWordsResult> GetWords(string s)
{
StringBuilder sb = new StringBuilder();
GetWordsResult result = new GetWordsResult();
for (int i = 0; i < s.Length; i++)
{
// Soft hyphen = \u00AD - Should break, and add a hyphen if needed. If not needed for a break, hyphen should be absent
if (s[i] == ' ' || s[i] == '\t' || s[i] == s_softHyphen)
{
result.Word = sb.ToString();
sb.Clear();
result.Delim = new string(s[i], 1);
yield return result;
}
// Non-breaking space = \u00A0 - ideally shouldn't wrap
// Hard hyphen = \u2011 - Should not break
else if (s[i] == s_hardHyphen || s[i] == s_nonBreakingSpace)
{
result.Word = sb.ToString();
sb.Clear();
result.Delim = string.Empty;
yield return result;
}
else
{
sb.Append(s[i]);
}
}
result.Word = sb.ToString();
result.Delim = string.Empty;
yield return result;
}
internal static StringCollection GenerateLines(DisplayCells displayCells, string val, int firstLineLen, int followingLinesLen)
{
if (s_cultureCollection.Contains(CultureInfo.CurrentCulture.TwoLetterISOLanguageName))
{
return GenerateLinesWithWordWrap(displayCells, val, firstLineLen, followingLinesLen);
}
else
{
return GenerateLinesWithoutWordWrap(displayCells, val, firstLineLen, followingLinesLen);
}
}
private static StringCollection GenerateLinesWithoutWordWrap(DisplayCells displayCells, string val, int firstLineLen, int followingLinesLen)
{
StringCollection retVal = new StringCollection();
if (string.IsNullOrEmpty(val))
{
// if null or empty, just add and we are done
retVal.Add(val);
return retVal;
}
// break string on newlines and process each line separately
string[] lines = SplitLines(val);
for (int k = 0; k < lines.Length; k++)
{
string currentLine = lines[k];
if (currentLine == null || displayCells.Length(currentLine) <= firstLineLen)
{
// we do not need to split further, just add
retVal.Add(currentLine);
continue;
}
// the string does not fit, so we have to wrap around on multiple lines
// for each of these lines in the string, the first line will have
// a (potentially) different length (indentation or hanging)
// for each line, start a new state
SplitLinesAccumulator accumulator = new SplitLinesAccumulator(retVal, firstLineLen, followingLinesLen);
int offset = 0; // offset into the line we are splitting
while (offset < currentLine.Length)
{
// acquire the current active display line length (it can very from call to call)
int currentDisplayLen = accumulator.ActiveLen;
// determine if the current tail would fit or not
// for the remaining part of the string, determine its display cell count
int currentCellsToFit = displayCells.Length(currentLine, offset);
// determine if we fit into the line
int excessCells = currentCellsToFit - currentDisplayLen;
if (excessCells > 0)
{
// we are not at the end of the string, select a sub string
// that would fit in the remaining display length
int charactersToAdd = displayCells.GetHeadSplitLength(currentLine, offset, currentDisplayLen);
if (charactersToAdd <= 0)
{
// corner case: we have a two cell character and the current
// display length is one.
// add a single cell arbitrary character instead of the original
// one and keep going
charactersToAdd = 1;
accumulator.AddLine("?");
}
else
{
// of the given length, add it to the accumulator
accumulator.AddLine(currentLine.Substring(offset, charactersToAdd));
}
// increase the offset by the # of characters added
offset += charactersToAdd;
}
else
{
// we reached the last (partial) line, we add it all
accumulator.AddLine(currentLine.Substring(offset));
break;
}
}
}
return retVal;
}
private sealed class SplitLinesAccumulator
{
internal SplitLinesAccumulator(StringCollection retVal, int firstLineLen, int followingLinesLen)
{
_retVal = retVal;
_firstLineLen = firstLineLen;
_followingLinesLen = followingLinesLen;
}
internal void AddLine(string s)
{
if (!_addedFirstLine)
{
_addedFirstLine = true;
}
_retVal.Add(s);
}
internal int ActiveLen
{
get
{
if (_addedFirstLine)
return _followingLinesLen;
return _firstLineLen;
}
}
private readonly StringCollection _retVal;
private bool _addedFirstLine;
private readonly int _firstLineLen;
private readonly int _followingLinesLen;
}
private static StringCollection GenerateLinesWithWordWrap(DisplayCells displayCells, string val, int firstLineLen, int followingLinesLen)
{
StringCollection retVal = new StringCollection();
if (string.IsNullOrEmpty(val))
{
// if null or empty, just add and we are done
retVal.Add(val);
return retVal;
}
// break string on newlines and process each line separately
string[] lines = SplitLines(val);
for (int k = 0; k < lines.Length; k++)
{
if (lines[k] == null || displayCells.Length(lines[k]) <= firstLineLen)
{
// we do not need to split further, just add
retVal.Add(lines[k]);
continue;
}
int spacesLeft = firstLineLen;
int lineWidth = firstLineLen;
bool firstLine = true;
StringBuilder singleLine = new StringBuilder();
foreach (GetWordsResult word in GetWords(lines[k]))
{
string wordToAdd = word.Word;
// Handle soft hyphen
if (word.Delim == s_softHyphen.ToString())
{
int wordWidthWithHyphen = displayCells.Length(wordToAdd) + displayCells.Length(s_softHyphen.ToString());
// Add hyphen only if necessary
if (wordWidthWithHyphen == spacesLeft)
{
wordToAdd += "-";
}
}
else
{
if (!string.IsNullOrEmpty(word.Delim))
{
wordToAdd += word.Delim;
}
}
int wordWidth = displayCells.Length(wordToAdd);
// Handle zero width
if (lineWidth == 0)
{
if (firstLine)
{
firstLine = false;
lineWidth = followingLinesLen;
}
if (lineWidth == 0)
{
break;
}
spacesLeft = lineWidth;
}
// Word is wider than a single line
if (wordWidth > lineWidth)
{
foreach (char c in wordToAdd)
{
char charToAdd = c;
int charWidth = displayCells.Length(c);
// corner case: we have a two cell character and the current
// display length is one.
// add a single cell arbitrary character instead of the original
// one and keep going
if (charWidth > lineWidth)
{
charToAdd = '?';
charWidth = 1;
}
if (charWidth > spacesLeft)
{
retVal.Add(singleLine.ToString());
singleLine.Clear();
singleLine.Append(charToAdd);
if (firstLine)
{
firstLine = false;
lineWidth = followingLinesLen;
}
spacesLeft = lineWidth - charWidth;
}
else
{
singleLine.Append(charToAdd);
spacesLeft -= charWidth;
}
}
}
else
{
if (wordWidth > spacesLeft)
{
retVal.Add(singleLine.ToString());
singleLine.Clear();
singleLine.Append(wordToAdd);
if (firstLine)
{
firstLine = false;
lineWidth = followingLinesLen;
}
spacesLeft = lineWidth - wordWidth;
}
else
{
singleLine.Append(wordToAdd);
spacesLeft -= wordWidth;
}
}
}
retVal.Add(singleLine.ToString());
}
return retVal;
}
/// <summary>
/// Split a multiline string into an array of strings
/// by honoring both \n and \r\n.
/// </summary>
/// <param name="s">String to split.</param>
/// <returns>String array with the values.</returns>
internal static string[] SplitLines(string s)
{
if (string.IsNullOrEmpty(s))
return new string[1] { s };
StringBuilder sb = new StringBuilder();
foreach (char c in s)
{
if (c != '\r')
sb.Append(c);
}
return sb.ToString().Split(s_newLineChar);
}
#if false
internal static string StripNewLines (string s)
{
if (string.IsNullOrEmpty (s))
return s;
string[] lines = SplitLines (s);
if (lines.Length == 0)
return null;
if (lines.Length == 1)
return lines[0];
StringBuilder sb = new StringBuilder ();
for (int k = 0; k < lines.Length; k++)
{
if (k == 0)
sb.Append (lines[k]);
else
sb.Append (" " + lines[k]);
}
return sb.ToString ();
}
#endif
internal static string TruncateAtNewLine(string s)
{
if (string.IsNullOrEmpty(s))
{
return string.Empty;
}
int lineBreak = s.IndexOfAny(s_lineBreakChars);
if (lineBreak < 0)
{
return s;
}
return s.Substring(0, lineBreak) + PSObjectHelper.Ellipsis;
}
internal static string PadLeft(string val, int count)
{
return StringUtil.Padding(count) + val;
}
private static readonly char[] s_newLineChar = new char[] { '\n' };
private static readonly char[] s_lineBreakChars = new char[] { '\n', '\r' };
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading;
// Source: https://github.com/tejacques/LRUCache
// License: https://github.com/tejacques/LRUCache/blob/master/LICENSE
namespace Box.V2.Utility
{
/// <summary>
/// An LRU Cache implementation.
/// </summary>
/// <typeparam name="K">The key type.</typeparam>
/// <typeparam name="V">The value type.</typeparam>
public class LRUCache<K, V>
{
private readonly Dictionary<K, CacheNode> _entries;
private CacheNode _head;
private CacheNode _tail;
private TimeSpan _ttl;
private readonly Timer _timer;
private int _count;
private readonly bool _refreshEntries;
/// <summary>
/// A least recently used cache with a time to live.
/// </summary>
/// <param name="capacity">
/// The number of entries the cache will hold
/// </param>
/// <param name="hours">The number of hours in the TTL</param>
/// <param name="minutes">The number of minutes in the TTL</param>
/// <param name="seconds">The number of seconds in the TTL</param>
/// <param name="refreshEntries">
/// Whether the TTL should be refreshed upon retrieval
/// </param>
public LRUCache(
int capacity,
int hours = 0,
int minutes = 0,
int seconds = 0,
bool refreshEntries = true)
{
Capacity = capacity;
_entries = new Dictionary<K, CacheNode>(Capacity);
_head = null;
_tail = null;
_count = 0;
_ttl = new TimeSpan(hours, minutes, seconds);
_refreshEntries = refreshEntries;
if (_ttl > TimeSpan.Zero)
{
_timer = new Timer(
Purge,
null,
(int)_ttl.TotalMilliseconds,
5000); // 5 seconds
}
}
private class CacheNode
{
public CacheNode Next { get; set; }
public CacheNode Prev { get; set; }
public K Key { get; set; }
public V Value { get; set; }
public DateTimeOffset LastAccessed { get; set; }
}
/// <summary>
/// Gets the current number of entries in the cache.
/// </summary>
public int Count
{
get { return _entries.Count; }
}
/// <summary>
/// Gets the maximum number of entries in the cache.
/// </summary>
public int Capacity { get; }
/// <summary>
/// Gets whether or not the cache is full.
/// </summary>
public bool IsFull
{
get { return _count == Capacity; }
}
/// <summary>
/// Gets the item being stored.
/// </summary>
/// <returns>The cached value at the given key.</returns>
public bool TryGetValue(K key, out V value)
{
value = default(V);
if (!_entries.TryGetValue(key, out CacheNode entry))
{
return false;
}
if (_refreshEntries)
{
MoveToHead(entry);
}
lock (entry)
{
value = entry.Value;
}
return true;
}
/// <summary>
/// Sets the item being stored to the supplied value.
/// </summary>
/// <param name="key">The cache key.</param>
/// <param name="value">The value to set in the cache.</param>
public void Add(K key, V value)
{
TryAdd(key, value);
}
/// <summary>
/// Sets the item being stored to the supplied value.
/// </summary>
/// <param name="key">The cache key.</param>
/// <param name="value">The value to set in the cache.</param>
/// <returns>True if the set was successful. False otherwise.</returns>
public bool TryAdd(K key, V value)
{
if (!_entries.TryGetValue(key, out CacheNode entry))
{
// Add the entry
lock (this)
{
if (!_entries.TryGetValue(key, out entry))
{
if (IsFull)
{
// Re-use the CacheNode entry
entry = _tail;
_entries.Remove(_tail.Key);
// Reset with new values
entry.Key = key;
entry.Value = value;
entry.LastAccessed = DateTimeOffset.UtcNow;
// Next and Prev don't need to be reset.
// Move to front will do the right thing.
}
else
{
_count++;
entry = new CacheNode()
{
Key = key,
Value = value,
LastAccessed = DateTimeOffset.UtcNow
};
}
_entries.Add(key, entry);
}
}
}
else
{
// If V is a nonprimitive Value type (struct) then sets are
// not atomic, therefore we need to lock on the entry.
lock (entry)
{
entry.Value = value;
}
}
MoveToHead(entry);
// We don't need to lock here because two threads at this point
// can both happily perform this check and set, since they are
// both atomic.
if (null == _tail)
{
_tail = _head;
}
return true;
}
/// <summary>
/// Removes the stored data.
/// </summary>
/// <returns>True if the removal was successful. False otherwise.</returns>
public bool Clear()
{
lock (this)
{
_entries.Clear();
_head = null;
_tail = null;
return true;
}
}
/// <summary>
/// Moved the provided entry to the head of the list.
/// </summary>
/// <param name="entry">The CacheNode entry to move up.</param>
private void MoveToHead(CacheNode entry)
{
if (entry == _head)
{
return;
}
// We need to lock here because we're modifying the entry
// which is not thread safe by itself.
lock (this)
{
RemoveFromLL(entry);
AddToHead(entry);
}
}
private void Purge(object state)
{
if (_ttl <= TimeSpan.Zero || _count == 0)
{
return;
}
lock (this)
{
var current = _tail;
var now = DateTimeOffset.UtcNow;
while (null != current
&& (now - current.LastAccessed) > _ttl)
{
Remove(current);
// Going backwards
current = current.Prev;
}
}
}
private void AddToHead(CacheNode entry)
{
entry.Prev = null;
entry.Next = _head;
if (null != _head)
{
_head.Prev = entry;
}
_head = entry;
}
private void RemoveFromLL(CacheNode entry)
{
var next = entry.Next;
var prev = entry.Prev;
if (null != next)
{
next.Prev = entry.Prev;
}
if (null != prev)
{
prev.Next = entry.Next;
}
if (_head == entry)
{
_head = next;
}
if (_tail == entry)
{
_tail = prev;
}
}
private void Remove(CacheNode entry)
{
// Only to be called while locked from Purge
RemoveFromLL(entry);
_entries.Remove(entry.Key);
_count--;
}
}
}
| |
/*
Matali Physics Demo
Copyright (c) 2013 KOMIRES Sp. z o. o.
*/
using System;
using System.Collections.Generic;
using OpenTK;
using OpenTK.Graphics.OpenGL;
using Komires.MataliPhysics;
namespace MataliPhysicsDemo
{
/// <summary>
/// This is the main type for your game
/// </summary>
public class Plant1
{
Demo demo;
PhysicsScene scene;
string instanceIndexName;
Vector3 position1;
Quaternion orientation1;
Quaternion orientation2;
public Plant1(Demo demo, int instanceIndex)
{
this.demo = demo;
instanceIndexName = " " + instanceIndex.ToString();
}
public void Initialize(PhysicsScene scene)
{
this.scene = scene;
}
public static void CreateShapes(Demo demo, PhysicsScene scene)
{
}
public void Create(Vector3 objectPosition, Vector3 objectScale, Quaternion objectOrientation)
{
PhysicsObject objectRoot = null;
PhysicsObject objectBase = null;
objectRoot = scene.Factory.PhysicsObjectManager.Create("Plant" + instanceIndexName);
string leafInstanceIndexName1 = " 1";
string leafInstanceIndexName2 = " 2";
string leafInstanceIndexName3 = " 3";
string leafInstanceIndexName4 = " 4";
int trunkCount = 8;
Vector3 trunkScale = new Vector3(1.0f, 5.0f, 1.0f);
int leafCount = 4;
Vector3 leafScale = new Vector3(4.0f, 0.1f, 1.0f);
CreateTrunk(scene, instanceIndexName, trunkCount, trunkScale, Vector3.Zero, Vector3.One, Quaternion.Identity);
CreateLeaf(scene, instanceIndexName, leafInstanceIndexName1, trunkCount, trunkScale, leafCount, leafScale, new Vector3(0.0f, 0.0f, 0.0f), Vector3.One, Quaternion.Identity);
CreateLeaf(scene, instanceIndexName, leafInstanceIndexName2, trunkCount, trunkScale, leafCount, leafScale, new Vector3(0.0f, 0.0f, 0.0f), Vector3.One, Quaternion.FromAxisAngle(Vector3.UnitY, MathHelper.DegreesToRadians(90.0f)));
CreateLeaf(scene, instanceIndexName, leafInstanceIndexName3, trunkCount, trunkScale, leafCount, leafScale, new Vector3(0.0f, 0.0f, 0.0f), Vector3.One, Quaternion.FromAxisAngle(Vector3.UnitY, MathHelper.DegreesToRadians(180.0f)));
CreateLeaf(scene, instanceIndexName, leafInstanceIndexName4, trunkCount, trunkScale, leafCount, leafScale, new Vector3(0.0f, 0.0f, 0.0f), Vector3.One, Quaternion.FromAxisAngle(Vector3.UnitY, MathHelper.DegreesToRadians(270.0f)));
objectBase = scene.Factory.PhysicsObjectManager.Find("Plant Trunk" + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName1 + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName2 + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName3 + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName4 + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
Constraint constraint = null;
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint" + leafInstanceIndexName1 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName1 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + (trunkCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MinLimitDegAngleZ = -10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
constraint = scene.Factory.ConstraintManager.Create("LeafC Constraint" + leafInstanceIndexName2 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName2 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + (trunkCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetAnchor2(position1 + new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MaxLimitDegAngleX = 10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint" + leafInstanceIndexName3 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName3 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + (trunkCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 - new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 - new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MinLimitDegAngleZ = -10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
constraint = scene.Factory.ConstraintManager.Create("LeafC Constraint" + leafInstanceIndexName4 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName4 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + (trunkCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 - new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetAnchor2(position1 - new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MaxLimitDegAngleX = 10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint" + leafInstanceIndexName1 + leafInstanceIndexName2 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName1 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName2 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint" + leafInstanceIndexName3 + leafInstanceIndexName4 + leafCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName3 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName4 + (leafCount - 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetAnchor2(position1 + new Vector3(0.0f, 0.0f, leafScale.X));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
PhysicsObject objectA = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName1 + (leafCount - 1).ToString() + instanceIndexName);
PhysicsObject objectB = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName2 + (leafCount - 1).ToString() + instanceIndexName);
PhysicsObject objectC = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName3 + (leafCount - 1).ToString() + instanceIndexName);
PhysicsObject objectD = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName4 + (leafCount - 1).ToString() + instanceIndexName);
if (objectA != null)
{
objectA.DisableCollision(objectB, true);
objectA.DisableCollision(objectC, true);
objectA.DisableCollision(objectD, true);
}
if (objectB != null)
{
objectB.DisableCollision(objectC, true);
objectB.DisableCollision(objectD, true);
}
if (objectC != null)
objectC.DisableCollision(objectD, true);
objectRoot.InitLocalTransform.SetOrientation(ref objectOrientation);
objectRoot.InitLocalTransform.SetScale(ref objectScale);
objectRoot.InitLocalTransform.SetPosition(ref objectPosition);
objectRoot.UpdateFromInitLocalTransform();
constraint = scene.Factory.ConstraintManager.Create("Trunk Constraint " + trunkCount.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk 0" + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Quad 1");
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 - new Vector3(0.0f, 0.5f * trunkScale.Y, 0.0f));
constraint.SetAnchor2(position1 - new Vector3(0.0f, 0.5f * trunkScale.Y, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 30.0f;
constraint.Update();
scene.UpdateFromInitLocalTransform(objectRoot);
}
void CreateTrunk(PhysicsScene scene, string instanceIndexName, int trunkCount, Vector3 trunkScale, Vector3 objectPosition, Vector3 objectScale, Quaternion objectOrientation)
{
Shape cylinderY = scene.Factory.ShapeManager.Find("CylinderY");
PhysicsObject objectRoot = null;
PhysicsObject objectBase = null;
objectRoot = scene.Factory.PhysicsObjectManager.Create("Plant Trunk" + instanceIndexName);
for (int i = 0; i < trunkCount; i++)
{
objectBase = scene.Factory.PhysicsObjectManager.Create("Plant Trunk " + i.ToString() + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
//objectBase.Material.RigidGroup = true;
objectBase.Shape = cylinderY;
objectBase.UserDataStr = "CylinderY";
objectBase.CreateSound(true);
objectBase.InitLocalTransform.SetPosition(new Vector3(0.0f, 0.5f * trunkScale.Y + i * trunkScale.Y, 0.0f) + objectPosition);
objectBase.InitLocalTransform.SetScale(trunkScale.X * 0.1f + 0.1f * (trunkCount - i), 0.5f * trunkScale.Y, trunkScale.Z * 0.1f + 0.1f * (trunkCount - i));
objectBase.Integral.SetDensity(10.0f);
}
objectRoot.UpdateFromInitLocalTransform();
Constraint constraint = null;
for (int i = 0; i < trunkCount - 1; i++)
{
constraint = scene.Factory.ConstraintManager.Create("Trunk Constraint " + i.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + i.ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Trunk " + (i + 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(0.0f, 0.5f * trunkScale.Y, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(0.0f, 0.5f * trunkScale.Y, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 200.0f;
constraint.Update();
}
objectRoot.InitLocalTransform.SetOrientation(ref objectOrientation);
objectRoot.InitLocalTransform.SetScale(ref objectScale);
objectRoot.InitLocalTransform.SetPosition(ref objectPosition);
objectRoot.UpdateFromInitLocalTransform();
}
void CreateLeaf(PhysicsScene scene, string instanceIndexName, string leafInstanceIndexName, int trunkCount, Vector3 trunkScale, int leafCount, Vector3 leafScale, Vector3 objectPosition, Vector3 objectScale, Quaternion objectOrientation)
{
Shape box = scene.Factory.ShapeManager.Find("Box");
PhysicsObject objectRoot = null;
PhysicsObject objectBase = null;
objectRoot = scene.Factory.PhysicsObjectManager.Create("Plant Leaf" + leafInstanceIndexName + instanceIndexName);
for (int i = 0; i < leafCount; i++)
{
objectBase = scene.Factory.PhysicsObjectManager.Create("Plant Leaf" + leafInstanceIndexName + i.ToString() + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase.Shape = box;
objectBase.UserDataStr = "Box";
objectBase.Material.UserDataStr = "Leaf";
objectBase.InitLocalTransform.SetPosition(new Vector3(-leafScale.X * 2.0f * leafCount + leafScale.X + i * 2.0f * leafScale.X, trunkCount * trunkScale.Y, 0.0f) + objectPosition);
objectBase.InitLocalTransform.SetScale(leafScale.X, leafScale.Y, leafScale.Z + (float)Math.Tan(1.0 / (leafCount - i + 1.0) - 0.9));
objectBase.Integral.SetDensity(0.1f);
}
objectRoot.UpdateFromInitLocalTransform();
Constraint constraint = null;
for (int i = 0; i < leafCount - 1; i++)
{
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName + (i + 1).ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MinLimitDegAngleZ = -10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 300.0f;
constraint.LimitAngleForce = 0.5f;
constraint.Update();
}
for (int i = 0; i < leafCount - 1; i++)
{
objectBase = scene.Factory.PhysicsObjectManager.Create("Plant Leaf Sub A" + leafInstanceIndexName + i.ToString() + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase.Shape = box;
objectBase.UserDataStr = "Box";
objectBase.Material.UserDataStr = "Leaf";
objectBase.InitLocalTransform.SetPosition(new Vector3(-leafScale.X * 2.0f * leafCount + leafScale.X + i * 2.0f * leafScale.X, trunkCount * trunkScale.Y, -1.2f + (float)Math.Exp(10.0 / (leafCount - i + 5.8f))) + objectPosition);
objectBase.InitLocalTransform.SetScale(leafScale.Z + (float)Math.Tan(1.0f / (leafCount - i + 1.0) - 0.5), leafScale.Y, leafScale.X);
objectBase.InitLocalTransform.SetRotation(Matrix4.CreateFromAxisAngle(Vector3.UnitY, -MathHelper.DegreesToRadians(45.0f + (leafCount - i) * 6.0f)));
objectBase.Integral.SetDensity(0.001f);
}
for (int i = 0; i < leafCount - 1; i++)
{
objectBase = scene.Factory.PhysicsObjectManager.Create("Plant Leaf Sub B" + leafInstanceIndexName + i.ToString() + instanceIndexName);
objectRoot.AddChildPhysicsObject(objectBase);
objectBase.Shape = box;
objectBase.UserDataStr = "Box";
objectBase.Material.UserDataStr = "Leaf";
objectBase.InitLocalTransform.SetPosition(new Vector3(-leafScale.X * 2.0f * leafCount + leafScale.X + i * 2.0f * leafScale.X, trunkCount * trunkScale.Y, 1.2f - (float)Math.Exp(10.0 / (leafCount - i + 5.8f))) + objectPosition);
objectBase.InitLocalTransform.SetScale(leafScale.Z + (float)Math.Tan(1.0f / (leafCount - i + 1.0) - 0.5), leafScale.Y, leafScale.X);
objectBase.InitLocalTransform.SetRotation(Matrix4.CreateFromAxisAngle(Vector3.UnitY, MathHelper.DegreesToRadians(45.0f + (leafCount - i) * 6.0f)));
objectBase.Integral.SetDensity(0.001f);
}
objectRoot.UpdateFromInitLocalTransform();
for (int i = 0; i < leafCount - 1; i++)
{
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint Sub A" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf Sub A" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MinLimitDegAngleZ = -10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 400.0f;
constraint.LimitAngleForce = 0.5f;
constraint.Update();
}
for (int i = 0; i < leafCount - 1; i++)
{
constraint = scene.Factory.ConstraintManager.Create("Leaf Constraint Sub B" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject1 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf Sub B" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject2 = scene.Factory.PhysicsObjectManager.Find("Plant Leaf" + leafInstanceIndexName + i.ToString() + instanceIndexName);
constraint.PhysicsObject1.MainWorldTransform.GetPosition(ref position1);
constraint.PhysicsObject1.MainWorldTransform.GetOrientation(ref orientation1);
constraint.PhysicsObject2.MainWorldTransform.GetOrientation(ref orientation2);
constraint.SetAnchor1(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetAnchor2(position1 + new Vector3(leafScale.X, 0.0f, 0.0f));
constraint.SetInitWorldOrientation1(ref orientation1);
constraint.SetInitWorldOrientation2(ref orientation2);
constraint.EnableLimitAngleX = true;
constraint.EnableLimitAngleY = true;
constraint.EnableLimitAngleZ = true;
constraint.MinLimitDegAngleZ = -10.0f;
constraint.EnableBreak = true;
constraint.MinBreakVelocity = 400.0f;
constraint.LimitAngleForce = 0.5f;
constraint.Update();
}
PhysicsObject objectA = null;
PhysicsObject objectB = null;
for (int i = 0; i < leafCount - 1; i++)
{
objectA = scene.Factory.PhysicsObjectManager.Find("Plant Leaf Sub A" + leafInstanceIndexName + i.ToString() + instanceIndexName);
objectB = scene.Factory.PhysicsObjectManager.Find("Plant Leaf Sub B" + leafInstanceIndexName + i.ToString() + instanceIndexName);
objectA.DisableCollision(objectB, true);
}
objectRoot.InitLocalTransform.SetOrientation(ref objectOrientation);
objectRoot.InitLocalTransform.SetScale(ref objectScale);
objectRoot.InitLocalTransform.SetPosition(ref objectPosition);
objectRoot.UpdateFromInitLocalTransform();
}
}
}
| |
using Content.Client.Actions.Assignments;
using Content.Client.Actions.UI;
using Content.Client.Construction;
using Content.Client.DragDrop;
using Content.Client.Hands;
using Content.Client.Items.Managers;
using Content.Client.Outline;
using Content.Client.Popups;
using Content.Shared.Actions;
using Content.Shared.Actions.ActionTypes;
using Content.Shared.Input;
using JetBrains.Annotations;
using Robust.Client.GameObjects;
using Robust.Client.Graphics;
using Robust.Client.Player;
using Robust.Client.UserInterface;
using Robust.Client.Utility;
using Robust.Shared.Audio;
using Robust.Shared.ContentPack;
using Robust.Shared.GameStates;
using Robust.Shared.Input;
using Robust.Shared.Input.Binding;
using Robust.Shared.Player;
using Robust.Shared.Serialization.Manager;
using Robust.Shared.Serialization.Markdown;
using Robust.Shared.Serialization.Markdown.Mapping;
using Robust.Shared.Serialization.Markdown.Sequence;
using Robust.Shared.Utility;
using System.IO;
using System.Linq;
using YamlDotNet.RepresentationModel;
namespace Content.Client.Actions
{
[UsedImplicitly]
public sealed class ActionsSystem : SharedActionsSystem
{
[Dependency] private readonly IPlayerManager _playerManager = default!;
[Dependency] private readonly IUserInterfaceManager _uiManager = default!;
[Dependency] private readonly IItemSlotManager _itemSlotManager = default!;
[Dependency] private readonly ISerializationManager _serializationManager = default!;
[Dependency] private readonly IResourceManager _resourceManager = default!;
[Dependency] private readonly IOverlayManager _overlayMan = default!;
[Dependency] private readonly PopupSystem _popupSystem = default!;
[Dependency] private readonly InteractionOutlineSystem _interactionOutline = default!;
[Dependency] private readonly TargetOutlineSystem _targetOutline = default!;
// TODO Redo assignments, including allowing permanent user configurable slot assignments.
/// <summary>
/// Current assignments for all hotbars / slots for this entity.
/// </summary>
public ActionAssignments Assignments = new(Hotbars, Slots);
public const byte Hotbars = 9;
public const byte Slots = 10;
public bool UIDirty;
public ActionsUI? Ui;
private EntityUid? _highlightedEntity;
public override void Initialize()
{
base.Initialize();
// set up hotkeys for hotbar
CommandBinds.Builder
.Bind(ContentKeyFunctions.OpenActionsMenu,
InputCmdHandler.FromDelegate(_ => ToggleActionsMenu()))
.Bind(ContentKeyFunctions.Hotbar1,
HandleHotbarKeybind(0))
.Bind(ContentKeyFunctions.Hotbar2,
HandleHotbarKeybind(1))
.Bind(ContentKeyFunctions.Hotbar3,
HandleHotbarKeybind(2))
.Bind(ContentKeyFunctions.Hotbar4,
HandleHotbarKeybind(3))
.Bind(ContentKeyFunctions.Hotbar5,
HandleHotbarKeybind(4))
.Bind(ContentKeyFunctions.Hotbar6,
HandleHotbarKeybind(5))
.Bind(ContentKeyFunctions.Hotbar7,
HandleHotbarKeybind(6))
.Bind(ContentKeyFunctions.Hotbar8,
HandleHotbarKeybind(7))
.Bind(ContentKeyFunctions.Hotbar9,
HandleHotbarKeybind(8))
.Bind(ContentKeyFunctions.Hotbar0,
HandleHotbarKeybind(9))
.Bind(ContentKeyFunctions.Loadout1,
HandleChangeHotbarKeybind(0))
.Bind(ContentKeyFunctions.Loadout2,
HandleChangeHotbarKeybind(1))
.Bind(ContentKeyFunctions.Loadout3,
HandleChangeHotbarKeybind(2))
.Bind(ContentKeyFunctions.Loadout4,
HandleChangeHotbarKeybind(3))
.Bind(ContentKeyFunctions.Loadout5,
HandleChangeHotbarKeybind(4))
.Bind(ContentKeyFunctions.Loadout6,
HandleChangeHotbarKeybind(5))
.Bind(ContentKeyFunctions.Loadout7,
HandleChangeHotbarKeybind(6))
.Bind(ContentKeyFunctions.Loadout8,
HandleChangeHotbarKeybind(7))
.Bind(ContentKeyFunctions.Loadout9,
HandleChangeHotbarKeybind(8))
// when selecting a target, we intercept clicks in the game world, treating them as our target selection. We want to
// take priority before any other systems handle the click.
.BindBefore(EngineKeyFunctions.Use, new PointerInputCmdHandler(TargetingOnUse, outsidePrediction: true),
typeof(ConstructionSystem), typeof(DragDropSystem))
.BindBefore(EngineKeyFunctions.UIRightClick, new PointerInputCmdHandler(TargetingCancel, outsidePrediction: true))
.Register<ActionsSystem>();
SubscribeLocalEvent<ActionsComponent, PlayerAttachedEvent>(OnPlayerAttached);
SubscribeLocalEvent<ActionsComponent, PlayerDetachedEvent>(OnPlayerDetached);
SubscribeLocalEvent<ActionsComponent, ComponentHandleState>(HandleState);
}
protected override void Dirty(ActionType action)
{
// Should only ever receive component states for attached player's component.
// --> lets not bother unnecessarily dirtying and prediction-resetting actions for other players.
if (action.AttachedEntity != _playerManager.LocalPlayer?.ControlledEntity)
return;
base.Dirty(action);
UIDirty = true;
}
private void HandleState(EntityUid uid, ActionsComponent component, ref ComponentHandleState args)
{
// Client only needs to care about local player.
if (uid != _playerManager.LocalPlayer?.ControlledEntity)
return;
if (args.Current is not ActionsComponentState state)
return;
var serverActions = new SortedSet<ActionType>(state.Actions);
foreach (var act in component.Actions.ToList())
{
if (act.ClientExclusive)
continue;
if (!serverActions.TryGetValue(act, out var serverAct))
{
component.Actions.Remove(act);
if (act.AutoRemove && !(Ui?.Locked ?? false))
Assignments.Remove(act);
continue;
}
act.CopyFrom(serverAct);
serverActions.Remove(serverAct);
if (act is EntityTargetAction entAct)
{
entAct.Whitelist?.UpdateRegistrations();
}
}
// Anything that remains is a new action
foreach (var newAct in serverActions)
{
if (newAct is EntityTargetAction entAct)
entAct.Whitelist?.UpdateRegistrations();
// We create a new action, not just sorting a reference to the state's action.
component.Actions.Add((ActionType) newAct.Clone());
}
UIDirty = true;
}
/// <summary>
/// Highlights the item slot (inventory or hand) that contains this item
/// </summary>
/// <param name="item"></param>
public void HighlightItemSlot(EntityUid item)
{
StopHighlightingItemSlot();
_highlightedEntity = item;
_itemSlotManager.HighlightEntity(item);
}
/// <summary>
/// Stops highlighting any item slots we are currently highlighting.
/// </summary>H
public void StopHighlightingItemSlot()
{
if (_highlightedEntity == null)
return;
_itemSlotManager.UnHighlightEntity(_highlightedEntity.Value);
_highlightedEntity = null;
}
protected override void AddActionInternal(ActionsComponent comp, ActionType action)
{
// Sometimes the client receives actions from the server, before predicting that newly added components will add
// their own shared actions. Just in case those systems ever decided to directly access action properties (e.g.,
// action.Toggled), we will remove duplicates:
if (comp.Actions.TryGetValue(action, out var existing))
{
comp.Actions.Remove(existing);
Assignments.Replace(existing, action);
}
comp.Actions.Add(action);
}
public override void AddAction(EntityUid uid, ActionType action, EntityUid? provider, ActionsComponent? comp = null, bool dirty = true)
{
if (uid != _playerManager.LocalPlayer?.ControlledEntity)
return;
if (!Resolve(uid, ref comp, false))
return;
base.AddAction(uid, action, provider, comp, dirty);
UIDirty = true;
}
public override void RemoveActions(EntityUid uid, IEnumerable<ActionType> actions, ActionsComponent? comp = null, bool dirty = true)
{
if (uid != _playerManager.LocalPlayer?.ControlledEntity)
return;
if (!Resolve(uid, ref comp, false))
return;
base.RemoveActions(uid, actions, comp, dirty);
foreach (var act in actions)
{
if (act.AutoRemove && !(Ui?.Locked ?? false))
Assignments.Remove(act);
}
UIDirty = true;
}
public override void FrameUpdate(float frameTime)
{
// avoid updating GUI when doing predictions & resetting state.
if (UIDirty)
{
UIDirty = false;
UpdateUI();
}
}
/// <summary>
/// Updates the displayed hotbar (and menu) based on current state of actions.
/// </summary>
public void UpdateUI()
{
if (Ui == null)
return;
foreach (var action in Ui.Component.Actions)
{
if (action.AutoPopulate && !Assignments.Assignments.ContainsKey(action))
Assignments.AutoPopulate(action, Ui.SelectedHotbar, false);
}
// get rid of actions that are no longer available to the user
foreach (var (action, index) in Assignments.Assignments.ToList())
{
if (index.Count == 0)
{
Assignments.Assignments.Remove(action);
continue;
}
if (action.AutoRemove && !Ui.Locked && !Ui.Component.Actions.Contains(action))
Assignments.ClearSlot(index[0].Hotbar, index[0].Slot, false);
}
Assignments.PreventAutoPopulate.RemoveWhere(action => !Ui.Component.Actions.Contains(action));
Ui.UpdateUI();
}
public void HandleHotbarKeybind(byte slot, in PointerInputCmdHandler.PointerInputCmdArgs args)
{
Ui?.HandleHotbarKeybind(slot, args);
}
public void HandleChangeHotbarKeybind(byte hotbar, in PointerInputCmdHandler.PointerInputCmdArgs args)
{
Ui?.HandleChangeHotbarKeybind(hotbar, args);
}
private void OnPlayerDetached(EntityUid uid, ActionsComponent component, PlayerDetachedEvent args)
{
if (Ui == null) return;
_uiManager.StateRoot.RemoveChild(Ui);
Ui = null;
}
private void OnPlayerAttached(EntityUid uid, ActionsComponent component, PlayerAttachedEvent args)
{
Assignments = new(Hotbars, Slots);
Ui = new ActionsUI(this, component);
_uiManager.StateRoot.AddChild(Ui);
UIDirty = true;
}
public override void Shutdown()
{
base.Shutdown();
CommandBinds.Unregister<ActionsSystem>();
}
private PointerInputCmdHandler HandleHotbarKeybind(byte slot)
{
// delegate to the ActionsUI, simulating a click on it
return new((in PointerInputCmdHandler.PointerInputCmdArgs args) =>
{
var playerEntity = _playerManager.LocalPlayer?.ControlledEntity;
if (playerEntity == null ||
!EntityManager.TryGetComponent<ActionsComponent?>(playerEntity.Value, out var actionsComponent)) return false;
HandleHotbarKeybind(slot, args);
return true;
}, false);
}
private PointerInputCmdHandler HandleChangeHotbarKeybind(byte hotbar)
{
// delegate to the ActionsUI, simulating a click on it
return new((in PointerInputCmdHandler.PointerInputCmdArgs args) =>
{
var playerEntity = _playerManager.LocalPlayer?.ControlledEntity;
if (!EntityManager.TryGetComponent<ActionsComponent?>(playerEntity, out var actionsComponent)) return false;
HandleChangeHotbarKeybind(hotbar, args);
return true;
},
false);
}
private void ToggleActionsMenu()
{
Ui?.ToggleActionsMenu();
}
/// <summary>
/// A action slot was pressed. This either performs the action or toggles the targeting mode.
/// </summary>
internal void OnSlotPressed(ActionSlot slot)
{
if (Ui == null)
return;
if (slot.Action == null || _playerManager.LocalPlayer?.ControlledEntity is not EntityUid user)
return;
if (slot.Action.Provider != null && Deleted(slot.Action.Provider))
return;
if (slot.Action is not InstantAction instantAction)
{
// for target actions, we go into "select target" mode, we don't
// message the server until we actually pick our target.
// if we're clicking the same thing we're already targeting for, then we simply cancel
// targeting
Ui.ToggleTargeting(slot);
return;
}
if (slot.Action.ClientExclusive)
{
if (instantAction.Event != null)
instantAction.Event.Performer = user;
PerformAction(Ui.Component, instantAction, instantAction.Event, GameTiming.CurTime);
}
else
{
var request = new RequestPerformActionEvent(instantAction);
EntityManager.RaisePredictiveEvent(request);
}
}
private bool TargetingCancel(in PointerInputCmdHandler.PointerInputCmdArgs args)
{
if (!GameTiming.IsFirstTimePredicted)
return false;
// only do something for actual target-based actions
if (Ui?.SelectingTargetFor?.Action == null)
return false;
Ui.StopTargeting();
return true;
}
/// <summary>
/// If the user clicked somewhere, and they are currently targeting an action, try and perform it.
/// </summary>
private bool TargetingOnUse(in PointerInputCmdHandler.PointerInputCmdArgs args)
{
if (!GameTiming.IsFirstTimePredicted)
return false;
// only do something for actual target-based actions
if (Ui?.SelectingTargetFor?.Action is not TargetedAction action)
return false;
if (_playerManager.LocalPlayer?.ControlledEntity is not EntityUid user)
return false;
if (!TryComp(user, out ActionsComponent? comp))
return false;
// Is the action currently valid?
if (!action.Enabled
|| action.Charges != null && action.Charges == 0
|| action.Cooldown.HasValue && action.Cooldown.Value.End > GameTiming.CurTime)
{
// The user is targeting with this action, but it is not valid. Maybe mark this click as
// handled and prevent further interactions.
return !action.InteractOnMiss;
}
switch (action)
{
case WorldTargetAction mapTarget:
return TryTargetWorld(args, mapTarget, user, comp) || !action.InteractOnMiss;
case EntityTargetAction entTarget:
return TargetEntity(args, entTarget, user, comp) || !action.InteractOnMiss;
default:
Logger.Error($"Unknown targeting action: {action.GetType()}");
return false;
}
}
private bool TryTargetWorld(in PointerInputCmdHandler.PointerInputCmdArgs args, WorldTargetAction action, EntityUid user, ActionsComponent actionComp)
{
var coords = args.Coordinates.ToMap(EntityManager);
if (!ValidateWorldTarget(user, coords, action))
{
// Invalid target.
if (action.DeselectOnMiss)
Ui?.StopTargeting();
return false;
}
if (action.ClientExclusive)
{
if (action.Event != null)
{
action.Event.Target = coords;
action.Event.Performer = user;
}
PerformAction(actionComp, action, action.Event, GameTiming.CurTime);
}
else
EntityManager.RaisePredictiveEvent(new RequestPerformActionEvent(action, coords));
if (!action.Repeat)
Ui?.StopTargeting();
return true;
}
private bool TargetEntity(in PointerInputCmdHandler.PointerInputCmdArgs args, EntityTargetAction action, EntityUid user, ActionsComponent actionComp)
{
if (!ValidateEntityTarget(user, args.EntityUid, action))
{
if (action.DeselectOnMiss)
Ui?.StopTargeting();
return false;
}
if (action.ClientExclusive)
{
if (action.Event != null)
{
action.Event.Target = args.EntityUid;
action.Event.Performer = user;
}
PerformAction(actionComp, action, action.Event, GameTiming.CurTime);
}
else
EntityManager.RaisePredictiveEvent(new RequestPerformActionEvent(action, args.EntityUid));
if (!action.Repeat)
Ui?.StopTargeting();
return true;
}
/// <summary>
/// Execute convenience functionality for actions (pop-ups, sound, speech)
/// </summary>
protected override bool PerformBasicActions(EntityUid user, ActionType action)
{
var performedAction = action.Sound != null
|| !string.IsNullOrWhiteSpace(action.UserPopup)
|| !string.IsNullOrWhiteSpace(action.Popup);
if (!GameTiming.IsFirstTimePredicted)
return performedAction;
if (!string.IsNullOrWhiteSpace(action.UserPopup))
{
var msg = (!action.Toggled || string.IsNullOrWhiteSpace(action.PopupToggleSuffix))
? Loc.GetString(action.UserPopup)
: Loc.GetString(action.UserPopup + action.PopupToggleSuffix);
_popupSystem.PopupEntity(msg, user);
}
else if (!string.IsNullOrWhiteSpace(action.Popup))
{
var msg = (!action.Toggled || string.IsNullOrWhiteSpace(action.PopupToggleSuffix))
? Loc.GetString(action.Popup)
: Loc.GetString(action.Popup + action.PopupToggleSuffix);
_popupSystem.PopupEntity(msg, user);
}
if (action.Sound != null)
SoundSystem.Play(Filter.Local(), action.Sound.GetSound(), user, action.AudioParams);
return performedAction;
}
internal void StopTargeting()
{
_targetOutline.Disable();
_interactionOutline.SetEnabled(true);
if (!_overlayMan.TryGetOverlay<ShowHandItemOverlay>(out var handOverlay) || handOverlay == null)
return;
handOverlay.IconOverride = null;
handOverlay.EntityOverride = null;
}
internal void StartTargeting(TargetedAction action)
{
// override "held-item" overlay
if (action.TargetingIndicator
&& _overlayMan.TryGetOverlay<ShowHandItemOverlay>(out var handOverlay)
&& handOverlay != null)
{
if (action.ItemIconStyle == ItemActionIconStyle.BigItem && action.Provider != null)
{
handOverlay.EntityOverride = action.Provider;
}
else if (action.Toggled && action.IconOn != null)
handOverlay.IconOverride = action.IconOn.Frame0();
else if (action.Icon != null)
handOverlay.IconOverride = action.Icon.Frame0();
}
// TODO: allow world-targets to check valid positions. E.g., maybe:
// - Draw a red/green ghost entity
// - Add a yes/no checkmark where the HandItemOverlay usually is
// Highlight valid entity targets
if (action is not EntityTargetAction entityAction)
return;
Func<EntityUid, bool>? predicate = null;
if (!entityAction.CanTargetSelf)
predicate = e => e != entityAction.AttachedEntity;
var range = entityAction.CheckCanAccess ? action.Range : -1;
_interactionOutline.SetEnabled(false);
_targetOutline.Enable(range, entityAction.CheckCanAccess, predicate, entityAction.Whitelist, null);
}
internal void TryFillSlot(byte hotbar, byte index)
{
if (Ui == null)
return;
var fillEvent = new FillActionSlotEvent();
RaiseLocalEvent(Ui.Component.Owner, fillEvent, broadcast: true);
if (fillEvent.Action == null)
return;
fillEvent.Action.ClientExclusive = true;
fillEvent.Action.Temporary = true;
fillEvent.Action.AutoPopulate = false;
Ui.Component.Actions.Add(fillEvent.Action);
Assignments.AssignSlot(hotbar, index, fillEvent.Action);
Ui.UpdateUI();
}
/*public void SaveActionAssignments(string path)
{
// Currently only tested with temporary innate actions (i.e., mapping actions). No guarantee it works with
// other actions. If its meant to be used for full game state saving/loading, the entity that provides
// actions needs to keep the same uid.
var sequence = new SequenceDataNode();
foreach (var (action, assigns) in Assignments.Assignments)
{
var slot = new MappingDataNode();
slot.Add("action", _serializationManager.WriteValue(action));
slot.Add("assignments", _serializationManager.WriteValue(assigns));
sequence.Add(slot);
}
using var writer = _resourceManager.UserData.OpenWriteText(new ResourcePath(path).ToRootedPath());
var stream = new YamlStream { new(sequence.ToSequenceNode()) };
stream.Save(new YamlMappingFix(new Emitter(writer)), false);
}*/
/// <summary>
/// Load actions and their toolbar assignments from a file.
/// </summary>
public void LoadActionAssignments(string path, bool userData)
{
if (Ui == null)
return;
var file = new ResourcePath(path).ToRootedPath();
TextReader reader = userData
? _resourceManager.UserData.OpenText(file)
: _resourceManager.ContentFileReadText(file);
var yamlStream = new YamlStream();
yamlStream.Load(reader);
if (yamlStream.Documents[0].RootNode.ToDataNode() is not SequenceDataNode sequence)
return;
foreach (var (action, assigns) in Assignments.Assignments)
{
foreach (var index in assigns)
{
Assignments.ClearSlot(index.Hotbar, index.Slot, true);
}
}
foreach (var entry in sequence.Sequence)
{
if (entry is not MappingDataNode map)
continue;
if (!map.TryGet("action", out var actionNode))
continue;
var action = _serializationManager.ReadValueCast<ActionType>(typeof(ActionType), actionNode);
if (action == null)
continue;
if (Ui.Component.Actions.TryGetValue(action, out var existingAction))
{
existingAction.CopyFrom(action);
action = existingAction;
}
else
Ui.Component.Actions.Add(action);
if (!map.TryGet("assignments", out var assignmentNode))
continue;
var assignments = _serializationManager.ReadValueCast<List<(byte Hotbar, byte Slot)>>(typeof(List<(byte Hotbar, byte Slot)>), assignmentNode);
if (assignments == null)
continue;
foreach (var index in assignments)
{
Assignments.AssignSlot(index.Hotbar, index.Slot, action);
}
}
UIDirty = true;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections;
using System.Collections.Specialized;
using FlatRedBall.Utilities;
namespace FlatRedBall.Math
{
#region XML Docs
/// <summary>
/// A list of IAttachables which is by default two-way.
/// </summary>
/// <typeparam name="T">Type of the list which is of IAttachable.</typeparam>
#endregion
public class AttachableList<T> : IAttachableRemovable, INotifyCollectionChanged, INameable, IList, IList<T> where T : IAttachable // Don't limit T to be a class because this creates bad IL. This is a bug in .NET 2.0
{
#region Fields
string mName;
// made internal for performance
// May 27, 2012
// A user asked me
// why the AttachableList
// class doesn't inherit from
// IList. The reason is because
// if we want the AttachableList to
// have two-way functionality then we
// need custom code in Add and Remove (and
// a few other methods). If AttachableList
// inherited from List, then these methods that
// need custom logic would have to be "new"-ed because
// they are not marked as "virtual" in List. This means
// that if the AttachableList were casted to an IList, it
// would lose its two-way functionality. We don't want that,
// so instead we inherit from IList so that the Add/Remove methods
// work properly regardless of the cast of AttachableList.
internal List<T> mInternalList;
internal IList mInternalListAsIList;
#endregion
#region Properties
#region XML Docs
/// <summary>
/// The number of elements contained in the list.
/// </summary>
#endregion
public int Count
{
get { return mInternalList.Count; }
}
#region XML Docs
/// <summary>
/// Gets and sets the name of this instance.
/// </summary>
#endregion
public string Name
{
get { return mName; }
set { mName = value; }
}
#endregion
#region Methods
#region Constructor
#region XML Docs
/// <summary>
/// Creates a new AttachableList.
/// </summary>
#endregion
public AttachableList()
{
mInternalList = new List<T>();
mInternalListAsIList = mInternalList;
}
#region XML Docs
/// <summary>
/// Creates a new AttachableList with the argument capacity.
/// </summary>
/// <param name="capacity">The initial capacity of the new AttachableList.</param>
#endregion
public AttachableList(int capacity)
{
mInternalList = new List<T>(capacity);
mInternalListAsIList = mInternalList;
}
#endregion
#region Public Static Methods
#region XML Docs
/// <summary>
/// Returns the top parents in the argument AttachableList
/// </summary>
/// <typeparam name="OutType">The type of object in the returned list.</typeparam>
/// <typeparam name="InType">Tye type of object in the argument list</typeparam>
/// <param name="poa">The list to search through.</param>
/// <returns>List of T's that are the top parents of the objects in the argument AttachableList.</returns>
#endregion
public static AttachableList<OutType> GetTopParents<OutType,InType>(AttachableList<InType> poa)
where OutType : PositionedObject
where InType : OutType, IAttachable
{
AttachableList<OutType> oldestParentsOneWay = new AttachableList<OutType>();
foreach (InType po in poa)
{
oldestParentsOneWay.AddUniqueOneWay(po.TopParent as OutType);
}
return oldestParentsOneWay;
}
#endregion
#region Public Methods
#region Add methods
#region XML Docs
/// <summary>
/// Adds the argument to the AttachableList and creates a two-way relationship.
/// </summary>
/// <param name="attachable">The IAttachable to add.</param>
#endregion
public void Add(T attachable)
{
#if DEBUG
if (mInternalList.Contains(attachable))
{
throw new InvalidOperationException("Can't add the following object twice: " + attachable.Name);
}
#endif
// January 4, 2012
// Victor Chelaru
// I think we can remove this for performance reasons...but I don't want
// to until I have a big game I can test this on.
// Update September 9, 2012
// Removing now and will be testing on Baron etc
//if (attachable.ListsBelongingTo.Contains(this) == false)
attachable.ListsBelongingTo.Add(this);
int countBefore = mInternalList.Count;
mInternalList.Add(attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, attachable, countBefore));
}
}
public void AddRange(IEnumerable<T> collection)
{
foreach (T t in collection)
Add(t);
}
#region XML Docs
/// <summary>
/// Adds all IAttachables contained in the argument AttachableList to this AttachableList and creates two
/// way relationships.
/// </summary>
/// <param name="listToAdd"></param>
#endregion
public void AddRange(AttachableList<T> listToAdd)
{
for (int i = 0; i < listToAdd.Count; i++)
{
Add(listToAdd[i]);
}
}
#region XML Docs
/// <summary>
/// Adds the argument attachable to this without creating a two-way relationship.
/// </summary>
/// <param name="attachable">The IAttachable to add to this.</param>
#endregion
public void AddOneWay(T attachable)
{
if (attachable == null) return;
mInternalList.Add(attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, attachable, 0));
}
}
#region XML Docs
/// <summary>
/// Adds all IAttachables contained in the argument AttachableList to this
/// without creating two-way relationships.
/// </summary>
/// <param name="listToAdd">The list of IAttachables to add.</param>
#endregion
public void AddRangeOneWay(AttachableList<T> listToAdd)
{
for (int i = 0; i < listToAdd.Count; i++)
{
mInternalList.Add(listToAdd[i]);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, listToAdd[i], i));
}
}
}
#region XML Docs
/// <summary>
/// Adds a new IAttachable if it is not already in the list.
/// </summary>
/// <param name="attachable">The IAttachable to add.</param>
/// <returns>Index where the IAttachable was added. -1 is returned if the list already contains the argument attachable</returns>
#endregion
public void AddUnique(T attachable)
{
if (this.Contains(attachable))
return;
else
Add(attachable);
}
#region XML Docs
/// <summary>
/// Adds the argument IAttachable to this and creates a two-way relationship if
/// this does not already contain the IAttachable.
/// </summary>
/// <param name="attachable">The IAttachable to add.</param>
#endregion
public void AddUniqueOneWay(T attachable)
{
if (this.Contains(attachable))
return;
else
AddOneWay(attachable);
}
#endregion
/*
I don't know if this method is needed or not - it may provide some speed improvements, but I'm
* going to put it off for some more sure-fire speed improvemens.
public void ChangeIndexOfObject(int oldIndex, int newIndex)
{
if (oldIndex > newIndex)
{
// Moving the object down in the list (so it comes earlier)
T oldObject = this[oldIndex];
for (int i = oldIndex; i > newIndex; i--)
{
this[i] = this[i - 1];
}
this[newIndex] = oldObject;
}
else if (newIndex > oldIndex)
{
// Moving the object up in the list (so it comes later)
T oldObject = this[oldIndex];
for (int i = oldIndex; i < newIndex; i++)
{
}
}
}
*/
#region XML Docs
/// <summary>
/// Removes all IAttachables contained in this and eliminates all
/// two-way relationships.
/// </summary>
#endregion
public void Clear()
{
List<T> removed = null;
if (this.CollectionChanged != null)
{
removed = new List<T>();
removed.AddRange(mInternalList);
}
for (int i = 0; i < mInternalList.Count; i++)
{
if (mInternalList[i].ListsBelongingTo.Contains(this))
mInternalList[i].ListsBelongingTo.Remove(this);
}
mInternalList.Clear();
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, removed, 0));
}
}
#region XML Docs
/// <summary>
/// Returns whether this contains the argument IAttachable.
/// </summary>
/// <remarks>
/// If the argument is part of this instance and the two share a
/// two-way relationship then this method is able to use this two-way
/// relationship to speed up the method call.
/// </remarks>
/// <param name="attachable">The argument IAttachable to search for.</param>
/// <returns>Whether the argument attachable is contained in this list.</returns>
#endregion
public bool Contains(T attachable)
{
return (attachable != null &&
(attachable.ListsBelongingTo.Contains(this) || mInternalList.Contains(attachable)));
}
/// <summary>
/// Returns the IAttachable with name matching the argument, or null if not found.
/// </summary>
/// <remarks>This method performs a case-sensitive search.</remarks>
/// <param name="nameToSearchFor">The name to match when searching.</param>
/// <returns>The IAttachable with matching name or null if none are found.</returns>
public T FindByName(string nameToSearchFor)
{
for (int i = 0; i < Count; i++)
if ((this[i]).Name == nameToSearchFor)
return this[i];
return default(T);
}
#region XML Docs
/// <summary>
/// Returns the first IAttachable with a name containing the argument string.
/// </summary>
/// <remarks>This method returns any IAttachable that has a name that contains the argument.
/// For example, an object with the name "MySprite" would return if the argument was "Sprite".</remarks>
/// <param name="stringToSearchFor">The string to check IAttachables for.</param>
/// <returns>The IAttachable with a name containing the argument string or null if none are found.</returns>
#endregion
public T FindWithNameContaining(string stringToSearchFor)
{
for (int i = 0; i < this.Count; i++)
{
T t = this[i];
if (t.Name.Contains(stringToSearchFor))
return t;
}
return default(T);
}
#region XML Docs
/// <summary>
/// Returns the first IAttachable with a name containing the argument string, case insensitive.
/// </summary>
/// <remarks>This method returns any IAttachable that has a name that contains the argument.
/// For example, an object with the name "MySprite" would return if the argument was "Sprite".</remarks>
/// <param name="stringToSearchFor">The string to check IAttachables for.</param>
/// <returns>The IAttachable with a name containing the argument string or null if none are found.</returns>
#endregion
public T FindWithNameContainingCaseInsensitive(string stringToSearchFor)
{
string name;
for (int i = 0; i < this.Count; i++)
{
T t = this[i];
name = t.Name.ToLower();
if (name.Contains(stringToSearchFor.ToLower()))
return t;
}
return default(T);
}
public AttachableList<T> FindAllWithNameContaining(string stringToSearchFor)
{
AttachableList<T> listToReturn = new AttachableList<T>();
for (int i = 0; i < this.Count; i++)
{
T t = this[i];
if (t.Name.Contains(stringToSearchFor))
{
listToReturn.Add(t);
}
}
return listToReturn;
}
#region XML Docs
/// <summary>
/// Inserts the argument IAttachable at the argument index and creates a
/// two-way relationship.
/// </summary>
/// <param name="index">The index to insert at.</param>
/// <param name="attachable">The IAttachable to insert.</param>
#endregion
public void Insert(int index, T attachable)
{
if (attachable == null)
return;
if (attachable.ListsBelongingTo.Contains(this) == false)
attachable.ListsBelongingTo.Add(this);
mInternalList.Insert(index, attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, attachable, 0));
}
}
#region XML Docs
/// <summary>
/// Inserts the argument IAttachable at the argument index but does not create
/// a two-way relationship.
/// </summary>
/// <param name="index">The index to insert at.</param>
/// <param name="attachable">The IAttachable to insert.</param>
#endregion
public void InsertOneWay(int index, T attachable)
{
mInternalList.Insert(index, attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, attachable, 0));
}
}
#region XML Docs
/// <summary>
/// Breaks all two-way relationships between this and all contained
/// IAttachables.
/// </summary>
/// <remarks>
/// This will still contain the same number of IAttachables before and
/// after the call.
/// </remarks>
#endregion
public void MakeOneWay()
{
for (int i = 0; i < this.Count; i++)
{
IAttachable ia = this[i];
if (ia.ListsBelongingTo.Contains(this))
ia.ListsBelongingTo.Remove(this);
}
}
#region XML Docs
/// <summary>
/// Makes the relationship between all contained IAttachables and this a two way relationship.
/// </summary>
/// <remarks>
/// If an IAttachable is added (through the Add method), the relationship is already a
/// two-way relationship. IAttachables which already have two-way relationships will not be affected
/// by this call. IAttachables that have been added through the AddOneWay call or added
/// through a call that returns a one-way array will be modified so that they hold a reference to
/// this instance in their ListsBelongingTo field. One-way relationships are often created in
/// FRB methods which return AttachableLists.
/// </remarks>
#endregion
public void MakeTwoWay()
{
for (int i = 0; i < this.Count; i++)
{
IAttachable ia = this[i];
if (ia.ListsBelongingTo.Contains(this) == false)
ia.ListsBelongingTo.Add(this);
}
}
#region XML Docs
/// <summary>
/// Moves the position of a block of IAttachables beginning at the argument
/// sourceIndex of numberToMove count to the argument destinationIndex.
/// </summary>
/// <param name="sourceIndex">The index of the first IAttachable in the block.</param>
/// <param name="numberToMove">The number of elements in the block.</param>
/// <param name="destinationIndex">The index to insert the block at.</param>
#endregion
public void MoveBlock(int sourceIndex, int numberToMove, int destinationIndex)
{
if (destinationIndex < sourceIndex)
{
for (int i = 0; i < numberToMove; i++)
{
mInternalList.Insert(destinationIndex + i, this[sourceIndex + i]);
mInternalList.RemoveAt(sourceIndex + i + 1);
}
}
else
{
for (int i = 0; i < numberToMove; i++)
{
mInternalList.Insert(destinationIndex, this[sourceIndex]);
mInternalList.RemoveAt(sourceIndex);
}
}
}
#region XML Docs
/// <summary>
/// Removes the argument IAttachable from this and clears the two-way relationship.
/// </summary>
/// <param name="attachable">The IAttachable to remove from this.</param>
#endregion
public void Remove(T attachable)
{
if (attachable.ListsBelongingTo.Contains(this))
{
attachable.ListsBelongingTo.Remove(this);
}
// Vic says: This makes things safer, but can also hurt performance. Should we leave this in?
// Update on March 7, 2011 - this kills performance for particles on the phone. We gotta take it
// out
//if (mInternalList.Contains(attachable))
mInternalList.Remove(attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, attachable, 0));
}
}
#region XML Docs
/// <summary>
/// Removes all IAttachables contained in the argument attachableList from this and clears the two-way relationships between
/// this and all IAttachables removed.
/// </summary>
/// <param name="attachableList">The list of IAttachables to remove.</param>
#endregion
public void Remove(AttachableList<T> attachableList)
{
for (int i = 0; i < attachableList.Count; i++)
{
Remove(attachableList[i]);
}
}
#region XML Docs
/// <summary>
/// Removes the IAttachable at the argument index and clears two-way relationships.
/// </summary>
/// <param name="index">The index of the object to remove.</param>
#endregion
public void RemoveAt(int index)
{
T removed = mInternalList[index];
if (mInternalList[index].ListsBelongingTo.Contains(this))
mInternalList[index].ListsBelongingTo.Remove(this);
mInternalList.RemoveAt(index);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, removed, 0));
}
}
/// <summary>
/// Removes the IAttachable at the argument index from the list, but the IAttachable will continue to reference
/// this List in its ListsBelongingTo.
/// </summary>
/// <param name="index"></param>
public void RemoveAtOneWay(int index)
{
T removed = mInternalList[index];
mInternalList.RemoveAt(index);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, removed, 0));
}
}
public void Sort(Comparison<T> comparison)
{
mInternalList.Sort(comparison);
}
public void Sort(IComparer<T> comparer)
{
// Calling Sort on the List is an unstable sort. This
// results in flickering. We want to implement our own sorting
// algorithm. We'll do the naive sort used elsewhere:
// mInternalList.Sort(comparer);
if (Count == 1 || Count == 0)
return;
int whereObjectBelongs;
for (int i = 1; i < Count; i++)
{
var itemAti = this[i];
if (comparer.Compare(itemAti, this[i-1]) < 0)
{
if (i == 1)
{
Insert(0, itemAti);
RemoveAtOneWay(i + 1);
continue;
}
for (whereObjectBelongs = i - 2; whereObjectBelongs > -1; whereObjectBelongs--)
{
if (comparer.Compare(itemAti, this[whereObjectBelongs]) >= 0)
{
Insert(whereObjectBelongs + 1, itemAti);
RemoveAtOneWay(i + 1);
break;
}
else if (whereObjectBelongs == 0 && comparer.Compare(itemAti, this[0]) < 0)
{
Insert(0, itemAti);
RemoveAtOneWay(i + 1);
break;
}
}
}
}
}
public void SortNameAscending()
{
if (Count == 1 || Count == 0)
return;
int whereObjectBelongs;
for (int i = 1; i < Count; i++)
{
if ((this[i]).Name.CompareTo(this[i - 1].Name) < 0)
{
if (i == 1)
{
Insert(0, this[i]);
RemoveAtOneWay(i + 1);
continue;
}
for (whereObjectBelongs = i - 2; whereObjectBelongs > -1; whereObjectBelongs--)
{
if ((this[i]).Name.CompareTo(this[whereObjectBelongs].Name) >= 0)
{
Insert(whereObjectBelongs + 1, this[i]);
RemoveAtOneWay(i + 1);
break;
}
else if (whereObjectBelongs == 0 && (this[i]).Name.CompareTo(this[0].Name) < 0)
{
Insert(0, this[i]);
RemoveAtOneWay(i + 1);
break;
}
}
}
}
}
#region XML Docs
/// <summary>
/// Returns a string with the name and the number of elements that this contains.
/// </summary>
/// <returns>The string with this instance's name and element count.</returns>
#endregion
public override string ToString()
{
return mName + ": " + this.Count;
}
#endregion
#region Protected Methods
#endregion
#endregion
#region IList<T> Members
public int IndexOf(T item)
{
return mInternalList.IndexOf(item);
}
public T this[int index]
{
get
{
return mInternalList[index];
}
set
{
mInternalList[index] = value;
}
}
#endregion
#region ICollection<T> Members
public void CopyTo(T[] array, int arrayIndex)
{
mInternalList.CopyTo(array, arrayIndex);
}
public bool IsReadOnly
{
get { return ((IList)mInternalList).IsReadOnly; }
}
bool ICollection<T>.Remove(T item)
{
// so inefficient
bool returnValue = this.Contains(item);
Remove((T)item);
return returnValue;
}
#endregion
#region IEnumerable<T> Members
public IEnumerator<T> GetEnumerator()
{
for (int i = 0; i < mInternalList.Count; i++)
{
yield return mInternalList[i];
}
//return mInternalList.GetEnumerator();
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
for (int i = 0; i < mInternalList.Count; i++)
{
yield return mInternalList[i];
}
// return mInternalList.GetEnumerator();
}
#endregion
#region IList Members
//public int Add(object value)
//{
//}
int IList.Add(object value)
{
int returnValue = this.Count;
this.Add((T)value);
return returnValue;
}
bool IList.Contains(object value)
{
return ((IList)mInternalList).Contains(value);
}
int IList.IndexOf(object value)
{
return ((IList)mInternalList).IndexOf(value);
}
void IList.Insert(int index, object value)
{
this.Insert(index, (T)value);
}
bool IList.IsFixedSize
{
get { return ((IList)mInternalList).IsFixedSize; }
}
void IList.Remove(object value)
{
Remove((T)value);
}
object IList.this[int index]
{
get
{
return mInternalList[index];
}
set
{
((IList)mInternalList)[index] = value;
}
}
#endregion
#region ICollection Members
void ICollection.CopyTo(Array array, int index)
{
((ICollection)mInternalList).CopyTo(array, index);
}
bool ICollection.IsSynchronized
{
get { return ((ICollection)mInternalList).IsSynchronized; }
}
object ICollection.SyncRoot
{
get { return ((ICollection)mInternalList).SyncRoot; }
}
#endregion
#region IAttachableRemovable Members
void IAttachableRemovable.RemoveGuaranteedContain(IAttachable attachable)
{
attachable.ListsBelongingTo.Remove(this);
mInternalListAsIList.Remove(attachable);
if (this.CollectionChanged != null)
{
// We put the index for Silverlight - but I don't want to do indexof for performance reasons so 0 it is
this.CollectionChanged(this, new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, attachable, 0));
}
}
#endregion
#region IList Members
void IList.Clear()
{
Clear();
}
bool IList.IsReadOnly
{
get { return IsReadOnly; }
}
void IList.RemoveAt(int index)
{
RemoveAt(index);
}
#endregion
#region ICollection Members
int ICollection.Count
{
get { return mInternalList.Count; }
}
#endregion
public event NotifyCollectionChangedEventHandler CollectionChanged;
}
}
| |
//
// C#-like events for AVFoundation classes
//
// Author:
// Miguel de Icaza (miguel@novell.com)
// Copyright 2009, Novell, Inc.
// Copyright 2010, Novell, Inc.
// Copyright 2011, 2012 Xamarin Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//
using System;
using MonoMac.Foundation;
using MonoMac.ObjCRuntime;
namespace MonoMac.AVFoundation {
public class AVErrorEventArgs : EventArgs {
public AVErrorEventArgs (NSError error)
{
Error = error;
}
public NSError Error { get; private set; }
}
public class AVStatusEventArgs : EventArgs {
public AVStatusEventArgs (bool status)
{
Status = status;
}
public bool Status { get; private set; }
}
#pragma warning disable 672
sealed class InternalAVAudioPlayerDelegate : AVAudioPlayerDelegate {
internal EventHandler cbEndInterruption, cbBeginInterruption;
internal EventHandler<AVStatusEventArgs> cbFinishedPlaying;
internal EventHandler<AVErrorEventArgs> cbDecoderError;
[Preserve (Conditional = true)]
public override void FinishedPlaying (AVAudioPlayer player, bool flag)
{
if (cbFinishedPlaying != null)
cbFinishedPlaying (player, new AVStatusEventArgs (flag));
if (player.Handle == IntPtr.Zero)
throw new ObjectDisposedException ("player", "the player object was Dispose()d during the callback, this has corrupted the state of the program");
}
[Preserve (Conditional = true)]
public override void DecoderError (AVAudioPlayer player, NSError error)
{
if (cbDecoderError != null)
cbDecoderError (player, new AVErrorEventArgs (error));
}
#if !MONOMAC
[Preserve (Conditional = true)]
public override void BeginInterruption (AVAudioPlayer player)
{
if (cbBeginInterruption != null)
cbBeginInterruption (player, EventArgs.Empty);
}
[Preserve (Conditional = true)]
public override void EndInterruption (AVAudioPlayer player)
{
if (cbEndInterruption != null)
cbEndInterruption (player, EventArgs.Empty);
}
#endif
}
#pragma warning restore 672
public partial class AVAudioPlayer {
InternalAVAudioPlayerDelegate EnsureEventDelegate ()
{
var del = WeakDelegate as InternalAVAudioPlayerDelegate;
if (del == null){
del = new InternalAVAudioPlayerDelegate ();
WeakDelegate = del;
}
return del;
}
public event EventHandler<AVStatusEventArgs> FinishedPlaying {
add {
EnsureEventDelegate ().cbFinishedPlaying += value;
}
remove {
EnsureEventDelegate ().cbFinishedPlaying -= value;
}
}
public event EventHandler<AVErrorEventArgs> DecoderError {
add {
EnsureEventDelegate ().cbDecoderError += value;
}
remove {
EnsureEventDelegate ().cbDecoderError -= value;
}
}
public event EventHandler BeginInterruption {
add {
EnsureEventDelegate ().cbBeginInterruption += value;
}
remove {
EnsureEventDelegate ().cbBeginInterruption -= value;
}
}
public event EventHandler EndInterruption {
add {
EnsureEventDelegate ().cbEndInterruption += value;
}
remove {
EnsureEventDelegate ().cbEndInterruption -= value;
}
}
}
internal class InternalAVAudioRecorderDelegate : AVAudioRecorderDelegate {
internal EventHandler cbEndInterruption, cbBeginInterruption;
internal EventHandler<AVStatusEventArgs> cbFinishedRecording;
internal EventHandler<AVErrorEventArgs> cbEncoderError;
[Preserve (Conditional = true)]
public override void FinishedRecording (AVAudioRecorder recorder, bool flag)
{
if (cbFinishedRecording != null)
cbFinishedRecording (recorder, new AVStatusEventArgs (flag));
}
[Preserve (Conditional = true)]
public override void EncoderError (AVAudioRecorder recorder, NSError error)
{
if (cbEncoderError != null)
cbEncoderError (recorder, new AVErrorEventArgs (error));
}
#if !MONOMAC
[Preserve (Conditional = true)]
public override void BeginInterruption (AVAudioRecorder recorder)
{
if (cbBeginInterruption != null)
cbBeginInterruption (recorder, EventArgs.Empty);
}
[Preserve (Conditional = true)]
[Obsolete ("Deprecated in iOS 6.0")]
public override void EndInterruption (AVAudioRecorder recorder)
{
if (cbEndInterruption != null)
cbEndInterruption (recorder, EventArgs.Empty);
}
#endif
}
public partial class AVAudioRecorder {
InternalAVAudioRecorderDelegate EnsureEventDelegate ()
{
var del = WeakDelegate as InternalAVAudioRecorderDelegate;
if (del == null){
del = new InternalAVAudioRecorderDelegate ();
WeakDelegate = del;
}
return del;
}
public event EventHandler<AVStatusEventArgs> FinishedRecording {
add {
EnsureEventDelegate ().cbFinishedRecording += value;
}
remove {
EnsureEventDelegate ().cbFinishedRecording -= value;
}
}
public event EventHandler<AVErrorEventArgs> EncoderError {
add {
EnsureEventDelegate ().cbEncoderError += value;
}
remove {
EnsureEventDelegate ().cbEncoderError -= value;
}
}
public event EventHandler BeginInterruption {
add {
EnsureEventDelegate ().cbBeginInterruption += value;
}
remove {
EnsureEventDelegate ().cbBeginInterruption -= value;
}
}
public event EventHandler EndInterruption {
add {
EnsureEventDelegate ().cbEndInterruption += value;
}
remove {
EnsureEventDelegate ().cbEndInterruption -= value;
}
}
}
public class AVSampleRateEventArgs : EventArgs {
public AVSampleRateEventArgs (double sampleRate)
{
SampleRate = sampleRate;
}
public double SampleRate { get; private set; }
}
public class AVChannelsEventArgs : EventArgs {
public AVChannelsEventArgs (int numberOfChannels)
{
NumberOfChannels = numberOfChannels;
}
public int NumberOfChannels { get; private set; }
}
public class AVCategoryEventArgs : EventArgs {
public AVCategoryEventArgs (string category)
{
Category = category;
}
public string Category { get; private set; }
}
#if !MONOMAC
internal class InternalAVAudioSessionDelegate : AVAudioSessionDelegate {
internal EventHandler cbEndInterruption, cbBeginInterruption;
internal EventHandler<AVCategoryEventArgs> cbCategoryChanged;
internal EventHandler<AVStatusEventArgs> cbInputAvailabilityChanged;
internal EventHandler<AVSampleRateEventArgs> cbSampleRateChanged;
internal EventHandler<AVChannelsEventArgs> cbInputChanged;
internal EventHandler<AVChannelsEventArgs> cbOutputChanged;
AVAudioSession session;
[Preserve (Conditional = true)]
public InternalAVAudioSessionDelegate (AVAudioSession session)
{
this.session = session;
}
[Preserve (Conditional = true)]
public override void BeginInterruption ()
{
if (cbBeginInterruption != null)
cbBeginInterruption (session, EventArgs.Empty);
}
[Preserve (Conditional = true)]
public override void EndInterruption ()
{
if (cbEndInterruption != null)
cbEndInterruption (session, EventArgs.Empty);
}
[Preserve (Conditional = true)]
public override void InputIsAvailableChanged (bool isInputAvailable)
{
if (cbInputAvailabilityChanged != null)
cbInputAvailabilityChanged (session, new AVStatusEventArgs (isInputAvailable));
}
}
public partial class AVAudioSession {
InternalAVAudioSessionDelegate EnsureEventDelegate ()
{
var del = WeakDelegate as InternalAVAudioSessionDelegate;
if (del == null){
del = new InternalAVAudioSessionDelegate (this);
WeakDelegate = del;
}
return del;
}
public event EventHandler BeginInterruption {
add {
EnsureEventDelegate ().cbBeginInterruption += value;
}
remove {
EnsureEventDelegate ().cbBeginInterruption -= value;
}
}
public event EventHandler EndInterruption {
add {
EnsureEventDelegate ().cbEndInterruption += value;
}
remove {
EnsureEventDelegate ().cbBeginInterruption -= value;
}
}
public event EventHandler<AVCategoryEventArgs> CategoryChanged {
add {
EnsureEventDelegate ().cbCategoryChanged += value;
}
remove {
EnsureEventDelegate ().cbCategoryChanged -= value;
}
}
public event EventHandler<AVStatusEventArgs> InputAvailabilityChanged {
add {
EnsureEventDelegate ().cbInputAvailabilityChanged += value;
}
remove {
EnsureEventDelegate ().cbInputAvailabilityChanged -= value;
}
}
public event EventHandler<AVSampleRateEventArgs> SampleRateChanged {
add {
EnsureEventDelegate ().cbSampleRateChanged += value;
}
remove {
EnsureEventDelegate ().cbSampleRateChanged -= value;
}
}
public event EventHandler<AVChannelsEventArgs> InputChannelsChanged {
add {
EnsureEventDelegate ().cbInputChanged += value;
}
remove {
EnsureEventDelegate ().cbOutputChanged += value;
}
}
public event EventHandler<AVChannelsEventArgs> OutputChannelsChanged {
add {
EnsureEventDelegate ().cbOutputChanged += value;
}
remove {
EnsureEventDelegate ().cbOutputChanged -= value;
}
}
}
#endif
}
| |
/**
* Couchbase Lite for .NET
*
* Original iOS version by Jens Alfke
* Android Port by Marty Schoch, Traun Leyden
* C# Port by Zack Gramana
*
* Copyright (c) 2012, 2013, 2014 Couchbase, Inc. All rights reserved.
* Portions (c) 2013, 2014 Xamarin, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using Apache.Http;
using Apache.Http.Util;
using Couchbase.Lite;
using Couchbase.Lite.Support;
using Couchbase.Lite.Util;
using Sharpen;
namespace Couchbase.Lite.Support
{
public class MultipartDocumentReader : MultipartReaderDelegate
{
/// <summary>The response which contains the input stream we need to read from</summary>
private HttpResponse response;
private MultipartReader multipartReader;
private BlobStoreWriter curAttachment;
private ByteArrayBuffer jsonBuffer;
private IDictionary<string, object> document;
private Database database;
private IDictionary<string, BlobStoreWriter> attachmentsByName;
private IDictionary<string, BlobStoreWriter> attachmentsByMd5Digest;
public MultipartDocumentReader(HttpResponse response, Database database)
{
this.response = response;
this.database = database;
}
public virtual IDictionary<string, object> GetDocumentProperties()
{
return document;
}
public virtual void ParseJsonBuffer()
{
try
{
document = Manager.GetObjectMapper().ReadValue<IDictionary>(jsonBuffer.ToByteArray
());
}
catch (IOException e)
{
throw new InvalidOperationException("Failed to parse json buffer", e);
}
jsonBuffer = null;
}
public virtual void SetContentType(string contentType)
{
if (!contentType.StartsWith("multipart/"))
{
throw new ArgumentException("contentType must start with multipart/");
}
multipartReader = new MultipartReader(contentType, this);
attachmentsByName = new Dictionary<string, BlobStoreWriter>();
attachmentsByMd5Digest = new Dictionary<string, BlobStoreWriter>();
}
public virtual void AppendData(byte[] data)
{
if (multipartReader != null)
{
multipartReader.AppendData(data);
}
else
{
jsonBuffer.Append(data, 0, data.Length);
}
}
public virtual void Finish()
{
if (multipartReader != null)
{
if (!multipartReader.Finished())
{
throw new InvalidOperationException("received incomplete MIME multipart response"
);
}
RegisterAttachments();
}
else
{
ParseJsonBuffer();
}
}
private void RegisterAttachments()
{
int numAttachmentsInDoc = 0;
IDictionary<string, object> attachments = (IDictionary<string, object>)document.Get
("_attachments");
if (attachments == null)
{
return;
}
foreach (string attachmentName in attachments.Keys)
{
IDictionary<string, object> attachment = (IDictionary<string, object>)attachments
.Get(attachmentName);
int length = 0;
if (attachment.ContainsKey("length"))
{
length = ((int)attachment.Get("length"));
}
if (attachment.ContainsKey("encoded_length"))
{
length = ((int)attachment.Get("encoded_length"));
}
if (attachment.ContainsKey("follows") && ((bool)attachment.Get("follows")) == true)
{
// Check that each attachment in the JSON corresponds to an attachment MIME body.
// Look up the attachment by either its MIME Content-Disposition header or MD5 digest:
string digest = (string)attachment.Get("digest");
BlobStoreWriter writer = attachmentsByName.Get(attachmentName);
if (writer != null)
{
// Identified the MIME body by the filename in its Disposition header:
string actualDigest = writer.MD5DigestString();
if (digest != null && !digest.Equals(actualDigest) && !digest.Equals(writer.SHA1DigestString
()))
{
string errMsg = string.Format("Attachment '%s' has incorrect MD5 digest (%s; should be %s)"
, attachmentName, digest, actualDigest);
throw new InvalidOperationException(errMsg);
}
attachment.Put("digest", actualDigest);
}
else
{
if (digest != null)
{
writer = attachmentsByMd5Digest.Get(digest);
if (writer == null)
{
string errMsg = string.Format("Attachment '%s' does not appear in MIME body (%s; should be %s)"
, attachmentName);
throw new InvalidOperationException(errMsg);
}
}
else
{
if (attachments.Count == 1 && attachmentsByMd5Digest.Count == 1)
{
// Else there's only one attachment, so just assume it matches & use it:
writer = attachmentsByMd5Digest.Values.GetEnumerator().Next();
attachment.Put("digest", writer.MD5DigestString());
}
else
{
// No digest metatata, no filename in MIME body; give up:
string errMsg = string.Format("Attachment '%s' has no digest metadata; cannot identify MIME body"
, attachmentName);
throw new InvalidOperationException(errMsg);
}
}
}
// Check that the length matches:
if (writer.GetLength() != length)
{
string errMsg = string.Format("Attachment '%s' has incorrect length field %d (should be %d)"
, attachmentName, length, writer.GetLength());
throw new InvalidOperationException(errMsg);
}
++numAttachmentsInDoc;
}
else
{
if (attachment.ContainsKey("data") && length > 1000)
{
string msg = string.Format("Attachment '%s' sent inline (len=%d). Large attachments "
+ "should be sent in MIME parts for reduced memory overhead.", attachmentName);
Log.W(Database.Tag, msg);
}
}
}
if (numAttachmentsInDoc < attachmentsByMd5Digest.Count)
{
string msg = string.Format("More MIME bodies (%d) than attachments (%d) ", attachmentsByMd5Digest
.Count, numAttachmentsInDoc);
throw new InvalidOperationException(msg);
}
// hand over the (uninstalled) blobs to the database to remember:
database.RememberAttachmentWritersForDigests(attachmentsByMd5Digest);
}
public virtual void StartedPart(IDictionary<string, string> headers)
{
if (document == null)
{
jsonBuffer = new ByteArrayBuffer(1024);
}
else
{
curAttachment = database.GetAttachmentWriter();
string contentDisposition = headers.Get("Content-Disposition");
if (contentDisposition != null && contentDisposition.StartsWith("attachment; filename="
))
{
// TODO: Parse this less simplistically. Right now it assumes it's in exactly the same
// format generated by -[CBL_Pusher uploadMultipartRevision:]. CouchDB (as of 1.2) doesn't
// output any headers at all on attachments so there's no compatibility issue yet.
string contentDispositionUnquoted = Misc.UnquoteString(contentDisposition);
string name = Sharpen.Runtime.Substring(contentDispositionUnquoted, 21);
if (name != null)
{
attachmentsByName.Put(name, curAttachment);
}
}
}
}
public virtual void AppendToPart(byte[] data)
{
if (jsonBuffer != null)
{
jsonBuffer.Append(data, 0, data.Length);
}
else
{
curAttachment.AppendData(data);
}
}
public virtual void FinishedPart()
{
if (jsonBuffer != null)
{
ParseJsonBuffer();
}
else
{
curAttachment.Finish();
string md5String = curAttachment.MD5DigestString();
attachmentsByMd5Digest.Put(md5String, curAttachment);
curAttachment = null;
}
}
}
}
| |
//
// Copyright (c) 2004-2020 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Security;
namespace NLog.UnitTests
{
using System;
using NLog.Common;
using System.IO;
using System.Text;
using System.Globalization;
using NLog.Layouts;
using NLog.Config;
using NLog.Targets;
using Xunit;
using System.Xml.Linq;
using System.Xml;
using System.IO.Compression;
#if (NET3_5 || NET4_0 || NET4_5) && !NETSTANDARD
using Ionic.Zip;
#endif
public abstract class NLogTestBase
{
protected NLogTestBase()
{
//reset before every test
if (LogManager.Configuration != null)
{
//flush all events if needed.
LogManager.Configuration.Close();
}
if (LogManager.LogFactory != null)
{
LogManager.LogFactory.ResetCandidateConfigFilePath();
}
LogManager.Configuration = null;
InternalLogger.Reset();
InternalLogger.LogLevel = LogLevel.Off;
LogManager.ThrowExceptions = true; // Ensure exceptions are thrown by default during unit-testing
LogManager.ThrowConfigExceptions = null;
System.Diagnostics.Trace.Listeners.Clear();
#if !NETSTANDARD
System.Diagnostics.Debug.Listeners.Clear();
#endif
}
protected void AssertDebugCounter(string targetName, int val)
{
Assert.Equal(val, GetDebugTarget(targetName).Counter);
}
protected void AssertDebugLastMessage(string targetName, string msg)
{
Assert.Equal(msg, GetDebugLastMessage(targetName));
}
protected void AssertDebugLastMessageContains(string targetName, string msg)
{
string debugLastMessage = GetDebugLastMessage(targetName);
Assert.True(debugLastMessage.Contains(msg),
$"Expected to find '{msg}' in last message value on '{targetName}', but found '{debugLastMessage}'");
}
protected string GetDebugLastMessage(string targetName)
{
return GetDebugLastMessage(targetName, LogManager.Configuration);
}
protected string GetDebugLastMessage(string targetName, LoggingConfiguration configuration)
{
return GetDebugTarget(targetName, configuration).LastMessage;
}
public DebugTarget GetDebugTarget(string targetName)
{
return GetDebugTarget(targetName, LogManager.Configuration);
}
protected DebugTarget GetDebugTarget(string targetName, LoggingConfiguration configuration)
{
var debugTarget = (DebugTarget)configuration.FindTargetByName(targetName);
Assert.NotNull(debugTarget);
return debugTarget;
}
protected void AssertFileContentsStartsWith(string fileName, string contents, Encoding encoding)
{
FileInfo fi = new FileInfo(fileName);
if (!fi.Exists)
Assert.True(false, "File '" + fileName + "' doesn't exist.");
byte[] encodedBuf = encoding.GetBytes(contents);
byte[] buf = File.ReadAllBytes(fileName);
Assert.True(encodedBuf.Length <= buf.Length,
$"File:{fileName} encodedBytes:{encodedBuf.Length} does not match file.content:{buf.Length}, file.length = {fi.Length}");
for (int i = 0; i < encodedBuf.Length; ++i)
{
if (encodedBuf[i] != buf[i])
Assert.True(encodedBuf[i] == buf[i],
$"File:{fileName} content mismatch {(int)encodedBuf[i]} <> {(int)buf[i]} at index {i}");
}
}
protected void AssertFileContentsEndsWith(string fileName, string contents, Encoding encoding)
{
if (!File.Exists(fileName))
Assert.True(false, "File '" + fileName + "' doesn't exist.");
string fileText = File.ReadAllText(fileName, encoding);
Assert.True(fileText.Length >= contents.Length);
Assert.Equal(contents, fileText.Substring(fileText.Length - contents.Length));
}
protected class CustomFileCompressor : IFileCompressor
{
public void CompressFile(string fileName, string archiveFileName)
{
#if (NET3_5 || NET4_0 || NET4_5) && !NETSTANDARD
using (var zip = new Ionic.Zip.ZipFile())
{
zip.AddFile(fileName);
zip.Save(archiveFileName);
}
#endif
}
}
#if NET3_5 || NET4_0
protected void AssertZipFileContents(string fileName, string contents, Encoding encoding)
{
if (!File.Exists(fileName))
Assert.True(false, "File '" + fileName + "' doesn't exist.");
byte[] encodedBuf = encoding.GetBytes(contents);
using (var zip = new Ionic.Zip.ZipFile(fileName))
{
Assert.Equal(1, zip.Count);
Assert.Equal(encodedBuf.Length, zip[0].UncompressedSize);
byte[] buf = new byte[zip[0].UncompressedSize];
using (var fs = zip[0].OpenReader())
{
fs.Read(buf, 0, buf.Length);
}
for (int i = 0; i < buf.Length; ++i)
{
Assert.Equal(encodedBuf[i], buf[i]);
}
}
}
#elif NET4_5
protected void AssertZipFileContents(string fileName, string contents, Encoding encoding)
{
FileInfo fi = new FileInfo(fileName);
if (!fi.Exists)
Assert.True(false, "File '" + fileName + "' doesn't exist.");
byte[] encodedBuf = encoding.GetBytes(contents);
using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read))
using (var zip = new ZipArchive(stream, ZipArchiveMode.Read))
{
Assert.Single(zip.Entries);
Assert.Equal(encodedBuf.Length, zip.Entries[0].Length);
byte[] buf = new byte[(int)zip.Entries[0].Length];
using (var fs = zip.Entries[0].Open())
{
fs.Read(buf, 0, buf.Length);
}
for (int i = 0; i < buf.Length; ++i)
{
Assert.Equal(encodedBuf[i], buf[i]);
}
}
}
#else
protected void AssertZipFileContents(string fileName, string contents, Encoding encoding)
{
Assert.True(false);
}
#endif
protected void AssertFileContents(string fileName, string contents, Encoding encoding)
{
AssertFileContents(fileName, contents, encoding, false);
}
protected void AssertFileContents(string fileName, string contents, Encoding encoding, bool addBom)
{
FileInfo fi = new FileInfo(fileName);
if (!fi.Exists)
Assert.True(false, "File '" + fileName + "' doesn't exist.");
byte[] encodedBuf = encoding.GetBytes(contents);
//add bom if needed
if (addBom)
{
var preamble = encoding.GetPreamble();
if (preamble.Length > 0)
{
//insert before
encodedBuf = preamble.Concat(encodedBuf).ToArray();
}
}
byte[] buf;
using (var fs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete))
{
int index = 0;
int count = (int)fs.Length;
buf = new byte[count];
while (count > 0)
{
int n = fs.Read(buf, index, count);
if (n == 0)
break;
index += n;
count -= n;
}
}
Assert.True(encodedBuf.Length == buf.Length,
$"File:{fileName} encodedBytes:{encodedBuf.Length} does not match file.content:{buf.Length}, file.length = {fi.Length}");
for (int i = 0; i < buf.Length; ++i)
{
if (encodedBuf[i] != buf[i])
Assert.True(encodedBuf[i] == buf[i],
$"File:{fileName} content mismatch {(int)encodedBuf[i]} <> {(int)buf[i]} at index {i}");
}
}
protected void AssertFileContains(string fileName, string contentToCheck, Encoding encoding)
{
if (contentToCheck.Contains(Environment.NewLine))
Assert.True(false, "Please use only single line string to check.");
FileInfo fi = new FileInfo(fileName);
if (!fi.Exists)
Assert.True(false, "File '" + fileName + "' doesn't exist.");
using (TextReader fs = new StreamReader(fileName, encoding))
{
string line;
while ((line = fs.ReadLine()) != null)
{
if (line.Contains(contentToCheck))
return;
}
}
Assert.True(false, "File doesn't contains '" + contentToCheck + "'");
}
protected void AssertFileNotContains(string fileName, string contentToCheck, Encoding encoding)
{
if (contentToCheck.Contains(Environment.NewLine))
Assert.True(false, "Please use only single line string to check.");
FileInfo fi = new FileInfo(fileName);
if (!fi.Exists)
Assert.True(false, "File '" + fileName + "' doesn't exist.");
using (TextReader fs = new StreamReader(fileName, encoding))
{
string line;
while ((line = fs.ReadLine()) != null)
{
if (line.Contains(contentToCheck))
Assert.False(true, "File contains '" + contentToCheck + "'");
}
}
}
protected string StringRepeat(int times, string s)
{
StringBuilder sb = new StringBuilder(s.Length * times);
for (int i = 0; i < times; ++i)
sb.Append(s);
return sb.ToString();
}
/// <summary>
/// Render layout <paramref name="layout"/> with dummy <see cref="LogEventInfo" />and compare result with <paramref name="expected"/>.
/// </summary>
protected static void AssertLayoutRendererOutput(Layout layout, string expected)
{
var logEventInfo = LogEventInfo.Create(LogLevel.Info, "loggername", "message");
AssertLayoutRendererOutput(layout, logEventInfo, expected);
}
/// <summary>
/// Render layout <paramref name="layout"/> with <paramref name="logEventInfo"/> and compare result with <paramref name="expected"/>.
/// </summary>
protected static void AssertLayoutRendererOutput(Layout layout, LogEventInfo logEventInfo, string expected)
{
layout.Initialize(null);
string actual = layout.Render(logEventInfo);
layout.Close();
Assert.Equal(expected, actual);
}
#if NET4_5
/// <summary>
/// Get line number of previous line.
/// </summary>
protected int GetPrevLineNumber([CallerLineNumber] int callingFileLineNumber = 0)
{
return callingFileLineNumber - 1;
}
#else
/// <summary>
/// Get line number of previous line.
/// </summary>
protected int GetPrevLineNumber()
{
//fixed value set with #line 100000
return 100001;
}
#endif
protected string RunAndCaptureInternalLog(SyncAction action, LogLevel internalLogLevel)
{
var stringWriter = new Logger();
InternalLogger.LogWriter = stringWriter;
InternalLogger.LogLevel = LogLevel.Trace;
InternalLogger.IncludeTimestamp = false;
action();
return stringWriter.ToString();
}
/// <summary>
/// To handle unstable integration tests, retry if failed
/// </summary>
/// <param name="tries"></param>
/// <param name="action"></param>
protected void RetryingIntegrationTest(int tries, Action action)
{
int tried = 0;
while (tried < tries)
{
try
{
tried++;
action();
return; //success
}
catch (Exception)
{
if (tried >= tries)
{
throw;
}
}
}
}
/// <summary>
/// This class has to be used when outputting from the InternalLogger.LogWriter.
/// Just creating a string writer will cause issues, since string writer is not thread safe.
/// This can cause issues when calling the ToString() on the text writer, since the underlying stringbuilder
/// of the textwriter, has char arrays that gets fucked up by the multiple threads.
/// this is a simple wrapper that just locks access to the writer so only one thread can access
/// it at a time.
/// </summary>
private class Logger : TextWriter
{
private readonly StringWriter writer = new StringWriter();
public override Encoding Encoding => writer.Encoding;
#if NETSTANDARD1_5
public override void Write(char value)
{
lock (this.writer)
{
this.writer.Write(value);
}
}
#endif
public override void Write(string value)
{
lock (writer)
{
writer.Write(value);
}
}
public override void WriteLine(string value)
{
lock (writer)
{
writer.WriteLine(value);
}
}
public override string ToString()
{
lock (writer)
{
return writer.ToString();
}
}
}
/// <summary>
/// Creates <see cref="CultureInfo"/> instance for test purposes
/// </summary>
/// <param name="cultureName">Culture name to create</param>
/// <remarks>
/// Creates <see cref="CultureInfo"/> instance with non-userOverride
/// flag to provide expected results when running tests in different
/// system cultures(with overriden culture options)
/// </remarks>
protected static CultureInfo GetCultureInfo(string cultureName)
{
return new CultureInfo(cultureName, false);
}
/// <summary>
/// Are we running on Travis?
/// </summary>
/// <returns></returns>
protected static bool IsTravis()
{
var val = Environment.GetEnvironmentVariable("TRAVIS");
return val != null && val.Equals("true", StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Are we running on AppVeyor?
/// </summary>
/// <returns></returns>
protected static bool IsAppVeyor()
{
var val = Environment.GetEnvironmentVariable("APPVEYOR");
return val != null && val.Equals("true", StringComparison.OrdinalIgnoreCase);
}
public delegate void SyncAction();
public class NoThrowNLogExceptions : IDisposable
{
private readonly bool throwExceptions;
public NoThrowNLogExceptions()
{
throwExceptions = LogManager.ThrowExceptions;
LogManager.ThrowExceptions = false;
}
public void Dispose()
{
LogManager.ThrowExceptions = throwExceptions;
}
}
public class InternalLoggerScope : IDisposable
{
private readonly TextWriter oldConsoleOutputWriter;
public StringWriter ConsoleOutputWriter { get; private set; }
private readonly TextWriter oldConsoleErrorWriter;
public StringWriter ConsoleErrorWriter { get; private set; }
private readonly LogLevel globalThreshold;
private readonly bool throwExceptions;
private readonly bool? throwConfigExceptions;
public InternalLoggerScope(bool redirectConsole = false)
{
InternalLogger.LogLevel = LogLevel.Info;
if (redirectConsole)
{
ConsoleOutputWriter = new StringWriter() { NewLine = "\n" };
ConsoleErrorWriter = new StringWriter() { NewLine = "\n" };
oldConsoleOutputWriter = Console.Out;
oldConsoleErrorWriter = Console.Error;
Console.SetOut(ConsoleOutputWriter);
Console.SetError(ConsoleErrorWriter);
}
globalThreshold = LogManager.GlobalThreshold;
throwExceptions = LogManager.ThrowExceptions;
throwConfigExceptions = LogManager.ThrowConfigExceptions;
}
public void SetConsoleError(StringWriter consoleErrorWriter)
{
if (ConsoleOutputWriter == null || consoleErrorWriter == null)
throw new InvalidOperationException("Initialize with redirectConsole=true");
ConsoleErrorWriter = consoleErrorWriter;
Console.SetError(consoleErrorWriter);
}
public void SetConsoleOutput(StringWriter consoleOutputWriter)
{
if (ConsoleOutputWriter == null || consoleOutputWriter == null)
throw new InvalidOperationException("Initialize with redirectConsole=true");
ConsoleOutputWriter = consoleOutputWriter;
Console.SetOut(consoleOutputWriter);
}
public void Dispose()
{
var logFile = InternalLogger.LogFile;
InternalLogger.Reset();
LogManager.GlobalThreshold = globalThreshold;
LogManager.ThrowExceptions = throwExceptions;
LogManager.ThrowConfigExceptions = throwConfigExceptions;
if (ConsoleOutputWriter != null)
Console.SetOut(oldConsoleOutputWriter);
if (ConsoleErrorWriter != null)
Console.SetError(oldConsoleErrorWriter);
if (!string.IsNullOrEmpty(InternalLogger.LogFile))
{
if (File.Exists(InternalLogger.LogFile))
File.Delete(InternalLogger.LogFile);
}
if (!string.IsNullOrEmpty(logFile) && logFile != InternalLogger.LogFile)
{
if (File.Exists(logFile))
File.Delete(logFile);
}
}
}
protected static void AssertContainsInDictionary<TKey, TValue>(IDictionary<TKey, TValue> dictionary, TKey key, TValue value)
{
Assert.Contains(key, dictionary);
Assert.Equal(value, dictionary[key]);
}
}
}
| |
//BSD, 2014-present, WinterDev
//----------------------------------------------------------------------------
// Anti-Grain Geometry - Version 2.4
// Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com)
//
// Permission to copy, use, modify, sell and distribute this software
// is granted provided this copyright notice appears in all copies.
// This software is provided "as is" without express or implied
// warranty, and with no claim as to its suitability for any purpose.
//
//----------------------------------------------------------------------------
// Contact: mcseem@antigrain.com
// mcseemagg@yahoo.com
// http://www.antigrain.com
//----------------------------------------------------------------------------
//
// Perspective 2D transformations
//
//----------------------------------------------------------------------------
using System;
namespace PixelFarm.CpuBlit.VertexProcessing
{
//=======================================================trans_perspective
public class CoordTransformationChain : ICoordTransformer
{
ICoordTransformer _left, _right;
public CoordTransformationChain(ICoordTransformer left, ICoordTransformer right)
{
_left = left;
_right = right;
}
ICoordTransformer ICoordTransformer.MultiplyWith(ICoordTransformer another)
{
return new CoordTransformationChain(_left, _right.MultiplyWith(another));
}
void ICoordTransformer.Transform(ref double x, ref double y)
{
_left.Transform(ref x, ref y);
_right.Transform(ref x, ref y);
}
ICoordTransformer ICoordTransformer.CreateInvert()
{
//TODO: impl
throw new System.NotSupportedException();
}
public ICoordTransformer Left => _left;
public ICoordTransformer Right => _right;
public CoordTransformerKind Kind => CoordTransformerKind.TransformChain;
public bool IsIdentity => false; //TODO: impl here again
}
public struct PerspectiveMat
{
public double
sx, shy, w0,
shx, sy, w1,
tx, ty, w2;
}
public sealed class Perspective : ICoordTransformer
{
const double EPSILON = 1e-14;
//this is 3x3 matrix , (rows x cols)
internal double
sx, shy, w0,
shx, sy, w1,
tx, ty, w2;
//-------------------------------------------------------
// Identity matrix
bool _isIdentity = false;
bool _isIdentiyMatEvaluated = false;
public Perspective()
{
sx = 1; shy = 0; w0 = 0;
shx = 0; sy = 1; w1 = 0;
tx = 0; ty = 0; w2 = 1;
_isIdentity = true;
_isIdentiyMatEvaluated = true;
}
// Custom matrix
public Perspective(double v0_sx, double v1_shy, double v2_w0,
double v3_shx, double v4_sy, double v5_w1,
double v6_tx, double v7_ty, double v8_w2)
{
sx = v0_sx; shy = v1_shy; w0 = v2_w0;
shx = v3_shx; sy = v4_sy; w1 = v5_w1;
tx = v6_tx; ty = v7_ty; w2 = v8_w2;
}
// From affine
public Perspective(Affine a)
{
sx = a.sx; /**/shy = a.shy; /**/w0 = 0;
shx = a.shx; /**/sy = a.sy; /**/w1 = 0;
tx = a.tx; /**/ty = a.ty; /**/w2 = 1;
}
// From trans_perspective
public Perspective(Perspective a)
{
sx = a.sx; shy = a.shy; w0 = a.w0;
shx = a.shx; sy = a.sy; w1 = a.w1;
tx = a.tx; ty = a.ty; w2 = a.w2;
}
// Rectangle to quadrilateral s
public Perspective(double x1, double y1, double x2, double y2, double[] quad)
{
unsafe
{
fixed (double* q_h = &quad[0])
{
double* r = stackalloc double[8];
r[0] = r[6] = x1;
r[2] = r[4] = x2;
r[1] = r[3] = y1;
r[5] = r[7] = y2;
InternalGenerateQuadToQuad(r, q_h);
}
}
}
// Quadrilateral to rectangle
public Perspective(double[] quad, double x1, double y1, double x2, double y2)
{
unsafe
{
fixed (double* q_h = &quad[0])
{
double* r = stackalloc double[8];
r[0] = r[6] = x1;
r[2] = r[4] = x2;
r[1] = r[3] = y1;
r[5] = r[7] = y2;
InternalGenerateQuadToQuad(q_h, r);
}
}
}
// Arbitrary quadrilateral transformations
public Perspective(double[] src, double[] dst)
{
quad_to_quad(src, dst);
}
public bool IsIdentity
{
get
{
//else
if (!_isIdentiyMatEvaluated)
{
_isIdentiyMatEvaluated = true;
return _isIdentity = (sx == 1 && shy == 0 && w0 == 0 &&
shx == 0 && sy == 1 && w1 == 0 &&
tx == 0 && ty == 0 && w2 == 1);
}
else
{
return _isIdentity;
}
}
}
ICoordTransformer ICoordTransformer.MultiplyWith(ICoordTransformer another)
{
if (another is Affine aff)
{
return this * aff;
}
else if (another is Perspective p2)
{
return new Perspective(this) * p2;
}
else
{
return null;
}
}
void Set(Perspective Other)
{
sx = Other.sx;
shy = Other.shy;
w0 = Other.w0;
shx = Other.shx;
sy = Other.sy;
w1 = Other.w1;
tx = Other.tx;
ty = Other.ty;
w2 = Other.w2;
}
//-------------------------------------- Quadrilateral transformations
// The arguments are double[8] that are mapped to quadrilaterals:
// x1,y1, x2,y2, x3,y3, x4,y4
public bool quad_to_quad(double[] qs, double[] qd)
{
unsafe
{
fixed (double* qs_h = &qs[0])
fixed (double* qd_h = &qd[0])
{
return InternalGenerateQuadToQuad(qs_h, qd_h);
}
}
}
unsafe bool InternalGenerateQuadToQuad(double* qs_h, double* qdHead)
{
//TODO: review here***
Perspective p = new Perspective();
if (!square_to_quad(qs_h))
{
return false;
}
invert();
//---------------------------------
if (!p.square_to_quad(qdHead))
{
return false;
}
multiply(p);
return true;
}
ICoordTransformer ICoordTransformer.CreateInvert()
{
Perspective newOne = new Perspective(this);
if (newOne.invert())
{
return newOne;
}
else
{
return null;
}
}
// Map square (0,0,1,1) to the quadrilateral and vice versa
unsafe bool square_to_quad(double* q)
{
double dx = q[0] - q[2] + q[4] - q[6];
double dy = q[1] - q[3] + q[5] - q[7];
if (dx == 0.0 && dy == 0.0)
{
// Affine case (parallelogram)
//---------------
sx = q[2] - q[0];
shy = q[3] - q[1];
w0 = 0.0;
shx = q[4] - q[2];
sy = q[5] - q[3];
w1 = 0.0;
tx = q[0];
ty = q[1];
w2 = 1.0;
}
else
{
double dx1 = q[2] - q[4];
double dy1 = q[3] - q[5];
double dx2 = q[6] - q[4];
double dy2 = q[7] - q[5];
double den = dx1 * dy2 - dx2 * dy1;
if (den == 0.0)
{
// Singular case
//---------------
sx = shy = w0 = shx = sy = w1 = tx = ty = w2 = 0.0;
return false;
}
// General case
//---------------
double u = (dx * dy2 - dy * dx2) / den;
double v = (dy * dx1 - dx * dy1) / den;
sx = q[2] - q[0] + u * q[2];
shy = q[3] - q[1] + u * q[3];
w0 = u;
shx = q[6] - q[0] + v * q[6];
sy = q[7] - q[1] + v * q[7];
w1 = v;
tx = q[0];
ty = q[1];
w2 = 1.0;
}
//double dx = q[0] - q[2] + q[4] - q[6];
//double dy = q[1] - q[3] + q[5] - q[7];
//if (dx == 0.0 && dy == 0.0)
//{
// // Affine case (parallelogram)
// //---------------
// sx = q[2] - q[0];
// shy = q[3] - q[1];
// w0 = 0.0;
// shx = q[4] - q[2];
// sy = q[5] - q[3];
// w1 = 0.0;
// tx = q[0];
// ty = q[1];
// w2 = 1.0;
//}
//else
//{
// double dx1 = q[2] - q[4];
// double dy1 = q[3] - q[5];
// double dx2 = q[6] - q[4];
// double dy2 = q[7] - q[5];
// double den = dx1 * dy2 - dx2 * dy1;
// if (den == 0.0)
// {
// // Singular case
// //---------------
// sx = shy = w0 = shx = sy = w1 = tx = ty = w2 = 0.0;
// return false;
// }
// // General case
// //---------------
// double u = (dx * dy2 - dy * dx2) / den;
// double v = (dy * dx1 - dx * dy1) / den;
// sx = q[2] - q[0] + u * q[2];
// shy = q[3] - q[1] + u * q[3];
// w0 = u;
// shx = q[6] - q[0] + v * q[6];
// sy = q[7] - q[1] + v * q[7];
// w1 = v;
// tx = q[0];
// ty = q[1];
// w2 = 1.0;
//}
return true;
}
//--------------------------------------------------------- Operations
public Perspective from_affine(Affine a)
{
sx = a.sx; shy = a.shy; w0 = 0;
shx = a.shx; sy = a.sy; w1 = 0;
tx = a.tx; ty = a.ty; w2 = 1;
return this;
}
// Reset - load an identity matrix
Perspective reset()
{
sx = 1; shy = 0; w0 = 0;
shx = 0; sy = 1; w1 = 0;
tx = 0; ty = 0; w2 = 1;
return this;
}
// Invert matrix. Returns false in degenerate case
bool invert()
{
double d0 = sy * w2 - w1 * ty;
double d1 = w0 * ty - shy * w2;
double d2 = shy * w1 - w0 * sy;
double d = sx * d0 + shx * d1 + tx * d2;
if (d == 0.0)
{
sx = shy = w0 = shx = sy = w1 = tx = ty = w2 = 0.0;
return false;
}
d = 1.0 / d;
Perspective a = new Perspective(this);
sx = d * d0;
shy = d * d1;
w0 = d * d2;
shx = d * (a.w1 * a.tx - a.shx * a.w2);
sy = d * (a.sx * a.w2 - a.w0 * a.tx);
w1 = d * (a.w0 * a.shx - a.sx * a.w1);
tx = d * (a.shx * a.ty - a.sy * a.tx);
ty = d * (a.shy * a.tx - a.sx * a.ty);
w2 = d * (a.sx * a.sy - a.shy * a.shx);
return true;
}
// Direct transformations operations
Perspective translate(double x, double y)
{
tx += x;
ty += y;
return this;
}
Perspective rotate(double a)
{
multiply(Affine.NewRotation(a));
return this;
}
Perspective scale(double s)
{
multiply(Affine.NewScaling(s));
return this;
}
Perspective scale(double x, double y)
{
multiply(Affine.NewScaling(x, y));
return this;
}
Perspective multiply(Perspective a)
{
Perspective b = new Perspective(this);
sx = a.sx * b.sx + a.shx * b.shy + a.tx * b.w0;
shx = a.sx * b.shx + a.shx * b.sy + a.tx * b.w1;
tx = a.sx * b.tx + a.shx * b.ty + a.tx * b.w2;
shy = a.shy * b.sx + a.sy * b.shy + a.ty * b.w0;
sy = a.shy * b.shx + a.sy * b.sy + a.ty * b.w1;
ty = a.shy * b.tx + a.sy * b.ty + a.ty * b.w2;
w0 = a.w0 * b.sx + a.w1 * b.shy + a.w2 * b.w0;
w1 = a.w0 * b.shx + a.w1 * b.sy + a.w2 * b.w1;
w2 = a.w0 * b.tx + a.w1 * b.ty + a.w2 * b.w2;
return this;
}
//------------------------------------------------------------------------
Perspective multiply(Affine a)
{
Perspective b = new Perspective(this);
sx = a.sx * b.sx + a.shx * b.shy + a.tx * b.w0;
shx = a.sx * b.shx + a.shx * b.sy + a.tx * b.w1;
tx = a.sx * b.tx + a.shx * b.ty + a.tx * b.w2;
shy = a.shy * b.sx + a.sy * b.shy + a.ty * b.w0;
sy = a.shy * b.shx + a.sy * b.sy + a.ty * b.w1;
ty = a.shy * b.tx + a.sy * b.ty + a.ty * b.w2;
return this;
}
//------------------------------------------------------------------------
Perspective premultiply(Perspective b)
{
Perspective a = new Perspective(this);
sx = a.sx * b.sx + a.shx * b.shy + a.tx * b.w0;
shx = a.sx * b.shx + a.shx * b.sy + a.tx * b.w1;
tx = a.sx * b.tx + a.shx * b.ty + a.tx * b.w2;
shy = a.shy * b.sx + a.sy * b.shy + a.ty * b.w0;
sy = a.shy * b.shx + a.sy * b.sy + a.ty * b.w1;
ty = a.shy * b.tx + a.sy * b.ty + a.ty * b.w2;
w0 = a.w0 * b.sx + a.w1 * b.shy + a.w2 * b.w0;
w1 = a.w0 * b.shx + a.w1 * b.sy + a.w2 * b.w1;
w2 = a.w0 * b.tx + a.w1 * b.ty + a.w2 * b.w2;
return this;
}
//------------------------------------------------------------------------
//Perspective premultiply(Affine b)
//{
// //copy this to a
// Perspective a = new Perspective(this);
// sx = a.sx * b.sx + a.shx * b.shy;
// shx = a.sx * b.shx + a.shx * b.sy;
// tx = a.sx * b.tx + a.shx * b.ty + a.tx;
// shy = a.shy * b.sx + a.sy * b.shy;
// sy = a.shy * b.shx + a.sy * b.sy;
// ty = a.shy * b.tx + a.sy * b.ty + a.ty;
// w0 = a.w0 * b.sx + a.w1 * b.shy;
// w1 = a.w0 * b.shx + a.w1 * b.sy;
// w2 = a.w0 * b.tx + a.w1 * b.ty + a.w2;
// return this;
//}
//------------------------------------------------------------------------
Perspective multiply_inv(Perspective m)
{
Perspective t = m;
t.invert();
return multiply(t);
}
//------------------------------------------------------------------------
Perspective trans_perspectivemultiply_inv(Affine m)
{
Affine t = m;
var invert = t.CreateInvert();
return multiply(invert);
}
//------------------------------------------------------------------------
Perspective premultiply_inv(Perspective m)
{
Perspective t = m;
t.invert();
Set(t.multiply(this));
return this;
}
// Multiply inverse of "m" by "this" and assign the result to "this"
Perspective premultiply_inv(Affine m)
{
Perspective t = new Perspective(m);
t.invert();
Set(t.multiply(this));
return this;
}
//--------------------------------------------------------- Load/Store
void store_to(double[] m)
{
m[0] = sx; m[1] = shy; m[2] = w0;
m[3] = shx; m[4] = sy; m[5] = w1;
m[6] = tx; m[7] = ty; m[8] = w2;
}
//------------------------------------------------------------------------
Perspective load_from(double[] m)
{
sx = m[0]; shy = m[1]; w0 = m[2];
shx = m[3]; sy = m[4]; w1 = m[5];
tx = m[6]; ty = m[7]; w2 = m[8];
return this;
}
//---------------------------------------------------------- Operators
// Multiply the matrix by another one and return the result in a separate matrix.
public static Perspective operator *(Perspective a, Perspective b)
{
Perspective temp = a;
temp.multiply(b);
return temp;
}
// Multiply the matrix by another one and return the result in a separate matrix.
public static Perspective operator *(Perspective a, Affine b)
{
Perspective temp = a;
temp.multiply(b);
return temp;
}
//// Multiply the matrix by inverse of another one and return the result in a separate matrix.
//public static Perspective operator /(Perspective a, Perspective b)
//{
// Perspective temp = a;
// temp.multiply_inv(b);
// return temp;
//}
//// Calculate and return the inverse matrix
//public static Perspective operator ~(Perspective b)
//{
// Perspective ret = b;
// ret.invert();
// return ret;
//}
//// Equal operator with default epsilon
//public static bool operator ==(Perspective a, Perspective b)
//{
// return a.is_equal(b, EPSILON);
//}
//// Not Equal operator with default epsilon
//public static bool operator !=(Perspective a, Perspective b)
//{
// return !a.is_equal(b, EPSILON);
//}
//public override bool Equals(object obj)
//{
// return base.Equals(obj);
//}
//public override int GetHashCode()
//{
// return base.GetHashCode();
//}
//---------------------------------------------------- Transformations
// Direct transformation of x and y
public void Transform(ref double px, ref double py)
{
double x = px;
double y = py;
double m = 1.0 / (x * w0 + y * w1 + w2);
px = m * (x * sx + y * shx + tx);
py = m * (x * shy + y * sy + ty);
}
// Direct transformation of x and y, affine part only
void transform_affine(ref double x, ref double y)
{
double tmp = x;
x = tmp * sx + y * shx + tx;
y = tmp * shy + y * sy + ty;
}
// Direct transformation of x and y, 2x2 matrix only, no translation
void transform_2x2(ref double x, ref double y)
{
double tmp = x;
x = tmp * sx + y * shx;
y = tmp * shy + y * sy;
}
// Inverse transformation of x and y. It works slow because
// it explicitly inverts the matrix on every call. For massive
// operations it's better to invert() the matrix and then use
// direct transformations.
void inverse_transform(ref double x, ref double y)
{
Perspective t = new Perspective(this);
if (t.invert()) t.Transform(ref x, ref y);
}
//---------------------------------------------------------- Auxiliary
double determinant()
{
return sx * (sy * w2 - ty * w1) +
shx * (ty * w0 - shy * w2) +
tx * (shy * w1 - sy * w0);
}
double determinant_reciprocal()
{
return 1.0 / determinant();
}
public bool IsValid
{
get
{
return Math.Abs(sx) > EPSILON &&
Math.Abs(sy) > EPSILON &&
Math.Abs(w2) > EPSILON;
}
}
public CoordTransformerKind Kind => CoordTransformerKind.Perspective;
bool is_identity()
{
return is_equal_eps(sx, 1.0, EPSILON) &&
is_equal_eps(shy, 0.0, EPSILON) &&
is_equal_eps(w0, 0.0, EPSILON) &&
is_equal_eps(shx, 0.0, EPSILON) &&
is_equal_eps(sy, 1.0, EPSILON) &&
is_equal_eps(w1, 0.0, EPSILON) &&
is_equal_eps(tx, 0.0, EPSILON) &&
is_equal_eps(ty, 0.0, EPSILON) &&
is_equal_eps(w2, 1.0, EPSILON);
}
static bool is_equal_eps(double v1, double v2, double epsilon)
{
return Math.Abs(v1 - v2) <= (epsilon);
}
//public bool is_equal(Perspective m)
//{
// return is_equal(m, EPSILON);
//}
//public bool is_equal(Perspective m, double epsilon)
//{
// return AggBasics.is_equal_eps(sx, m.sx, epsilon) &&
// AggBasics.is_equal_eps(shy, m.shy, epsilon) &&
// AggBasics.is_equal_eps(w0, m.w0, epsilon) &&
// AggBasics.is_equal_eps(shx, m.shx, epsilon) &&
// AggBasics.is_equal_eps(sy, m.sy, epsilon) &&
// AggBasics.is_equal_eps(w1, m.w1, epsilon) &&
// AggBasics.is_equal_eps(tx, m.tx, epsilon) &&
// AggBasics.is_equal_eps(ty, m.ty, epsilon) &&
// AggBasics.is_equal_eps(w2, m.w2, epsilon);
//}
// Determine the major affine parameters. Use with caution
// considering possible degenerate cases.
double scale()
{
double x = 0.707106781 * sx + 0.707106781 * shx;
double y = 0.707106781 * shy + 0.707106781 * sy;
return Math.Sqrt(x * x + y * y);
}
double rotation()
{
double x1 = 0.0;
double y1 = 0.0;
double x2 = 1.0;
double y2 = 0.0;
Transform(ref x1, ref y1);
Transform(ref x2, ref y2);
return Math.Atan2(y2 - y1, x2 - x1);
}
void translation(out double dx, out double dy)
{
dx = tx;
dy = ty;
}
void scaling(out double x, out double y)
{
double x1 = 0.0;
double y1 = 0.0;
double x2 = 1.0;
double y2 = 1.0;
Perspective t = new Perspective(this);
t *= Affine.NewRotation(-rotation());
t.Transform(ref x1, ref y1);
t.Transform(ref x2, ref y2);
x = x2 - x1;
y = y2 - y1;
}
void scaling_abs(out double x, out double y)
{
x = Math.Sqrt(sx * sx + shx * shx);
y = Math.Sqrt(shy * shy + sy * sy);
}
public PerspectiveMat GetInternalElements()
{
PerspectiveMat m = new PerspectiveMat();
m.sx = sx; /**/m.shy = shy; /**/m.w0 = w0;
m.shx = shx; /**/m.sy = sy; /**/m.w1 = w1;
m.tx = tx; /**/m.ty = ty; /**/m.w2 = w2;
return m;
}
}
}
| |
/**
* Copyright 2016 Dartmouth-Hitchcock
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
namespace Legion.Core.Services {
/// <summary>
/// A node in the Reply object
/// </summary>
public class ReplyNode {
private string _nodename;
private XmlDocument _dom;
/// <summary>
/// The base XmlElement
/// </summary>
protected XmlElement _node = null;
/// <summary>
/// Gets the raw XmlElement backing the ReplyNode
/// </summary>
public XmlElement Raw {
get { return _node; }
set { _node.InnerXml = value.InnerXml; }
}
/// <summary>
/// Constructor, defaults to visible empty node
/// </summary>
/// <param name="dom">The dom to create on</param>
/// <param name="nodename">This node's name</param>
public ReplyNode(XmlDocument dom, string nodename) : this(dom, nodename, false) { }
/// <summary>
/// Constructor
/// </summary>
/// <param name="dom">The dom to create on</param>
/// <param name="nodename">This node's name</param>
/// <param name="hideIfEmpty">hide this node if it has no contents</param>
public ReplyNode(XmlDocument dom, string nodename, bool hideIfEmpty){
_dom = dom;
_nodename = nodename;
if(!hideIfEmpty)
_node = (XmlElement)_dom.DocumentElement.AppendChild(_dom.CreateElement(_nodename));
}
/// <summary>
/// Creates a new XmlElement of the Reply
/// </summary>
/// <param name="name">the name of the new element</param>
/// <returns>A new XmlElement of the Reply</returns>
public XmlElement CreateElement(string name) {
CheckBaseNodeExists();
return _dom.CreateElement(name);
}
/// <summary>
/// Adds a new element of the provided name to the Document Element
/// </summary>
/// <param name="name">the name of the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(string name) {
CheckBaseNodeExists();
return AddElement(_node, name);
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement
/// </summary>
/// <param name="name">the name of the new element</param>
/// <param name="value">the text value of the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(string name, string value) {
CheckBaseNodeExists();
return AddElement(name, value, false);
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement
/// </summary>
/// <param name="name">the name of the new element</param>
/// <param name="value">the text value of the new element</param>
/// <param name="asCDATA">true if the value should be in a CDATA section</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(string name, string value, bool asCDATA) {
CheckBaseNodeExists();
return AddElement(_node, name, value, asCDATA);
}
/// <summary>
/// Adds a new element of the provided name to the provided parent
/// </summary>
/// <param name="parent">the parent element to append to</param>
/// <param name="name">the name of the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(XmlElement parent, string name) {
CheckBaseNodeExists();
return (XmlElement)parent.AppendChild(CreateElement(name));
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement
/// </summary>
/// <param name="parent">the element to add to</param>
/// <param name="name">the name of the new element</param>
/// <param name="value">the text value of the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(XmlElement parent, string name, string value) {
CheckBaseNodeExists();
return AddElement(parent, name, value, false);
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement
/// </summary>
/// <param name="parent">the element to add to</param>
/// <param name="name">the name of the new element</param>
/// <param name="value">the text value of the new element</param>
/// <param name="asCDATA">true if the value should be in a CDATA section</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(XmlElement parent, string name, string value, bool asCDATA) {
CheckBaseNodeExists();
XmlElement element;
element = _dom.CreateElement(name);
if (asCDATA)
element.AppendChild(_dom.CreateCDataSection(value));
else
element.InnerText = value;
return (XmlElement)parent.AppendChild(element);
}
/// <summary>
/// Adds an element to the Reply's DocumentElement
/// </summary>
/// <param name="element">the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(XmlElement element) {
CheckBaseNodeExists();
return AddElement(_node, element);
}
/// <summary>
/// Adds an element to the provided parent
/// </summary>
/// <param name="parent">the parent element to append to</param>
/// <param name="element">the new element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElement(XmlElement parent, XmlElement element) {
CheckBaseNodeExists();
return (XmlElement)parent.AppendChild(element);
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement as XML to the Document Element
/// </summary>
/// <param name="name">the name of the new element</param>
/// <param name="value">the string XML value of thenew element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElementXml(string name, string value) {
return AddElementXml(_node, name, value);
}
/// <summary>
/// Adds a new element to the Reply's DocumentElement as XML
/// </summary>
/// <param name="parent">the element to add to</param>
/// <param name="name">the name of the new element</param>
/// <param name="value">the string XML value of thenew element</param>
/// <returns>The new XmlElement</returns>
public XmlElement AddElementXml(XmlElement parent, string name, string value) {
CheckBaseNodeExists();
XmlElement element;
element = _dom.CreateElement(name);
element.InnerXml = value;
return (XmlElement)parent.AppendChild(element);
}
/// <summary>
/// Clears the ReplyNode
/// </summary>
public void Clear() {
CheckBaseNodeExists();
_node.RemoveAll();
}
/// <summary>
/// Checks that the base node has been created and if not, creates it
/// </summary>
protected bool CheckBaseNodeExists() {
if (_node == null) {
_node = (XmlElement)_dom.DocumentElement.AppendChild(_dom.CreateElement(_nodename));
return false;
}
else
return true;
}
}
}
| |
using System;
using Android.Content;
using Android.Graphics;
using Android.Graphics.Drawables;
using Android.Util;
using DrawableTextView = TextDrawable.TextDrawable;
namespace TextDrawableSamples.Util
{
public enum DrwableProviderEnum
{
SampleRect = 1,
SampleRoundRect = 2,
SampleRound = 3,
SampleRectBorder = 4,
SampleRoundRectBorder = 5,
SampleRoundBorder = 6,
SampleMultipleLetters = 7,
SampleFont = 8,
SampleSize = 9,
SampleAnimation = 10,
SampleMisc = 11
}
/// <summary>
/// Class DrawableProvider.
/// </summary>
public class DrawableProvider
{
/// <summary>
/// The _m context
/// </summary>
private readonly Context _mContext;
/// <summary>
/// Initializes a new instance of the <see cref="DrawableProvider" /> class.
/// </summary>
/// <param name="context">The context.</param>
public DrawableProvider(Context context)
{
_mContext = context;
}
/// <summary>
/// The _rand
/// </summary>
private readonly Random _rand = new Random();
/// <summary>
/// Gets the random color.
/// </summary>
/// <value>The random color.</value>
public Color RandomColor => GetRandomColor();
/// <summary>
/// Gets the random color.
/// </summary>
/// <returns>Color.</returns>
private Color GetRandomColor()
{
var hue = _rand.Next(255);
var color = Color.HSVToColor(
new[]
{
hue,
1.0f,
1.0f
}
);
return color;
}
/// <summary>
/// Gets the rect.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRect(string text)
{
return DrawableTextView.TextDrwableBuilder.BuildRect(text, RandomColor, RandomColor);
}
/// <summary>
/// Gets the round.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRound(string text)
{
return DrawableTextView.TextDrwableBuilder.BuildRound(text, RandomColor, RandomColor);
}
/// <summary>
/// Gets the round rect.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRoundRect(string text)
{
return DrawableTextView.TextDrwableBuilder.BuildRoundRect(text, RandomColor, ToPx(10), RandomColor);
}
/// <summary>
/// Gets the rect with border.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRectWithBorder(string text)
{
return DrawableTextView.TextDrwableBuilder
.BeginConfig()
.WithBorder(ToPx(2))
.EndConfig()
.BuildRect(text, RandomColor, RandomColor);
}
/// <summary>
/// Gets the round with border.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRoundWithBorder(string text)
{
return DrawableTextView.TextDrwableBuilder
.BeginConfig()
.WithBorder(ToPx(2))
.EndConfig()
.BuildRound(text, RandomColor, RandomColor);
}
/// <summary>
/// Gets the round rect with border.
/// </summary>
/// <param name="text">The text.</param>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRoundRectWithBorder(string text)
{
return DrawableTextView.TextDrwableBuilder
.BeginConfig()
.WithBorder(ToPx(2))
.EndConfig()
.BuildRoundRect(text, RandomColor, ToPx(10), RandomColor);
}
/// <summary>
/// Gets the rect with multi letter.
/// </summary>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRectWithMultiLetter()
{
var text = "AK";
return DrawableTextView.TextDrwableBuilder.BeginConfig()
.FontSize(ToPx(20))
.ToUpperCase()
.EndConfig()
.BuildRect(text, RandomColor, RandomColor);
}
/// <summary>
/// Gets the round with custom font.
/// </summary>
/// <returns>DrawableTextView.</returns>
public DrawableTextView GetRoundWithCustomFont()
{
var text = "Bold";
return DrawableTextView.TextDrwableBuilder.BeginConfig()
.UseFont(Typeface.Default)
.FontSize(ToPx(15))
.TextColor(RandomColor)
.Bold()
.EndConfig()
.BuildRect(text, Color.DarkGray, RandomColor /*toPx(5)*/);
}
/// <summary>
/// Gets the size of the rect with custom.
/// </summary>
/// <returns>Drawable.</returns>
public Drawable GetRectWithCustomSize()
{
var leftText = "I";
var rightText = "J";
var builder =
DrawableTextView.TextDrwableBuilder.BeginConfig().Width(ToPx(29)).WithBorder(ToPx(2)).EndConfig().Rect();
var left = builder.Build(leftText, RandomColor, Color.Transparent);
var right = builder.Build(rightText, RandomColor, Color.Transparent);
Drawable[] layerList =
{
new InsetDrawable(left, 0, 0, ToPx(31), 0),
new InsetDrawable(right, ToPx(31), 0, 0, 0)
};
return new LayerDrawable(layerList);
}
/// <summary>
/// Gets the rect with animation.
/// </summary>
/// <returns>Drawable.</returns>
public Drawable GetRectWithAnimation()
{
var builder = DrawableTextView.TextDrwableBuilder.Rect();
var animationDrawable = new AnimationDrawable();
for (var i = 10; i > 0; i--)
{
var frame = builder.Build(i.ToString(), RandomColor, RandomColor);
animationDrawable.AddFrame(frame, 1200);
}
animationDrawable.OneShot = false;
animationDrawable.Start();
return animationDrawable;
}
/// <summary>
/// To the px.
/// </summary>
/// <param name="dp">The dp.</param>
/// <returns>System.Int32.</returns>
public int ToPx(int dp)
{
var resources = _mContext.Resources;
return (int) TypedValue.ApplyDimension(ComplexUnitType.Dip, dp, resources.DisplayMetrics);
}
}
}
| |
/* ====================================================================
Copyright (C) 2004-2008 fyiReporting Software, LLC
Copyright (C) 2011 Peter Gill <peter@majorsilence.com>
This file is part of the fyiReporting RDL project.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, email info@fyireporting.com or visit
the website www.fyiReporting.com.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Runtime.InteropServices;
using System.Xml;
using System.IO;
using RdlMapFile.Resources;
namespace fyiReporting.RdlMapFile
{
public partial class MapFile : Form, IMessageFilter
{
private DesignXmlDraw map;
public MapFile(string file)
{
InitializeComponent();
//map = new DesignXmlDraw(); // designer keeps deleting this code??!!
Application.AddMessageFilter(this);
//
// manually add controls to the splitter. visual studio keeps deleting them
//
map = new DesignXmlDraw();
this.splitContainer1.Panel1.Controls.Add(this.map);
//
// map
//
this.map.Dock = System.Windows.Forms.DockStyle.Fill;
this.map.Location = new System.Drawing.Point(0, 0);
this.map.Name = "map";
this.map.Size = new System.Drawing.Size(620, 474);
this.map.TabIndex = 0;
this.map.Zoom = 1F;
map.ZoomChange += new DesignXmlDraw.DrawEventHandler(map_ZoomChange);
map.XmlChange += new DesignXmlDraw.DrawEventHandler(map_XmlChange);
map.SelectionChange += new DesignXmlDraw.DrawEventHandler(map_SelectionChange);
map.ToolChange += new DesignXmlDraw.DrawEventHandler(map_ToolChange);
this.Closing += new CancelEventHandler(MapFile_Closing);
if (file != null)
{
map.SetMapFile(file);
if (map.MapDoc == null) // failed to open?
map.SetNew(); // yes, just start a new one
}
else
map.SetNew();
SetTitle(false);
}
void map_ToolChange(DesignXmlDraw dxd)
{
bToolStrip.Image = selectionToolStripMenuItem.Image;
}
void MapFile_Closing(object sender, CancelEventArgs e)
{
e.Cancel = !OkToClose();
}
void map_SelectionChange(DesignXmlDraw dxd)
{
XmlNode sNode = dxd.SelectedItem;
if (sNode == null)
{
pgXmlNode.SelectedObject = null;
return;
}
switch (sNode.Name)
{
case "Text":
pgXmlNode.SelectedObject = new PropertyText(this.map);
break;
case "Polygon":
pgXmlNode.SelectedObject = new PropertyPolygon(this.map);
// expand the Keys property. Makes it easier for user to identify the polygon
GridItem keys = findPropertyItem("Keys", findPropertyRoot());
if (keys != null)
keys.Expanded = true;
break;
case "Lines":
pgXmlNode.SelectedObject = new PropertyLine(this.map);
break;
default:
pgXmlNode.SelectedObject = null;
break;
}
}
private GridItem findPropertyRoot()
{
// get the root item
GridItem root = pgXmlNode.SelectedGridItem;
if (root == null)
return null;
while (root.Parent != null)
{
root = root.Parent;
}
return root;
}
private GridItem findPropertyItem(string label, GridItem root)
{
if (root == null)
return null;
if (root.Label == label)
return root;
// search the property grid's item tree for the indicated item
foreach (GridItem gi in root.GridItems)
{
GridItem result = findPropertyItem(label, gi);
if (result != null)
return result;
}
return null;
}
void map_XmlChange(DesignXmlDraw dxd)
{
pgXmlNode.Refresh();
if (map.Modified) // only need to process on the first change
return;
SetTitle(true);
}
void map_ZoomChange(DesignXmlDraw dxd)
{
string z = string.Format("{0}%", Math.Round(dxd.Zoom * 100, 0));
cbZoom.Text = z;
}
/// <summary>
/// Handles mousewheel processing when window under mousewheel doesn't have focus
/// </summary>
/// <param name="m"></param>
/// <returns></returns>
public bool PreFilterMessage(ref Message m)
{
#if MONO
return false;
#else
if (m.Msg == 0x20a)
{
// WM_MOUSEWHEEL, find the control at screen position m.LParam
Point pos = new Point(m.LParam.ToInt32() & 0xffff, m.LParam.ToInt32() >> 16);
IntPtr hWnd = WindowFromPoint(pos);
if (hWnd != IntPtr.Zero && hWnd != m.HWnd && Control.FromHandle(hWnd) != null)
{
SendMessage(hWnd, m.Msg, m.WParam, m.LParam);
return true;
}
}
return false;
#endif
}
#if MONO
#else
// P/Invoke declarations
[DllImport("user32.dll")]
private static extern IntPtr WindowFromPoint(Point pt);
[DllImport("user32.dll")]
private static extern IntPtr SendMessage(IntPtr hWnd, int msg, IntPtr wp, IntPtr lp);
#endif
private void exitToolStripMenuItem_Click(object sender, EventArgs e)
{
if (!OkToClose())
return;
Application.Exit();
}
private bool OkToClose()
{
if (!map.Modified)
return true;
DialogResult mb = MessageBox.Show(string.Format(Strings.MapFile_ShowB_WantSave, map.File == null ? Strings.MapFile_ShowB_Untitled : Path.GetFileName(map.File)),
Strings.MapFile_ShowB_RdlMapFileDesigner, MessageBoxButtons.YesNoCancel) ;
if (mb == DialogResult.Cancel)
return false;
if (mb == DialogResult.No)
return true;
return Save();
}
private bool Save()
{
// need to save file first then exit
if (map.File == null)
{
if (!SaveAs())
return false;
}
string file = map.File;
StreamWriter writer = null;
bool bOK = true;
try
{
writer = new StreamWriter(file);
writer.Write(map.MapSource);
map.Modified = false;
map.ClearUndo();
SetTitle(false);
}
catch (Exception ae)
{
bOK = false;
MessageBox.Show(ae.Message + "\r\n" + ae.StackTrace);
}
finally
{
writer.Close();
}
if (bOK)
map.Modified = false;
return bOK;
}
/// <summary>
/// Asks user for file name and sets the map file name to user specified one
/// </summary>
/// <returns></returns>
private bool SaveAs()
{
SaveFileDialog sfd = new SaveFileDialog();
sfd.DefaultExt = "xml";
sfd.Filter = Strings.MapFile_SaveAs_RDL_MapFilesFilter;
sfd.FilterIndex = 1;
sfd.CheckFileExists = false;
bool rc = false;
try
{
if (sfd.ShowDialog(this) == DialogResult.OK)
{
map.File = sfd.FileName;
rc = true;
}
}
finally
{
sfd.Dispose();
}
return rc;
}
private void SetTitle(bool bModified)
{
var title = Strings.MapFile_SetTitle_fyiReporting_MapFile_Designer + " - " +
(map.File ?? Strings.MapFile_SetTitle_untitled) +
(bModified ? "*" : "");
Text = title;
}
private void openToolStripMenuItem_Click(object sender, EventArgs e)
{
if (!OkToClose())
return;
OpenFileDialog ofd = new OpenFileDialog();
ofd.DefaultExt = "xml";
ofd.Filter = Strings.MapFile_openToolStripMenuItem_Click_OpenMap;
ofd.FilterIndex = 1;
ofd.CheckFileExists = true;
ofd.Multiselect = false;
if (map.File != null)
{
try
{
ofd.InitialDirectory = Path.GetDirectoryName(map.File);
}
catch
{
}
}
try
{
if (ofd.ShowDialog(this) == DialogResult.OK)
{
map.SetMapFile(ofd.FileName);
SetTitle(false);
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, string.Format("{1} {0}", ofd.FileName, Strings.MapFile_openToolStripMenuItem_Click_ErrorOpening));
}
finally
{
ofd.Dispose();
}
}
private void setBackgroundImageToolStripMenuItem_Click(object sender, EventArgs e)
{
OpenFileDialog ofd = new OpenFileDialog();
ofd.Filter = Strings.MapFile_setBackgroundImageToolStripMenuItem_Click_OpenPicture;
ofd.FilterIndex = 6;
ofd.CheckFileExists = true;
try
{
if (ofd.ShowDialog(this) == DialogResult.OK)
{
map.SetBackgroundImage(ofd.FileName);
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, string.Format("{1} {0}", ofd.FileName, Strings.MapFile_openToolStripMenuItem_Click_ErrorOpening));
}
finally
{
ofd.Dispose();
}
}
private void cbZoom_SelectedIndexChanged(object sender, EventArgs e)
{
try
{
float z = int.Parse(cbZoom.Text.Replace("%", ""), System.Globalization.NumberStyles.Integer) / 100f;
if (z < .1f)
z = .1f;
else if (z > 10)
z = 8;
this.map.Zoom = z;
}
catch { } // happens when user types in a bad zoom value
}
private void deleteToolStripMenuItem_Click(object sender, EventArgs e)
{
this.map.DeleteSelected();
}
private void undoToolStripMenuItem_Click(object sender, EventArgs e)
{
this.map.Undo();
}
private void editToolStripMenuItem_DropDownOpening(object sender, EventArgs e)
{
undoToolStripMenuItem.Enabled = map.CanUndo;
undoToolStripMenuItem.Text = map.CanUndo ? Strings.MapFile_editToolStripMenuItem_DropDownOpening_Undo + " " + map.UndoText : Strings.MapFile_editToolStripMenuItem_DropDownOpening_Undo;
deleteToolStripMenuItem.Enabled = reducePolygonPointsToolStripMenuItem.Enabled = (map.SelectedItem != null);
selectAllToolStripMenuItem.Enabled = map.MapDoc != null;
}
private void saveToolStripMenuItem_Click(object sender, EventArgs e)
{
this.Save();
}
private void saveAsToolStripMenuItem_Click(object sender, EventArgs e)
{
if (SaveAs())
Save();
}
private void aboutToolStripMenuItem_Click(object sender, EventArgs e)
{
using (DialogAbout dlg = new DialogAbout())
{
dlg.ShowDialog();
}
}
private void insertPolygonToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Tool = DesignXmlDraw.ToolMode.InsertPolygon;
bToolStrip.Image = ((ToolStripMenuItem)sender).Image;
}
private void insertTextToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Tool = DesignXmlDraw.ToolMode.InsertText;
bToolStrip.Image = ((ToolStripMenuItem)sender).Image;
}
private void insertLineToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Tool = DesignXmlDraw.ToolMode.InsertLine;
bToolStrip.Image = ((ToolStripMenuItem)sender).Image;
}
private void selectionToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Tool = DesignXmlDraw.ToolMode.Selection;
bToolStrip.Image = ((ToolStripMenuItem)sender).Image;
}
private void newToolStripMenuItem_Click(object sender, EventArgs e)
{
if (!OkToClose())
return;
map.SetNew();
pgXmlNode.SelectedObject = null;
SetTitle(false);
}
private void selectAllToolStripMenuItem_Click(object sender, EventArgs e)
{
map.SelectAll();
}
private void copyToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Copy();
}
private void pasteToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Paste(new Point(0, 0));
}
private void cutToolStripMenuItem_Click(object sender, EventArgs e)
{
map.Copy();
this.map.DeleteSelected();
}
private void helpHelpToolStripMenuItem_Click(object sender, EventArgs e)
{
try
{
System.Diagnostics.Process.Start("http://www.fyireporting.com/helpv4/mapdesigner.php");
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, Strings.MapFile_helpHelpToolStripMenuItem_Click_Help_URL_Invalid);
}
}
private void supportToolStripMenuItem_Click(object sender, EventArgs e)
{
try
{
System.Diagnostics.Process.Start("http://www.fyireporting.com/forum/");
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, Strings.MapFile_supportToolStripMenuItem_Click_Support_URL_Invalid);
}
}
private void importMenuItem_Click(object sender, EventArgs e)
{
OpenFileDialog ofd = new OpenFileDialog();
ofd.Filter = Strings.MapFile_importMenuItem_Click_OpenShape;
ofd.FilterIndex = 0;
ofd.CheckFileExists = true;
try
{
if (ofd.ShowDialog(this) == DialogResult.OK)
{
map.ClearUndo();
ShapeFile sf = new ShapeFile();
sf.Read(ofd.FileName);
StringBuilder sb = new StringBuilder("<MapItems>", sf.Records.Count * 100);
float xOffset = (float)-sf.FileHeader.XMin;
float yOffset = (float)-sf.FileHeader.YMin;
//PointF offset = this.MercatorConversion(new PointF((float)sf.FileHeader.XMin, (float)sf.FileHeader.YMin));
//float xOffset = (float)-offset.X;
//float yOffset = (float)-offset.Y;
foreach (ShapeFileRecord sfr in sf.Records)
{
if (sfr.ShapeType == (int)ShapeType.Polygon)
{
HandlePolygon(sb, xOffset, yOffset, sfr);
}
}
sb.Append("</MapItems>");
map.Paste(new Point(0, 0), sb.ToString());
map.ClearUndo();
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, string.Format("{1} {0}", ofd.FileName, Strings.MapFile_openToolStripMenuItem_Click_ErrorOpening));
}
finally
{
ofd.Dispose();
}
}
private void HandlePolygon(StringBuilder sb, float xOffset, float yOffset, ShapeFileRecord sfr)
{
// we'll use this key for all the polygons
StringBuilder keys = new StringBuilder();
keys.Append("<Keys>");
int len = sfr.Attributes.ItemArray.GetLength(0);
for (int j = 0; j < len; j++)
{
string key = sfr.Attributes.ItemArray[j].ToString();
if (key.Length == 0)
continue;
try { float.Parse(key); continue; }
catch { } // not a number
keys.Append(key);
if (j + 1 < len)
keys.Append(", ");
}
if (keys.ToString().EndsWith(", "))
keys.Remove(keys.Length - 2, 2);
keys.Append("</Keys>");
string skeys = keys.ToString();
for (int i = 0; i < sfr.NumberOfParts; i++)
{
sb.Append("<Polygon>");
sb.Append("<Points>");
int oldx = int.MaxValue;
int oldy = int.MaxValue;
int cp = 0;
// Determine the starting index and the end index
// into the points array that defines the figure.
int start = sfr.Parts[i];
int end;
if (sfr.NumberOfParts > 1 && i != (sfr.NumberOfParts - 1))
end = sfr.Parts[i + 1];
else
end = sfr.NumberOfPoints;
// Add line segments to the figure.
for (int j = start; j < end; j++)
{
PointF ll = sfr.Points[j];
//PointF ll = MercatorConversion(sfr.Points[j]);
int x = (int)((ll.X + xOffset) * 4);
int y = (int)(((-ll.Y) + yOffset) * 4);
if (x == oldx && y == oldy) // we're truncating the data so some points are redundant
continue;
cp++;
oldx = x;
oldy = y;
sb.AppendFormat("{0},{1}", x, y);
if (j + 1 < sfr.Points.Count)
sb.Append(",");
}
if (cp == 1)
sb.AppendFormat(",{0},{1}", oldx, oldy);
sb.Append("</Points>");
sb.Append(skeys);
sb.Append("</Polygon>");
}
}
private PointF MercatorConversion(PointF ll)
{
double dLat = Degrees2Radians(ll.Y);
if (Math.Abs(Math.Abs(dLat) - HALF_PI) <= .0001f)
{ // Not perfect but should be close; latitude fails near poles (90 and -90 degree)
dLat = Degrees2Radians(ll.Y < 0 ? -87f: 87f);
}
// see http://en.wikipedia.org/wiki/Mercator_projection for formula
double y = Math.Log(Math.Tan(dLat)+ (1f / Math.Cos(dLat)));
return new PointF(ll.X, (float)Radians2Degrees(y));
}
static double HALF_PI = Math.PI / 2;
private double Degrees2Radians(double d)
{
return d * Math.PI / 180;
}
private double Radians2Degrees(double r)
{
return r * 180 / Math.PI;
}
private void reducePolygonPointsToolStripMenuItem_Click(object sender, EventArgs e)
{
int count = map.ReducePointCount();
MessageBox.Show(string.Format("{1} {0}.", count, Strings.MapFile_reducePolygonPointsToolStripMenuItem_ReducePolygon), Strings.MapFile_reducePolygonPointsToolStripMenuItem_Click_Reduce_Polygon_Count);
}
private void sizePolygonPoints_Click(object sender, EventArgs e)
{
ToolStripItem tsi = (ToolStripItem)sender;
float zoom = (100f + Convert.ToSingle((string)(tsi.Tag))) / 100f;
map.SizeSelected(zoom);
}
private void menuFindByKey_Click(object sender, EventArgs e)
{
DialogFindByKey fbk = new DialogFindByKey(map);
try
{
fbk.ShowDialog(this); // it does all the work
}
finally
{
fbk.Dispose();
}
}
}
}
| |
// Copyright 2007-2015 Chris Patterson, Dru Sellers, Travis Smith, et. al.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit.Courier.Hosts
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Contracts;
using Events;
using Exceptions;
using MassTransit.Pipeline;
using Results;
public class HostExecuteContext<TArguments> :
ExecuteContext<TArguments>
where TArguments : class
{
readonly Activity _activity;
readonly TArguments _arguments;
readonly Uri _compensationAddress;
readonly ConsumeContext<RoutingSlip> _context;
readonly Guid _executionId;
readonly HostInfo _host;
readonly IRoutingSlipEventPublisher _publisher;
readonly SanitizedRoutingSlip _routingSlip;
readonly Stopwatch _timer;
readonly DateTime _timestamp;
public HostExecuteContext(HostInfo host, Uri compensationAddress, ConsumeContext<RoutingSlip> context)
{
_host = host;
_compensationAddress = compensationAddress;
_context = context;
_timer = Stopwatch.StartNew();
NewId newId = NewId.Next();
_executionId = newId.ToGuid();
_timestamp = newId.Timestamp;
_routingSlip = new SanitizedRoutingSlip(context);
if (_routingSlip.Itinerary.Count == 0)
throw new ArgumentException("The routingSlip must contain at least one activity");
_activity = _routingSlip.Itinerary[0];
_arguments = _routingSlip.GetActivityArguments<TArguments>();
_publisher = new RoutingSlipEventPublisher(this, _routingSlip);
}
CancellationToken PipeContext.CancellationToken => _context.CancellationToken;
bool PipeContext.HasPayloadType(Type contextType)
{
return _context.HasPayloadType(contextType);
}
bool PipeContext.TryGetPayload<TPayload>(out TPayload payload)
{
return _context.TryGetPayload(out payload);
}
TPayload PipeContext.GetOrAddPayload<TPayload>(PayloadFactory<TPayload> payloadFactory)
{
return _context.GetOrAddPayload(payloadFactory);
}
Task IPublishEndpoint.Publish<T>(T message, CancellationToken cancellationToken)
{
return _context.Publish(message, cancellationToken);
}
Task IPublishEndpoint.Publish<T>(T message, IPipe<PublishContext<T>> publishPipe,
CancellationToken cancellationToken)
{
return _context.Publish(message, publishPipe, cancellationToken);
}
Task IPublishEndpoint.Publish<T>(T message, IPipe<PublishContext> publishPipe, CancellationToken cancellationToken)
{
return _context.Publish(message, publishPipe, cancellationToken);
}
Task IPublishEndpoint.Publish(object message, CancellationToken cancellationToken)
{
return _context.Publish(message, cancellationToken);
}
Task IPublishEndpoint.Publish(object message, IPipe<PublishContext> publishPipe, CancellationToken cancellationToken)
{
return _context.Publish(message, publishPipe, cancellationToken);
}
Task IPublishEndpoint.Publish(object message, Type messageType, CancellationToken cancellationToken)
{
return _context.Publish(message, messageType, cancellationToken);
}
Task IPublishEndpoint.Publish(object message, Type messageType, IPipe<PublishContext> publishPipe,
CancellationToken cancellationToken)
{
return _context.Publish(message, messageType, publishPipe, cancellationToken);
}
Task IPublishEndpoint.Publish<T>(object values, CancellationToken cancellationToken)
{
return _context.Publish<T>(values, cancellationToken);
}
Task IPublishEndpoint.Publish<T>(object values, IPipe<PublishContext<T>> publishPipe,
CancellationToken cancellationToken)
{
return _context.Publish(values, publishPipe, cancellationToken);
}
Task IPublishEndpoint.Publish<T>(object values, IPipe<PublishContext> publishPipe,
CancellationToken cancellationToken)
{
return _context.Publish<T>(values, publishPipe, cancellationToken);
}
HostInfo ExecuteContext.Host => _host;
DateTime ExecuteContext.Timestamp => _timestamp;
TimeSpan ExecuteContext.Elapsed => _timer.Elapsed;
ConsumeContext ExecuteContext.ConsumeContext => _context;
TArguments ExecuteContext<TArguments>.Arguments => _arguments;
Guid ExecuteContext.TrackingNumber => _routingSlip.TrackingNumber;
Guid ExecuteContext.ExecutionId => _executionId;
string ExecuteContext.ActivityName => _activity.Name;
ExecutionResult ExecuteContext.Completed()
{
return new NextActivityExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip);
}
ExecutionResult ExecuteContext.Completed<TLog>(TLog log)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new NextActivityExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress, log);
}
ExecutionResult ExecuteContext.CompletedWithVariables(IEnumerable<KeyValuePair<string, object>> variables)
{
if (variables == null)
throw new ArgumentNullException(nameof(variables));
return new NextActivityWithVariablesExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip,
variables.ToDictionary(x => x.Key, x => x.Value));
}
ExecutionResult ExecuteContext.CompletedWithVariables(object variables)
{
if (variables == null)
throw new ArgumentNullException(nameof(variables));
return new NextActivityWithVariablesExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip,
RoutingSlipBuilder.GetObjectAsDictionary(variables));
}
ExecutionResult ExecuteContext.CompletedWithVariables<TLog>(TLog log, object variables)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (variables == null)
throw new ArgumentNullException(nameof(variables));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new NextActivityWithVariablesExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress, log,
RoutingSlipBuilder.GetObjectAsDictionary(variables));
}
ExecutionResult ExecuteContext.CompletedWithVariables<TLog>(TLog log, IEnumerable<KeyValuePair<string, object>> variables)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (variables == null)
throw new ArgumentNullException(nameof(variables));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new NextActivityWithVariablesExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress, log,
variables.ToDictionary(x => x.Key, x => x.Value));
}
ExecutionResult ExecuteContext.ReviseItinerary(Action<ItineraryBuilder> buildItinerary)
{
if (buildItinerary == null)
throw new ArgumentNullException(nameof(buildItinerary));
return new ReviseItineraryExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip, buildItinerary);
}
ExecutionResult ExecuteContext.ReviseItinerary<TLog>(TLog log, Action<ItineraryBuilder> buildItinerary)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (buildItinerary == null)
throw new ArgumentNullException(nameof(buildItinerary));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new ReviseItineraryExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress, log,
buildItinerary);
}
ExecutionResult ExecuteContext.ReviseItinerary<TLog>(TLog log, object variables, Action<ItineraryBuilder> buildItinerary)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (variables == null)
throw new ArgumentNullException(nameof(variables));
if (buildItinerary == null)
throw new ArgumentNullException(nameof(buildItinerary));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new ReviseItineraryWithVariablesExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress,
log, RoutingSlipBuilder.GetObjectAsDictionary(variables), buildItinerary);
}
ExecutionResult ExecuteContext.ReviseItinerary<TLog>(TLog log, IEnumerable<KeyValuePair<string, object>> variables,
Action<ItineraryBuilder> buildItinerary)
{
if (log == null)
throw new ArgumentNullException(nameof(log));
if (variables == null)
throw new ArgumentNullException(nameof(variables));
if (buildItinerary == null)
throw new ArgumentNullException(nameof(buildItinerary));
if (_compensationAddress == null)
throw new InvalidCompensationAddressException(_compensationAddress);
return new ReviseItineraryWithVariablesExecutionResult<TArguments, TLog>(this, _publisher, _activity, _routingSlip, _compensationAddress,
log, variables.ToDictionary(x => x.Key, x => x.Value), buildItinerary);
}
ExecutionResult ExecuteContext.Terminate()
{
return new TerminateExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip);
}
ExecutionResult ExecuteContext.Terminate(object variables)
{
if (variables == null)
throw new ArgumentNullException(nameof(variables));
return new TerminateWithVariablesExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip,
RoutingSlipBuilder.GetObjectAsDictionary(variables));
}
ExecutionResult ExecuteContext.Terminate(IEnumerable<KeyValuePair<string, object>> variables)
{
if (variables == null)
throw new ArgumentNullException(nameof(variables));
return new TerminateWithVariablesExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip,
variables.ToDictionary(x => x.Key, x => x.Value));
}
ExecutionResult ExecuteContext.Faulted()
{
return Faulted(new ActivityExecutionFaultedException());
}
ExecutionResult ExecuteContext.Faulted(Exception exception)
{
if (exception == null)
throw new ArgumentNullException(nameof(exception));
return Faulted(exception);
}
Task<ISendEndpoint> ISendEndpointProvider.GetSendEndpoint(Uri address)
{
return _context.GetSendEndpoint(address);
}
ConnectHandle IPublishObserverConnector.ConnectPublishObserver(IPublishObserver observer)
{
return _context.ConnectPublishObserver(observer);
}
ExecutionResult Faulted(Exception exception)
{
return new FaultedExecutionResult<TArguments>(this, _publisher, _activity, _routingSlip, new FaultExceptionInfo(exception));
}
public ConnectHandle ConnectSendObserver(ISendObserver observer)
{
return _context.ConnectSendObserver(observer);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Batch.Protocol
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for CertificateOperations.
/// </summary>
public static partial class CertificateOperationsExtensions
{
/// <summary>
/// Adds a certificate to the specified account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='certificate'>
/// The certificate to be added.
/// </param>
/// <param name='certificateAddOptions'>
/// Additional parameters for the operation
/// </param>
public static CertificateAddHeaders Add(this ICertificateOperations operations, CertificateAddParameter certificate, CertificateAddOptions certificateAddOptions = default(CertificateAddOptions))
{
return operations.AddAsync(certificate, certificateAddOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Adds a certificate to the specified account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='certificate'>
/// The certificate to be added.
/// </param>
/// <param name='certificateAddOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<CertificateAddHeaders> AddAsync(this ICertificateOperations operations, CertificateAddParameter certificate, CertificateAddOptions certificateAddOptions = default(CertificateAddOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.AddWithHttpMessagesAsync(certificate, certificateAddOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Headers;
}
}
/// <summary>
/// Lists all of the certificates that have been added to the specified
/// account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='certificateListOptions'>
/// Additional parameters for the operation
/// </param>
public static IPage<Certificate> List(this ICertificateOperations operations, CertificateListOptions certificateListOptions = default(CertificateListOptions))
{
return operations.ListAsync(certificateListOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Lists all of the certificates that have been added to the specified
/// account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='certificateListOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Certificate>> ListAsync(this ICertificateOperations operations, CertificateListOptions certificateListOptions = default(CertificateListOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(certificateListOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Cancels a failed deletion of a certificate from the specified account.
/// </summary>
/// <remarks>
/// If you try to delete a certificate that is being used by a pool or compute
/// node, the status of the certificate changes to deleteFailed. If you decide
/// that you want to continue using the certificate, you can use this operation
/// to set the status of the certificate back to active. If you intend to
/// delete the certificate, you do not need to run this operation after the
/// deletion failed. You must make sure that the certificate is not being used
/// by any resources, and then you can try again to delete the certificate.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate being deleted.
/// </param>
/// <param name='certificateCancelDeletionOptions'>
/// Additional parameters for the operation
/// </param>
public static CertificateCancelDeletionHeaders CancelDeletion(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateCancelDeletionOptions certificateCancelDeletionOptions = default(CertificateCancelDeletionOptions))
{
return operations.CancelDeletionAsync(thumbprintAlgorithm, thumbprint, certificateCancelDeletionOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Cancels a failed deletion of a certificate from the specified account.
/// </summary>
/// <remarks>
/// If you try to delete a certificate that is being used by a pool or compute
/// node, the status of the certificate changes to deleteFailed. If you decide
/// that you want to continue using the certificate, you can use this operation
/// to set the status of the certificate back to active. If you intend to
/// delete the certificate, you do not need to run this operation after the
/// deletion failed. You must make sure that the certificate is not being used
/// by any resources, and then you can try again to delete the certificate.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate being deleted.
/// </param>
/// <param name='certificateCancelDeletionOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<CertificateCancelDeletionHeaders> CancelDeletionAsync(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateCancelDeletionOptions certificateCancelDeletionOptions = default(CertificateCancelDeletionOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CancelDeletionWithHttpMessagesAsync(thumbprintAlgorithm, thumbprint, certificateCancelDeletionOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Headers;
}
}
/// <summary>
/// Deletes a certificate from the specified account.
/// </summary>
/// <remarks>
/// You cannot delete a certificate if a resource (pool or compute node) is
/// using it. Before you can delete a certificate, you must therefore make sure
/// that the certificate is not associated with any existing pools, the
/// certificate is not installed on any compute nodes (even if you remove a
/// certificate from a pool, it is not removed from existing compute nodes in
/// that pool until they restart), and no running tasks depend on the
/// certificate. If you try to delete a certificate that is in use, the
/// deletion fails. The certificate status changes to deleteFailed. You can use
/// Cancel Delete Certificate to set the status back to active if you decide
/// that you want to continue using the certificate.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate to be deleted.
/// </param>
/// <param name='certificateDeleteOptions'>
/// Additional parameters for the operation
/// </param>
public static CertificateDeleteHeaders Delete(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateDeleteOptions certificateDeleteOptions = default(CertificateDeleteOptions))
{
return operations.DeleteAsync(thumbprintAlgorithm, thumbprint, certificateDeleteOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a certificate from the specified account.
/// </summary>
/// <remarks>
/// You cannot delete a certificate if a resource (pool or compute node) is
/// using it. Before you can delete a certificate, you must therefore make sure
/// that the certificate is not associated with any existing pools, the
/// certificate is not installed on any compute nodes (even if you remove a
/// certificate from a pool, it is not removed from existing compute nodes in
/// that pool until they restart), and no running tasks depend on the
/// certificate. If you try to delete a certificate that is in use, the
/// deletion fails. The certificate status changes to deleteFailed. You can use
/// Cancel Delete Certificate to set the status back to active if you decide
/// that you want to continue using the certificate.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate to be deleted.
/// </param>
/// <param name='certificateDeleteOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<CertificateDeleteHeaders> DeleteAsync(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateDeleteOptions certificateDeleteOptions = default(CertificateDeleteOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.DeleteWithHttpMessagesAsync(thumbprintAlgorithm, thumbprint, certificateDeleteOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Headers;
}
}
/// <summary>
/// Gets information about the specified certificate.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate to get.
/// </param>
/// <param name='certificateGetOptions'>
/// Additional parameters for the operation
/// </param>
public static Certificate Get(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateGetOptions certificateGetOptions = default(CertificateGetOptions))
{
return operations.GetAsync(thumbprintAlgorithm, thumbprint, certificateGetOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Gets information about the specified certificate.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='thumbprintAlgorithm'>
/// The algorithm used to derive the thumbprint parameter. This must be sha1.
/// </param>
/// <param name='thumbprint'>
/// The thumbprint of the certificate to get.
/// </param>
/// <param name='certificateGetOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Certificate> GetAsync(this ICertificateOperations operations, string thumbprintAlgorithm, string thumbprint, CertificateGetOptions certificateGetOptions = default(CertificateGetOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(thumbprintAlgorithm, thumbprint, certificateGetOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists all of the certificates that have been added to the specified
/// account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='certificateListNextOptions'>
/// Additional parameters for the operation
/// </param>
public static IPage<Certificate> ListNext(this ICertificateOperations operations, string nextPageLink, CertificateListNextOptions certificateListNextOptions = default(CertificateListNextOptions))
{
return operations.ListNextAsync(nextPageLink, certificateListNextOptions).GetAwaiter().GetResult();
}
/// <summary>
/// Lists all of the certificates that have been added to the specified
/// account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='certificateListNextOptions'>
/// Additional parameters for the operation
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Certificate>> ListNextAsync(this ICertificateOperations operations, string nextPageLink, CertificateListNextOptions certificateListNextOptions = default(CertificateListNextOptions), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, certificateListNextOptions, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// zlib.h -- interface of the 'zlib' general purpose compression library
// version 1.2.1, November 17th, 2003
//
// Copyright (C) 1995-2003 Jean-loup Gailly and Mark Adler
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
//
//
using System.Diagnostics;
namespace System.IO.Compression
{
internal sealed class InflaterManaged
{
// const tables used in decoding:
// Extra bits for length code 257 - 285.
private static readonly byte[] s_extraLengthBits =
{ 0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,16 };
// The base length for length code 257 - 285.
// The formula to get the real length for a length code is lengthBase[code - 257] + (value stored in extraBits)
private static readonly int[] s_lengthBase =
{ 3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,3};
// The base distance for distance code 0 - 31
// The real distance for a distance code is distanceBasePosition[code] + (value stored in extraBits)
private static readonly int[] s_distanceBasePosition =
{ 1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,32769,49153 };
// code lengths for code length alphabet is stored in following order
private static readonly byte[] s_codeOrder = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static readonly byte[] s_staticDistanceTreeTable =
{
0x00,0x10,0x08,0x18,0x04,0x14,0x0c,0x1c,0x02,0x12,0x0a,0x1a,
0x06,0x16,0x0e,0x1e,0x01,0x11,0x09,0x19,0x05,0x15,0x0d,0x1d,
0x03,0x13,0x0b,0x1b,0x07,0x17,0x0f,0x1f
};
private readonly OutputWindow _output;
private readonly InputBuffer _input;
private HuffmanTree _literalLengthTree;
private HuffmanTree _distanceTree;
private InflaterState _state;
private bool _hasFormatReader;
private int _bfinal;
private BlockType _blockType;
// uncompressed block
private readonly byte[] _blockLengthBuffer = new byte[4];
private int _blockLength;
// compressed block
private int _length;
private int _distanceCode;
private int _extraBits;
private int _loopCounter;
private int _literalLengthCodeCount;
private int _distanceCodeCount;
private int _codeLengthCodeCount;
private int _codeArraySize;
private int _lengthCode;
private readonly byte[] _codeList; // temporary array to store the code length for literal/Length and distance
private readonly byte[] _codeLengthTreeCodeLength;
private readonly bool _deflate64;
private HuffmanTree _codeLengthTree;
private IFileFormatReader _formatReader; // class to decode header and footer (e.g. gzip)
public InflaterManaged(bool deflate64)
{
_output = new OutputWindow();
_input = new InputBuffer();
_codeList = new byte[HuffmanTree.MaxLiteralTreeElements + HuffmanTree.MaxDistTreeElements];
_codeLengthTreeCodeLength = new byte[HuffmanTree.NumberOfCodeLengthTreeElements];
_deflate64 = deflate64;
Reset();
}
internal InflaterManaged(IFileFormatReader reader, bool deflate64)
{
_output = new OutputWindow();
_input = new InputBuffer();
_codeList = new byte[HuffmanTree.MaxLiteralTreeElements + HuffmanTree.MaxDistTreeElements];
_codeLengthTreeCodeLength = new byte[HuffmanTree.NumberOfCodeLengthTreeElements];
_deflate64 = deflate64;
if (reader != null)
{
_formatReader = reader;
_hasFormatReader = true;
}
Reset();
}
public void SetFileFormatReader(IFileFormatReader reader)
{
_formatReader = reader;
_hasFormatReader = true;
Reset();
}
private void Reset()
{
_state = _hasFormatReader ?
InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
}
public void SetInput(byte[] inputBytes, int offset, int length) =>
_input.SetInput(inputBytes, offset, length); // append the bytes
public bool Finished() => _state == InflaterState.Done || _state == InflaterState.VerifyingFooter;
public int AvailableOutput => _output.AvailableBytes;
public bool NeedsInput() => _input.NeedsInput();
public int Inflate(byte[] bytes, int offset, int length)
{
// copy bytes from output to outputbytes if we have available bytes
// if buffer is not filled up. keep decoding until no input are available
// if decodeBlock returns false. Throw an exception.
int count = 0;
do
{
int copied = _output.CopyTo(bytes, offset, length);
if (copied > 0)
{
if (_hasFormatReader)
{
_formatReader.UpdateWithBytesRead(bytes, offset, copied);
}
offset += copied;
count += copied;
length -= copied;
}
if (length == 0)
{ // filled in the bytes array
break;
}
// Decode will return false when more input is needed
} while (!Finished() && Decode());
if (_state == InflaterState.VerifyingFooter)
{ // finished reading CRC
// In this case finished is true and output window has all the data.
// But some data in output window might not be copied out.
if (_output.AvailableBytes == 0)
{
_formatReader.Validate();
}
}
return count;
}
//Each block of compressed data begins with 3 header bits
// containing the following data:
// first bit BFINAL
// next 2 bits BTYPE
// Note that the header bits do not necessarily begin on a byte
// boundary, since a block does not necessarily occupy an integral
// number of bytes.
// BFINAL is set if and only if this is the last block of the data
// set.
// BTYPE specifies how the data are compressed, as follows:
// 00 - no compression
// 01 - compressed with fixed Huffman codes
// 10 - compressed with dynamic Huffman codes
// 11 - reserved (error)
// The only difference between the two compressed cases is how the
// Huffman codes for the literal/length and distance alphabets are
// defined.
//
// This function returns true for success (end of block or output window is full,)
// false if we are short of input
//
private bool Decode()
{
bool eob = false;
bool result = false;
if (Finished())
{
return true;
}
if (_hasFormatReader)
{
if (_state == InflaterState.ReadingHeader)
{
if (!_formatReader.ReadHeader(_input))
{
return false;
}
_state = InflaterState.ReadingBFinal;
}
else if (_state == InflaterState.StartReadingFooter || _state == InflaterState.ReadingFooter)
{
if (!_formatReader.ReadFooter(_input))
return false;
_state = InflaterState.VerifyingFooter;
return true;
}
}
if (_state == InflaterState.ReadingBFinal)
{
// reading bfinal bit
// Need 1 bit
if (!_input.EnsureBitsAvailable(1))
return false;
_bfinal = _input.GetBits(1);
_state = InflaterState.ReadingBType;
}
if (_state == InflaterState.ReadingBType)
{
// Need 2 bits
if (!_input.EnsureBitsAvailable(2))
{
_state = InflaterState.ReadingBType;
return false;
}
_blockType = (BlockType)_input.GetBits(2);
if (_blockType == BlockType.Dynamic)
{
_state = InflaterState.ReadingNumLitCodes;
}
else if (_blockType == BlockType.Static)
{
_literalLengthTree = HuffmanTree.StaticLiteralLengthTree;
_distanceTree = HuffmanTree.StaticDistanceTree;
_state = InflaterState.DecodeTop;
}
else if (_blockType == BlockType.Uncompressed)
{
_state = InflaterState.UncompressedAligning;
}
else
{
throw new InvalidDataException(SR.UnknownBlockType);
}
}
if (_blockType == BlockType.Dynamic)
{
if (_state < InflaterState.DecodeTop)
{
// we are reading the header
result = DecodeDynamicBlockHeader();
}
else
{
result = DecodeBlock(out eob); // this can returns true when output is full
}
}
else if (_blockType == BlockType.Static)
{
result = DecodeBlock(out eob);
}
else if (_blockType == BlockType.Uncompressed)
{
result = DecodeUncompressedBlock(out eob);
}
else
{
throw new InvalidDataException(SR.UnknownBlockType);
}
//
// If we reached the end of the block and the block we were decoding had
// bfinal=1 (final block)
//
if (eob && (_bfinal != 0))
{
if (_hasFormatReader)
_state = InflaterState.StartReadingFooter;
else
_state = InflaterState.Done;
}
return result;
}
// Format of Non-compressed blocks (BTYPE=00):
//
// Any bits of input up to the next byte boundary are ignored.
// The rest of the block consists of the following information:
//
// 0 1 2 3 4...
// +---+---+---+---+================================+
// | LEN | NLEN |... LEN bytes of literal data...|
// +---+---+---+---+================================+
//
// LEN is the number of data bytes in the block. NLEN is the
// one's complement of LEN.
private bool DecodeUncompressedBlock(out bool end_of_block)
{
end_of_block = false;
while (true)
{
switch (_state)
{
case InflaterState.UncompressedAligning: // initial state when calling this function
// we must skip to a byte boundary
_input.SkipToByteBoundary();
_state = InflaterState.UncompressedByte1;
goto case InflaterState.UncompressedByte1;
case InflaterState.UncompressedByte1: // decoding block length
case InflaterState.UncompressedByte2:
case InflaterState.UncompressedByte3:
case InflaterState.UncompressedByte4:
int bits = _input.GetBits(8);
if (bits < 0)
{
return false;
}
_blockLengthBuffer[_state - InflaterState.UncompressedByte1] = (byte)bits;
if (_state == InflaterState.UncompressedByte4)
{
_blockLength = _blockLengthBuffer[0] + ((int)_blockLengthBuffer[1]) * 256;
int blockLengthComplement = _blockLengthBuffer[2] + ((int)_blockLengthBuffer[3]) * 256;
// make sure complement matches
if ((ushort)_blockLength != (ushort)(~blockLengthComplement))
{
throw new InvalidDataException(SR.InvalidBlockLength);
}
}
_state += 1;
break;
case InflaterState.DecodingUncompressed: // copying block data
// Directly copy bytes from input to output.
int bytesCopied = _output.CopyFrom(_input, _blockLength);
_blockLength -= bytesCopied;
if (_blockLength == 0)
{
// Done with this block, need to re-init bit buffer for next block
_state = InflaterState.ReadingBFinal;
end_of_block = true;
return true;
}
// We can fail to copy all bytes for two reasons:
// Running out of Input
// running out of free space in output window
if (_output.FreeBytes == 0)
{
return true;
}
return false;
default:
Debug.Fail("check why we are here!");
throw new InvalidDataException(SR.UnknownState);
}
}
}
private bool DecodeBlock(out bool end_of_block_code_seen)
{
end_of_block_code_seen = false;
int freeBytes = _output.FreeBytes; // it is a little bit faster than frequently accessing the property
while (freeBytes > 65536)
{
// With Deflate64 we can have up to a 64kb length, so we ensure at least that much space is available
// in the OutputWindow to avoid overwriting previous unflushed output data.
int symbol;
switch (_state)
{
case InflaterState.DecodeTop:
// decode an element from the literal tree
// TODO: optimize this!!!
symbol = _literalLengthTree.GetNextSymbol(_input);
if (symbol < 0)
{
// running out of input
return false;
}
if (symbol < 256)
{
// literal
_output.Write((byte)symbol);
--freeBytes;
}
else if (symbol == 256)
{
// end of block
end_of_block_code_seen = true;
// Reset state
_state = InflaterState.ReadingBFinal;
return true;
}
else
{
// length/distance pair
symbol -= 257; // length code started at 257
if (symbol < 8)
{
symbol += 3; // match length = 3,4,5,6,7,8,9,10
_extraBits = 0;
}
else if (!_deflate64 && symbol == 28)
{
// extra bits for code 285 is 0
symbol = 258; // code 285 means length 258
_extraBits = 0;
}
else
{
if (symbol < 0 || symbol >= s_extraLengthBits.Length)
{
throw new InvalidDataException(SR.GenericInvalidData);
}
_extraBits = s_extraLengthBits[symbol];
Debug.Assert(_extraBits != 0, "We handle other cases separately!");
}
_length = symbol;
goto case InflaterState.HaveInitialLength;
}
break;
case InflaterState.HaveInitialLength:
if (_extraBits > 0)
{
_state = InflaterState.HaveInitialLength;
int bits = _input.GetBits(_extraBits);
if (bits < 0)
{
return false;
}
if (_length < 0 || _length >= s_lengthBase.Length)
{
throw new InvalidDataException(SR.GenericInvalidData);
}
_length = s_lengthBase[_length] + bits;
}
_state = InflaterState.HaveFullLength;
goto case InflaterState.HaveFullLength;
case InflaterState.HaveFullLength:
if (_blockType == BlockType.Dynamic)
{
_distanceCode = _distanceTree.GetNextSymbol(_input);
}
else
{
// get distance code directly for static block
_distanceCode = _input.GetBits(5);
if (_distanceCode >= 0)
{
_distanceCode = s_staticDistanceTreeTable[_distanceCode];
}
}
if (_distanceCode < 0)
{
// running out input
return false;
}
_state = InflaterState.HaveDistCode;
goto case InflaterState.HaveDistCode;
case InflaterState.HaveDistCode:
// To avoid a table lookup we note that for distanceCode > 3,
// extra_bits = (distanceCode-2) >> 1
int offset;
if (_distanceCode > 3)
{
_extraBits = (_distanceCode - 2) >> 1;
int bits = _input.GetBits(_extraBits);
if (bits < 0)
{
return false;
}
offset = s_distanceBasePosition[_distanceCode] + bits;
}
else
{
offset = _distanceCode + 1;
}
_output.WriteLengthDistance(_length, offset);
freeBytes -= _length;
_state = InflaterState.DecodeTop;
break;
default:
Debug.Fail("check why we are here!");
throw new InvalidDataException(SR.UnknownState);
}
}
return true;
}
// Format of the dynamic block header:
// 5 Bits: HLIT, # of Literal/Length codes - 257 (257 - 286)
// 5 Bits: HDIST, # of Distance codes - 1 (1 - 32)
// 4 Bits: HCLEN, # of Code Length codes - 4 (4 - 19)
//
// (HCLEN + 4) x 3 bits: code lengths for the code length
// alphabet given just above, in the order: 16, 17, 18,
// 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
//
// These code lengths are interpreted as 3-bit integers
// (0-7); as above, a code length of 0 means the
// corresponding symbol (literal/length or distance code
// length) is not used.
//
// HLIT + 257 code lengths for the literal/length alphabet,
// encoded using the code length Huffman code
//
// HDIST + 1 code lengths for the distance alphabet,
// encoded using the code length Huffman code
//
// The code length repeat codes can cross from HLIT + 257 to the
// HDIST + 1 code lengths. In other words, all code lengths form
// a single sequence of HLIT + HDIST + 258 values.
private bool DecodeDynamicBlockHeader()
{
switch (_state)
{
case InflaterState.ReadingNumLitCodes:
_literalLengthCodeCount = _input.GetBits(5);
if (_literalLengthCodeCount < 0)
{
return false;
}
_literalLengthCodeCount += 257;
_state = InflaterState.ReadingNumDistCodes;
goto case InflaterState.ReadingNumDistCodes;
case InflaterState.ReadingNumDistCodes:
_distanceCodeCount = _input.GetBits(5);
if (_distanceCodeCount < 0)
{
return false;
}
_distanceCodeCount += 1;
_state = InflaterState.ReadingNumCodeLengthCodes;
goto case InflaterState.ReadingNumCodeLengthCodes;
case InflaterState.ReadingNumCodeLengthCodes:
_codeLengthCodeCount = _input.GetBits(4);
if (_codeLengthCodeCount < 0)
{
return false;
}
_codeLengthCodeCount += 4;
_loopCounter = 0;
_state = InflaterState.ReadingCodeLengthCodes;
goto case InflaterState.ReadingCodeLengthCodes;
case InflaterState.ReadingCodeLengthCodes:
while (_loopCounter < _codeLengthCodeCount)
{
int bits = _input.GetBits(3);
if (bits < 0)
{
return false;
}
_codeLengthTreeCodeLength[s_codeOrder[_loopCounter]] = (byte)bits;
++_loopCounter;
}
for (int i = _codeLengthCodeCount; i < s_codeOrder.Length; i++)
{
_codeLengthTreeCodeLength[s_codeOrder[i]] = 0;
}
// create huffman tree for code length
_codeLengthTree = new HuffmanTree(_codeLengthTreeCodeLength);
_codeArraySize = _literalLengthCodeCount + _distanceCodeCount;
_loopCounter = 0; // reset loop count
_state = InflaterState.ReadingTreeCodesBefore;
goto case InflaterState.ReadingTreeCodesBefore;
case InflaterState.ReadingTreeCodesBefore:
case InflaterState.ReadingTreeCodesAfter:
while (_loopCounter < _codeArraySize)
{
if (_state == InflaterState.ReadingTreeCodesBefore)
{
if ((_lengthCode = _codeLengthTree.GetNextSymbol(_input)) < 0)
{
return false;
}
}
// The alphabet for code lengths is as follows:
// 0 - 15: Represent code lengths of 0 - 15
// 16: Copy the previous code length 3 - 6 times.
// The next 2 bits indicate repeat length
// (0 = 3, ... , 3 = 6)
// Example: Codes 8, 16 (+2 bits 11),
// 16 (+2 bits 10) will expand to
// 12 code lengths of 8 (1 + 6 + 5)
// 17: Repeat a code length of 0 for 3 - 10 times.
// (3 bits of length)
// 18: Repeat a code length of 0 for 11 - 138 times
// (7 bits of length)
if (_lengthCode <= 15)
{
_codeList[_loopCounter++] = (byte)_lengthCode;
}
else
{
int repeatCount;
if (_lengthCode == 16)
{
if (!_input.EnsureBitsAvailable(2))
{
_state = InflaterState.ReadingTreeCodesAfter;
return false;
}
if (_loopCounter == 0)
{
// can't have "prev code" on first code
throw new InvalidDataException();
}
byte previousCode = _codeList[_loopCounter - 1];
repeatCount = _input.GetBits(2) + 3;
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++)
{
_codeList[_loopCounter++] = previousCode;
}
}
else if (_lengthCode == 17)
{
if (!_input.EnsureBitsAvailable(3))
{
_state = InflaterState.ReadingTreeCodesAfter;
return false;
}
repeatCount = _input.GetBits(3) + 3;
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++)
{
_codeList[_loopCounter++] = 0;
}
}
else
{
// code == 18
if (!_input.EnsureBitsAvailable(7))
{
_state = InflaterState.ReadingTreeCodesAfter;
return false;
}
repeatCount = _input.GetBits(7) + 11;
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++)
{
_codeList[_loopCounter++] = 0;
}
}
}
_state = InflaterState.ReadingTreeCodesBefore; // we want to read the next code.
}
break;
default:
Debug.Fail("check why we are here!");
throw new InvalidDataException(SR.UnknownState);
}
byte[] literalTreeCodeLength = new byte[HuffmanTree.MaxLiteralTreeElements];
byte[] distanceTreeCodeLength = new byte[HuffmanTree.MaxDistTreeElements];
// Create literal and distance tables
Array.Copy(_codeList, 0, literalTreeCodeLength, 0, _literalLengthCodeCount);
Array.Copy(_codeList, _literalLengthCodeCount, distanceTreeCodeLength, 0, _distanceCodeCount);
// Make sure there is an end-of-block code, otherwise how could we ever end?
if (literalTreeCodeLength[HuffmanTree.EndOfBlockCode] == 0)
{
throw new InvalidDataException();
}
_literalLengthTree = new HuffmanTree(literalTreeCodeLength);
_distanceTree = new HuffmanTree(distanceTreeCodeLength);
_state = InflaterState.DecodeTop;
return true;
}
public void Dispose() { }
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
/*============================================================
**
** Class: SafeHandle
**
**
** A specially designed handle wrapper to ensure we never leak
** an OS handle. The runtime treats this class specially during
** P/Invoke marshaling and finalization. Users should write
** subclasses of SafeHandle for each distinct handle type.
**
**
===========================================================*/
namespace System.Runtime.InteropServices {
using System;
using System.Reflection;
using System.Threading;
using System.Security.Permissions;
using System.Runtime;
using System.Runtime.CompilerServices;
using System.IO;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
/*
Problems addressed by the SafeHandle class:
1) Critical finalization - ensure we never leak OS resources in SQL. Done
without running truly arbitrary & unbounded amounts of managed code.
2) Reduced graph promotion - during finalization, keep object graph small
3) GC.KeepAlive behavior - P/Invoke vs. finalizer thread ---- (HandleRef)
4) Elimination of security ----s w/ explicit calls to Close (HandleProtector)
5) Enforcement of the above via the type system - Don't use IntPtr anymore.
6) Allows the handle lifetime to be controlled externally via a boolean.
Subclasses of SafeHandle will implement the ReleaseHandle abstract method
used to execute any code required to free the handle. This method will be
prepared as a constrained execution region at instance construction time
(along with all the methods in its statically determinable call graph). This
implies that we won't get any inconvenient jit allocation errors or rude
thread abort interrupts while releasing the handle but the user must still
write careful code to avoid injecting fault paths of their own (see the CER
spec for more details). In particular, any sub-methods you call should be
decorated with a reliability contract of the appropriate level. In most cases
this should be:
ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)
Also, any P/Invoke methods should use the SuppressUnmanagedCodeSecurity
attribute to avoid a runtime security check that can also inject failures
(even if the check is guaranteed to pass).
The GC will run ReleaseHandle methods after any normal finalizers have been
run for objects that were collected at the same time. This ensures classes
like FileStream can run a normal finalizer to flush out existing buffered
data. This is key - it means adding this class to a class like FileStream does
not alter our current semantics w.r.t. finalization today.
Subclasses must also implement the IsInvalid property so that the
infrastructure can tell when critical finalization is actually required.
Again, this method is prepared ahead of time. It's envisioned that direct
subclasses of SafeHandle will provide an IsInvalid implementation that suits
the general type of handle they support (null is invalid, -1 is invalid etc.)
and then these classes will be further derived for specific safe handle types.
Most classes using SafeHandle should not provide a finalizer. If they do
need to do so (ie, for flushing out file buffers, needing to write some data
back into memory, etc), then they can provide a finalizer that will be
guaranteed to run before the SafeHandle's critical finalizer.
Note that SafeHandle's ReleaseHandle is called from a constrained execution
region, and is eagerly prepared before we create your class. This means you
should only call methods with an appropriate reliability contract from your
ReleaseHandle method.
Subclasses are expected to be written as follows (note that
SuppressUnmanagedCodeSecurity should always be used on any P/Invoke methods
invoked as part of ReleaseHandle, in order to switch the security check from
runtime to jit time and thus remove a possible failure path from the
invocation of the method):
internal sealed MySafeHandleSubclass : SafeHandle {
// Called by P/Invoke when returning SafeHandles
private MySafeHandleSubclass() : base(IntPtr.Zero, true)
{
}
// If & only if you need to support user-supplied handles
internal MySafeHandleSubclass(IntPtr preexistingHandle, bool ownsHandle) : base(IntPtr.Zero, ownsHandle)
{
SetHandle(preexistingHandle);
}
// Do not provide a finalizer - SafeHandle's critical finalizer will
// call ReleaseHandle for you.
public override bool IsInvalid {
get { return handle == IntPtr.Zero; }
}
override protected bool ReleaseHandle()
{
return MyNativeMethods.CloseHandle(handle);
}
}
Then elsewhere to create one of these SafeHandles, define a method
with the following type of signature (CreateFile follows this model).
Note that when returning a SafeHandle like this, P/Invoke will call your
class's default constructor. Also, you probably want to define CloseHandle
somewhere, and remember to apply a reliability contract to it.
[SuppressUnmanagedCodeSecurity]
internal static class MyNativeMethods {
[DllImport("kernel32")]
private static extern MySafeHandleSubclass CreateHandle(int someState);
[DllImport("kernel32", SetLastError=true), ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private static extern bool CloseHandle(IntPtr handle);
}
Drawbacks with this implementation:
1) Requires some magic to run the critical finalizer.
2) Requires more memory than just an IntPtr.
3) If you use DangerousAddRef and forget to call DangerousRelease, you can leak a SafeHandle. Use CER's & don't do that.
*/
// This class should not be serializable - it's a handle. We require unmanaged
// code permission to subclass SafeHandle to prevent people from writing a
// subclass and suddenly being able to run arbitrary native code with the
// same signature as CloseHandle. This is technically a little redundant, but
// we'll do this to ensure we've cut off all attack vectors. Similarly, all
// methods have a link demand to ensure untrusted code cannot directly edit
// or alter a handle.
[System.Security.SecurityCritical] // auto-generated_required
#if !FEATURE_CORECLR
[SecurityPermission(SecurityAction.InheritanceDemand, UnmanagedCode=true)]
#endif
public abstract class SafeHandle : CriticalFinalizerObject, IDisposable
{
// ! Do not add or rearrange fields as the EE depends on this layout.
//------------------------------------------------------------------
#if DEBUG
// FxCop thinks this field is marshaled and so it raises a CA2101 error unless
// we specify this. In practice this is never presented to Win32.
[MarshalAs(UnmanagedType.LPWStr)]
private String _stackTrace; // Where we allocated this SafeHandle.
#endif
#if !FEATURE_CORECLR
[System.Runtime.ForceTokenStabilization]
#endif //!FEATURE_CORECLR
protected IntPtr handle; // this must be protected so derived classes can use out params.
private int _state; // Combined ref count and closed/disposed flags (so we can atomically modify them).
private bool _ownsHandle; // Whether we can release this handle.
#pragma warning disable 414
private bool _fullyInitialized; // Whether constructor completed.
#pragma warning restore 414
// Creates a SafeHandle class. Users must then set the Handle property.
// To prevent the SafeHandle from being freed, write a subclass that
// doesn't define a finalizer.
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)]
protected SafeHandle(IntPtr invalidHandleValue, bool ownsHandle)
{
handle = invalidHandleValue;
_state = 4; // Ref count 1 and not closed or disposed.
_ownsHandle = ownsHandle;
if (!ownsHandle)
GC.SuppressFinalize(this);
#if DEBUG
if (BCLDebug.SafeHandleStackTracesEnabled)
_stackTrace = Environment.GetStackTrace(null, false);
else
_stackTrace = "For a stack trace showing who allocated this SafeHandle, set SafeHandleStackTraces to 1 and rerun your app.";
#endif
// Set this last to prevent SafeHandle's finalizer from freeing an
// invalid handle. This means we don't have to worry about
// ThreadAbortExceptions interrupting this constructor or the managed
// constructors on subclasses that call this constructor.
_fullyInitialized = true;
}
#if FEATURE_CORECLR
// Migrating InheritanceDemands requires this default ctor, so we can mark it critical
protected SafeHandle()
{
BCLDebug.Assert(false, "SafeHandle's protected default ctor should never be used!");
throw new NotImplementedException();
}
#endif
[System.Security.SecuritySafeCritical] // auto-generated
~SafeHandle()
{
Dispose(false);
}
[ResourceExposure(ResourceScope.None)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
extern void InternalFinalize();
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#if !FEATURE_CORECLR
[TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")]
#endif
protected void SetHandle(IntPtr handle) {
this.handle = handle;
}
// This method is necessary for getting an IntPtr out of a SafeHandle.
// Used to tell whether a call to create the handle succeeded by comparing
// the handle against a known invalid value, and for backwards
// compatibility to support the handle properties returning IntPtrs on
// many of our Framework classes.
// Note that this method is dangerous for two reasons:
// 1) If the handle has been marked invalid with SetHandleasInvalid,
// DangerousGetHandle will still return the original handle value.
// 2) The handle returned may be recycled at any point. At best this means
// the handle might stop working suddenly. At worst, if the handle or
// the resource the handle represents is exposed to untrusted code in
// any way, this can lead to a handle recycling security attack (i.e. an
// untrusted caller can query data on the handle you've just returned
// and get back information for an entirely unrelated resource).
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[ResourceExposure(ResourceScope.None)]
#if !FEATURE_CORECLR
[TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")]
#endif
public IntPtr DangerousGetHandle()
{
return handle;
}
public bool IsClosed {
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
#if !FEATURE_CORECLR
[TargetedPatchingOptOut("Performance critical to inline across NGen image boundaries")]
#endif
get { return (_state & 1) == 1; }
}
public abstract bool IsInvalid {
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
get;
}
[System.Security.SecurityCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
public void Close() {
Dispose(true);
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
public void Dispose() {
Dispose(true);
}
[System.Security.SecurityCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
protected virtual void Dispose(bool disposing)
{
if (disposing)
InternalDispose();
else
InternalFinalize();
}
[ResourceExposure(ResourceScope.None)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void InternalDispose();
// This should only be called for cases when you know for a fact that
// your handle is invalid and you want to record that information.
// An example is calling a syscall and getting back ERROR_INVALID_HANDLE.
// This method will normally leak handles!
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public extern void SetHandleAsInvalid();
// Implement this abstract method in your derived class to specify how to
// free the handle. Be careful not write any code that's subject to faults
// in this method (the runtime will prepare the infrastructure for you so
// that no jit allocations etc. will occur, but don't allocate memory unless
// you can deal with the failure and still free the handle).
// The boolean returned should be true for success and false if the runtime
// should fire a SafeHandleCriticalFailure MDA (CustomerDebugProbe) if that
// MDA is enabled.
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
protected abstract bool ReleaseHandle();
// Add a reason why this handle should not be relinquished (i.e. have
// ReleaseHandle called on it). This method has dangerous in the name since
// it must always be used carefully (e.g. called within a CER) to avoid
// leakage of the handle. It returns a boolean indicating whether the
// increment was actually performed to make it easy for program logic to
// back out in failure cases (i.e. is a call to DangerousRelease needed).
// It is passed back via a ref parameter rather than as a direct return so
// that callers need not worry about the atomicity of calling the routine
// and assigning the return value to a variable (the variable should be
// explicitly set to false prior to the call). The only failure cases are
// when the method is interrupted prior to processing by a thread abort or
// when the handle has already been (or is in the process of being)
// released.
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public extern void DangerousAddRef(ref bool success);
// Partner to DangerousAddRef. This should always be successful when used in
// a correct manner (i.e. matching a successful DangerousAddRef and called
// from a region such as a CER where a thread abort cannot interrupt
// processing). In the same way that unbalanced DangerousAddRef calls can
// cause resource leakage, unbalanced DangerousRelease calls may cause
// invalid handle states to become visible to other threads. This
// constitutes a potential security hole (via handle recycling) as well as a
// correctness problem -- so don't ever expose Dangerous* calls out to
// untrusted code.
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public extern void DangerousRelease();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using Microsoft.CSharp.RuntimeBinder.Syntax;
namespace Microsoft.CSharp.RuntimeBinder.Semantics
{
internal partial class CMemberLookupResults
{
public partial class CMethodIterator
{
private SymbolLoader _pSymbolLoader;
private CSemanticChecker _pSemanticChecker;
// Inputs.
private AggregateType _pCurrentType;
private MethodOrPropertySymbol _pCurrentSym;
private AggregateDeclaration _pContext;
private TypeArray _pContainingTypes;
private CType _pQualifyingType;
private Name _pName;
private int _nArity;
private symbmask_t _mask;
private EXPRFLAG _flags;
// Internal state.
private int _nCurrentTypeCount;
private bool _bIsCheckingInstanceMethods;
private bool _bAtEnd;
private bool _bAllowBogusAndInaccessible;
// Flags for the current sym.
private bool _bCurrentSymIsBogus;
private bool _bCurrentSymIsInaccessible;
// if Extension can be part of the results that are returned by the iterator
// this may be false if an applicable instance method was found by bindgrptoArgs
private bool _bcanIncludeExtensionsInResults;
// we have found a applicable extension and only continue to the end of the current
// Namespace's extension methodlist
private bool _bEndIterationAtCurrentExtensionList;
public CMethodIterator(CSemanticChecker checker, SymbolLoader symLoader, Name name, TypeArray containingTypes, CType @object, CType qualifyingType, AggregateDeclaration context, bool allowBogusAndInaccessible, bool allowExtensionMethods, int arity, EXPRFLAG flags, symbmask_t mask)
{
Debug.Assert(name != null);
Debug.Assert(symLoader != null);
Debug.Assert(checker != null);
Debug.Assert(containingTypes != null);
_pSemanticChecker = checker;
_pSymbolLoader = symLoader;
_pCurrentType = null;
_pCurrentSym = null;
_pName = name;
_pContainingTypes = containingTypes;
_pQualifyingType = qualifyingType;
_pContext = context;
_bAllowBogusAndInaccessible = allowBogusAndInaccessible;
_nArity = arity;
_flags = flags;
_mask = mask;
_nCurrentTypeCount = 0;
_bIsCheckingInstanceMethods = true;
_bAtEnd = false;
_bCurrentSymIsBogus = false;
_bCurrentSymIsInaccessible = false;
_bcanIncludeExtensionsInResults = allowExtensionMethods;
_bEndIterationAtCurrentExtensionList = false;
}
public MethodOrPropertySymbol GetCurrentSymbol()
{
return _pCurrentSym;
}
public AggregateType GetCurrentType()
{
return _pCurrentType;
}
public bool IsCurrentSymbolInaccessible()
{
return _bCurrentSymIsInaccessible;
}
public bool IsCurrentSymbolBogus()
{
return _bCurrentSymIsBogus;
}
public bool MoveNext(bool canIncludeExtensionsInResults, bool endatCurrentExtensionList)
{
if (_bcanIncludeExtensionsInResults)
{
_bcanIncludeExtensionsInResults = canIncludeExtensionsInResults;
}
if (!_bEndIterationAtCurrentExtensionList)
{
_bEndIterationAtCurrentExtensionList = endatCurrentExtensionList;
}
if (_bAtEnd)
{
return false;
}
if (_pCurrentType == null) // First guy.
{
if (_pContainingTypes.Count == 0)
{
// No instance methods, only extensions.
_bIsCheckingInstanceMethods = false;
_bAtEnd = true;
return false;
}
else
{
if (!FindNextTypeForInstanceMethods())
{
// No instance or extensions.
_bAtEnd = true;
return false;
}
}
}
if (!FindNextMethod())
{
_bAtEnd = true;
return false;
}
return true;
}
public bool AtEnd()
{
return _pCurrentSym == null;
}
private CSemanticChecker GetSemanticChecker()
{
return _pSemanticChecker;
}
private SymbolLoader GetSymbolLoader()
{
return _pSymbolLoader;
}
public bool CanUseCurrentSymbol()
{
_bCurrentSymIsInaccessible = false;
_bCurrentSymIsBogus = false;
// Make sure that whether we're seeing a ctor is consistent with the flag.
// The only properties we handle are indexers.
if (_mask == symbmask_t.MASK_MethodSymbol && (
0 == (_flags & EXPRFLAG.EXF_CTOR) != !_pCurrentSym.AsMethodSymbol().IsConstructor() ||
0 == (_flags & EXPRFLAG.EXF_OPERATOR) != !_pCurrentSym.AsMethodSymbol().isOperator) ||
_mask == symbmask_t.MASK_PropertySymbol && !_pCurrentSym.AsPropertySymbol().isIndexer())
{
// Get the next symbol.
return false;
}
// If our arity is non-0, we must match arity with this symbol.
if (_nArity > 0)
{
if (_mask == symbmask_t.MASK_MethodSymbol && _pCurrentSym.AsMethodSymbol().typeVars.Count != _nArity)
{
return false;
}
}
// If this guy's not callable, no good.
if (!ExpressionBinder.IsMethPropCallable(_pCurrentSym, (_flags & EXPRFLAG.EXF_USERCALLABLE) != 0))
{
return false;
}
// Check access.
if (!GetSemanticChecker().CheckAccess(_pCurrentSym, _pCurrentType, _pContext, _pQualifyingType))
{
// Sym is not accessible. However, if we're allowing inaccessible, then let it through and mark it.
if (_bAllowBogusAndInaccessible)
{
_bCurrentSymIsInaccessible = true;
}
else
{
return false;
}
}
// Check bogus.
if (GetSemanticChecker().CheckBogus(_pCurrentSym))
{
// Sym is bogus, but if we're allow it, then let it through and mark it.
if (_bAllowBogusAndInaccessible)
{
_bCurrentSymIsBogus = true;
}
else
{
return false;
}
}
// if we are done checking all the instance types ensure that currentsym is an
// extension method and not a simple static method
if (!_bIsCheckingInstanceMethods)
{
if (!_pCurrentSym.AsMethodSymbol().IsExtension())
{
return false;
}
}
return true;
}
private bool FindNextMethod()
{
while (true)
{
if (_pCurrentSym == null)
{
_pCurrentSym = GetSymbolLoader().LookupAggMember(
_pName, _pCurrentType.getAggregate(), _mask).AsMethodOrPropertySymbol();
}
else
{
_pCurrentSym = GetSymbolLoader().LookupNextSym(
_pCurrentSym, _pCurrentType.getAggregate(), _mask).AsMethodOrPropertySymbol();
}
// If we couldn't find a sym, we look up the type chain and get the next type.
if (_pCurrentSym == null)
{
if (_bIsCheckingInstanceMethods)
{
if (!FindNextTypeForInstanceMethods() && _bcanIncludeExtensionsInResults)
{
// We didn't find any more instance methods, set us into extension mode.
_bIsCheckingInstanceMethods = false;
}
else if (_pCurrentType == null && !_bcanIncludeExtensionsInResults)
{
return false;
}
else
{
// Found an instance method.
continue;
}
}
continue;
}
// Note that we do not filter the current symbol for the user. They must do that themselves.
// This is because for instance, BindGrpToArgs wants to filter on arguments before filtering
// on bogosity.
// If we're here, we're good to go.
break;
}
return true;
}
private bool FindNextTypeForInstanceMethods()
{
// Otherwise, search through other types listed as well as our base class.
if (_pContainingTypes.Count > 0)
{
if (_nCurrentTypeCount >= _pContainingTypes.Count)
{
// No more types to check.
_pCurrentType = null;
}
else
{
_pCurrentType = _pContainingTypes[_nCurrentTypeCount++].AsAggregateType();
}
}
else
{
// We have no more types to consider, so check out the base class.
_pCurrentType = _pCurrentType.GetBaseClass();
}
return _pCurrentType != null;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Description;
using CoffeeOrders.Areas.HelpPage.Models;
namespace CoffeeOrders.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
model = GenerateApiModel(apiDescription, sampleGenerator);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator)
{
HelpPageApiModel apiModel = new HelpPageApiModel();
apiModel.ApiDescription = apiDescription;
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message));
}
return apiModel;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="TranslationManipulator.cs" company="Google LLC">
//
// Copyright 2018 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// </copyright>
//-----------------------------------------------------------------------
namespace GoogleARCore.Examples.ObjectManipulation
{
using GoogleARCore.Examples.ObjectManipulationInternal;
using UnityEngine;
/// <summary>
/// Manipulates the position of an object via a drag gesture.
/// If not selected, the object will be selected when the drag gesture starts.
/// </summary>
[RequireComponent(typeof(SelectionManipulator))]
public class TranslationManipulator : Manipulator
{
/// <summary>
/// The translation mode of this object.
/// </summary>
public TransformationUtility.TranslationMode ObjectTranslationMode;
/// <summary>
/// The maximum translation distance of this object.
/// </summary>
public float MaxTranslationDistance;
private const float k_PositionSpeed = 12.0f;
private const float k_DiffThreshold = 0.0001f;
private bool m_IsActive = false;
private Vector3 m_DesiredAnchorPosition;
private Vector3 m_DesiredLocalPosition;
private Quaternion m_DesiredRotation;
private float m_GroundingPlaneHeight;
private TrackableHit m_LastHit;
/// <summary>
/// The Unity's Start method.
/// </summary>
protected void Start()
{
m_DesiredLocalPosition = new Vector3(0, 0, 0);
}
/// <summary>
/// The Unity's Update method.
/// </summary>
protected override void Update()
{
base.Update();
UpdatePosition();
}
/// <summary>
/// Returns true if the manipulation can be started for the given gesture.
/// </summary>
/// <param name="gesture">The current gesture.</param>
/// <returns>True if the manipulation can be started.</returns>
protected override bool CanStartManipulationForGesture(DragGesture gesture)
{
if (gesture.TargetObject == null)
{
return false;
}
// If the gesture isn't targeting this item, don't start manipulating.
if (gesture.TargetObject != gameObject)
{
return false;
}
// Select it.
Select();
return true;
}
/// <summary>
/// Function called when the manipulation is started.
/// </summary>
/// <param name="gesture">The current gesture.</param>
protected override void OnStartManipulation(DragGesture gesture)
{
m_GroundingPlaneHeight = transform.parent.position.y;
}
/// <summary>
/// Continues the translation.
/// </summary>
/// <param name="gesture">The current gesture.</param>
protected override void OnContinueManipulation(DragGesture gesture)
{
m_IsActive = true;
TransformationUtility.Placement desiredPlacement =
TransformationUtility.GetBestPlacementPosition(
transform.parent.position, gesture.Position, m_GroundingPlaneHeight, 0.03f,
MaxTranslationDistance, ObjectTranslationMode);
if (desiredPlacement.HoveringPosition.HasValue &&
desiredPlacement.PlacementPosition.HasValue)
{
// If desired position is lower than current position, don't drop it until it's
// finished.
m_DesiredLocalPosition = transform.parent.InverseTransformPoint(
desiredPlacement.HoveringPosition.Value);
m_DesiredAnchorPosition = desiredPlacement.PlacementPosition.Value;
m_GroundingPlaneHeight = desiredPlacement.UpdatedGroundingPlaneHeight;
if (desiredPlacement.PlacementRotation.HasValue)
{
// Rotate if the plane direction has changed.
if (((desiredPlacement.PlacementRotation.Value * Vector3.up) - transform.up)
.magnitude > k_DiffThreshold)
{
m_DesiredRotation = desiredPlacement.PlacementRotation.Value;
}
else
{
m_DesiredRotation = transform.rotation;
}
}
if (desiredPlacement.PlacementPlane.HasValue)
{
m_LastHit = desiredPlacement.PlacementPlane.Value;
}
}
}
/// <summary>
/// Finishes the translation.
/// </summary>
/// <param name="gesture">The current gesture.</param>
protected override void OnEndManipulation(DragGesture gesture)
{
GameObject oldAnchor = transform.parent.gameObject;
Pose desiredPose = new Pose(m_DesiredAnchorPosition, m_LastHit.Pose.rotation);
Vector3 desiredLocalPosition =
transform.parent.InverseTransformPoint(desiredPose.position);
if (desiredLocalPosition.magnitude > MaxTranslationDistance)
{
desiredLocalPosition = desiredLocalPosition.normalized * MaxTranslationDistance;
}
desiredPose.position = transform.parent.TransformPoint(desiredLocalPosition);
Anchor newAnchor = m_LastHit.Trackable.CreateAnchor(desiredPose);
transform.parent = newAnchor.transform;
Destroy(oldAnchor);
m_DesiredLocalPosition = Vector3.zero;
// Rotate if the plane direction has changed.
if (((desiredPose.rotation * Vector3.up) - transform.up).magnitude > k_DiffThreshold)
{
m_DesiredRotation = desiredPose.rotation;
}
else
{
m_DesiredRotation = transform.rotation;
}
// Make sure position is updated one last time.
m_IsActive = true;
}
private void UpdatePosition()
{
if (!m_IsActive)
{
return;
}
// Lerp position.
Vector3 oldLocalPosition = transform.localPosition;
Vector3 newLocalPosition = Vector3.Lerp(
oldLocalPosition, m_DesiredLocalPosition, Time.deltaTime * k_PositionSpeed);
float diffLenght = (m_DesiredLocalPosition - newLocalPosition).magnitude;
if (diffLenght < k_DiffThreshold)
{
newLocalPosition = m_DesiredLocalPosition;
m_IsActive = false;
}
transform.localPosition = newLocalPosition;
// Lerp rotation.
Quaternion oldRotation = transform.rotation;
Quaternion newRotation =
Quaternion.Lerp(oldRotation, m_DesiredRotation, Time.deltaTime * k_PositionSpeed);
transform.rotation = newRotation;
// Avoid placing the selection higher than the object if the anchor is higher than the
// object.
float newElevation =
Mathf.Max(0, -transform.InverseTransformPoint(m_DesiredAnchorPosition).y);
GetComponent<SelectionManipulator>().OnElevationChanged(newElevation);
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Text.RegularExpressions;
namespace Microsoft.PythonTools.Intellisense {
/// <summary>
/// The possible modes for a <see cref="FuzzyStringMatcher"/>.
/// </summary>
public enum FuzzyMatchMode {
Prefix = 0,
PrefixIgnoreCase = 1,
Substring = 2,
SubstringIgnoreCase = 3,
Fuzzy = 4,
FuzzyIgnoreCase = 5,
FuzzyIgnoreLowerCase = 6,
Regex = 7,
RegexIgnoreCase = 8,
Default = FuzzyIgnoreLowerCase,
}
/// <summary>
/// Compares strings against patterns for sorting and filtering.
/// </summary>
public class FuzzyStringMatcher {
delegate int Matcher(string text, string pattern, bool ignoreCase);
readonly Matcher _matcher;
readonly bool _ignoreCase;
readonly static bool[] _ignoreCaseMap = new[] { false, true, false, true, false, true, false, false, true };
readonly static Matcher[] _matcherMap = new Matcher[] {
PrefixMatch, PrefixMatch,
SubstringMatch, SubstringMatch,
FuzzyMatch, FuzzyMatch,
FuzzyMatchIgnoreLowerCase,
RegexMatch, RegexMatch
};
public FuzzyStringMatcher(FuzzyMatchMode mode) {
_ignoreCase = _ignoreCaseMap[(int)mode];
_matcher = _matcherMap[(int)mode];
}
/// <summary>
/// Returns an integer indicating how well text matches pattern. Larger
/// values indicate a better match.
/// </summary>
public int GetSortKey(string text, string pattern) {
return _matcher(text, pattern, _ignoreCase);
}
/// <summary>
/// Returns false if text does not match pattern well enough to be
/// displayed.
/// </summary>
public bool IsCandidateMatch(string text, string pattern) {
return IsCandidateMatch(text, pattern, _matcher(text, pattern, _ignoreCase));
}
/// <summary>
/// Returns false if text does not match pattern well enough to be
/// displayed.
/// </summary>
/// <remarks>
/// Use this overload if the sort key is already available, as this will
/// avoid recalculating it.
/// </remarks>
public bool IsCandidateMatch(string text, string pattern, int sortKey) {
return sortKey >= pattern.Length;
}
static int PrefixMatch(string text, string pattern, bool ignoreCase) {
if (text.StartsWith(pattern, StringComparison.InvariantCulture) || text.StartsWith(pattern, StringComparison.CurrentCulture)) {
return pattern.Length * 2 + (text.Length == pattern.Length ? 1 : 0);
} else if (ignoreCase && (text.StartsWith(pattern, StringComparison.InvariantCultureIgnoreCase) || text.StartsWith(pattern, StringComparison.CurrentCultureIgnoreCase))) {
return pattern.Length + (text.Length == pattern.Length ? 1 : 0);
} else {
return 0;
}
}
static int SubstringMatch(string text, string pattern, bool ignoreCase) {
int position = text.IndexOf(pattern, StringComparison.InvariantCulture);
if (position >= 0) {
return pattern.Length * 2 + (position == 0 ? 1 : 0);
}
position = text.IndexOf(pattern, StringComparison.CurrentCulture);
if (position >= 0) {
return pattern.Length * 2 + (position == 0 ? 1 : 0);
}
if (ignoreCase) {
position = text.IndexOf(pattern, StringComparison.InvariantCultureIgnoreCase);
if (position >= 0) {
return pattern.Length + (position == 0 ? 1 : 0);
}
position = text.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase);
if (position >= 0) {
return pattern.Length + (position == 0 ? 1 : 0);
}
}
return 0;
}
static int RegexMatch(string text, string pattern, bool ignoreCase) {
try {
var match = Regex.Match(text, pattern, RegexOptions.CultureInvariant);
if (match != null && match.Success) {
return match.Value.Length * 2 + (match.Index == 0 ? 1 : 0);
}
match = Regex.Match(text, pattern);
if (match != null && match.Success) {
return match.Value.Length * 2 + (match.Index == 0 ? 1 : 0);
}
if (ignoreCase) {
match = Regex.Match(text, pattern, RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
if (match != null && match.Success) {
return match.Value.Length + (match.Index == 0 ? 1 : 0);
}
match = Regex.Match(text, pattern, RegexOptions.IgnoreCase);
if (match != null && match.Success) {
return match.Value.Length + (match.Index == 0 ? 1 : 0);
}
}
} catch (ArgumentException ex) {
Trace.TraceWarning("Exception in Regex.Match(\"{0}\", \"{1}\"): {2}", text, pattern, ex);
}
return 0;
}
/// <summary>
/// The reward for the first matching character.
/// </summary>
const int BASE_REWARD = 1;
/// <summary>
/// The amount to increase the reward for each consecutive character.
/// This bonus is cumulative for each character.
/// </summary>
const int CONSECUTIVE_BONUS = 1;
/// <summary>
/// The amount to increase the reward at the start of the word. This
/// bonus is applied once but remains for each consecutive character.
/// </summary>
const int START_OF_WORD_BONUS = 4;
/// <summary>
/// The amount to increase the reward after an underscore. This bonus
/// is applied once but remains for each consecutive character.
/// </summary>
const int AFTER_UNDERSCORE_BONUS = 3;
/// <summary>
/// The amount to increase the reward for case-sensitive matches where
/// the user typed an uppercase character. This bonus is only applied
/// for the matching character.
/// </summary>
const int MATCHED_UPPERCASE_BONUS = 1;
/// <summary>
/// The amount to increase the reward for case-insensitive matches when
/// the user typed a lowercase character. This bonus is only applied
/// for the matching character, and is intended to be negative.
/// </summary>
const int EXPECTED_LOWERCASE_BONUS = -1;
/// <summary>
/// The amount to increase the reward for case-insensitive matches when
/// the user typed an uppercase character. This bonus is only applied
/// for the matching character, and is intended to be negative.
/// </summary>
const int EXPECTED_UPPERCASE_BONUS = -2;
static int FuzzyMatchInternal(string text, string pattern, bool ignoreLowerCase, bool ignoreUpperCase) {
if (text == null || pattern == null) {
return 0;
}
int total = 0;
int increment = BASE_REWARD + START_OF_WORD_BONUS;
int y = 0;
try {
checked {
var cmp1 = CultureInfo.InvariantCulture.CompareInfo;
var cmp2 = CultureInfo.CurrentCulture.CompareInfo;
for (int x = 0; x < text.Length; ++x) {
if (y >= pattern.Length) {
// Prevent bonus for y == pattern.Length
y += 1;
break;
}
if (cmp1.Compare(text, x, 1, pattern, y, 1, CompareOptions.IgnoreCase | CompareOptions.IgnoreNonSpace | CompareOptions.IgnoreWidth) == 0 ||
cmp2.Compare(text, x, 1, pattern, y, 1, CompareOptions.IgnoreCase | CompareOptions.IgnoreNonSpace | CompareOptions.IgnoreWidth) == 0) {
if (char.IsUpper(pattern, y)) {
if (char.IsUpper(text, x)) {
// Apply a bonus for case-sensitive matches
// when the user has typed an uppercase
// character.
total += increment + MATCHED_UPPERCASE_BONUS;
increment += CONSECUTIVE_BONUS;
y += 1;
} else if (ignoreUpperCase) {
// The user typed uppercase and it matched
// lowercase, so reward with a slight
// penalty.
total += increment + EXPECTED_UPPERCASE_BONUS;
increment += CONSECUTIVE_BONUS;
y += 1;
} else {
// The user typed uppercase and it matched
// lowercase.
increment = BASE_REWARD;
}
} else {
if (char.IsLower(text, x)) {
// The user typed lowercase and it matched
// lowercase.
total += increment;
increment += CONSECUTIVE_BONUS;
y += 1;
} else if (ignoreLowerCase) {
// The user typed lowercase and it matched
// uppercase, so reward with a slight
// penalty.
total += increment + EXPECTED_LOWERCASE_BONUS;
increment += CONSECUTIVE_BONUS;
y += 1;
} else {
// The user typed lowercase and it matched
// uppercase, but we don't care.
increment = BASE_REWARD;
}
}
} else if (text[x] == '_') {
increment = BASE_REWARD + AFTER_UNDERSCORE_BONUS;
} else {
increment = BASE_REWARD;
}
}
if (y < pattern.Length) {
total = 0;
}
}
} catch (OverflowException) {
return int.MaxValue;
}
return total;
}
static int FuzzyMatch(string text, string pattern, bool ignoreCase) {
return FuzzyMatchInternal(text, pattern, ignoreCase, ignoreCase);
}
static int FuzzyMatchIgnoreLowerCase(string text, string pattern, bool ignoreCase) {
int total = FuzzyMatchInternal(text, pattern, true, false);
if (total == 0) {
total = FuzzyMatchInternal(text, pattern, true, true);
}
return total;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Umbraco.Core.Models;
using Umbraco.Core.Models.Membership;
using Umbraco.Core.Persistence.DatabaseModelDefinitions;
using Umbraco.Core.Persistence.Querying;
namespace Umbraco.Core.Services
{
/// <summary>
/// Defines the UserService, which is an easy access to operations involving <see cref="IProfile"/> and eventually Users.
/// </summary>
public interface IUserService : IMembershipUserService
{
/// <summary>
/// Creates a database entry for starting a new login session for a user
/// </summary>
/// <param name="userId"></param>
/// <param name="requestingIpAddress"></param>
/// <returns></returns>
Guid CreateLoginSession(int userId, string requestingIpAddress);
/// <summary>
/// Validates that a user login session is valid/current and hasn't been closed
/// </summary>
/// <param name="userId"></param>
/// <param name="sessionId"></param>
/// <returns></returns>
bool ValidateLoginSession(int userId, Guid sessionId);
/// <summary>
/// Removes the session's validity
/// </summary>
/// <param name="sessionId"></param>
void ClearLoginSession(Guid sessionId);
/// <summary>
/// Removes all valid sessions for the user
/// </summary>
/// <param name="userId"></param>
int ClearLoginSessions(int userId);
/// <summary>
/// This is basically facets of UserStates key = state, value = count
/// </summary>
IDictionary<UserState, int> GetUserStates();
/// <summary>
/// Get paged users
/// </summary>
/// <param name="pageIndex"></param>
/// <param name="pageSize"></param>
/// <param name="totalRecords"></param>
/// <param name="orderBy"></param>
/// <param name="orderDirection"></param>
/// <param name="userState"></param>
/// <param name="includeUserGroups">
/// A filter to only include user that belong to these user groups
/// </param>
/// <param name="excludeUserGroups">
/// A filter to only include users that do not belong to these user groups
/// </param>
/// <param name="filter"></param>
/// <returns></returns>
IEnumerable<IUser> GetAll(long pageIndex, int pageSize, out long totalRecords,
string orderBy, Direction orderDirection,
UserState[] userState = null,
string[] includeUserGroups = null,
string[] excludeUserGroups = null,
IQuery<IUser> filter = null);
/// <summary>
/// Get paged users
/// </summary>
/// <param name="pageIndex"></param>
/// <param name="pageSize"></param>
/// <param name="totalRecords"></param>
/// <param name="orderBy"></param>
/// <param name="orderDirection"></param>
/// <param name="userState"></param>
/// <param name="userGroups">
/// A filter to only include user that belong to these user groups
/// </param>
/// <param name="filter"></param>
/// <returns></returns>
IEnumerable<IUser> GetAll(long pageIndex, int pageSize, out long totalRecords,
string orderBy, Direction orderDirection,
UserState[] userState = null,
string[] userGroups = null,
string filter = null);
/// <summary>
/// This is simply a helper method which essentially just wraps the MembershipProvider's ChangePassword method
/// </summary>
/// <remarks>
/// This method exists so that Umbraco developers can use one entry point to create/update users if they choose to.
/// </remarks>
/// <param name="user">The user to save the password for</param>
/// <param name="password">The password to save</param>
void SavePassword(IUser user, string password);
/// <summary>
/// Deletes or disables a User
/// </summary>
/// <param name="user"><see cref="IUser"/> to delete</param>
/// <param name="deletePermanently"><c>True</c> to permanently delete the user, <c>False</c> to disable the user</param>
void Delete(IUser user, bool deletePermanently);
/// <summary>
/// Gets an IProfile by User Id.
/// </summary>
/// <param name="id">Id of the User to retrieve</param>
/// <returns><see cref="IProfile"/></returns>
IProfile GetProfileById(int id);
/// <summary>
/// Gets a profile by username
/// </summary>
/// <param name="username">Username</param>
/// <returns><see cref="IProfile"/></returns>
IProfile GetProfileByUserName(string username);
/// <summary>
/// Gets a user by Id
/// </summary>
/// <param name="id">Id of the user to retrieve</param>
/// <returns><see cref="IUser"/></returns>
IUser GetUserById(int id);
/// <summary>
/// Gets a users by Id
/// </summary>
/// <param name="ids">Ids of the users to retrieve</param>
/// <returns><see cref="IUser"/></returns>
IEnumerable<IUser> GetUsersById(params int[] ids);
/// <summary>
/// Removes a specific section from all user groups
/// </summary>
/// <remarks>This is useful when an entire section is removed from config</remarks>
/// <param name="sectionAlias">Alias of the section to remove</param>
void DeleteSectionFromAllUserGroups(string sectionAlias);
/// <summary>
/// Get explicitly assigned permissions for a user and optional node ids
/// </summary>
/// <remarks>If no permissions are found for a particular entity then the user's default permissions will be applied</remarks>
/// <param name="user">User to retrieve permissions for</param>
/// <param name="nodeIds">Specifiying nothing will return all user permissions for all nodes that have explicit permissions defined</param>
/// <returns>An enumerable list of <see cref="EntityPermission"/></returns>
/// <remarks>
/// This will return the default permissions for the user's groups for node ids that don't have explicitly defined permissions
/// </remarks>
EntityPermissionCollection GetPermissions(IUser user, params int[] nodeIds);
/// <summary>
/// Get explicitly assigned permissions for groups and optional node Ids
/// </summary>
/// <param name="groups"></param>
/// <param name="fallbackToDefaultPermissions">
/// Flag indicating if we want to include the default group permissions for each result if there are not explicit permissions set
/// </param>
/// <param name="nodeIds">Specifiying nothing will return all permissions for all nodes</param>
/// <returns>An enumerable list of <see cref="EntityPermission"/></returns>
EntityPermissionCollection GetPermissions(IUserGroup[] groups, bool fallbackToDefaultPermissions, params int[] nodeIds);
/// <summary>
/// Gets the implicit/inherited permissions for the user for the given path
/// </summary>
/// <param name="user">User to check permissions for</param>
/// <param name="path">Path to check permissions for</param>
EntityPermissionSet GetPermissionsForPath(IUser user, string path);
/// <summary>
/// Gets the permissions for the provided groups and path
/// </summary>
/// <param name="groups"></param>
/// <param name="path">Path to check permissions for</param>
/// <param name="fallbackToDefaultPermissions">
/// Flag indicating if we want to include the default group permissions for each result if there are not explicit permissions set
/// </param>
EntityPermissionSet GetPermissionsForPath(IUserGroup[] groups, string path, bool fallbackToDefaultPermissions = false);
/// <summary>
/// Replaces the same permission set for a single group to any number of entities
/// </summary>
/// <param name="groupId">Id of the group</param>
/// <param name="permissions">
/// Permissions as enumerable list of <see cref="char"/>,
/// if no permissions are specified then all permissions for this node are removed for this group
/// </param>
/// <param name="entityIds">Specify the nodes to replace permissions for. If nothing is specified all permissions are removed.</param>
/// <remarks>If no 'entityIds' are specified all permissions will be removed for the specified group.</remarks>
void ReplaceUserGroupPermissions(int groupId, IEnumerable<char> permissions, params int[] entityIds);
/// <summary>
/// Assigns the same permission set for a single user group to any number of entities
/// </summary>
/// <param name="groupId">Id of the group</param>
/// <param name="permission"></param>
/// <param name="entityIds">Specify the nodes to replace permissions for</param>
void AssignUserGroupPermission(int groupId, char permission, params int[] entityIds);
/// <summary>
/// Gets a list of <see cref="IUser"/> objects associated with a given group
/// </summary>
/// <param name="groupId">Id of group</param>
/// <returns><see cref="IEnumerable{IUser}"/></returns>
IEnumerable<IUser> GetAllInGroup(int groupId);
/// <summary>
/// Gets a list of <see cref="IUser"/> objects not associated with a given group
/// </summary>
/// <param name="groupId">Id of group</param>
/// <returns><see cref="IEnumerable{IUser}"/></returns>
IEnumerable<IUser> GetAllNotInGroup(int groupId);
#region User groups
/// <summary>
/// Gets all UserGroups or those specified as parameters
/// </summary>
/// <param name="ids">Optional Ids of UserGroups to retrieve</param>
/// <returns>An enumerable list of <see cref="IUserGroup"/></returns>
IEnumerable<IUserGroup> GetAllUserGroups(params int[] ids);
/// <summary>
/// Gets a UserGroup by its Alias
/// </summary>
/// <param name="alias">Alias of the UserGroup to retrieve</param>
/// <returns><see cref="IUserGroup"/></returns>
IEnumerable<IUserGroup> GetUserGroupsByAlias(params string[] alias);
/// <summary>
/// Gets a UserGroup by its Alias
/// </summary>
/// <param name="name">Name of the UserGroup to retrieve</param>
/// <returns><see cref="IUserGroup"/></returns>
IUserGroup GetUserGroupByAlias(string name);
/// <summary>
/// Gets a UserGroup by its Id
/// </summary>
/// <param name="id">Id of the UserGroup to retrieve</param>
/// <returns><see cref="IUserGroup"/></returns>
IUserGroup GetUserGroupById(int id);
/// <summary>
/// Saves a UserGroup
/// </summary>
/// <param name="userGroup">UserGroup to save</param>
/// <param name="userIds">
/// If null than no changes are made to the users who are assigned to this group, however if a value is passed in
/// than all users will be removed from this group and only these users will be added
/// </param>
/// <param name="raiseEvents">Optional parameter to raise events.
/// Default is <c>True</c> otherwise set to <c>False</c> to not raise events</param>
void Save(IUserGroup userGroup, int[] userIds = null, bool raiseEvents = true);
/// <summary>
/// Deletes a UserGroup
/// </summary>
/// <param name="userGroup">UserGroup to delete</param>
void DeleteUserGroup(IUserGroup userGroup);
#endregion
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using NLog.Config;
#if !SILVERLIGHT
namespace NLog.Internal.FileAppenders
{
using System;
using System.IO;
using System.Security;
using System.Security.AccessControl;
using System.Security.Cryptography;
using System.Security.Principal;
using System.Text;
using System.Threading;
using NLog.Common;
/// <summary>
/// Provides a multiprocess-safe atomic file appends while
/// keeping the files open.
/// </summary>
/// <remarks>
/// On Unix you can get all the appends to be atomic, even when multiple
/// processes are trying to write to the same file, because setting the file
/// pointer to the end of the file and appending can be made one operation.
/// On Win32 we need to maintain some synchronization between processes
/// (global named mutex is used for this)
/// </remarks>
[SecuritySafeCritical]
internal class MutexMultiProcessFileAppender : BaseFileAppender
{
public static readonly IFileAppenderFactory TheFactory = new Factory();
private FileStream fileStream;
private Mutex mutex;
/// <summary>
/// Initializes a new instance of the <see cref="MutexMultiProcessFileAppender" /> class.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="parameters">The parameters.</param>
public MutexMultiProcessFileAppender(string fileName, ICreateFileParameters parameters) : base(fileName, parameters)
{
try
{
this.mutex = CreateSharableMutex(GetMutexName(fileName));
this.fileStream = CreateFileStream(true);
}
catch
{
if (this.mutex != null)
{
this.mutex.Close();
this.mutex = null;
}
if (this.fileStream != null)
{
this.fileStream.Close();
this.fileStream = null;
}
throw;
}
}
/// <summary>
/// Writes the specified bytes.
/// </summary>
/// <param name="bytes">The bytes to be written.</param>
public override void Write(byte[] bytes)
{
this.Write(bytes, 0, bytes.Length);
}
public override void Write(byte[] bytes, int offset, int count)
{
if (this.mutex == null)
{
return;
}
try
{
this.mutex.WaitOne();
}
catch (AbandonedMutexException)
{
// ignore the exception, another process was killed without properly releasing the mutex
// the mutex has been acquired, so proceed to writing
// See: http://msdn.microsoft.com/en-us/library/system.threading.abandonedmutexexception.aspx
}
try
{
this.fileStream.Seek(0, SeekOrigin.End);
this.fileStream.Write(bytes, offset, count);
this.fileStream.Flush();
this.FileTouched();
}
finally
{
this.mutex.ReleaseMutex();
}
}
/// <summary>
/// Closes this instance.
/// </summary>
public override void Close()
{
InternalLogger.Trace("Closing '{0}'", FileName);
if (this.mutex != null)
{
this.mutex.Close();
}
if (this.fileStream != null)
{
this.fileStream.Close();
}
this.mutex = null;
this.fileStream = null;
FileTouched();
}
/// <summary>
/// Flushes this instance.
/// </summary>
public override void Flush()
{
// do nothing, the stream is always flushed
}
/// <summary>
/// Gets the file info.
/// </summary>
/// <returns>The file characteristics, if the file information was retrieved successfully, otherwise null.</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2001:AvoidCallingProblematicMethods", MessageId = "System.Runtime.InteropServices.SafeHandle.DangerousGetHandle", Justification = "Optimization")]
public override FileCharacteristics GetFileCharacteristics()
{
return FileCharacteristicsHelper.Helper.GetFileCharacteristics(FileName, this.fileStream.SafeFileHandle.DangerousGetHandle());
}
private static Mutex CreateSharableMutex(string name)
{
// Creates a mutex sharable by more than one process
var mutexSecurity = new MutexSecurity();
var everyoneSid = new SecurityIdentifier(WellKnownSidType.WorldSid, null);
mutexSecurity.AddAccessRule(new MutexAccessRule(everyoneSid, MutexRights.FullControl, AccessControlType.Allow));
// The constructor will either create new mutex or open
// an existing one, in a thread-safe manner
bool createdNew;
return new Mutex(false, name, out createdNew, mutexSecurity);
}
private static string GetMutexName(string fileName)
{
// The global kernel object namespace is used so the mutex
// can be shared among processes in all sessions
const string mutexNamePrefix = @"Global\NLog-FileLock-";
const int maxMutexNameLength = 260;
string canonicalName = Path.GetFullPath(fileName).ToLowerInvariant();
// Mutex names must not contain a backslash, it's the namespace separator,
// but all other are OK
canonicalName = canonicalName.Replace('\\', '/');
// A mutex name must not exceed MAX_PATH (260) characters
if (mutexNamePrefix.Length + canonicalName.Length <= maxMutexNameLength)
{
return mutexNamePrefix + canonicalName;
}
// The unusual case of the path being too long; let's hash the canonical name,
// so it can be safely shortened and still remain unique
string hash;
using (MD5 md5 = MD5.Create())
{
byte[] bytes = md5.ComputeHash(Encoding.UTF8.GetBytes(canonicalName));
hash = Convert.ToBase64String(bytes);
}
// The hash makes the name unique, but also add the end of the path,
// so the end of the name tells us which file it is (for debugging)
int cutOffIndex = canonicalName.Length - (maxMutexNameLength - mutexNamePrefix.Length - hash.Length);
return mutexNamePrefix + hash + canonicalName.Substring(cutOffIndex);
}
/// <summary>
/// Factory class.
/// </summary>
private class Factory : IFileAppenderFactory
{
/// <summary>
/// Opens the appender for given file name and parameters.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="parameters">Creation parameters.</param>
/// <returns>
/// Instance of <see cref="BaseFileAppender"/> which can be used to write to the file.
/// </returns>
BaseFileAppender IFileAppenderFactory.Open(string fileName, ICreateFileParameters parameters)
{
return new MutexMultiProcessFileAppender(fileName, parameters);
}
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics.CodeAnalysis;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics;
namespace System.IO.Pipes
{
/// <summary>
/// Named pipe server
/// </summary>
public sealed partial class NamedPipeServerStream : PipeStream
{
// Use the maximum number of server instances that the system resources allow
public const int MaxAllowedServerInstances = -1;
public NamedPipeServerStream(string pipeName)
: this(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None)
{
}
public NamedPipeServerStream(string pipeName, PipeDirection direction)
: this(pipeName, direction, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None)
{
}
public NamedPipeServerStream(string pipeName, PipeDirection direction, int maxNumberOfServerInstances)
: this(pipeName, direction, maxNumberOfServerInstances, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None)
{
}
public NamedPipeServerStream(string pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode)
: this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, PipeOptions.None, 0, 0, HandleInheritability.None)
{
}
public NamedPipeServerStream(string pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options)
: this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, 0, 0, HandleInheritability.None)
{
}
public NamedPipeServerStream(string pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize)
: this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, inBufferSize, outBufferSize, HandleInheritability.None)
{
}
/// <summary>
/// Full named pipe server constructor
/// </summary>
/// <param name="pipeName">Pipe name</param>
/// <param name="direction">Pipe direction: In, Out or InOut (duplex).
/// Win32 note: this gets OR'd into dwOpenMode to CreateNamedPipe
/// </param>
/// <param name="maxNumberOfServerInstances">Maximum number of server instances. Specify a fixed value between
/// 1 and 254 (Windows)/greater than 1 (Unix), or use NamedPipeServerStream.MaxAllowedServerInstances to use the
/// maximum amount allowed by system resources.</param>
/// <param name="transmissionMode">Byte mode or message mode.
/// Win32 note: this gets used for dwPipeMode. CreateNamedPipe allows you to specify PIPE_TYPE_BYTE/MESSAGE
/// and PIPE_READMODE_BYTE/MESSAGE independently, but this sets type and readmode to match.
/// </param>
/// <param name="options">PipeOption enum: None, Asynchronous, or Write-through
/// Win32 note: this gets passed in with dwOpenMode to CreateNamedPipe. Asynchronous corresponds to
/// FILE_FLAG_OVERLAPPED option. PipeOptions enum doesn't expose FIRST_PIPE_INSTANCE option because
/// this sets that automatically based on the number of instances specified.
/// </param>
/// <param name="inBufferSize">Incoming buffer size, 0 or higher.
/// Note: this size is always advisory; OS uses a suggestion.
/// </param>
/// <param name="outBufferSize">Outgoing buffer size, 0 or higher (see above)</param>
/// <param name="inheritability">Whether handle is inheritable</param>
private NamedPipeServerStream(string pipeName, PipeDirection direction, int maxNumberOfServerInstances,
PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize,
HandleInheritability inheritability)
: base(direction, transmissionMode, outBufferSize)
{
if (pipeName == null)
{
throw new ArgumentNullException(nameof(pipeName));
}
if (pipeName.Length == 0)
{
throw new ArgumentException(SR.Argument_NeedNonemptyPipeName);
}
if ((options & ~(PipeOptions.WriteThrough | PipeOptions.Asynchronous | PipeOptions.CurrentUserOnly)) != 0)
{
throw new ArgumentOutOfRangeException(nameof(options), SR.ArgumentOutOfRange_OptionsInvalid);
}
if (inBufferSize < 0)
{
throw new ArgumentOutOfRangeException(nameof(inBufferSize), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if ((maxNumberOfServerInstances < 1 || maxNumberOfServerInstances > 254) && (maxNumberOfServerInstances != MaxAllowedServerInstances))
{
// win32 allows fixed values of 1-254 or 255 to mean max allowed by system. We expose 255 as -1 (unlimited)
// through the MaxAllowedServerInstances constant. This is consistent e.g. with -1 as infinite timeout, etc.
// We do this check for consistency on Unix, even though maxNumberOfServerInstances is otherwise ignored.
throw new ArgumentOutOfRangeException(nameof(maxNumberOfServerInstances), SR.ArgumentOutOfRange_MaxNumServerInstances);
}
// inheritability will always be None since this private constructor is only called from other constructors from which
// inheritability is always set to None. Desktop has a public constructor to allow setting it to something else, but Core
// doesn't.
if (inheritability < HandleInheritability.None || inheritability > HandleInheritability.Inheritable)
{
throw new ArgumentOutOfRangeException(nameof(inheritability), SR.ArgumentOutOfRange_HandleInheritabilityNoneOrInheritable);
}
if ((options & PipeOptions.CurrentUserOnly) != 0)
{
IsCurrentUserOnly = true;
}
Create(pipeName, direction, maxNumberOfServerInstances, transmissionMode,
options, inBufferSize, outBufferSize, inheritability);
}
// Create a NamedPipeServerStream from an existing server pipe handle.
public NamedPipeServerStream(PipeDirection direction, bool isAsync, bool isConnected, SafePipeHandle safePipeHandle)
: base(direction, PipeTransmissionMode.Byte, 0)
{
if (safePipeHandle == null)
{
throw new ArgumentNullException(nameof(safePipeHandle));
}
if (safePipeHandle.IsInvalid)
{
throw new ArgumentException(SR.Argument_InvalidHandle, nameof(safePipeHandle));
}
ValidateHandleIsPipe(safePipeHandle);
InitializeHandle(safePipeHandle, true, isAsync);
if (isConnected)
{
State = PipeState.Connected;
}
}
~NamedPipeServerStream()
{
Dispose(false);
}
public Task WaitForConnectionAsync()
{
return WaitForConnectionAsync(CancellationToken.None);
}
public System.IAsyncResult BeginWaitForConnection(AsyncCallback callback, object state) =>
TaskToApm.Begin(WaitForConnectionAsync(), callback, state);
public void EndWaitForConnection(IAsyncResult asyncResult) =>
TaskToApm.End(asyncResult);
// Server can only connect from Disconnected state
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")]
private void CheckConnectOperationsServer()
{
// we're not checking whether already connected; this allows us to throw IOException
// "pipe is being closed" if other side is closing (as does win32) or no-op if
// already connected
if (State == PipeState.Closed)
{
throw Error.GetPipeNotOpen();
}
if (InternalHandle != null && InternalHandle.IsClosed) // only check IsClosed if we have a handle
{
throw Error.GetPipeNotOpen();
}
if (State == PipeState.Broken)
{
throw new IOException(SR.IO_PipeBroken);
}
}
// Server is allowed to disconnect from connected and broken states
private void CheckDisconnectOperations()
{
if (State == PipeState.WaitingToConnect)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected);
}
if (State == PipeState.Disconnected)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeAlreadyDisconnected);
}
if (InternalHandle == null && CheckOperationsRequiresSetHandle)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
if ((State == PipeState.Closed) || (InternalHandle != null && InternalHandle.IsClosed))
{
throw Error.GetPipeNotOpen();
}
}
}
// Users will use this delegate to specify a method to call while impersonating the client
// (see NamedPipeServerStream.RunAsClient).
public delegate void PipeStreamImpersonationWorker();
}
| |
/*
Copyright (c) 2010 by Genstein
This file is (or was originally) part of Trizbort, the Interactive Fiction Mapper.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Drawing;
using PdfSharp.Drawing;
namespace Trizbort
{
class TextBlock
{
public string Text
{
get { return m_text; }
set
{
m_text = value;
m_invalidLayout = true;
}
}
/// <summary>
/// Draw a multi-line string as it would be drawn by GDI+.
/// Compensates for issues and draw-vs-PDF differences in PDFsharp.
/// </summary>
/// <param name="graphics">The graphics with which to draw.</param>
/// <param name="text">The text to draw, which may contain line breaks.</param>
/// <param name="font">The font with which to draw.</param>
/// <param name="brush">The brush with which to draw.</param>
/// <param name="pos">The position at which to draw.</param>
/// <param name="size">The size to which to limit the drawn text; or Vector.Zero for no limit.</param>
/// <param name="format">The string format to use.</param>
/// <remarks>
/// PDFsharp cannot currently render multi-line text to PDF files; it comes out as single line.
/// This method simulates standard Graphics.DrawString() over PDFsharp.
/// It always has the effect of StringFormatFlags.LineLimit (which PDFsharp does not support).
/// </remarks>
public void Draw(XGraphics graphics, Font font, Brush brush, Vector pos, Vector size, XStringFormat format)
{
// do a quick test to see if text is going to get drawn at the same size as last time;
// if so, assume we don't need to recompute our layout for that reason.
var sizeChecker = graphics.MeasureString("M q", font);
if (sizeChecker != m_sizeChecker || pos != m_pos || m_size != size || m_requestedFormat.Alignment != format.Alignment || m_requestedFormat.LineAlignment != format.LineAlignment || m_requestedFormat.FormatFlags != format.FormatFlags)
{
m_invalidLayout = true;
}
m_sizeChecker = sizeChecker;
if (m_invalidLayout)
{
// something vital has changed; rebuild our cached layout data
RebuildCachedLayout(graphics, font, ref pos, ref size, format);
m_invalidLayout = false;
}
var state = graphics.Save();
if (size != Vector.Zero)
{
graphics.IntersectClip(new RectangleF(pos.X, pos.Y, size.X, size.Y));
}
// disable smoothing whilst rendering text;
// visually this is no different, but is faster
var smoothingMode = graphics.SmoothingMode;
graphics.SmoothingMode = XSmoothingMode.HighSpeed;
var origin = m_origin;
for (var index=0; index<m_lines.Count; ++index)
{
if (size.Y > 0 && size.Y < m_lineHeight)
break; // not enough remaining vertical space for a whole line
var line = m_lines[index];
graphics.DrawString(line, font, brush, origin.X, origin.Y, m_actualFormat);
origin += m_delta;
size.Y -= m_lineHeight;
}
graphics.SmoothingMode = smoothingMode;
graphics.Restore(state);
}
private void RebuildCachedLayout(XGraphics graphics, Font font, ref Vector pos, ref Vector size, XStringFormat baseFormat)
{
// for diagnostic purposes
++s_rebuildCount;
// store current settings to help us tell if we need a rebuild next time around
m_requestedFormat = new XStringFormat();
m_requestedFormat.Alignment = baseFormat.Alignment;
m_requestedFormat.FormatFlags = baseFormat.FormatFlags;
m_requestedFormat.LineAlignment = baseFormat.LineAlignment;
m_actualFormat = new XStringFormat();
m_actualFormat.Alignment = baseFormat.Alignment;
m_actualFormat.FormatFlags = baseFormat.FormatFlags;
m_actualFormat.LineAlignment = baseFormat.LineAlignment;
m_pos = pos;
m_size = size;
var text = m_text;
if (text.IndexOf('\n') == -1 && size.X > 0 && size.Y > 0 && graphics.MeasureString(text, font).Width > size.X)
{
// wrap single-line text to fit in rectangle
// measure a space, countering the APIs unwillingness to measure spaces
var spaceLength = (float)(graphics.MeasureString("M M", font).Width - graphics.MeasureString("M", font).Width * 2);
var words = new List<Word>();
foreach (var word in text.Split(' '))
{
if (words.Count != 0)
{
words.Add(new Word(" ", spaceLength));
}
words.Add(new Word(word, (float)graphics.MeasureString(word, font).Width));
}
var lineLength = 0.0f;
var total = string.Empty;
var line = string.Empty;
foreach (var word in words)
{
if (word.Text != " " && word.Length > Math.Max(0, size.X - lineLength) && lineLength > 0)
{
if (line.Length > 0)
{
if (total.Length > 0)
{
total += "\n";
}
total += line;
lineLength = word.Length + spaceLength;
line = word.Text;
}
}
else
{
line += word.Text;
lineLength += word.Length + spaceLength;
}
}
if (line.Length > 0)
{
if (total.Length > 0)
{
total += "\n";
}
total += line;
}
text = total;
}
m_lineHeight = font.GetHeight();
m_lines.Clear();
m_lines.AddRange(text.Split('\n'));
switch (m_actualFormat.LineAlignment)
{
case XLineAlignment.Near:
default:
m_origin = pos;
m_delta = new Vector(0, m_lineHeight);
break;
case XLineAlignment.Far:
m_origin = new Vector(pos.X, pos.Y + size.Y - m_lineHeight);
if (size.Y > 0)
{
var count = m_lines.Count;
while (m_origin.Y - m_lineHeight >= pos.Y && --count > 0)
{
m_origin.Y -= m_lineHeight;
}
}
else
{
m_origin.Y -= (m_lines.Count - 1) * m_lineHeight;
}
m_delta = new Vector(0, m_lineHeight);
break;
case XLineAlignment.Center:
m_origin = new Vector(pos.X, pos.Y + size.Y / 2 - (m_lines.Count - 1) * m_lineHeight / 2 - m_lineHeight / 2);
m_delta = new Vector(0, m_lineHeight);
break;
}
m_actualFormat.LineAlignment = XLineAlignment.Near;
switch (m_actualFormat.Alignment)
{
case XStringAlignment.Far:
m_origin.X = pos.X + size.X;
break;
case XStringAlignment.Center:
m_origin.X = pos.X + size.X / 2;
break;
}
}
public static int RebuildCount
{
get { return s_rebuildCount; }
}
private struct Word
{
public Word(string text, float length)
{
Text = text;
Length = length;
}
public string Text;
public float Length;
}
private string m_text = string.Empty;
// cached layout data to speed drawing
private bool m_invalidLayout = true;
private XSize m_sizeChecker;
private Vector m_pos;
private Vector m_size;
private Vector m_origin;
private Vector m_delta;
private float m_lineHeight;
private XStringFormat m_requestedFormat;
private XStringFormat m_actualFormat;
private List<string> m_lines = new List<string>();
public static int s_rebuildCount = 0;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Numerics;
using Xunit;
namespace ComplexTestSupport
{
public static class Support
{
private static Random s_random;
static Support()
{
s_random = new Random(-55);
}
public static Random Random
{
get { return s_random; }
}
// Valid values in double type
public static Double[] doubleValidValues = new Double[] {
double.MinValue,
-1,
0,
double.Epsilon,
1,
double.MaxValue,
};
// Invalid values in double type
public static Double[] doubleInvalidValues = new Double[] {
double.NegativeInfinity,
double.PositiveInfinity,
double.NaN
};
// Typical phase values in double type
public static Double[] phaseTypicalValues = new Double[] {
-Math.PI/2,
0,
Math.PI/2
};
public static String[] supportedStdNumericFormats = new String[] { "C", "E", "F", "G", "N", "P", "R" };
private static double GetRandomValue(double mult, bool fIsNegative)
{
double randomDouble = (mult * s_random.NextDouble());
randomDouble %= (Double)(mult);
return fIsNegative ? -randomDouble : randomDouble;
}
public static double GetRandomDoubleValue(bool fIsNegative)
{
return GetRandomValue(double.MaxValue, fIsNegative);
}
public static double GetSmallRandomDoubleValue(bool fIsNegative)
{
return GetRandomValue(1.0, fIsNegative);
}
public static Int16 GetRandomInt16Value(bool fIsNegative)
{
if (fIsNegative)
{
return ((Int16)s_random.Next(Int16.MinValue, 0));
}
else
{
return ((Int16)s_random.Next(1, Int16.MaxValue));
}
}
public static Int32 GetRandomInt32Value(bool fIsNegative)
{
return ((Int32)GetRandomValue(Int32.MaxValue, fIsNegative));
}
public static Int64 GetRandomInt64Value(bool fIsNegative)
{
return ((Int64)GetRandomValue(Int64.MaxValue, fIsNegative));
}
public static Byte GetRandomByteValue()
{
return ((Byte)s_random.Next(1, Byte.MaxValue));
}
#if CLS_Compliant
public static SByte GetRandomSByteValue(bool fIsNegative)
{
if (fIsNegative)
{
return ((SByte) random.Next(SByte.MinValue, 0));
}
else
{
return ((SByte) random.Next(1, SByte.MaxValue));
}
}
public static UInt16 GetRandomUInt16Value()
{
return ((UInt16)random.Next(1, UInt16.MaxValue));
}
public static UInt32 GetRandomUInt32Value()
{
return ((UInt32)GetRandomValue(UInt32.MaxValue, false));
}
public static UInt64 GetRandomUInt64Value()
{
return ((UInt64)GetRandomValue(UInt64.MaxValue, false));
}
#endif
public static Single GetRandomSingleValue(bool fIsNegative)
{
return ((Single)GetRandomValue(Single.MaxValue, fIsNegative));
}
public static BigInteger GetRandomBigIntegerValue(bool fIsNegative)
{
return ((BigInteger)GetRandomValue(double.MaxValue, fIsNegative));
}
public static Decimal GetRandomDecimalValue(bool fIsNegative)
{
if (fIsNegative)
{
return ((Decimal)new Decimal(
s_random.Next(Int32.MinValue, Int32.MaxValue),
s_random.Next(Int32.MinValue, Int32.MaxValue),
s_random.Next(Int32.MinValue, Int32.MaxValue),
true,
(byte)s_random.Next(0, 29)));
}
else
{
return ((Decimal)new Decimal(
s_random.Next(Int32.MinValue, Int32.MaxValue),
s_random.Next(Int32.MinValue, Int32.MaxValue),
s_random.Next(Int32.MinValue, Int32.MaxValue),
false,
(byte)s_random.Next(0, 29)));
}
}
public static double GetRandomPhaseValue(bool fIsNegative)
{
return GetRandomValue((Math.PI / 2), fIsNegative);
}
public static bool IsDiffTolerable(double d1, double d2)
{
if (double.IsInfinity(d1))
{
return d1 == (d2 * 10);
}
else if (double.IsInfinity(d2))
{
return d2 == (d1 * 10);
}
else
{
double diffRatio = (d1 - d2) / d1;
diffRatio *= Math.Pow(10, 6);
diffRatio = Math.Abs(diffRatio);
return (diffRatio < 1);
}
}
public static void VerifyRealImaginaryProperties(Complex complex, double real, double imaginary, string message)
{
Assert.True(real.Equals((Double)complex.Real) || IsDiffTolerable(complex.Real, real), message);
Assert.True(imaginary.Equals((Double)complex.Imaginary) || IsDiffTolerable(complex.Imaginary, imaginary), message);
}
public static void VerifyMagnitudePhaseProperties(Complex complex, double magnitude, double phase, string message)
{
// The magnitude (m) of a complex number (z = x + yi) is the absolute value - |z| = sqrt(x^2 + y^2)
// Verification is done using the square of the magnitude since m^2 = x^2 + y^2
double expectedMagnitudeSqr = magnitude * magnitude;
double actualMagnitudeSqr = complex.Magnitude * complex.Magnitude;
Assert.True(expectedMagnitudeSqr.Equals((Double)(actualMagnitudeSqr)) || IsDiffTolerable(actualMagnitudeSqr, expectedMagnitudeSqr), message);
if (double.IsNaN(magnitude))
{
phase = double.NaN;
}
else if (magnitude == 0)
{
phase = 0;
}
else if (magnitude < 0)
{
phase += (phase < 0) ? Math.PI : -Math.PI;
}
Assert.True(phase.Equals((Double)complex.Phase) || IsDiffTolerable(complex.Phase, phase), message);
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the PnGruposUsuario class.
/// </summary>
[Serializable]
public partial class PnGruposUsuarioCollection : ActiveList<PnGruposUsuario, PnGruposUsuarioCollection>
{
public PnGruposUsuarioCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>PnGruposUsuarioCollection</returns>
public PnGruposUsuarioCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
PnGruposUsuario o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the PN_grupos_usuarios table.
/// </summary>
[Serializable]
public partial class PnGruposUsuario : ActiveRecord<PnGruposUsuario>, IActiveRecord
{
#region .ctors and Default Settings
public PnGruposUsuario()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public PnGruposUsuario(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public PnGruposUsuario(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public PnGruposUsuario(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("PN_grupos_usuarios", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdGu = new TableSchema.TableColumn(schema);
colvarIdGu.ColumnName = "id_gu";
colvarIdGu.DataType = DbType.Int32;
colvarIdGu.MaxLength = 0;
colvarIdGu.AutoIncrement = true;
colvarIdGu.IsNullable = false;
colvarIdGu.IsPrimaryKey = true;
colvarIdGu.IsForeignKey = false;
colvarIdGu.IsReadOnly = false;
colvarIdGu.DefaultSetting = @"";
colvarIdGu.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdGu);
TableSchema.TableColumn colvarIdGrupo = new TableSchema.TableColumn(schema);
colvarIdGrupo.ColumnName = "id_grupo";
colvarIdGrupo.DataType = DbType.Int32;
colvarIdGrupo.MaxLength = 0;
colvarIdGrupo.AutoIncrement = false;
colvarIdGrupo.IsNullable = false;
colvarIdGrupo.IsPrimaryKey = false;
colvarIdGrupo.IsForeignKey = true;
colvarIdGrupo.IsReadOnly = false;
colvarIdGrupo.DefaultSetting = @"";
colvarIdGrupo.ForeignKeyTableName = "PN_grupos";
schema.Columns.Add(colvarIdGrupo);
TableSchema.TableColumn colvarIdUsuario = new TableSchema.TableColumn(schema);
colvarIdUsuario.ColumnName = "id_usuario";
colvarIdUsuario.DataType = DbType.Int32;
colvarIdUsuario.MaxLength = 0;
colvarIdUsuario.AutoIncrement = false;
colvarIdUsuario.IsNullable = false;
colvarIdUsuario.IsPrimaryKey = false;
colvarIdUsuario.IsForeignKey = true;
colvarIdUsuario.IsReadOnly = false;
colvarIdUsuario.DefaultSetting = @"";
colvarIdUsuario.ForeignKeyTableName = "PN_usuarios";
schema.Columns.Add(colvarIdUsuario);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("PN_grupos_usuarios",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdGu")]
[Bindable(true)]
public int IdGu
{
get { return GetColumnValue<int>(Columns.IdGu); }
set { SetColumnValue(Columns.IdGu, value); }
}
[XmlAttribute("IdGrupo")]
[Bindable(true)]
public int IdGrupo
{
get { return GetColumnValue<int>(Columns.IdGrupo); }
set { SetColumnValue(Columns.IdGrupo, value); }
}
[XmlAttribute("IdUsuario")]
[Bindable(true)]
public int IdUsuario
{
get { return GetColumnValue<int>(Columns.IdUsuario); }
set { SetColumnValue(Columns.IdUsuario, value); }
}
#endregion
#region ForeignKey Properties
/// <summary>
/// Returns a PnUsuario ActiveRecord object related to this PnGruposUsuario
///
/// </summary>
public DalSic.PnUsuario PnUsuario
{
get { return DalSic.PnUsuario.FetchByID(this.IdUsuario); }
set { SetColumnValue("id_usuario", value.IdUsuario); }
}
/// <summary>
/// Returns a PnGrupo ActiveRecord object related to this PnGruposUsuario
///
/// </summary>
public DalSic.PnGrupo PnGrupo
{
get { return DalSic.PnGrupo.FetchByID(this.IdGrupo); }
set { SetColumnValue("id_grupo", value.IdGrupo); }
}
#endregion
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(int varIdGrupo,int varIdUsuario)
{
PnGruposUsuario item = new PnGruposUsuario();
item.IdGrupo = varIdGrupo;
item.IdUsuario = varIdUsuario;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdGu,int varIdGrupo,int varIdUsuario)
{
PnGruposUsuario item = new PnGruposUsuario();
item.IdGu = varIdGu;
item.IdGrupo = varIdGrupo;
item.IdUsuario = varIdUsuario;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdGuColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn IdGrupoColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn IdUsuarioColumn
{
get { return Schema.Columns[2]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdGu = @"id_gu";
public static string IdGrupo = @"id_grupo";
public static string IdUsuario = @"id_usuario";
}
#endregion
#region Update PK Collections
#endregion
#region Deep Save
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.Diagnostics
{
internal sealed class PerformanceCounterLib
{
private static volatile string s_computerName;
private PerformanceMonitor _performanceMonitor;
private string _machineName;
private string _perfLcid;
private static volatile Dictionary<String, PerformanceCounterLib> s_libraryTable;
private Dictionary<int, string> _nameTable;
private readonly object _nameTableLock = new Object();
private static Object s_internalSyncObject;
private static Object InternalSyncObject
{
get
{
if (s_internalSyncObject == null)
{
Object o = new Object();
Interlocked.CompareExchange(ref s_internalSyncObject, o, null);
}
return s_internalSyncObject;
}
}
internal PerformanceCounterLib(string machineName, string lcid)
{
_machineName = machineName;
_perfLcid = lcid;
}
/// <internalonly/>
internal static string ComputerName
{
get
{
if (s_computerName == null)
{
lock (InternalSyncObject)
{
if (s_computerName == null)
{
s_computerName = Interop.mincore.GetComputerName();
}
}
}
return s_computerName;
}
}
internal Dictionary<int, string> NameTable
{
get
{
if (_nameTable == null)
{
lock (_nameTableLock)
{
if (_nameTable == null)
_nameTable = GetStringTable(false);
}
}
return _nameTable;
}
}
internal string GetCounterName(int index)
{
string result;
return NameTable.TryGetValue(index, out result) ? result : "";
}
internal static PerformanceCounterLib GetPerformanceCounterLib(string machineName, CultureInfo culture)
{
string lcidString = culture.Name.ToLowerInvariant();
if (machineName.CompareTo(".") == 0)
machineName = ComputerName.ToLowerInvariant();
else
machineName = machineName.ToLowerInvariant();
if (PerformanceCounterLib.s_libraryTable == null)
{
lock (InternalSyncObject)
{
if (PerformanceCounterLib.s_libraryTable == null)
PerformanceCounterLib.s_libraryTable = new Dictionary<string, PerformanceCounterLib>();
}
}
string libraryKey = machineName + ":" + lcidString;
PerformanceCounterLib library;
if (!PerformanceCounterLib.s_libraryTable.TryGetValue(libraryKey, out library))
{
library = new PerformanceCounterLib(machineName, lcidString);
PerformanceCounterLib.s_libraryTable[libraryKey] = library;
}
return library;
}
internal byte[] GetPerformanceData(string item)
{
if (_performanceMonitor == null)
{
lock (InternalSyncObject)
{
if (_performanceMonitor == null)
_performanceMonitor = new PerformanceMonitor(_machineName);
}
}
return _performanceMonitor.GetData(item);
}
private Dictionary<int, string> GetStringTable(bool isHelp)
{
Dictionary<int, string> stringTable;
RegistryKey libraryKey;
libraryKey = Registry.PerformanceData;
try
{
string[] names = null;
int waitRetries = 14; //((2^13)-1)*10ms == approximately 1.4mins
int waitSleep = 0;
// In some stress situations, querying counter values from
// HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Perflib\009
// often returns null/empty data back. We should build fault-tolerance logic to
// make it more reliable because getting null back once doesn't necessarily mean
// that the data is corrupted, most of the time we would get the data just fine
// in subsequent tries.
while (waitRetries > 0)
{
try
{
if (!isHelp)
names = (string[])libraryKey.GetValue("Counter " + _perfLcid);
else
names = (string[])libraryKey.GetValue("Explain " + _perfLcid);
if ((names == null) || (names.Length == 0))
{
--waitRetries;
if (waitSleep == 0)
waitSleep = 10;
else
{
System.Threading.Thread.Sleep(waitSleep);
waitSleep *= 2;
}
}
else
break;
}
catch (IOException)
{
// RegistryKey throws if it can't find the value. We want to return an empty table
// and throw a different exception higher up the stack.
names = null;
break;
}
catch (InvalidCastException)
{
// Unable to cast object of type 'System.Byte[]' to type 'System.String[]'.
// this happens when the registry data store is corrupt and the type is not even REG_MULTI_SZ
names = null;
break;
}
}
if (names == null)
stringTable = new Dictionary<int, string>();
else
{
stringTable = new Dictionary<int, string>(names.Length / 2);
for (int index = 0; index < (names.Length / 2); ++index)
{
string nameString = names[(index * 2) + 1];
if (nameString == null)
nameString = String.Empty;
int key;
if (!Int32.TryParse(names[index * 2], NumberStyles.Integer, CultureInfo.InvariantCulture, out key))
{
if (isHelp)
{
// Category Help Table
throw new InvalidOperationException(SR.Format(SR.CategoryHelpCorrupt, names[index * 2]));
}
else
{
// Counter Name Table
throw new InvalidOperationException(SR.Format(SR.CounterNameCorrupt, names[index * 2]));
}
}
stringTable[key] = nameString;
}
}
}
finally
{
libraryKey.Dispose();
}
return stringTable;
}
internal class PerformanceMonitor
{
private RegistryKey _perfDataKey = null;
private string _machineName;
internal PerformanceMonitor(string machineName)
{
_machineName = machineName;
Init();
}
private void Init()
{
_perfDataKey = Registry.PerformanceData;
}
// Win32 RegQueryValueEx for perf data could deadlock (for a Mutex) up to 2mins in some
// scenarios before they detect it and exit gracefully. In the mean time, ERROR_BUSY,
// ERROR_NOT_READY etc can be seen by other concurrent calls (which is the reason for the
// wait loop and switch case below). We want to wait most certainly more than a 2min window.
// The curent wait time of up to 10mins takes care of the known stress deadlock issues. In most
// cases we wouldn't wait for more than 2mins anyways but in worst cases how much ever time
// we wait may not be sufficient if the Win32 code keeps running into this deadlock again
// and again. A condition very rare but possible in theory. We would get back to the user
// in this case with InvalidOperationException after the wait time expires.
internal byte[] GetData(string item)
{
int waitRetries = 17; //2^16*10ms == approximately 10mins
int waitSleep = 0;
byte[] data = null;
int error = 0;
while (waitRetries > 0)
{
try
{
data = (byte[])_perfDataKey.GetValue(item);
return data;
}
catch (IOException e)
{
error = Marshal.GetHRForException(e);
switch (error)
{
case Interop.mincore.RPCStatus.RPC_S_CALL_FAILED:
case Interop.mincore.Errors.ERROR_INVALID_HANDLE:
case Interop.mincore.RPCStatus.RPC_S_SERVER_UNAVAILABLE:
Init();
goto case Interop.mincore.WaitOptions.WAIT_TIMEOUT;
case Interop.mincore.WaitOptions.WAIT_TIMEOUT:
case Interop.mincore.Errors.ERROR_NOT_READY:
case Interop.mincore.Errors.ERROR_LOCK_FAILED:
case Interop.mincore.Errors.ERROR_BUSY:
--waitRetries;
if (waitSleep == 0)
{
waitSleep = 10;
}
else
{
System.Threading.Thread.Sleep(waitSleep);
waitSleep *= 2;
}
break;
default:
throw new Win32Exception(error);
}
}
catch (InvalidCastException e)
{
throw new InvalidOperationException(SR.Format(SR.CounterDataCorrupt, _perfDataKey.ToString()), e);
}
}
throw new Win32Exception(error);
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
namespace System.Xml.XPath
{
internal class XNodeNavigator : XPathNavigator, IXmlLineInfo
{
internal static readonly string xmlPrefixNamespace = XNamespace.Xml.NamespaceName;
internal static readonly string xmlnsPrefixNamespace = XNamespace.Xmlns.NamespaceName;
private const int DocumentContentMask =
(1 << (int)XmlNodeType.Element) |
(1 << (int)XmlNodeType.ProcessingInstruction) |
(1 << (int)XmlNodeType.Comment);
private static readonly int[] s_ElementContentMasks = {
0, // Root
(1 << (int)XmlNodeType.Element), // Element
0, // Attribute
0, // Namespace
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text), // Text
0, // SignificantWhitespace
0, // Whitespace
(1 << (int)XmlNodeType.ProcessingInstruction), // ProcessingInstruction
(1 << (int)XmlNodeType.Comment), // Comment
(1 << (int)XmlNodeType.Element) |
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text) |
(1 << (int)XmlNodeType.ProcessingInstruction) |
(1 << (int)XmlNodeType.Comment) // All
};
private const int TextMask =
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text);
private static XAttribute s_XmlNamespaceDeclaration;
// The navigator position is encoded by the tuple (source, parent).
// Namespace declaration uses (instance, parent element).
// Common XObjects uses (instance, null).
private XObject _source;
private XElement _parent;
private XmlNameTable _nameTable;
public XNodeNavigator(XNode node, XmlNameTable nameTable)
{
_source = node;
_nameTable = nameTable != null ? nameTable : CreateNameTable();
}
public XNodeNavigator(XNodeNavigator other)
{
_source = other._source;
_parent = other._parent;
_nameTable = other._nameTable;
}
public override string BaseURI
{
get
{
if (_source != null)
{
return _source.BaseUri;
}
if (_parent != null)
{
return _parent.BaseUri;
}
return string.Empty;
}
}
public override bool HasAttributes
{
get
{
XElement element = _source as XElement;
if (element != null)
{
foreach (XAttribute attribute in element.Attributes())
{
if (!attribute.IsNamespaceDeclaration)
{
return true;
}
}
}
return false;
}
}
public override bool HasChildren
{
get
{
XContainer container = _source as XContainer;
if (container != null)
{
foreach (XNode node in container.Nodes())
{
if (IsContent(container, node))
{
return true;
}
}
}
return false;
}
}
public override bool IsEmptyElement
{
get
{
XElement e = _source as XElement;
return e != null && e.IsEmpty;
}
}
public override string LocalName
{
get { return _nameTable.Add(GetLocalName()); }
}
string GetLocalName()
{
XElement e = _source as XElement;
if (e != null)
{
return e.Name.LocalName;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null && a.Name.NamespaceName.Length == 0)
{
return string.Empty; // backcompat
}
return a.Name.LocalName;
}
XProcessingInstruction p = _source as XProcessingInstruction;
if (p != null)
{
return p.Target;
}
return string.Empty;
}
public override string Name
{
get
{
string prefix = GetPrefix();
if (prefix.Length == 0)
{
return _nameTable.Add(GetLocalName());
}
return _nameTable.Add(string.Concat(prefix, ":", GetLocalName()));
}
}
public override string NamespaceURI
{
get { return _nameTable.Add(GetNamespaceURI()); }
}
string GetNamespaceURI()
{
XElement e = _source as XElement;
if (e != null)
{
return e.Name.NamespaceName;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null)
{
return string.Empty; // backcompat
}
return a.Name.NamespaceName;
}
return string.Empty;
}
public override XmlNameTable NameTable
{
get { return _nameTable; }
}
public override XPathNodeType NodeType
{
get
{
if (_source != null)
{
switch (_source.NodeType)
{
case XmlNodeType.Element:
return XPathNodeType.Element;
case XmlNodeType.Attribute:
XAttribute attribute = (XAttribute)_source;
return attribute.IsNamespaceDeclaration ? XPathNodeType.Namespace : XPathNodeType.Attribute;
case XmlNodeType.Document:
return XPathNodeType.Root;
case XmlNodeType.Comment:
return XPathNodeType.Comment;
case XmlNodeType.ProcessingInstruction:
return XPathNodeType.ProcessingInstruction;
default:
return XPathNodeType.Text;
}
}
return XPathNodeType.Text;
}
}
public override string Prefix
{
get { return _nameTable.Add(GetPrefix()); }
}
string GetPrefix()
{
XElement e = _source as XElement;
if (e != null)
{
string prefix = e.GetPrefixOfNamespace(e.Name.Namespace);
if (prefix != null)
{
return prefix;
}
return string.Empty;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null)
{
return string.Empty; // backcompat
}
string prefix = a.GetPrefixOfNamespace(a.Name.Namespace);
if (prefix != null)
{
return prefix;
}
}
return string.Empty;
}
public override object UnderlyingObject
{
get
{
return _source;
}
}
public override string Value
{
get
{
if (_source != null)
{
switch (_source.NodeType)
{
case XmlNodeType.Element:
return ((XElement)_source).Value;
case XmlNodeType.Attribute:
return ((XAttribute)_source).Value;
case XmlNodeType.Document:
XElement root = ((XDocument)_source).Root;
return root != null ? root.Value : string.Empty;
case XmlNodeType.Text:
case XmlNodeType.CDATA:
return CollectText((XText)_source);
case XmlNodeType.Comment:
return ((XComment)_source).Value;
case XmlNodeType.ProcessingInstruction:
return ((XProcessingInstruction)_source).Data;
default:
return string.Empty;
}
}
return string.Empty;
}
}
public override XPathNavigator Clone()
{
return new XNodeNavigator(this);
}
public override bool IsSamePosition(XPathNavigator navigator)
{
XNodeNavigator other = navigator as XNodeNavigator;
if (other == null)
{
return false;
}
return IsSamePosition(this, other);
}
public override bool MoveTo(XPathNavigator navigator)
{
XNodeNavigator other = navigator as XNodeNavigator;
if (other != null)
{
_source = other._source;
_parent = other._parent;
return true;
}
return false;
}
public override bool MoveToAttribute(string localName, string namespaceName)
{
XElement e = _source as XElement;
if (e != null)
{
foreach (XAttribute attribute in e.Attributes())
{
if (attribute.Name.LocalName == localName &&
attribute.Name.NamespaceName == namespaceName &&
!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
return false;
}
public override bool MoveToChild(string localName, string namespaceName)
{
XContainer c = _source as XContainer;
if (c != null)
{
foreach (XElement element in c.Elements())
{
if (element.Name.LocalName == localName &&
element.Name.NamespaceName == namespaceName)
{
_source = element;
return true;
}
}
}
return false;
}
public override bool MoveToChild(XPathNodeType type)
{
XContainer c = _source as XContainer;
if (c != null)
{
int mask = GetElementContentMask(type);
if ((TextMask & mask) != 0 && c.GetParent() == null && c is XDocument)
{
mask &= ~TextMask;
}
foreach (XNode node in c.Nodes())
{
if (((1 << (int)node.NodeType) & mask) != 0)
{
_source = node;
return true;
}
}
}
return false;
}
public override bool MoveToFirstAttribute()
{
XElement e = _source as XElement;
if (e != null)
{
foreach (XAttribute attribute in e.Attributes())
{
if (!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
return false;
}
public override bool MoveToFirstChild()
{
XContainer container = _source as XContainer;
if (container != null)
{
foreach (XNode node in container.Nodes())
{
if (IsContent(container, node))
{
_source = node;
return true;
}
}
}
return false;
}
public override bool MoveToFirstNamespace(XPathNamespaceScope scope)
{
XElement e = _source as XElement;
if (e != null)
{
XAttribute a = null;
switch (scope)
{
case XPathNamespaceScope.Local:
a = GetFirstNamespaceDeclarationLocal(e);
break;
case XPathNamespaceScope.ExcludeXml:
a = GetFirstNamespaceDeclarationGlobal(e);
while (a != null && a.Name.LocalName == "xml")
{
a = GetNextNamespaceDeclarationGlobal(a);
}
break;
case XPathNamespaceScope.All:
a = GetFirstNamespaceDeclarationGlobal(e);
if (a == null)
{
a = GetXmlNamespaceDeclaration();
}
break;
}
if (a != null)
{
_source = a;
_parent = e;
return true;
}
}
return false;
}
public override bool MoveToId(string id)
{
throw new NotSupportedException(SR.NotSupported_MoveToId);
}
public override bool MoveToNamespace(string localName)
{
XElement e = _source as XElement;
if (e != null)
{
if (localName == "xmlns")
{
return false; // backcompat
}
if (localName != null && localName.Length == 0)
{
localName = "xmlns"; // backcompat
}
XAttribute a = GetFirstNamespaceDeclarationGlobal(e);
while (a != null)
{
if (a.Name.LocalName == localName)
{
_source = a;
_parent = e;
return true;
}
a = GetNextNamespaceDeclarationGlobal(a);
}
if (localName == "xml")
{
_source = GetXmlNamespaceDeclaration();
_parent = e;
return true;
}
}
return false;
}
public override bool MoveToNext()
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
XNode next = null;
for (XNode node = currentNode; node != null; node = next)
{
next = node.NextNode;
if (next == null)
{
break;
}
if (IsContent(container, next) && !(node is XText && next is XText))
{
_source = next;
return true;
}
}
}
}
return false;
}
public override bool MoveToNext(string localName, string namespaceName)
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
foreach (XElement element in currentNode.ElementsAfterSelf())
{
if (element.Name.LocalName == localName &&
element.Name.NamespaceName == namespaceName)
{
_source = element;
return true;
}
}
}
return false;
}
public override bool MoveToNext(XPathNodeType type)
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
int mask = GetElementContentMask(type);
if ((TextMask & mask) != 0 && container.GetParent() == null && container is XDocument)
{
mask &= ~TextMask;
}
XNode next = null;
for (XNode node = currentNode; node != null; node = next)
{
next = node.NextNode;
if (((1 << (int)next.NodeType) & mask) != 0 && !(node is XText && next is XText))
{
_source = next;
return true;
}
}
}
}
return false;
}
public override bool MoveToNextAttribute()
{
XAttribute currentAttribute = _source as XAttribute;
if (currentAttribute != null && _parent == null)
{
XElement e = (XElement)currentAttribute.GetParent();
if (e != null)
{
for (XAttribute attribute = currentAttribute.NextAttribute; attribute != null; attribute = attribute.NextAttribute)
{
if (!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
}
return false;
}
public override bool MoveToNextNamespace(XPathNamespaceScope scope)
{
XAttribute a = _source as XAttribute;
if (a != null && _parent != null && !IsXmlNamespaceDeclaration(a))
{
switch (scope)
{
case XPathNamespaceScope.Local:
if (a.GetParent() != _parent)
{
return false;
}
a = GetNextNamespaceDeclarationLocal(a);
break;
case XPathNamespaceScope.ExcludeXml:
do
{
a = GetNextNamespaceDeclarationGlobal(a);
} while (a != null &&
(a.Name.LocalName == "xml" ||
HasNamespaceDeclarationInScope(a, _parent)));
break;
case XPathNamespaceScope.All:
do
{
a = GetNextNamespaceDeclarationGlobal(a);
} while (a != null &&
HasNamespaceDeclarationInScope(a, _parent));
if (a == null &&
!HasNamespaceDeclarationInScope(GetXmlNamespaceDeclaration(), _parent))
{
a = GetXmlNamespaceDeclaration();
}
break;
}
if (a != null)
{
_source = a;
return true;
}
}
return false;
}
public override bool MoveToParent()
{
if (_parent != null)
{
_source = _parent;
_parent = null;
return true;
}
XNode parentNode = _source.GetParent();
if (parentNode != null)
{
_source = parentNode;
return true;
}
return false;
}
public override bool MoveToPrevious()
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
XNode previous = null;
foreach (XNode node in container.Nodes())
{
if (node == currentNode)
{
if (previous != null)
{
_source = previous;
return true;
}
return false;
}
if (IsContent(container, node))
{
previous = node;
}
}
}
}
return false;
}
public override XmlReader ReadSubtree()
{
XContainer c = _source as XContainer;
if (c == null) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_BadNodeType, NodeType));
return c.CreateReader();
}
bool IXmlLineInfo.HasLineInfo()
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.HasLineInfo();
}
return false;
}
int IXmlLineInfo.LineNumber
{
get
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.LineNumber;
}
return 0;
}
}
int IXmlLineInfo.LinePosition
{
get
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.LinePosition;
}
return 0;
}
}
static string CollectText(XText n)
{
string s = n.Value;
if (n.GetParent() != null)
{
foreach (XNode node in n.NodesAfterSelf())
{
XText t = node as XText;
if (t == null) break;
s += t.Value;
}
}
return s;
}
static XmlNameTable CreateNameTable()
{
XmlNameTable nameTable = new NameTable();
nameTable.Add(string.Empty);
nameTable.Add(xmlnsPrefixNamespace);
nameTable.Add(xmlPrefixNamespace);
return nameTable;
}
static bool IsContent(XContainer c, XNode n)
{
if (c.GetParent() != null || c is XElement)
{
return true;
}
return ((1 << (int)n.NodeType) & DocumentContentMask) != 0;
}
static bool IsSamePosition(XNodeNavigator n1, XNodeNavigator n2)
{
return n1._source == n2._source && n1._source.GetParent() == n2._source.GetParent();
}
static bool IsXmlNamespaceDeclaration(XAttribute a)
{
return (object)a == (object)GetXmlNamespaceDeclaration();
}
static int GetElementContentMask(XPathNodeType type)
{
return s_ElementContentMasks[(int)type];
}
static XAttribute GetFirstNamespaceDeclarationGlobal(XElement e)
{
do
{
XAttribute a = GetFirstNamespaceDeclarationLocal(e);
if (a != null)
{
return a;
}
e = e.Parent;
} while (e != null);
return null;
}
static XAttribute GetFirstNamespaceDeclarationLocal(XElement e)
{
foreach (XAttribute attribute in e.Attributes())
{
if (attribute.IsNamespaceDeclaration)
{
return attribute;
}
}
return null;
}
static XAttribute GetNextNamespaceDeclarationGlobal(XAttribute a)
{
XElement e = (XElement)a.GetParent();
if (e == null)
{
return null;
}
XAttribute next = GetNextNamespaceDeclarationLocal(a);
if (next != null)
{
return next;
}
e = e.Parent;
if (e == null)
{
return null;
}
return GetFirstNamespaceDeclarationGlobal(e);
}
static XAttribute GetNextNamespaceDeclarationLocal(XAttribute a)
{
XElement e = a.Parent;
if (e == null)
{
return null;
}
a = a.NextAttribute;
while (a != null)
{
if (a.IsNamespaceDeclaration)
{
return a;
}
a = a.NextAttribute;
}
return null;
}
static XAttribute GetXmlNamespaceDeclaration()
{
if (s_XmlNamespaceDeclaration == null)
{
System.Threading.Interlocked.CompareExchange(ref s_XmlNamespaceDeclaration, new XAttribute(XNamespace.Xmlns.GetName("xml"), xmlPrefixNamespace), null);
}
return s_XmlNamespaceDeclaration;
}
static bool HasNamespaceDeclarationInScope(XAttribute a, XElement e)
{
XName name = a.Name;
while (e != null && e != a.GetParent())
{
if (e.Attribute(name) != null)
{
return true;
}
e = e.Parent;
}
return false;
}
}
struct XPathEvaluator
{
public object Evaluate<T>(XNode node, string expression, IXmlNamespaceResolver resolver) where T : class
{
XPathNavigator navigator = node.CreateNavigator();
object result = navigator.Evaluate(expression, resolver);
if (result is XPathNodeIterator)
{
return EvaluateIterator<T>((XPathNodeIterator)result);
}
if (!(result is T)) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_UnexpectedEvaluation, result.GetType()));
return (T)result;
}
IEnumerable<T> EvaluateIterator<T>(XPathNodeIterator result)
{
foreach (XPathNavigator navigator in result)
{
object r = navigator.UnderlyingObject;
if (!(r is T)) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_UnexpectedEvaluation, r.GetType()));
yield return (T)r;
XText t = r as XText;
if (t != null && t.GetParent() != null)
{
foreach (XNode node in t.GetParent().Nodes())
{
t = node as XText;
if (t == null) break;
yield return (T)(object)t;
}
}
}
}
}
/// <summary>
/// Extension methods
/// </summary>
public static class Extensions
{
/// <summary>
/// Creates an <see cref="XPathNavigator"/> for a given <see cref="XNode"/>
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <returns>An <see cref="XPathNavigator"/></returns>
public static XPathNavigator CreateNavigator(this XNode node)
{
return node.CreateNavigator(null);
}
/// <summary>
/// Creates an <see cref="XPathNavigator"/> for a given <see cref="XNode"/>
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="nameTable">The <see cref="XmlNameTable"/> to be used by
/// the <see cref="XPathNavigator"/></param>
/// <returns>An <see cref="XPathNavigator"/></returns>
public static XPathNavigator CreateNavigator(this XNode node, XmlNameTable nameTable)
{
if (node == null) throw new ArgumentNullException("node");
if (node is XDocumentType) throw new ArgumentException(SR.Format(SR.Argument_CreateNavigator, XmlNodeType.DocumentType));
XText text = node as XText;
if (text != null)
{
if (text.GetParent() is XDocument) throw new ArgumentException(SR.Format(SR.Argument_CreateNavigator, XmlNodeType.Whitespace));
node = CalibrateText(text);
}
return new XNodeNavigator(node, nameTable);
}
/// <summary>
/// Evaluates an XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>The result of evaluating the expression which can be typed as bool, double, string or
/// IEnumerable</returns>
public static object XPathEvaluate(this XNode node, string expression)
{
return node.XPathEvaluate(expression, null);
}
/// <summary>
/// Evaluates an XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"> for the namespace
/// prefixes used in the XPath expression</see></param>
/// <returns>The result of evaluating the expression which can be typed as bool, double, string or
/// IEnumerable</returns>
public static object XPathEvaluate(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
if (node == null) throw new ArgumentNullException("node");
return new XPathEvaluator().Evaluate<object>(node, expression, resolver);
}
/// <summary>
/// Select an <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>An <see cref="XElement"> or null</see></returns>
public static XElement XPathSelectElement(this XNode node, string expression)
{
return node.XPathSelectElement(expression, null);
}
/// <summary>
/// Select an <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"/> for the namespace
/// prefixes used in the XPath expression</param>
/// <returns>An <see cref="XElement"> or null</see></returns>
public static XElement XPathSelectElement(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
return node.XPathSelectElements(expression, resolver).FirstOrDefault();
}
/// <summary>
/// Select a set of <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>An <see cref="IEnumerable<XElement>"/> corresponding to the resulting set of elements</returns>
public static IEnumerable<XElement> XPathSelectElements(this XNode node, string expression)
{
return node.XPathSelectElements(expression, null);
}
/// <summary>
/// Select a set of <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"/> for the namespace
/// prefixes used in the XPath expression</param>
/// <returns>An <see cref="IEnumerable<XElement>"/> corresponding to the resulting set of elements</returns>
public static IEnumerable<XElement> XPathSelectElements(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
if (node == null) throw new ArgumentNullException("node");
return (IEnumerable<XElement>)new XPathEvaluator().Evaluate<XElement>(node, expression, resolver);
}
static XText CalibrateText(XText n)
{
XContainer parentNode = n.GetParent();
if (parentNode == null)
{
return n;
}
foreach (XNode node in parentNode.Nodes())
{
XText t = node as XText;
bool isTextNode = t != null;
if (isTextNode && node == n)
{
return t;
}
}
System.Diagnostics.Debug.Assert(false, "Parent node doesn't contain itself.");
return null;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void SubtractUInt64()
{
var test = new SimpleBinaryOpTest__SubtractUInt64();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Avx.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Avx.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Avx.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Avx.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Avx.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__SubtractUInt64
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt64[] inArray1, UInt64[] inArray2, UInt64[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt64>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt64>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt64>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt64, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt64, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector256<UInt64> _fld1;
public Vector256<UInt64> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__SubtractUInt64 testClass)
{
var result = Avx2.Subtract(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__SubtractUInt64 testClass)
{
fixed (Vector256<UInt64>* pFld1 = &_fld1)
fixed (Vector256<UInt64>* pFld2 = &_fld2)
{
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(pFld1)),
Avx.LoadVector256((UInt64*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static UInt64[] _data1 = new UInt64[Op1ElementCount];
private static UInt64[] _data2 = new UInt64[Op2ElementCount];
private static Vector256<UInt64> _clsVar1;
private static Vector256<UInt64> _clsVar2;
private Vector256<UInt64> _fld1;
private Vector256<UInt64> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__SubtractUInt64()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _clsVar1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _clsVar2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
}
public SimpleBinaryOpTest__SubtractUInt64()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
_dataTable = new DataTable(_data1, _data2, new UInt64[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx2.Subtract(
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx2.Subtract(
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx2.Subtract(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector256<UInt64>* pClsVar1 = &_clsVar1)
fixed (Vector256<UInt64>* pClsVar2 = &_clsVar2)
{
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(pClsVar1)),
Avx.LoadVector256((UInt64*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr);
var result = Avx2.Subtract(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr));
var op2 = Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr));
var result = Avx2.Subtract(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr));
var op2 = Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr));
var result = Avx2.Subtract(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__SubtractUInt64();
var result = Avx2.Subtract(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__SubtractUInt64();
fixed (Vector256<UInt64>* pFld1 = &test._fld1)
fixed (Vector256<UInt64>* pFld2 = &test._fld2)
{
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(pFld1)),
Avx.LoadVector256((UInt64*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx2.Subtract(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector256<UInt64>* pFld1 = &_fld1)
fixed (Vector256<UInt64>* pFld2 = &_fld2)
{
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(pFld1)),
Avx.LoadVector256((UInt64*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx2.Subtract(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Avx2.Subtract(
Avx.LoadVector256((UInt64*)(&test._fld1)),
Avx.LoadVector256((UInt64*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<UInt64> op1, Vector256<UInt64> op2, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray1 = new UInt64[Op1ElementCount];
UInt64[] inArray2 = new UInt64[Op2ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray1 = new UInt64[Op1ElementCount];
UInt64[] inArray2 = new UInt64[Op2ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt64[] left, UInt64[] right, UInt64[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if ((ulong)(left[0] - right[0]) != result[0])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((ulong)(left[i] - right[i]) != result[i])
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.Subtract)}<UInt64>(Vector256<UInt64>, Vector256<UInt64>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
using System;
using System.Linq;
using System.Collections.Generic;
using GoogleMapsApi.Entities.Common;
using GoogleMapsApi.StaticMaps.Entities;
using GoogleMapsApi.StaticMaps.Enums;
namespace GoogleMapsApi.StaticMaps
{
/// <summary>
/// Creates a URL to google's static map according to properly filled up StaticMapsRequest
/// http://code.google.com/apis/maps/documentation/staticmaps/
/// </summary>
public class StaticMapsEngine
{
protected static readonly string BaseUrl;
private static readonly int[] ValidScales = { 1, 2, 4 };
static StaticMapsEngine()
{
BaseUrl = @"maps.google.com/maps/api/staticmap";
}
public string GenerateStaticMapURL(StaticMapRequest request)
{
string scheme = request.IsSSL ? "https://" : "http://";
var parametersList = new QueryStringParametersList();
if (!string.IsNullOrEmpty(request.ApiKey))
{
string apiKey = request.ApiKey;
parametersList.Add("key", apiKey);
}
if (request.Center != null)
{
ILocationString center = request.Center;
string centerLocation = center.LocationString;
parametersList.Add("center", centerLocation);
}
if (request.Zoom != default)
{
parametersList.Add("zoom", request.Zoom.ToString());
}
if (request.Scale != default)
{
if (!ValidScales.Contains(request.Scale))
{
throw new ArgumentException("Scale is invalid; must be a value of 1, 2 or 4");
}
parametersList.Add("scale", request.Scale.ToString());
}
if (request.Size.Width != default || request.Size.Height != default)
{
ImageSize imageSize = request.Size;
parametersList.Add("size", string.Format("{0}x{1}", imageSize.Width, imageSize.Height));
}
else
{
throw new ArgumentException("Size is invalid");
}
if (request.ImageFormat != default)
{
string format;
switch (request.ImageFormat)
{
case ImageFormat.PNG8:
format = "png8";
break;
case ImageFormat.PNG32:
format = "png32";
break;
case ImageFormat.GIF:
format = "gif";
break;
case ImageFormat.JPG:
format = "jpg";
break;
case ImageFormat.JPG_baseline:
format = "jpg-baseline";
break;
default:
throw new ArgumentOutOfRangeException("ImageFormat");
}
parametersList.Add("format", format);
}
if (request.MapType != null)
{
string type;
switch (request.MapType)
{
case MapType.Roadmap:
type = "roadmap";
break;
case MapType.Satellite:
type = "satellite";
break;
case MapType.Terrain:
type = "terrain";
break;
case MapType.Hybrid:
type = "hybrid";
break;
default:
throw new ArgumentOutOfRangeException("MapType");
}
parametersList.Add("maptype", type);
}
if (request.Style != null)
{
MapStyle style = request.Style;
var styleComponents = new List<string>();
if (style.MapFeature != default)
{
string mapFeature;
switch (style.MapFeature)
{
case MapFeature.All:
mapFeature = "all";
break;
case MapFeature.Road:
mapFeature = "road";
break;
case MapFeature.Landscape:
mapFeature = "landscape";
break;
default:
throw new ArgumentOutOfRangeException();
}
styleComponents.Add("feature:" + mapFeature);
}
if (style.MapElement != default)
{
string element;
switch (style.MapElement)
{
case MapElement.All:
element = "all";
break;
case MapElement.Geometry:
element = "geometry";
break;
case MapElement.Labels:
element = "lables";
break;
default:
throw new ArgumentOutOfRangeException();
}
styleComponents.Add("element:" + element);
}
string hue = style.HUE;
if (hue != null)
{
styleComponents.Add("hue:" + hue);
}
float? lightness = style.Lightness;
if (lightness != null)
{
styleComponents.Add("lightness:" + lightness);
}
float? saturation = style.Saturation;
if (saturation != null)
{
styleComponents.Add("saturation:" + saturation);
}
float? gamma = style.Gamma;
if (gamma != null)
{
styleComponents.Add("gamma:" + gamma);
}
bool inverseLightness = style.InverseLightness;
if (inverseLightness)
{
styleComponents.Add("inverse_lightnes:true");
}
MapVisibility mapVisibility = style.MapVisibility;
if (mapVisibility != default)
{
string visibility;
switch (mapVisibility)
{
case MapVisibility.On:
visibility = "on";
break;
case MapVisibility.Off:
visibility = "off";
break;
case MapVisibility.Simplified:
visibility = "simplified";
break;
default:
throw new ArgumentOutOfRangeException();
}
styleComponents.Add("visibility:" + visibility);
}
parametersList.Add("style", string.Join("|", styleComponents));
}
IList<Marker> markers = request.Markers;
if (markers != null)
{
foreach (Marker marker in markers)
{
var markerStyleParams = new List<string>();
MarkerStyle markerStyle = marker.Style;
if (markerStyle != null)
{
if (string.IsNullOrWhiteSpace(markerStyle.Color))
{
throw new ArgumentException("Marker style color can't be empty");
}
markerStyleParams.Add("color:" + markerStyle.Color);
if (!string.IsNullOrWhiteSpace(markerStyle.Label))
{
markerStyleParams.Add("label:" + markerStyle.Label);
}
if (markerStyle.Size != default)
{
switch (markerStyle.Size)
{
case MarkerSize.Mid:
markerStyleParams.Add("size:mid");
break;
case MarkerSize.Tiny:
markerStyleParams.Add("size:tiny");
break;
case MarkerSize.Small:
markerStyleParams.Add("size:small");
break;
default:
throw new ArgumentOutOfRangeException();
}
}
}
string styleString = string.Join("|", markerStyleParams);
string locations = string.Join("|", marker.Locations.Select(location => location.LocationString));
parametersList.Add("markers", string.Format("{0}|{1}", styleString, locations));
}
}
IList<Path> pathes = request.Pathes;
if (pathes != null)
{
foreach (Path path in pathes)
{
var pathStyleParams = new List<string>();
PathStyle pathStyle = path.Style;
if (pathStyle != null)
{
if (string.IsNullOrWhiteSpace(pathStyle.Color))
{
throw new ArgumentException("Path style color can't be empty");
}
pathStyleParams.Add("color:" + pathStyle.Color);
if (!string.IsNullOrWhiteSpace(pathStyle.FillColor))
{
pathStyleParams.Add("fillcolor:" + pathStyle.FillColor);
}
if (pathStyle.Weight != default)
{
pathStyleParams.Add("weight:" + pathStyle.Weight);
}
}
string styleString = string.Join("|", pathStyleParams);
string locations = string.Join("|", path.Locations.Select(location => location.LocationString));
parametersList.Add("path", string.Format("{0}|{1}", styleString, locations));
}
}
return scheme + BaseUrl + "?" + parametersList.GetQueryStringPostfix();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyFormData
{
using Microsoft.Rest;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Formdata operations.
/// </summary>
public partial class Formdata : IServiceOperations<AutoRestSwaggerBATFormDataService>, IFormdata
{
/// <summary>
/// Initializes a new instance of the Formdata class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public Formdata(AutoRestSwaggerBATFormDataService client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestSwaggerBATFormDataService
/// </summary>
public AutoRestSwaggerBATFormDataService Client { get; private set; }
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='fileName'>
/// File name to upload. Name has to be spelled exactly as written here.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> UploadFileWithHttpMessagesAsync(Stream fileContent, string fileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
if (fileName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileName");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("fileName", fileName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFile", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
System.Net.Http.MultipartFormDataContent _multiPartContent = new System.Net.Http.MultipartFormDataContent();
if (fileContent != null)
{
System.Net.Http.StreamContent _fileContent = new System.Net.Http.StreamContent(fileContent);
_fileContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
FileStream _fileContentAsFileStream = fileContent as FileStream;
if (_fileContentAsFileStream != null)
{
System.Net.Http.Headers.ContentDispositionHeaderValue _contentDispositionHeaderValue = new System.Net.Http.Headers.ContentDispositionHeaderValue("form-data");
_contentDispositionHeaderValue.Name = "fileContent";
_contentDispositionHeaderValue.FileName = _fileContentAsFileStream.Name;
_fileContent.Headers.ContentDisposition = _contentDispositionHeaderValue;
}
_multiPartContent.Add(_fileContent, "fileContent");
}
if (fileName != null)
{
System.Net.Http.StringContent _fileName = new System.Net.Http.StringContent(fileName, System.Text.Encoding.UTF8);
_multiPartContent.Add(_fileName, "fileName");
}
_httpRequest.Content = _multiPartContent;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> UploadFileViaBodyWithHttpMessagesAsync(Stream fileContent, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFileViaBody", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(fileContent == null)
{
throw new System.ArgumentNullException("fileContent");
}
if (fileContent != null && fileContent != Stream.Null)
{
_httpRequest.Content = new System.Net.Http.StreamContent(fileContent);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/octet-stream");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NPOI.SS.Format
{
using System;
using NPOI.SS.UserModel;
using System.Text.RegularExpressions;
using System.Collections.Generic;
using System.Drawing;
/**
* Format a value according to the standard Excel behavior. This "standard" is
* not explicitly documented by Microsoft, so the behavior is determined by
* experimentation; see the tests.
*
* An Excel format has up to four parts, Separated by semicolons. Each part
* specifies what to do with particular kinds of values, depending on the number
* of parts given:
*
* - One part (example: <c>[Green]#.##</c>)
* If the value is a number, display according to this one part (example: green text,
* with up to two decimal points). If the value is text, display it as is.
*
* - Two parts (example: <c>[Green]#.##;[Red]#.##</c>)
* If the value is a positive number or zero, display according to the first part (example: green
* text, with up to two decimal points); if it is a negative number, display
* according to the second part (example: red text, with up to two decimal
* points). If the value is text, display it as is.
*
* - Three parts (example: <c>[Green]#.##;[Black]#.##;[Red]#.##</c>)
* If the value is a positive number, display according to the first part (example: green text, with up to
* two decimal points); if it is zero, display according to the second part
* (example: black text, with up to two decimal points); if it is a negative
* number, display according to the third part (example: red text, with up to
* two decimal points). If the value is text, display it as is.
*
* - Four parts (example: <c>[Green]#.##;[Black]#.##;[Red]#.##;[@]</c>)
* If the value is a positive number, display according to the first part (example: green text,
* with up to two decimal points); if it is zero, display according to the
* second part (example: black text, with up to two decimal points); if it is a
* negative number, display according to the third part (example: red text, with
* up to two decimal points). If the value is text, display according to the
* fourth part (example: text in the cell's usual color, with the text value
* surround by brackets).
*
* In Addition to these, there is a general format that is used when no format
* is specified. This formatting is presented by the {@link #GENERAL_FORMAT}
* object.
*
* @author Ken Arnold, Industrious Media LLC
*/
public class CellFormat
{
private String format;
private CellFormatPart posNumFmt;
private CellFormatPart zeroNumFmt;
private CellFormatPart negNumFmt;
private CellFormatPart textFmt;
private int formatPartCount;
private static readonly Regex ONE_PART = new Regex(CellFormatPart.FORMAT_PAT.ToString() + "(;|$)", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace);
private static readonly CellFormatPart DEFAULT_TEXT_FORMAT =
new CellFormatPart("@");
/*
* Cells that cannot be formatted, e.g. cells that have a date or time
* format and have an invalid date or time value, are displayed as 255
* pound signs ("#").
*/
private const string INVALID_VALUE_FOR_FORMAT =
"###################################################" +
"###################################################" +
"###################################################" +
"###################################################" +
"###################################################";
private const string QUOTE = "\"";
private static readonly CellFormat GENERAL_FORMAT = new GeneralCellFormat();
/**
* Format a value as it would be were no format specified. This is also
* used when the format specified is <tt>General</tt>.
*/
public class GeneralCellFormat : CellFormat
{
public GeneralCellFormat()
: base("General")
{
}
public override CellFormatResult Apply(Object value)
{
String text = (new CellGeneralFormatter()).Format(value);
return new CellFormatResult(true, text, Color.Empty);
}
}
/** Maps a format string to its Parsed version for efficiencies sake. */
private static Dictionary<String, CellFormat> formatCache =
new Dictionary<String, CellFormat>();
/**
* Returns a {@link CellFormat} that applies the given format. Two calls
* with the same format may or may not return the same object.
*
* @param format The format.
*
* @return A {@link CellFormat} that applies the given format.
*/
public static CellFormat GetInstance(String format)
{
CellFormat fmt = null;
if (formatCache.ContainsKey(format))
fmt = formatCache[format];
if (fmt == null)
{
if (format.Equals("General") || format.Equals("@"))
fmt = GENERAL_FORMAT;
else
fmt = new CellFormat(format);
formatCache.Add(format, fmt);
}
return fmt;
}
/**
* Creates a new object.
*
* @param format The format.
*/
private CellFormat(String format)
{
this.format = format;
MatchCollection mc = ONE_PART.Matches(format);
List<CellFormatPart> parts = new List<CellFormatPart>();
//while (m.Success)
foreach (Match m in mc)
{
try
{
String valueDesc = m.Groups[0].Value;
// Strip out the semicolon if it's there
if (valueDesc.EndsWith(";"))
valueDesc = valueDesc.Substring(0, valueDesc.Length - 1);
parts.Add(new CellFormatPart(valueDesc));
}
catch (Exception)
{
//CellFormatter.logger.Log(Level.WARNING,
// "Invalid format: " + CellFormatter.Quote(m.Group()), e);
parts.Add(null);
}
}
formatPartCount = parts.Count;
switch (formatPartCount)
{
case 1:
posNumFmt = parts[(0)];
negNumFmt = null;
zeroNumFmt = null;
textFmt = DEFAULT_TEXT_FORMAT;
break;
case 2:
posNumFmt = parts[0];
negNumFmt = parts[1];
zeroNumFmt = null;
textFmt = DEFAULT_TEXT_FORMAT;
break;
case 3:
posNumFmt = parts[0];
negNumFmt = parts[1];
zeroNumFmt = parts[2];
textFmt = DEFAULT_TEXT_FORMAT;
break;
case 4:
default:
posNumFmt = parts[0];
negNumFmt = parts[1];
zeroNumFmt = parts[2];
textFmt = parts[3];
break;
}
}
/**
* Returns the result of Applying the format to the given value. If the
* value is a number (a type of {@link Number} object), the correct number
* format type is chosen; otherwise it is considered a text object.
*
* @param value The value
*
* @return The result, in a {@link CellFormatResult}.
*/
public virtual CellFormatResult Apply(Object value)
{
//if (value is Number) {
if (NPOI.Util.Number.IsNumber(value))
{
double val ;
double.TryParse(value.ToString(), out val);
if (val < 0 &&
((formatPartCount == 2
&& !posNumFmt.HasCondition && !negNumFmt.HasCondition)
|| (formatPartCount == 3 && !negNumFmt.HasCondition)
|| (formatPartCount == 4 && !negNumFmt.HasCondition)))
{
// The negative number format has the negative formatting required,
// e.g. minus sign or brackets, so pass a positive value so that
// the default leading minus sign is not also output
return negNumFmt.Apply(-val);
}
else
{
return GetApplicableFormatPart(val).Apply(val);
}
}
else if (value is DateTime)
{
// Don't know (and can't get) the workbook date windowing (1900 or 1904)
// so assume 1900 date windowing
Double numericValue = DateUtil.GetExcelDate((DateTime)value);
if (DateUtil.IsValidExcelDate(numericValue))
{
return GetApplicableFormatPart(numericValue).Apply(value);
}
else
{
throw new ArgumentException("value not a valid Excel date");
}
}
else
{
return textFmt.Apply(value);
}
}
/**
* Returns the result of applying the format to the given date.
*
* @param date The date.
* @param numericValue The numeric value for the date.
*
* @return The result, in a {@link CellFormatResult}.
*/
private CellFormatResult Apply(DateTime date, double numericValue)
{
return GetApplicableFormatPart(numericValue).Apply(date);
}
/**
* Fetches the appropriate value from the cell, and returns the result of
* Applying it to the appropriate format. For formula cells, the computed
* value is what is used.
*
* @param c The cell.
*
* @return The result, in a {@link CellFormatResult}.
*/
public CellFormatResult Apply(ICell c)
{
switch (UltimateType(c))
{
case CellType.Blank:
return Apply("");
case CellType.Boolean:
return Apply(c.BooleanCellValue);
case CellType.Numeric:
Double value = c.NumericCellValue;
if (GetApplicableFormatPart(value).CellFormatType == CellFormatType.DATE)
{
if (DateUtil.IsValidExcelDate(value))
{
return Apply(c.DateCellValue, value);
}
else
{
return Apply(INVALID_VALUE_FOR_FORMAT);
}
}
else
{
return Apply(value);
}
case CellType.String:
return Apply(c.StringCellValue);
default:
return Apply("?");
}
}
/**
* Uses the result of Applying this format to the value, Setting the text
* and color of a label before returning the result.
*
* @param label The label to apply to.
* @param value The value to Process.
*
* @return The result, in a {@link CellFormatResult}.
*/
//public CellFormatResult Apply(Label label, Object value)
//{
// CellFormatResult result = Apply(value);
// label.Text = (/*setter*/result.Text);
// if (result.TextColor != Color.Empty)
// {
// label.ForeColor = (/*setter*/result.TextColor);
// }
// return result;
//}
/**
* Uses the result of applying this format to the given date, setting the text
* and color of a label before returning the result.
*
* @param label The label to apply to.
* @param date The date.
* @param numericValue The numeric value for the date.
*
* @return The result, in a {@link CellFormatResult}.
*/
//private CellFormatResult Apply(Label label, DateTime date, double numericValue)
//{
// CellFormatResult result = Apply(date, numericValue);
// label.Text = (result.Text);
// if (result.TextColor != Color.Empty)
// {
// label.ForeColor = (result.TextColor);
// }
// return result;
//}
/**
* Fetches the appropriate value from the cell, and uses the result, Setting
* the text and color of a label before returning the result.
*
* @param label The label to apply to.
* @param c The cell.
*
* @return The result, in a {@link CellFormatResult}.
*/
//public CellFormatResult Apply(Label label, ICell c)
//{
// switch (UltimateType(c))
// {
// case CellType.Blank:
// return Apply(label, "");
// case CellType.Boolean:
// return Apply(label, c.BooleanCellValue);
// case CellType.Numeric:
// Double value = c.NumericCellValue;
// if (GetApplicableFormatPart(value).CellFormatType == CellFormatType.DATE)
// {
// if (DateUtil.IsValidExcelDate(value))
// {
// return Apply(label, c.DateCellValue, value);
// }
// else
// {
// return Apply(label, INVALID_VALUE_FOR_FORMAT);
// }
// }
// else
// {
// return Apply(label, value);
// }
// case CellType.String:
// return Apply(label, c.StringCellValue);
// default:
// return Apply(label, "?");
// }
//}
/**
* Returns the {@link CellFormatPart} that applies to the value. Result
* depends on how many parts the cell format has, the cell value and any
* conditions. The value must be a {@link Number}.
*
* @param value The value.
* @return The {@link CellFormatPart} that applies to the value.
*/
private CellFormatPart GetApplicableFormatPart(Object value)
{
//if (value is Number) {
if (NPOI.Util.Number.IsNumber(value))
{
double val;
double.TryParse(value.ToString(), out val);
if (formatPartCount == 1)
{
if (!posNumFmt.HasCondition
|| (posNumFmt.HasCondition && posNumFmt.Applies(val)))
{
return posNumFmt;
}
else
{
return new CellFormatPart("General");
}
}
else if (formatPartCount == 2)
{
if ((!posNumFmt.HasCondition && val >= 0)
|| (posNumFmt.HasCondition && posNumFmt.Applies(val)))
{
return posNumFmt;
}
else if (!negNumFmt.HasCondition
|| (negNumFmt.HasCondition && negNumFmt.Applies(val)))
{
return negNumFmt;
}
else
{
// Return ###...### (255 #s) to match Excel 2007 behaviour
return new CellFormatPart(QUOTE + INVALID_VALUE_FOR_FORMAT + QUOTE);
}
}
else
{
if ((!posNumFmt.HasCondition && val > 0)
|| (posNumFmt.HasCondition && posNumFmt.Applies(val)))
{
return posNumFmt;
}
else if ((!negNumFmt.HasCondition && val < 0)
|| (negNumFmt.HasCondition && negNumFmt.Applies(val)))
{
return negNumFmt;
// Only the first two format parts can have conditions
}
else
{
return zeroNumFmt;
}
}
}
else
{
throw new ArgumentException("value must be a Number");
}
}
/**
* Returns the ultimate cell type, following the results of formulas. If
* the cell is a {@link Cell#CELL_TYPE_FORMULA}, this returns the result of
* {@link Cell#getCachedFormulaResultType()}. Otherwise this returns the
* result of {@link Cell#getCellType()}.
*
* @param cell The cell.
*
* @return The ultimate type of this cell.
*/
public static CellType UltimateType(ICell cell)
{
CellType type = cell.CellType;
if (type == CellType.Formula)
return cell.CachedFormulaResultType;
else
return type;
}
/**
* Returns <tt>true</tt> if the other object is a {@link CellFormat} object
* with the same format.
*
* @param obj The other object.
*
* @return <tt>true</tt> if the two objects are Equal.
*/
public override bool Equals(Object obj)
{
if (this == obj)
return true;
if (obj is CellFormat)
{
CellFormat that = (CellFormat)obj;
return format.Equals(that.format);
}
return false;
}
/**
* Returns a hash code for the format.
*
* @return A hash code for the format.
*/
public override int GetHashCode()
{
return format.GetHashCode();
}
public override string ToString()
{
return format;
}
}
}
| |
#region License
/*
* HttpListener.cs
*
* This code is derived from HttpListener.cs (System.Net) of Mono
* (http://www.mono-project.com).
*
* The MIT License
*
* Copyright (c) 2005 Novell, Inc. (http://www.novell.com)
* Copyright (c) 2012-2021 sta.blockhead
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#endregion
#region Authors
/*
* Authors:
* - Gonzalo Paniagua Javier <gonzalo@novell.com>
*/
#endregion
#region Contributors
/*
* Contributors:
* - Liryna <liryna.stark@gmail.com>
*/
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Security.Cryptography.X509Certificates;
using System.Security.Principal;
using System.Threading;
// TODO: Logging.
namespace WebSocketSharp.Net
{
/// <summary>
/// Provides a simple, programmatically controlled HTTP listener.
/// </summary>
public sealed class HttpListener : IDisposable
{
#region Private Fields
private AuthenticationSchemes _authSchemes;
private Func<HttpListenerRequest, AuthenticationSchemes> _authSchemeSelector;
private string _certFolderPath;
private Queue<HttpListenerContext> _contextQueue;
private LinkedList<HttpListenerContext> _contextRegistry;
private object _contextRegistrySync;
private static readonly string _defaultRealm;
private bool _disposed;
private bool _ignoreWriteExceptions;
private volatile bool _listening;
private Logger _log;
private string _objectName;
private HttpListenerPrefixCollection _prefixes;
private string _realm;
private bool _reuseAddress;
private ServerSslConfiguration _sslConfig;
private Func<IIdentity, NetworkCredential> _userCredFinder;
private Queue<HttpListenerAsyncResult> _waitQueue;
#endregion
#region Static Constructor
static HttpListener ()
{
_defaultRealm = "SECRET AREA";
}
#endregion
#region Public Constructors
/// <summary>
/// Initializes a new instance of the <see cref="HttpListener"/> class.
/// </summary>
public HttpListener ()
{
_authSchemes = AuthenticationSchemes.Anonymous;
_contextQueue = new Queue<HttpListenerContext> ();
_contextRegistry = new LinkedList<HttpListenerContext> ();
_contextRegistrySync = ((ICollection) _contextRegistry).SyncRoot;
_log = new Logger ();
_objectName = GetType ().ToString ();
_prefixes = new HttpListenerPrefixCollection (this);
_waitQueue = new Queue<HttpListenerAsyncResult> ();
}
#endregion
#region Internal Properties
internal bool ReuseAddress {
get {
return _reuseAddress;
}
set {
_reuseAddress = value;
}
}
#endregion
#region Public Properties
/// <summary>
/// Gets or sets the scheme used to authenticate the clients.
/// </summary>
/// <value>
/// <para>
/// One of the <see cref="WebSocketSharp.Net.AuthenticationSchemes"/>
/// enum values.
/// </para>
/// <para>
/// It represents the scheme used to authenticate the clients.
/// </para>
/// <para>
/// The default value is
/// <see cref="WebSocketSharp.Net.AuthenticationSchemes.Anonymous"/>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public AuthenticationSchemes AuthenticationSchemes {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _authSchemes;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_authSchemes = value;
}
}
/// <summary>
/// Gets or sets the delegate called to select the scheme used to
/// authenticate the clients.
/// </summary>
/// <remarks>
/// <para>
/// If this property is set, the listener uses the authentication
/// scheme selected by the delegate for each request.
/// </para>
/// <para>
/// Or if this property is not set, the listener uses the value of
/// the <see cref="HttpListener.AuthenticationSchemes"/> property
/// as the authentication scheme for all requests.
/// </para>
/// </remarks>
/// <value>
/// <para>
/// A <c>Func<<see cref="HttpListenerRequest"/>,
/// <see cref="AuthenticationSchemes"/>></c> delegate or
/// <see langword="null"/> if not needed.
/// </para>
/// <para>
/// The delegate references the method used to select
/// an authentication scheme.
/// </para>
/// <para>
/// The default value is <see langword="null"/>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public Func<HttpListenerRequest, AuthenticationSchemes> AuthenticationSchemeSelector {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _authSchemeSelector;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_authSchemeSelector = value;
}
}
/// <summary>
/// Gets or sets the path to the folder in which stores the certificate
/// files used to authenticate the server on the secure connection.
/// </summary>
/// <remarks>
/// <para>
/// This property represents the path to the folder in which stores
/// the certificate files associated with each port number of added
/// URI prefixes.
/// </para>
/// <para>
/// A set of the certificate files is a pair of <port number>.cer
/// (DER) and <port number>.key (DER, RSA Private Key).
/// </para>
/// <para>
/// If this property is <see langword="null"/> or an empty string,
/// the result of <c>System.Environment.GetFolderPath (<see
/// cref="Environment.SpecialFolder.ApplicationData"/>)</c>
/// is used as the default path.
/// </para>
/// </remarks>
/// <value>
/// <para>
/// A <see cref="string"/> that represents the path to the folder
/// in which stores the certificate files.
/// </para>
/// <para>
/// The default value is <see langword="null"/>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public string CertificateFolderPath {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _certFolderPath;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_certFolderPath = value;
}
}
/// <summary>
/// Gets or sets a value indicating whether the listener returns
/// exceptions that occur when sending the response to the client.
/// </summary>
/// <value>
/// <para>
/// <c>true</c> if the listener should not return those exceptions;
/// otherwise, <c>false</c>.
/// </para>
/// <para>
/// The default value is <c>false</c>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public bool IgnoreWriteExceptions {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _ignoreWriteExceptions;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_ignoreWriteExceptions = value;
}
}
/// <summary>
/// Gets a value indicating whether the listener has been started.
/// </summary>
/// <value>
/// <c>true</c> if the listener has been started; otherwise, <c>false</c>.
/// </value>
public bool IsListening {
get {
return _listening;
}
}
/// <summary>
/// Gets a value indicating whether the listener can be used with
/// the current operating system.
/// </summary>
/// <value>
/// <c>true</c>.
/// </value>
public static bool IsSupported {
get {
return true;
}
}
/// <summary>
/// Gets the logging functions.
/// </summary>
/// <remarks>
/// <para>
/// The default logging level is <see cref="LogLevel.Error"/>.
/// </para>
/// <para>
/// If you would like to change it, you should set the <c>Log.Level</c>
/// property to any of the <see cref="LogLevel"/> enum values.
/// </para>
/// </remarks>
/// <value>
/// A <see cref="Logger"/> that provides the logging functions.
/// </value>
public Logger Log {
get {
return _log;
}
}
/// <summary>
/// Gets the URI prefixes handled by the listener.
/// </summary>
/// <value>
/// A <see cref="HttpListenerPrefixCollection"/> that contains the URI
/// prefixes.
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public HttpListenerPrefixCollection Prefixes {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _prefixes;
}
}
/// <summary>
/// Gets or sets the name of the realm associated with the listener.
/// </summary>
/// <remarks>
/// If this property is <see langword="null"/> or an empty string,
/// "SECRET AREA" will be used as the name of the realm.
/// </remarks>
/// <value>
/// <para>
/// A <see cref="string"/> that represents the name of the realm.
/// </para>
/// <para>
/// The default value is <see langword="null"/>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public string Realm {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _realm;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_realm = value;
}
}
/// <summary>
/// Gets the SSL configuration used to authenticate the server and
/// optionally the client for secure connection.
/// </summary>
/// <value>
/// A <see cref="ServerSslConfiguration"/> that represents the SSL
/// configuration for secure connection.
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public ServerSslConfiguration SslConfiguration {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
if (_sslConfig == null)
_sslConfig = new ServerSslConfiguration ();
return _sslConfig;
}
}
/// <summary>
/// Gets or sets a value indicating whether, when NTLM authentication is used,
/// the authentication information of first request is used to authenticate
/// additional requests on the same connection.
/// </summary>
/// <remarks>
/// This property is not currently supported and always throws
/// a <see cref="NotSupportedException"/>.
/// </remarks>
/// <value>
/// <c>true</c> if the authentication information of first request is used;
/// otherwise, <c>false</c>.
/// </value>
/// <exception cref="NotSupportedException">
/// Any use of this property.
/// </exception>
public bool UnsafeConnectionNtlmAuthentication {
get {
throw new NotSupportedException ();
}
set {
throw new NotSupportedException ();
}
}
/// <summary>
/// Gets or sets the delegate called to find the credentials for
/// an identity used to authenticate a client.
/// </summary>
/// <value>
/// <para>
/// A <c>Func<<see cref="IIdentity"/>,
/// <see cref="NetworkCredential"/>></c> delegate or
/// <see langword="null"/> if not needed.
/// </para>
/// <para>
/// It references the method used to find the credentials.
/// </para>
/// <para>
/// The default value is <see langword="null"/>.
/// </para>
/// </value>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public Func<IIdentity, NetworkCredential> UserCredentialsFinder {
get {
if (_disposed)
throw new ObjectDisposedException (_objectName);
return _userCredFinder;
}
set {
if (_disposed)
throw new ObjectDisposedException (_objectName);
_userCredFinder = value;
}
}
#endregion
#region Private Methods
private HttpListenerAsyncResult beginGetContext (
AsyncCallback callback, object state
)
{
lock (_contextRegistrySync) {
if (!_listening) {
var msg = _disposed
? "The listener is closed."
: "The listener is stopped.";
throw new HttpListenerException (995, msg);
}
var ares = new HttpListenerAsyncResult (callback, state);
if (_contextQueue.Count == 0) {
_waitQueue.Enqueue (ares);
}
else {
var ctx = _contextQueue.Dequeue ();
ares.Complete (ctx, true);
}
return ares;
}
}
private void cleanupContextQueue (bool force)
{
if (_contextQueue.Count == 0)
return;
if (force) {
_contextQueue.Clear ();
return;
}
var ctxs = _contextQueue.ToArray ();
_contextQueue.Clear ();
foreach (var ctx in ctxs) {
ctx.ErrorStatusCode = 503;
ctx.SendError ();
}
}
private void cleanupContextRegistry ()
{
var cnt = _contextRegistry.Count;
if (cnt == 0)
return;
var ctxs = new HttpListenerContext[cnt];
_contextRegistry.CopyTo (ctxs, 0);
_contextRegistry.Clear ();
foreach (var ctx in ctxs)
ctx.Connection.Close (true);
}
private void cleanupWaitQueue (string message)
{
if (_waitQueue.Count == 0)
return;
var aress = _waitQueue.ToArray ();
_waitQueue.Clear ();
foreach (var ares in aress) {
var ex = new HttpListenerException (995, message);
ares.Complete (ex);
}
}
private void close (bool force)
{
if (!_listening) {
_disposed = true;
return;
}
_listening = false;
cleanupContextQueue (force);
cleanupContextRegistry ();
var msg = "The listener is closed.";
cleanupWaitQueue (msg);
EndPointManager.RemoveListener (this);
_disposed = true;
}
private string getRealm ()
{
var realm = _realm;
return realm != null && realm.Length > 0 ? realm : _defaultRealm;
}
private AuthenticationSchemes selectAuthenticationScheme (
HttpListenerRequest request
)
{
var selector = _authSchemeSelector;
if (selector == null)
return _authSchemes;
try {
return selector (request);
}
catch {
return AuthenticationSchemes.None;
}
}
#endregion
#region Internal Methods
internal bool AuthenticateContext (HttpListenerContext context)
{
var req = context.Request;
var schm = selectAuthenticationScheme (req);
if (schm == AuthenticationSchemes.Anonymous)
return true;
if (schm == AuthenticationSchemes.None) {
context.ErrorStatusCode = 403;
context.ErrorMessage = "Authentication not allowed";
context.SendError ();
return false;
}
var realm = getRealm ();
var user = HttpUtility.CreateUser (
req.Headers["Authorization"],
schm,
realm,
req.HttpMethod,
_userCredFinder
);
var authenticated = user != null && user.Identity.IsAuthenticated;
if (!authenticated) {
context.SendAuthenticationChallenge (schm, realm);
return false;
}
context.User = user;
return true;
}
internal void CheckDisposed ()
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
}
internal bool RegisterContext (HttpListenerContext context)
{
if (!_listening)
return false;
lock (_contextRegistrySync) {
if (!_listening)
return false;
context.Listener = this;
_contextRegistry.AddLast (context);
if (_waitQueue.Count == 0) {
_contextQueue.Enqueue (context);
}
else {
var ares = _waitQueue.Dequeue ();
ares.Complete (context, false);
}
return true;
}
}
internal void UnregisterContext (HttpListenerContext context)
{
lock (_contextRegistrySync)
_contextRegistry.Remove (context);
}
#endregion
#region Public Methods
/// <summary>
/// Shuts down the listener immediately.
/// </summary>
public void Abort ()
{
if (_disposed)
return;
lock (_contextRegistrySync) {
if (_disposed)
return;
close (true);
}
}
/// <summary>
/// Begins getting an incoming request asynchronously.
/// </summary>
/// <remarks>
/// <para>
/// This asynchronous operation must be completed by calling
/// the EndGetContext method.
/// </para>
/// <para>
/// Typically, the EndGetContext method is called by
/// <paramref name="callback"/>.
/// </para>
/// </remarks>
/// <returns>
/// An <see cref="IAsyncResult"/> that represents the status of
/// the asynchronous operation.
/// </returns>
/// <param name="callback">
/// An <see cref="AsyncCallback"/> delegate that references the method to
/// invoke when the asynchronous operation completes.
/// </param>
/// <param name="state">
/// An <see cref="object"/> that represents a user defined object to
/// pass to <paramref name="callback"/>.
/// </param>
/// <exception cref="InvalidOperationException">
/// <para>
/// This listener has no URI prefix on which listens.
/// </para>
/// <para>
/// -or-
/// </para>
/// <para>
/// This listener has not been started or is currently stopped.
/// </para>
/// </exception>
/// <exception cref="HttpListenerException">
/// This method is canceled.
/// </exception>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public IAsyncResult BeginGetContext (AsyncCallback callback, object state)
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
if (_prefixes.Count == 0) {
var msg = "The listener has no URI prefix on which listens.";
throw new InvalidOperationException (msg);
}
if (!_listening) {
var msg = "The listener has not been started.";
throw new InvalidOperationException (msg);
}
return beginGetContext (callback, state);
}
/// <summary>
/// Shuts down the listener.
/// </summary>
public void Close ()
{
if (_disposed)
return;
lock (_contextRegistrySync) {
if (_disposed)
return;
close (false);
}
}
/// <summary>
/// Ends an asynchronous operation to get an incoming request.
/// </summary>
/// <remarks>
/// This method completes an asynchronous operation started by calling
/// the BeginGetContext method.
/// </remarks>
/// <returns>
/// A <see cref="HttpListenerContext"/> that represents a request.
/// </returns>
/// <param name="asyncResult">
/// An <see cref="IAsyncResult"/> instance obtained by calling
/// the BeginGetContext method.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="asyncResult"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="asyncResult"/> was not obtained by calling
/// the BeginGetContext method.
/// </exception>
/// <exception cref="InvalidOperationException">
/// This method was already called for <paramref name="asyncResult"/>.
/// </exception>
/// <exception cref="HttpListenerException">
/// This method is canceled.
/// </exception>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public HttpListenerContext EndGetContext (IAsyncResult asyncResult)
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
if (asyncResult == null)
throw new ArgumentNullException ("asyncResult");
var ares = asyncResult as HttpListenerAsyncResult;
if (ares == null) {
var msg = "A wrong IAsyncResult instance.";
throw new ArgumentException (msg, "asyncResult");
}
lock (ares.SyncRoot) {
if (ares.EndCalled) {
var msg = "This IAsyncResult instance cannot be reused.";
throw new InvalidOperationException (msg);
}
ares.EndCalled = true;
}
if (!ares.IsCompleted)
ares.AsyncWaitHandle.WaitOne ();
return ares.Context;
}
/// <summary>
/// Gets an incoming request.
/// </summary>
/// <remarks>
/// This method waits for an incoming request and returns when a request is
/// received.
/// </remarks>
/// <returns>
/// A <see cref="HttpListenerContext"/> that represents a request.
/// </returns>
/// <exception cref="InvalidOperationException">
/// <para>
/// This listener has no URI prefix on which listens.
/// </para>
/// <para>
/// -or-
/// </para>
/// <para>
/// This listener has not been started or is currently stopped.
/// </para>
/// </exception>
/// <exception cref="HttpListenerException">
/// This method is canceled.
/// </exception>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public HttpListenerContext GetContext ()
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
if (_prefixes.Count == 0) {
var msg = "The listener has no URI prefix on which listens.";
throw new InvalidOperationException (msg);
}
if (!_listening) {
var msg = "The listener has not been started.";
throw new InvalidOperationException (msg);
}
var ares = beginGetContext (null, null);
ares.EndCalled = true;
if (!ares.IsCompleted)
ares.AsyncWaitHandle.WaitOne ();
return ares.Context;
}
/// <summary>
/// Starts receiving incoming requests.
/// </summary>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public void Start ()
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
lock (_contextRegistrySync) {
if (_disposed)
throw new ObjectDisposedException (_objectName);
if (_listening)
return;
EndPointManager.AddListener (this);
_listening = true;
}
}
/// <summary>
/// Stops receiving incoming requests.
/// </summary>
/// <exception cref="ObjectDisposedException">
/// This listener has been closed.
/// </exception>
public void Stop ()
{
if (_disposed)
throw new ObjectDisposedException (_objectName);
lock (_contextRegistrySync) {
if (!_listening)
return;
_listening = false;
cleanupContextQueue (false);
cleanupContextRegistry ();
var msg = "The listener is stopped.";
cleanupWaitQueue (msg);
EndPointManager.RemoveListener (this);
}
}
#endregion
#region Explicit Interface Implementations
/// <summary>
/// Releases all resources used by the listener.
/// </summary>
void IDisposable.Dispose ()
{
if (_disposed)
return;
lock (_contextRegistrySync) {
if (_disposed)
return;
close (true);
}
}
#endregion
}
}
| |
#region Copyright & license notice
/*
* Copyright: Copyright (c) 2007 Amazon Technologies, Inc.
* License: Apache License, Version 2.0
*/
#endregion
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
namespace Amazon.WebServices.MechanicalTurk.Advanced
{
/// <summary>
/// Leaky bucket implementation of a request throttler
/// </summary>
public class LeakyBucketRequestThrottler : IRequestThrottler, IDisposable
{
private int curTokenCount = 0; // number of tokens currently in the bucket
private string endpoint; // endpoint associated with throttler
private int capacity = 0; // capacity of the bucket
private int ratePerSecond = 0; // refill rate of tokens per second
private System.Threading.Timer timerRefill;
// FIFO list of enqueued threads waiting for tokens to become available
private List<ManualResetEvent> queue = new List<ManualResetEvent>();
// current instances (endpoint->throttler)
private static Dictionary<string, LeakyBucketRequestThrottler> instances = new Dictionary<string, LeakyBucketRequestThrottler>();
/// <summary>
/// Returns a throttler for a specific service endpoint URL
/// </summary>
/// <param name="serviceEndpoint">URL of the mechanical turk service endpoint</param>
/// <param name="capacity">Number of requests the throttler permits all at once
/// (bucket capacity)</param>
/// <param name="rate">Number of requests the throttler allows per second
/// (average long term)</param>
/// <returns>A <see cref="IRequestThrottler"/> instance</returns>
public static LeakyBucketRequestThrottler GetInstance(string serviceEndpoint, int capacity, int rate)
{
if (serviceEndpoint == null)
{
throw new ArgumentNullException("serviceEndpoint", "Endpoint URL may not be null");
}
if (capacity <= 0)
{
throw new ArgumentException("Capacity must be bigger than zero", "capacity");
}
if (rate > capacity)
{
throw new ArgumentException("Rate must be bigger than capacity", "rate");
}
LeakyBucketRequestThrottler ret = null;
string key = string.Format("{0}{1}{2}", serviceEndpoint, capacity, rate);
if (instances.ContainsKey(key))
{
ret = instances[key];
}
else
{
lock (instances)
{
if (instances.ContainsKey(key))
{
ret = instances[key];
}
else
{
MTurkLog.Debug("Throttling requests to {0} (Capacity: {1}. Rate: {2}/sec)", serviceEndpoint, capacity, rate);
ret = new LeakyBucketRequestThrottler(serviceEndpoint, capacity, rate);
instances[key] = ret;
}
}
}
return ret;
}
/// <summary>
/// Throttles requests to the service endpoint (for sandbox)
/// </summary>
/// <param name="capacity">Number of requests permitted all at once (bucket capacity)</param>
/// <param name="rate">Number of requests allowed per second (average long term)</param>
/// <param name="serviceEndpoint">Web service endpoint for this throttler</param>
private LeakyBucketRequestThrottler(string serviceEndpoint, int capacity, int rate)
{
if (capacity <= 0)
{
throw new ArgumentException("Capacity must be bigger than zero", "capacity");
}
if (rate <= 0)
{
throw new ArgumentException("Rate must be bigger than zero", "rate");
}
this.capacity = (int)capacity;
this.ratePerSecond = (int)rate;
this.curTokenCount = this.capacity;
this.endpoint = serviceEndpoint;
timerRefill = new Timer(new TimerCallback(OnRefill), null, 1000, 1000);
}
/// <summary>
/// Refills the bucket and works the backlog in the order the requests came in
/// </summary>
private void OnRefill(object o)
{
// add tokens to the bucket
if (Add(ratePerSecond) < capacity)
{
MTurkLog.Debug("Refilled {0} tokens to throttle bucket (Current size: {1})", ratePerSecond, curTokenCount);
}
// work backlog in order
lock (queue)
{
if (queue.Count > 0)
{
int num = Math.Min(queue.Count, curTokenCount);
MTurkLog.Debug("Processing {0} throttled requests from backlog (Size: {1})", num, queue.Count);
for (int i = 0; i < num; i++)
{
if (Add(-1) != null)
{
// signal waiting thread to resume sending FIFO
queue[0].Set();
queue.RemoveAt(0);
}
}
}
}
}
/// <summary>
/// Returns null, if no tokens are available and throttling must start
/// </summary>
private int? Add(int i)
{
lock (queue)
{
curTokenCount += i;
if (curTokenCount < 0)
{
curTokenCount = 0;
return null; // indicate that throttling
}
else if (curTokenCount > capacity)
{
// cap to avoid overflow of bucket through refill timer
curTokenCount = capacity;
}
return curTokenCount;
}
}
/// <summary>
/// Starts a throttled request. If it can get a slice from the bucket, then it
/// can run immediately. Otherwise enqueue it and notify it once a slice becomes available.
/// </summary>
public void StartRequest()
{
if (Add(-1) == null)
{
// No more tokens available: enqueue thread
MTurkLog.Debug("Throttling request");
ManualResetEvent evt = new ManualResetEvent(false);
lock (queue)
{
queue.Add(evt);
}
evt.WaitOne();
//Thread.CurrentThread.Suspend();
MTurkLog.Debug("Released throttle on request");
}
}
/// <summary>
/// Returns pertinent information about the trottler configuration
/// (burst/capacity and refresh rate)
/// </summary>
public override string ToString()
{
return string.Format("LeakyBucketRequestThrottler for '{2}' (Burst: {0}, Rate {1})", this.capacity, this.ratePerSecond, this.endpoint);
}
#region IDisposable Members
/// <summary>
/// Disposes the throttler instance and any resources associated with it
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (disposing)
{
lock (instances)
{
string key = string.Format("{0}{1}{2}", endpoint, capacity, ratePerSecond);
LeakyBucketRequestThrottler throttler = instances[key];
MTurkLog.Debug("Disposing {0}", throttler);
if (timerRefill != null)
{
timerRefill.Dispose();
}
instances.Remove(key);
}
}
}
#endregion
}
}
| |
using Geofence.Plugin.Abstractions;
using System;
using System.Collections.Generic;
using Android.Util;
using Android.Gms.Common.Apis;
using Android.Gms.Common;
using Android.App;
using Android.Content;
using Android.OS;
using System.Linq;
using Android.Support.V4.App;
using Android.Media;
using Java.Lang;
using Android.Text;
using System.Runtime.Remoting.Messaging;
using Java.Interop;
using System.Collections.ObjectModel;
using Android.Gms.Location;
namespace Geofence.Plugin
{
/// <summary>
/// Implementation for Feature
/// </summary>
///
public class GeofenceImplementation : Java.Lang.Object, Geofence.Plugin.Abstractions.IGeofence, GoogleApiClient.IConnectionCallbacks, GoogleApiClient.IOnConnectionFailedListener, IResultCallback
{
internal const string GeoReceiverAction = "ACTION_RECEIVE_GEOFENCE";
private Dictionary<string,GeofenceCircularRegion> mRegions = GeofenceStore.SharedInstance.GetAll();
private Dictionary<string, GeofenceResult> mGeofenceResults = new Dictionary<string, GeofenceResult>();
private GeofenceLocation lastKnownGeofenceLocation;
private PendingIntent mGeofencePendingIntent;
private GoogleApiClient mGoogleApiClient;
// Defines the allowable request types
internal enum RequestType
{
Add,
Update,
Delete,
Clear,
Default
}
/// <summary>
/// Get all regions been monitored
/// </summary>
public IReadOnlyDictionary<string, GeofenceCircularRegion> Regions { get { return mRegions; } }
/// <summary>
/// Get geofence state change results.
/// </summary>
public IReadOnlyDictionary<string, GeofenceResult> GeofenceResults { get { return mGeofenceResults; } }
private IList<string> mRequestedRegionIdentifiers;
/// <summary>
/// Get last known location
/// </summary>
public GeofenceLocation LastKnownLocation { get { return lastKnownGeofenceLocation; } }
internal RequestType CurrentRequestType { get; set; }
/// <summary>
/// Checks if region are been monitored
/// </summary>
public bool IsMonitoring { get { return mRegions.Count > 0; } }
/// <summary>
/// Gets or sets a value indicating whether this <see cref="Geofence.Plugin.GeofenceImplementation"/> location has error.
/// </summary>
/// <value><c>true</c> if location has error; otherwise, <c>false</c>.</value>
public bool LocationHasError { get; set; }
public bool RequestNotificationPermission { get; set; }
public bool RequestLocationPermission { get; set; }
//IsMonitoring?RequestType.Add:
private PendingIntent GeofenceTransitionPendingIntent
{
get
{
// If the PendingIntent already exists
if (mGeofencePendingIntent == null)
{
//var intent = new Intent(Android.App.Application.Context, typeof(GeofenceBroadcastReceiver));
// intent.SetAction(string.Format("{0}.{1}", Android.App.Application.Context.PackageName, GeoReceiverAction));
var intent = new Intent(string.Format("{0}.{1}", Android.App.Application.Context.PackageName, GeoReceiverAction));
mGeofencePendingIntent = PendingIntent.GetBroadcast(Android.App.Application.Context, 0, intent, PendingIntentFlags.UpdateCurrent);
}
return mGeofencePendingIntent;
}
}
/// <summary>
/// Android Geofence plugin implementation
/// </summary>
public GeofenceImplementation()
{
//Check if location services are enabled
IsLocationEnabled((bool locationIsEnabled) => {
if(locationIsEnabled)
{
CurrentRequestType = RequestType.Default;
if(IsMonitoring)
{
StartMonitoring(Regions.Values.ToList());
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}", CrossGeofence.Id, "Monitoring was restored"));
}
}
else
{
string message = string.Format("{0} - {1}", CrossGeofence.Id, "You need to enabled Location Services");
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
});
}
public void IsLocationEnabled(Action<bool> returnAction)
{
InitializeGoogleAPI();
if(mGoogleApiClient == null)
{
returnAction(false);
return;
}
var locationRequestPriority = LocationRequest.PriorityBalancedPowerAccuracy;
switch (CrossGeofence.GeofencePriority)
{
case GeofencePriority.HighAccuracy:
locationRequestPriority = LocationRequest.PriorityHighAccuracy;
break;
case GeofencePriority.LowAccuracy:
locationRequestPriority = LocationRequest.PriorityLowPower;
break;
case GeofencePriority.LowestAccuracy:
locationRequestPriority = LocationRequest.PriorityNoPower;
break;
}
var locationRequest = new LocationRequest();
locationRequest.SetPriority(locationRequestPriority);
locationRequest.SetInterval(CrossGeofence.LocationUpdatesInterval);
locationRequest.SetFastestInterval(CrossGeofence.FastestLocationUpdatesInterval);
LocationSettingsRequest.Builder builder = new LocationSettingsRequest.Builder().AddLocationRequest(locationRequest);
var pendingResult = LocationServices.SettingsApi.CheckLocationSettings(mGoogleApiClient, builder.Build());
pendingResult.SetResultCallback((LocationSettingsResult locationSettingsResult) => {
if (locationSettingsResult != null)
{
returnAction(locationSettingsResult.Status.StatusCode <= CommonStatusCodes.Success);
}
});
}
/// <summary>
/// Starts monitoring specified region
/// </summary>
/// <param name="region"></param>
public void StartMonitoring(GeofenceCircularRegion region)
{
/*if (IsMonitoring && mGoogleApiClient.IsConnected)
{
Android.Gms.Location.LocationServices.GeofencingApi.RemoveGeofences(mGoogleApiClient, GeofenceTransitionPendingIntent).SetResultCallback(this);
}*/
if (!mRegions.ContainsKey(region.Id))
{
mRegions.Add(region.Id, region);
}
RequestMonitoringStart();
}
void RequestMonitoringStart()
{
//If connected to google play services then add regions
if (mGoogleApiClient.IsConnected)
{
AddGeofences();
}
else
{
//If not connection then connect
if (!mGoogleApiClient.IsConnecting)
{
mGoogleApiClient.Connect();
}
//Request to add geofence regions once connected
CurrentRequestType = RequestType.Add;
}
}
/// <summary>
/// Starts geofence monitoring on specified regions
/// </summary>
/// <param name="regions"></param>
public void StartMonitoring(IList<GeofenceCircularRegion> regions)
{
/* if (IsMonitoring && mGoogleApiClient.IsConnected)
{
Android.Gms.Location.LocationServices.GeofencingApi.RemoveGeofences(mGoogleApiClient, GeofenceTransitionPendingIntent).SetResultCallback(this);
}*/
foreach (var region in regions)
{
if (!mRegions.ContainsKey(region.Id))
{
mRegions.Add(region.Id, region);
}
}
RequestMonitoringStart();
}
internal void AddGeofenceResult(string identifier)
{
mGeofenceResults.Add(identifier, new GeofenceResult()
{
RegionId = identifier,
Transition = GeofenceTransition.Unknown
});
}
public void AddGeofences()
{
try
{
List<Android.Gms.Location.IGeofence> geofenceList = new List<Android.Gms.Location.IGeofence>();
var regions = Regions.Values;
foreach (GeofenceCircularRegion region in regions)
{
int transitionTypes = 0;
if (region.NotifyOnStay)
{
transitionTypes |= Android.Gms.Location.Geofence.GeofenceTransitionDwell;
}
if (region.NotifyOnEntry)
{
transitionTypes |= Android.Gms.Location.Geofence.GeofenceTransitionEnter;
}
if (region.NotifyOnExit)
{
transitionTypes |= Android.Gms.Location.Geofence.GeofenceTransitionExit;
}
if (transitionTypes != 0)
{
geofenceList.Add(new Android.Gms.Location.GeofenceBuilder()
.SetRequestId(region.Id)
.SetCircularRegion(region.Latitude, region.Longitude, (float)region.Radius)
.SetLoiteringDelay((int)region.StayedInThresholdDuration.TotalMilliseconds)
//.SetNotificationResponsiveness(mNotificationResponsivness)
.SetExpirationDuration(Android.Gms.Location.Geofence.NeverExpire)
.SetTransitionTypes(transitionTypes)
.Build());
if (GeofenceStore.SharedInstance.Get(region.Id) == null)
{
GeofenceStore.SharedInstance.Save(region);
}
CrossGeofence.GeofenceListener.OnMonitoringStarted(region.Id);
}
}
if (geofenceList.Count > 0)
{
Android.Gms.Location.GeofencingRequest request = new Android.Gms.Location.GeofencingRequest.Builder().SetInitialTrigger(Android.Gms.Location.GeofencingRequest.InitialTriggerEnter).AddGeofences(geofenceList).Build();
Android.Gms.Location.LocationServices.GeofencingApi.AddGeofences(mGoogleApiClient, request, GeofenceTransitionPendingIntent).SetResultCallback(this);
CurrentRequestType = RequestType.Default;
}
}
catch (Java.Lang.Exception ex1)
{
string message = string.Format("{0} - Error: {1}", CrossGeofence.Id, ex1.ToString());
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
catch (System.Exception ex2)
{
string message = string.Format("{0} - Error: {1}", CrossGeofence.Id, ex2.ToString());
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
}
/// <summary>
/// Stops monitoring a specific geofence region
/// </summary>
/// <param name="regionIdentifier"></param>
public void StopMonitoring(string regionIdentifier)
{
StopMonitoring(new List<string>() { regionIdentifier });
}
internal void OnMonitoringRemoval()
{
if (mRegions.Count == 0)
{
CrossGeofence.GeofenceListener.OnMonitoringStopped();
StopLocationUpdates();
mGoogleApiClient.Disconnect();
}
}
private void RemoveGeofences(IList<string> regionIdentifiers)
{
foreach (string identifier in regionIdentifiers)
{
//Remove this region from regions dictionary and results
RemoveRegion(identifier);
//Remove from persistent store
GeofenceStore.SharedInstance.Remove(identifier);
//Notify monitoring was stopped
CrossGeofence.GeofenceListener.OnMonitoringStopped(identifier);
}
//Stop Monitoring
Android.Gms.Location.LocationServices.GeofencingApi.RemoveGeofences(mGoogleApiClient, regionIdentifiers).SetResultCallback(this);
//Check if there are still regions
OnMonitoringRemoval();
}
/// <summary>
/// Stops monitoring specified geofence regions
/// </summary>
public void StopMonitoring(IList<string> regionIdentifiers)
{
mRequestedRegionIdentifiers = regionIdentifiers;
if (IsMonitoring && mGoogleApiClient.IsConnected)
{
RemoveGeofences(regionIdentifiers);
}
else
{
//If not connection then connect
if (!mGoogleApiClient.IsConnecting)
{
mGoogleApiClient.Connect();
}
//Request to add geofence regions once connected
CurrentRequestType = RequestType.Delete;
}
//
}
/// <summary>
/// Stops monitoring all geofence regions
/// </summary>
public void StopMonitoringAllRegions()
{
if (IsMonitoring && mGoogleApiClient.IsConnected)
{
RemoveGeofences();
}
else
{
//If not connection then connect
if (!mGoogleApiClient.IsConnecting)
{
mGoogleApiClient.Connect();
}
//Request to add geofence regions once connected
CurrentRequestType = RequestType.Clear;
}
}
private void RemoveGeofences()
{
GeofenceStore.SharedInstance.RemoveAll();
mRegions.Clear();
mGeofenceResults.Clear();
Android.Gms.Location.LocationServices.GeofencingApi.RemoveGeofences(mGoogleApiClient, GeofenceTransitionPendingIntent).SetResultCallback(this);
StopLocationUpdates();
mGoogleApiClient.Disconnect();
CrossGeofence.GeofenceListener.OnMonitoringStopped();
}
private void InitializeGoogleAPI()
{
int queryResult = GoogleApiAvailability.Instance.IsGooglePlayServicesAvailable(Android.App.Application.Context);
if (queryResult == ConnectionResult.Success)
{
if (mGoogleApiClient == null)
{
mGoogleApiClient = new GoogleApiClient.Builder(Android.App.Application.Context).AddApi(Android.Gms.Location.LocationServices.API).AddConnectionCallbacks(this).AddOnConnectionFailedListener(this).Build();
string message = string.Format("{0} - {1}", CrossGeofence.Id, "Google Play services is available.");
System.Diagnostics.Debug.WriteLine(message);
}
if (!mGoogleApiClient.IsConnected)
{
mGoogleApiClient.Connect();
}
}
else
{
string message = string.Format("{0} - {1}", CrossGeofence.Id, "Google Play services is unavailable.");
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
}
/// <summary>
/// Google play connection failed handling
/// </summary>
/// <param name="result"></param>
public void OnConnectionFailed(Android.Gms.Common.ConnectionResult result)
{
int errorCode = result.ErrorCode;
string message = string.Format("{0} - {1} {2}", CrossGeofence.Id, "Connection to Google Play services failed with error code ", errorCode);
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
internal void SetLastKnownLocation(Android.Locations.Location location)
{
if (location != null)
{
if (lastKnownGeofenceLocation == null)
{
lastKnownGeofenceLocation = new GeofenceLocation();
}
lastKnownGeofenceLocation.Latitude = location.Latitude;
lastKnownGeofenceLocation.Longitude = location.Longitude;
double seconds = location.Time / 1000;
lastKnownGeofenceLocation.Date = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Local).AddSeconds(seconds);
CrossGeofence.GeofenceListener.OnLocationChanged(lastKnownGeofenceLocation);
}
}
/// <summary>
/// On Google play services Connection handling
/// </summary>
/// <param name="connectionHint"></param>
public void OnConnected(Bundle connectionHint)
{
Android.Locations.Location location = Android.Gms.Location.LocationServices.FusedLocationApi.GetLastLocation(mGoogleApiClient);
SetLastKnownLocation(location);
if (CurrentRequestType == RequestType.Add)
{
AddGeofences();
StartLocationUpdates();
}
else if (CurrentRequestType == RequestType.Clear)
{
RemoveGeofences();
}
else if (CurrentRequestType == RequestType.Delete)
{
if (mRequestedRegionIdentifiers != null)
{
RemoveGeofences(mRequestedRegionIdentifiers);
}
}
CurrentRequestType = RequestType.Default;
}
/// <summary>
/// On Geofence Request Result
/// </summary>
/// <param name="result"></param>
public void OnResult(Java.Lang.Object result)
{
var res = result.JavaCast<IResult>();
int statusCode = res.Status.StatusCode;
string message = string.Empty;
switch (res.Status.StatusCode)
{
case Android.Gms.Location.GeofenceStatusCodes.SuccessCache:
case Android.Gms.Location.GeofenceStatusCodes.Success:
if (CurrentRequestType == RequestType.Add)
{
message = string.Format("{0} - {1}", CrossGeofence.Id, "Successfully added Geofence.");
foreach (GeofenceCircularRegion region in Regions.Values)
{
CrossGeofence.GeofenceListener.OnMonitoringStarted(region.Id);
}
}
else
{
message = string.Format("{0} - {1}", CrossGeofence.Id, "Geofence Update Received");
}
break;
case Android.Gms.Location.GeofenceStatusCodes.Error:
message = string.Format("{0} - {1}", CrossGeofence.Id, "Error adding Geofence.");
break;
case Android.Gms.Location.GeofenceStatusCodes.GeofenceTooManyGeofences:
message = string.Format("{0} - {1}", CrossGeofence.Id, "Too many geofences.");
break;
case Android.Gms.Location.GeofenceStatusCodes.GeofenceTooManyPendingIntents:
message = string.Format("{0} - {1}", CrossGeofence.Id, "Too many pending intents.");
break;
case Android.Gms.Location.GeofenceStatusCodes.GeofenceNotAvailable:
message = string.Format("{0} - {1}", CrossGeofence.Id, "Geofence not available.");
break;
}
System.Diagnostics.Debug.WriteLine(message);
if (statusCode != Android.Gms.Location.GeofenceStatusCodes.Success && statusCode != Android.Gms.Location.GeofenceStatusCodes.SuccessCache && IsMonitoring)
{
// Rather than force killing all running geofences, delegate action on geofence failures to the application.
// This lets the application decide to ignore the error, perform retry logic, stop monitoring as below, or any other behavior.
// StopMonitoringAllRegions();
((GeofenceImplementation)CrossGeofence.Current).LocationHasError = true;
if (!string.IsNullOrEmpty(message))
CrossGeofence.GeofenceListener.OnError(message);
}
}
/// <summary>
/// Connection suspended handling
/// </summary>
/// <param name="cause"></param>
public void OnConnectionSuspended(int cause)
{
string message = string.Format("{0} - {1} {2}", CrossGeofence.Id, "Connection to Google Play services suspended with error code ", cause);
System.Diagnostics.Debug.WriteLine(message);
CrossGeofence.GeofenceListener.OnError(message);
}
private void RemoveRegion(string regionIdentifier)
{
if (mRegions.ContainsKey(regionIdentifier))
{
mRegions.Remove(regionIdentifier);
}
if (mGeofenceResults.ContainsKey(regionIdentifier))
{
mGeofenceResults.Remove(regionIdentifier);
}
}
internal void StartLocationUpdates()
{
Android.Gms.Location.LocationRequest mLocationRequest = new Android.Gms.Location.LocationRequest();
mLocationRequest.SetInterval(CrossGeofence.LocationUpdatesInterval == 0 ? 30000 : CrossGeofence.LocationUpdatesInterval);
mLocationRequest.SetFastestInterval(CrossGeofence.FastestLocationUpdatesInterval == 0 ? 5000 : CrossGeofence.FastestLocationUpdatesInterval);
string priorityType = "Balanced Power";
switch (CrossGeofence.GeofencePriority)
{
case GeofencePriority.HighAccuracy:
priorityType = "High Accuracy";
mLocationRequest.SetPriority(Android.Gms.Location.LocationRequest.PriorityHighAccuracy);
break;
case GeofencePriority.LowAccuracy:
priorityType = "Low Accuracy";
mLocationRequest.SetPriority(Android.Gms.Location.LocationRequest.PriorityLowPower);
break;
case GeofencePriority.LowestAccuracy:
priorityType = "Lowest Accuracy";
mLocationRequest.SetPriority(Android.Gms.Location.LocationRequest.PriorityNoPower);
break;
case GeofencePriority.MediumAccuracy:
case GeofencePriority.AcceptableAccuracy:
default:
mLocationRequest.SetPriority(Android.Gms.Location.LocationRequest.PriorityBalancedPowerAccuracy);
break;
}
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}: {2}", CrossGeofence.Id, "Priority set to", priorityType));
//(Regions.Count == 0) ? (CrossGeofence.SmallestDisplacement==0?50 :CrossGeofence.SmallestDisplacement): Regions.Min(s => (float)s.Value.Radius)
if (CrossGeofence.SmallestDisplacement > 0)
{
mLocationRequest.SetSmallestDisplacement(CrossGeofence.SmallestDisplacement);
System.Diagnostics.Debug.WriteLine(string.Format("{0} - {1}: {2} meters", CrossGeofence.Id, "Location smallest displacement set to", CrossGeofence.SmallestDisplacement));
}
try
{
Android.Gms.Location.LocationServices.FusedLocationApi.RequestLocationUpdates(mGoogleApiClient, mLocationRequest, GeofenceLocationListener.SharedInstance);
}
catch(System.Exception e)
{
// Do not crash the app if permissions are disabled on Android Marshmallow
System.Diagnostics.Debug.WriteLine(e.Message);
CrossGeofence.GeofenceListener.OnError(e.Message);
}
}
internal void StopLocationUpdates()
{
Android.Gms.Location.LocationServices.FusedLocationApi.RemoveLocationUpdates(mGoogleApiClient, GeofenceLocationListener.SharedInstance);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kemel.Orm.Entity;
using Kemel.Orm.Providers;
using Kemel.Orm.Data;
using Kemel.Orm.Schema;
using Kemel.Orm.NQuery.Storage;
namespace Kemel.Orm.Base
{
public class Dal<TEtt>: ITransactable
where TEtt: EntityBase, new()
{
private Provider prvCurrentProvider;
public Provider CurrentProvider
{
get
{
if (prvCurrentProvider == null)
prvCurrentProvider = Provider.GetProvider(this.GetType().BaseType.GetGenericArguments()[0]);
return prvCurrentProvider;
}
}
public OrmTransaction Transaction { get; set; }
private List<ITransactable> lstTransactables = null;
public List<ITransactable> Transactables
{
get
{
if (lstTransactables == null)
lstTransactables = new List<ITransactable>();
return lstTransactables;
}
}
#region CUD
#region Entity
public virtual TEtt Save(TEtt ettEntity)
{
switch (ettEntity.EntityState)
{
case EntityItemState.Added:
this.Insert(ettEntity);
break;
case EntityItemState.Deleted:
this.DeleteById(ettEntity);
break;
case EntityItemState.Modified:
this.Update(ettEntity);
break;
}
return ettEntity;
}
public virtual TEtt Insert(TEtt ettEntity)
{
bool createTransaction = false;
if (this.Transaction == null)
{
createTransaction = true;
this.Transaction = this.CurrentProvider.GetConnection().CreateTransaction();
this.Transaction.Begin();
}
try
{
Query query = CurrentProvider.EntityCrudBuilder.GetInsert<TEtt>(ettEntity);
query.Execute(this.Transaction).NonQuery();
ettEntity.EntityState = EntityItemState.Unchanged;
if (TableSchema.FromEntity<TEtt>().IdentityColumns.Count > 0)
{
Query getIdentity = CurrentProvider.EntityCrudBuilder.GetSelectIdentity<TEtt>();
object value = getIdentity.Execute(this.Transaction).Scalar();
Executer.SetIdentityValues<TEtt>(ettEntity, value);
//TEtt ettIdentity = getIdentity.Execute(this.Transaction).List<TEtt>().First();
//Executer.SetIdentityValues<TEtt>(ettEntity, ettIdentity, getIdentity.Tables[0].TableDefinition as TableSchema);
}
if (createTransaction)
{
this.Transaction.Commit();
this.Transaction = null;
}
}
catch (Exception ex)
{
if (createTransaction)
{
this.Transaction.Rollback();
this.Transaction = null;
}
throw ex;
}
return ettEntity;
}
public virtual int Update(TEtt ettEntity)
{
Query query = CurrentProvider.EntityCrudBuilder.GetUpdate<TEtt>(ettEntity);
int rowsAffected = query.Execute(this.Transaction).NonQuery();
ettEntity.EntityState = EntityItemState.Unchanged;
return rowsAffected;
}
public virtual int Delete(TEtt ettEntity)
{
Query query = CurrentProvider.EntityCrudBuilder.GetDelete<TEtt>(ettEntity);
int rowsAffected = query.Execute(this.Transaction).NonQuery();
ettEntity.EntityState = EntityItemState.Unchanged;
return rowsAffected;
}
public virtual int DeleteById(TEtt ettEntity)
{
Query query = CurrentProvider.EntityCrudBuilder.GetDeleteById<TEtt>(ettEntity);
int rowsAffected = query.Execute(this.Transaction).NonQuery();
ettEntity.EntityState = EntityItemState.Unchanged;
return rowsAffected;
}
public virtual int DeleteById(object id)
{
Query qry = StorageFactory.NQuery.Delete(this.CurrentProvider).Into<TEtt>();
ColumnSchema[] columns = (qry.IntoTable.TableDefinition as TableSchema).GetPrimaryKeys();
if (columns != null && columns.Length > 0)
{
qry.And(columns[0]).Equal(id);
}
int rowsAffected = qry.Execute().NonQuery();
return rowsAffected;
}
#endregion
#region List<TEtt>
public virtual void Save(List<TEtt> lstEntity)
{
if (lstEntity.Count == 0)
return;
foreach (TEtt entity in lstEntity)
{
this.Save(entity);
}
}
public virtual int Insert(List<TEtt> lstEntity)
{
if (lstEntity.Count == 0)
return 0;
Query query = CurrentProvider.EntityCrudBuilder.GetInsert<TEtt>(lstEntity[0]);
int rowsAffected = query.Execute(this.Transaction).NonQuery<TEtt>(lstEntity);
return rowsAffected;
}
public virtual int Update(List<TEtt> lstEntity)
{
if (lstEntity.Count == 0)
return 0;
Query query = CurrentProvider.EntityCrudBuilder.GetUpdate<TEtt>(lstEntity[0]);
int rowsAffected = query.Execute(this.Transaction).NonQuery<TEtt>(lstEntity);
return rowsAffected;
}
public virtual int Delete(List<TEtt> lstEntity)
{
if (lstEntity.Count == 0)
return 0;
Query query = CurrentProvider.EntityCrudBuilder.GetDelete<TEtt>(lstEntity[0]);
int rowsAffected = query.Execute(this.Transaction).NonQuery<TEtt>(lstEntity);
return rowsAffected;
}
public virtual int DeleteById(List<TEtt> lstEntity)
{
if (lstEntity.Count == 0)
return 0;
Query query = CurrentProvider.EntityCrudBuilder.GetDeleteById<TEtt>(lstEntity[0]);
int rowsAffected = query.Execute(this.Transaction).NonQuery<TEtt>(lstEntity);
return rowsAffected;
}
#endregion
#endregion
#region Read
public virtual TEtt ReadById(TEtt ett)
{
Query qry = CurrentProvider.EntityCrudBuilder.GetSelectById<TEtt>(ett);
return qry.Execute().List<TEtt>().First();
}
public virtual TEtt ReadById(object id)
{
Query qry = StorageFactory.NQuery.Select(this.CurrentProvider).From<TEtt>().AllColumns();
ColumnSchema[] columns = (qry.Tables[0].TableDefinition as TableSchema).GetPrimaryKeys();
if (columns != null && columns.Length > 0)
{
qry.And(columns[0]).Equal(id);
}
return qry.Execute().List<TEtt>().First();
}
public virtual List<TEtt> ReadByField(TEtt ett)
{
Query qry = CurrentProvider.EntityCrudBuilder.GetSelect<TEtt>(ett);
return qry.Execute().List<TEtt>();
}
public virtual List<TEtt> ReadAll()
{
Query qry = CurrentProvider.EntityCrudBuilder.GetSelect<TEtt>();
return qry.Execute().List<TEtt>();
}
#endregion
public void BeginTransaction()
{
this.CreateTransaction();
this.Transaction.Begin();
}
public OrmTransaction CreateTransaction()
{
OrmConnection connection = this.CurrentProvider.GetConnection();
this.Transaction = connection.CreateTransaction();
foreach (ITransactable item in this.Transactables)
{
item.Transaction = this.Transaction;
}
return this.Transaction;
}
#region Query
/// <summary>
/// Initialize Select query.
/// </summary>
/// <returns></returns>
protected Query Select()
{
return StorageFactory.NQuery.Select(this.CurrentProvider);
}
/// <summary>
/// Initialize Insert query.
/// </summary>
/// <returns></returns>
protected Query Insert()
{
return StorageFactory.NQuery.Insert(this.CurrentProvider);
}
/// <summary>
/// Initialize Update query.
/// </summary>
/// <returns></returns>
protected Query Update()
{
return StorageFactory.NQuery.Update(this.CurrentProvider);
}
/// <summary>
/// Initialize Delete query.
/// </summary>
/// <returns></returns>
protected Query Delete()
{
return StorageFactory.NQuery.Delete(this.CurrentProvider);
}
/// <summary>
/// Initialize Procedure query.
/// </summary>
/// <returns></returns>
protected Query Procedure(string procedureName)
{
return StorageFactory.NQuery.Procedure(procedureName, this.CurrentProvider);
}
/// <summary>
/// Initialize Procedure query.
/// </summary>
/// <returns></returns>
protected Query Procedure<TEntity>()
where TEntity : EntityBase
{
return StorageFactory.NQuery.Procedure<TEntity>();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Data.Common;
using System.Data.ProviderBase;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics.CodeAnalysis;
using System.Transactions;
using Microsoft.SqlServer.Server;
using System.Reflection;
using System.IO;
using System.Globalization;
using System.Security;
namespace System.Data.SqlClient
{
public sealed partial class SqlConnection : DbConnection, ICloneable
{
private bool _AsyncCommandInProgress;
// SQLStatistics support
internal SqlStatistics _statistics;
private bool _collectstats;
private bool _fireInfoMessageEventOnUserErrors; // False by default
// root task associated with current async invocation
private Tuple<TaskCompletionSource<DbConnectionInternal>, Task> _currentCompletion;
private SqlCredential _credential;
private string _connectionString;
private int _connectRetryCount;
private string _accessToken; // Access Token to be used for token based authententication
// connection resiliency
private readonly object _reconnectLock = new object();
internal Task _currentReconnectionTask;
private Task _asyncWaitingForReconnection; // current async task waiting for reconnection in non-MARS connections
private Guid _originalConnectionId = Guid.Empty;
private CancellationTokenSource _reconnectionCancellationSource;
internal SessionData _recoverySessionData;
internal bool _suppressStateChangeForReconnection;
private int _reconnectCount;
// diagnostics listener
private static readonly DiagnosticListener s_diagnosticListener = new DiagnosticListener(SqlClientDiagnosticListenerExtensions.DiagnosticListenerName);
// Transient Fault handling flag. This is needed to convey to the downstream mechanism of connection establishment, if Transient Fault handling should be used or not
// The downstream handling of Connection open is the same for idle connection resiliency. Currently we want to apply transient fault handling only to the connections opened
// using SqlConnection.Open() method.
internal bool _applyTransientFaultHandling = false;
public SqlConnection(string connectionString) : this()
{
ConnectionString = connectionString; // setting connection string first so that ConnectionOption is available
CacheConnectionStringProperties();
}
public SqlConnection(string connectionString, SqlCredential credential) : this()
{
ConnectionString = connectionString;
if (credential != null)
{
// The following checks are necessary as setting Credential property will call CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential
// CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential it will throw InvalidOperationException rather than Arguemtn exception
// Need to call setter on Credential property rather than setting _credential directly as pool groups need to be checked
SqlConnectionString connectionOptions = (SqlConnectionString)ConnectionOptions;
if (UsesClearUserIdOrPassword(connectionOptions))
{
throw ADP.InvalidMixedArgumentOfSecureAndClearCredential();
}
if (UsesIntegratedSecurity(connectionOptions))
{
throw ADP.InvalidMixedArgumentOfSecureCredentialAndIntegratedSecurity();
}
Credential = credential;
}
// else
// credential == null: we should not set "Credential" as this will do additional validation check and
// checking pool groups which is not necessary. All necessary operation is already done by calling "ConnectionString = connectionString"
CacheConnectionStringProperties();
}
private SqlConnection(SqlConnection connection)
{
GC.SuppressFinalize(this);
CopyFrom(connection);
_connectionString = connection._connectionString;
if (connection._credential != null)
{
SecureString password = connection._credential.Password.Copy();
password.MakeReadOnly();
_credential = new SqlCredential(connection._credential.UserId, password);
}
_accessToken = connection._accessToken;
CacheConnectionStringProperties();
}
// This method will be called once connection string is set or changed.
private void CacheConnectionStringProperties()
{
SqlConnectionString connString = ConnectionOptions as SqlConnectionString;
if (connString != null)
{
_connectRetryCount = connString.ConnectRetryCount;
}
}
//
// PUBLIC PROPERTIES
//
// used to start/stop collection of statistics data and do verify the current state
//
// devnote: start/stop should not performed using a property since it requires execution of code
//
// start statistics
// set the internal flag (_statisticsEnabled) to true.
// Create a new SqlStatistics object if not already there.
// connect the parser to the object.
// if there is no parser at this time we need to connect it after creation.
//
public bool StatisticsEnabled
{
get
{
return (_collectstats);
}
set
{
{
if (value)
{
// start
if (ConnectionState.Open == State)
{
if (null == _statistics)
{
_statistics = new SqlStatistics();
ADP.TimerCurrent(out _statistics._openTimestamp);
}
// set statistics on the parser
// update timestamp;
Debug.Assert(Parser != null, "Where's the parser?");
Parser.Statistics = _statistics;
}
}
else
{
// stop
if (null != _statistics)
{
if (ConnectionState.Open == State)
{
// remove statistics from parser
// update timestamp;
TdsParser parser = Parser;
Debug.Assert(parser != null, "Where's the parser?");
parser.Statistics = null;
ADP.TimerCurrent(out _statistics._closeTimestamp);
}
}
}
_collectstats = value;
}
}
}
internal bool AsyncCommandInProgress
{
get => _AsyncCommandInProgress;
set => _AsyncCommandInProgress = value;
}
// Does this connection use Integrated Security?
private bool UsesIntegratedSecurity(SqlConnectionString opt)
{
return opt != null ? opt.IntegratedSecurity : false;
}
// Does this connection use old style of clear userID or Password in connection string?
private bool UsesClearUserIdOrPassword(SqlConnectionString opt)
{
bool result = false;
if (null != opt)
{
result = (!string.IsNullOrEmpty(opt.UserID) || !string.IsNullOrEmpty(opt.Password));
}
return result;
}
internal SqlConnectionString.TransactionBindingEnum TransactionBinding
{
get => ((SqlConnectionString)ConnectionOptions).TransactionBinding;
}
internal SqlConnectionString.TypeSystem TypeSystem
{
get => ((SqlConnectionString)ConnectionOptions).TypeSystemVersion;
}
internal Version TypeSystemAssemblyVersion
{
get => ((SqlConnectionString)ConnectionOptions).TypeSystemAssemblyVersion;
}
internal int ConnectRetryInterval
{
get => ((SqlConnectionString)ConnectionOptions).ConnectRetryInterval;
}
public override string ConnectionString
{
get
{
return ConnectionString_Get();
}
set
{
if (_credential != null || _accessToken != null)
{
SqlConnectionString connectionOptions = new SqlConnectionString(value);
if (_credential != null)
{
CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential(connectionOptions);
}
else
{
CheckAndThrowOnInvalidCombinationOfConnectionOptionAndAccessToken(connectionOptions);
}
}
ConnectionString_Set(new SqlConnectionPoolKey(value, _credential, _accessToken));
_connectionString = value; // Change _connectionString value only after value is validated
CacheConnectionStringProperties();
}
}
public override int ConnectionTimeout
{
get
{
SqlConnectionString constr = (SqlConnectionString)ConnectionOptions;
return ((null != constr) ? constr.ConnectTimeout : SqlConnectionString.DEFAULT.Connect_Timeout);
}
}
// AccessToken: To be used for token based authentication
public string AccessToken
{
get
{
// When a connection is connecting or is ever opened, make AccessToken available only if "Persist Security Info" is set to true
// otherwise, return null
SqlConnectionString connectionOptions = (SqlConnectionString)UserConnectionOptions;
return InnerConnection.ShouldHidePassword && connectionOptions != null && !connectionOptions.PersistSecurityInfo ? null : _accessToken;
}
set
{
// If a connection is connecting or is ever opened, AccessToken cannot be set
if (!InnerConnection.AllowSetConnectionString)
{
throw ADP.OpenConnectionPropertySet("AccessToken", InnerConnection.State);
}
if (value != null)
{
// Check if the usage of AccessToken has any conflict with the keys used in connection string and credential
CheckAndThrowOnInvalidCombinationOfConnectionOptionAndAccessToken((SqlConnectionString)ConnectionOptions);
}
// Need to call ConnectionString_Set to do proper pool group check
ConnectionString_Set(new SqlConnectionPoolKey(_connectionString, credential: _credential, accessToken: value));
_accessToken = value;
}
}
public override string Database
{
// if the connection is open, we need to ask the inner connection what it's
// current catalog is because it may have gotten changed, otherwise we can
// just return what the connection string had.
get
{
SqlInternalConnection innerConnection = (InnerConnection as SqlInternalConnection);
string result;
if (null != innerConnection)
{
result = innerConnection.CurrentDatabase;
}
else
{
SqlConnectionString constr = (SqlConnectionString)ConnectionOptions;
result = ((null != constr) ? constr.InitialCatalog : SqlConnectionString.DEFAULT.Initial_Catalog);
}
return result;
}
}
public override string DataSource
{
get
{
SqlInternalConnection innerConnection = (InnerConnection as SqlInternalConnection);
string result;
if (null != innerConnection)
{
result = innerConnection.CurrentDataSource;
}
else
{
SqlConnectionString constr = (SqlConnectionString)ConnectionOptions;
result = ((null != constr) ? constr.DataSource : SqlConnectionString.DEFAULT.Data_Source);
}
return result;
}
}
public int PacketSize
{
// if the connection is open, we need to ask the inner connection what it's
// current packet size is because it may have gotten changed, otherwise we
// can just return what the connection string had.
get
{
SqlInternalConnectionTds innerConnection = (InnerConnection as SqlInternalConnectionTds);
int result;
if (null != innerConnection)
{
result = innerConnection.PacketSize;
}
else
{
SqlConnectionString constr = (SqlConnectionString)ConnectionOptions;
result = ((null != constr) ? constr.PacketSize : SqlConnectionString.DEFAULT.Packet_Size);
}
return result;
}
}
public Guid ClientConnectionId
{
get
{
SqlInternalConnectionTds innerConnection = (InnerConnection as SqlInternalConnectionTds);
if (null != innerConnection)
{
return innerConnection.ClientConnectionId;
}
else
{
Task reconnectTask = _currentReconnectionTask;
if (reconnectTask != null && !reconnectTask.IsCompleted)
{
return _originalConnectionId;
}
return Guid.Empty;
}
}
}
public override string ServerVersion
{
get => GetOpenTdsConnection().ServerVersion;
}
public override ConnectionState State
{
get
{
Task reconnectTask = _currentReconnectionTask;
if (reconnectTask != null && !reconnectTask.IsCompleted)
{
return ConnectionState.Open;
}
return InnerConnection.State;
}
}
internal SqlStatistics Statistics
{
get => _statistics;
}
public string WorkstationId
{
get
{
// If not supplied by the user, the default value is the MachineName
// Note: In Longhorn you'll be able to rename a machine without
// rebooting. Therefore, don't cache this machine name.
SqlConnectionString constr = (SqlConnectionString)ConnectionOptions;
string result = constr?.WorkstationId ?? Environment.MachineName;
return result;
}
}
public SqlCredential Credential
{
get
{
SqlCredential result = _credential;
// When a connection is connecting or is ever opened, make credential available only if "Persist Security Info" is set to true
// otherwise, return null
SqlConnectionString connectionOptions = (SqlConnectionString)UserConnectionOptions;
if (InnerConnection.ShouldHidePassword && connectionOptions != null && !connectionOptions.PersistSecurityInfo)
{
result = null;
}
return result;
}
set
{
// If a connection is connecting or is ever opened, user id/password cannot be set
if (!InnerConnection.AllowSetConnectionString)
{
throw ADP.OpenConnectionPropertySet(nameof(Credential), InnerConnection.State);
}
// check if the usage of credential has any conflict with the keys used in connection string
if (value != null)
{
CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential((SqlConnectionString)ConnectionOptions);
if (_accessToken != null)
{
throw ADP.InvalidMixedUsageOfCredentialAndAccessToken();
}
}
_credential = value;
// Need to call ConnectionString_Set to do proper pool group check
ConnectionString_Set(new SqlConnectionPoolKey(_connectionString, _credential, accessToken: _accessToken));
}
}
// CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential: check if the usage of credential has any conflict
// with the keys used in connection string
// If there is any conflict, it throws InvalidOperationException
// This is used in the setter of ConnectionString and Credential properties.
private void CheckAndThrowOnInvalidCombinationOfConnectionStringAndSqlCredential(SqlConnectionString connectionOptions)
{
if (UsesClearUserIdOrPassword(connectionOptions))
{
throw ADP.InvalidMixedUsageOfSecureAndClearCredential();
}
if (UsesIntegratedSecurity(connectionOptions))
{
throw ADP.InvalidMixedUsageOfSecureCredentialAndIntegratedSecurity();
}
}
// CheckAndThrowOnInvalidCombinationOfConnectionOptionAndAccessToken: check if the usage of AccessToken has any conflict
// with the keys used in connection string and credential
// If there is any conflict, it throws InvalidOperationException
// This is to be used setter of ConnectionString and AccessToken properties
private void CheckAndThrowOnInvalidCombinationOfConnectionOptionAndAccessToken(SqlConnectionString connectionOptions)
{
if (UsesClearUserIdOrPassword(connectionOptions))
{
throw ADP.InvalidMixedUsageOfAccessTokenAndUserIDPassword();
}
if (UsesIntegratedSecurity(connectionOptions))
{
throw ADP.InvalidMixedUsageOfAccessTokenAndIntegratedSecurity();
}
// Check if the usage of AccessToken has the conflict with credential
if (_credential != null)
{
throw ADP.InvalidMixedUsageOfCredentialAndAccessToken();
}
}
protected override DbProviderFactory DbProviderFactory
{
get => SqlClientFactory.Instance;
}
// SqlCredential: Pair User Id and password in SecureString which are to be used for SQL authentication
//
// PUBLIC EVENTS
//
public event SqlInfoMessageEventHandler InfoMessage;
public bool FireInfoMessageEventOnUserErrors
{
get => _fireInfoMessageEventOnUserErrors;
set => _fireInfoMessageEventOnUserErrors = value;
}
// Approx. number of times that the internal connection has been reconnected
internal int ReconnectCount
{
get => _reconnectCount;
}
internal bool ForceNewConnection { get; set; }
protected override void OnStateChange(StateChangeEventArgs stateChange)
{
if (!_suppressStateChangeForReconnection)
{
base.OnStateChange(stateChange);
}
}
//
// PUBLIC METHODS
//
public new SqlTransaction BeginTransaction()
{
// this is just a delegate. The actual method tracks executiontime
return BeginTransaction(IsolationLevel.Unspecified, null);
}
public new SqlTransaction BeginTransaction(IsolationLevel iso)
{
// this is just a delegate. The actual method tracks executiontime
return BeginTransaction(iso, null);
}
public SqlTransaction BeginTransaction(string transactionName)
{
// Use transaction names only on the outermost pair of nested
// BEGIN...COMMIT or BEGIN...ROLLBACK statements. Transaction names
// are ignored for nested BEGIN's. The only way to rollback a nested
// transaction is to have a save point from a SAVE TRANSACTION call.
return BeginTransaction(IsolationLevel.Unspecified, transactionName);
}
[SuppressMessage("Microsoft.Reliability", "CA2004:RemoveCallsToGCKeepAlive")]
protected override DbTransaction BeginDbTransaction(IsolationLevel isolationLevel)
{
DbTransaction transaction = BeginTransaction(isolationLevel);
// InnerConnection doesn't maintain a ref on the outer connection (this) and
// subsequently leaves open the possibility that the outer connection could be GC'ed before the SqlTransaction
// is fully hooked up (leaving a DbTransaction with a null connection property). Ensure that this is reachable
// until the completion of BeginTransaction with KeepAlive
GC.KeepAlive(this);
return transaction;
}
public SqlTransaction BeginTransaction(IsolationLevel iso, string transactionName)
{
WaitForPendingReconnection();
SqlStatistics statistics = null;
try
{
statistics = SqlStatistics.StartTimer(Statistics);
SqlTransaction transaction;
bool isFirstAttempt = true;
do
{
transaction = GetOpenTdsConnection().BeginSqlTransaction(iso, transactionName, isFirstAttempt); // do not reconnect twice
Debug.Assert(isFirstAttempt || !transaction.InternalTransaction.ConnectionHasBeenRestored, "Restored connection on non-first attempt");
isFirstAttempt = false;
} while (transaction.InternalTransaction.ConnectionHasBeenRestored);
// The GetOpenConnection line above doesn't keep a ref on the outer connection (this),
// and it could be collected before the inner connection can hook it to the transaction, resulting in
// a transaction with a null connection property. Use GC.KeepAlive to ensure this doesn't happen.
GC.KeepAlive(this);
return transaction;
}
finally
{
SqlStatistics.StopTimer(statistics);
}
}
public override void ChangeDatabase(string database)
{
SqlStatistics statistics = null;
RepairInnerConnection();
try
{
statistics = SqlStatistics.StartTimer(Statistics);
InnerConnection.ChangeDatabase(database);
}
finally
{
SqlStatistics.StopTimer(statistics);
}
}
public static void ClearAllPools()
{
SqlConnectionFactory.SingletonInstance.ClearAllPools();
}
public static void ClearPool(SqlConnection connection)
{
ADP.CheckArgumentNull(connection, nameof(connection));
DbConnectionOptions connectionOptions = connection.UserConnectionOptions;
if (null != connectionOptions)
{
SqlConnectionFactory.SingletonInstance.ClearPool(connection);
}
}
private void CloseInnerConnection()
{
// CloseConnection() now handles the lock
// The SqlInternalConnectionTds is set to OpenBusy during close, once this happens the cast below will fail and
// the command will no longer be cancelable. It might be desirable to be able to cancel the close operation, but this is
// outside of the scope of Whidbey RTM. See (SqlCommand::Cancel) for other lock.
InnerConnection.CloseConnection(this, ConnectionFactory);
}
public override void Close()
{
ConnectionState previousState = State;
Guid operationId = default(Guid);
Guid clientConnectionId = default(Guid);
// during the call to Dispose() there is a redundant call to
// Close(). because of this, the second time Close() is invoked the
// connection is already in a closed state. this doesn't seem to be a
// problem except for logging, as we'll get duplicate Before/After/Error
// log entries
if (previousState != ConnectionState.Closed)
{
operationId = s_diagnosticListener.WriteConnectionCloseBefore(this);
// we want to cache the ClientConnectionId for After/Error logging, as when the connection
// is closed then we will lose this identifier
//
// note: caching this is only for diagnostics logging purposes
clientConnectionId = ClientConnectionId;
}
SqlStatistics statistics = null;
Exception e = null;
try
{
statistics = SqlStatistics.StartTimer(Statistics);
Task reconnectTask = _currentReconnectionTask;
if (reconnectTask != null && !reconnectTask.IsCompleted)
{
CancellationTokenSource cts = _reconnectionCancellationSource;
if (cts != null)
{
cts.Cancel();
}
AsyncHelper.WaitForCompletion(reconnectTask, 0, null, rethrowExceptions: false); // we do not need to deal with possible exceptions in reconnection
if (State != ConnectionState.Open)
{// if we cancelled before the connection was opened
OnStateChange(DbConnectionInternal.StateChangeClosed);
}
}
CancelOpenAndWait();
CloseInnerConnection();
GC.SuppressFinalize(this);
if (null != Statistics)
{
ADP.TimerCurrent(out _statistics._closeTimestamp);
}
}
catch (Exception ex)
{
e = ex;
throw;
}
finally
{
SqlStatistics.StopTimer(statistics);
// we only want to log this if the previous state of the
// connection is open, as that's the valid use-case
if (previousState != ConnectionState.Closed)
{
if (e != null)
{
s_diagnosticListener.WriteConnectionCloseError(operationId, clientConnectionId, this, e);
}
else
{
s_diagnosticListener.WriteConnectionCloseAfter(operationId, clientConnectionId, this);
}
}
}
}
public new SqlCommand CreateCommand()
{
return new SqlCommand(null, this);
}
private void DisposeMe(bool disposing)
{
_credential = null;
_accessToken = null;
if (!disposing)
{
// For non-pooled connections we need to make sure that if the SqlConnection was not closed,
// then we release the GCHandle on the stateObject to allow it to be GCed
// For pooled connections, we will rely on the pool reclaiming the connection
var innerConnection = (InnerConnection as SqlInternalConnectionTds);
if ((innerConnection != null) && (!innerConnection.ConnectionOptions.Pooling))
{
var parser = innerConnection.Parser;
if ((parser != null) && (parser._physicalStateObj != null))
{
parser._physicalStateObj.DecrementPendingCallbacks(release: false);
}
}
}
}
public override void Open()
{
Guid operationId = s_diagnosticListener.WriteConnectionOpenBefore(this);
PrepareStatisticsForNewConnection();
SqlStatistics statistics = null;
Exception e = null;
try
{
statistics = SqlStatistics.StartTimer(Statistics);
if (!TryOpen(null))
{
throw ADP.InternalError(ADP.InternalErrorCode.SynchronousConnectReturnedPending);
}
}
catch (Exception ex)
{
e = ex;
throw;
}
finally
{
SqlStatistics.StopTimer(statistics);
if (e != null)
{
s_diagnosticListener.WriteConnectionOpenError(operationId, this, e);
}
else
{
s_diagnosticListener.WriteConnectionOpenAfter(operationId, this);
}
}
}
internal void RegisterWaitingForReconnect(Task waitingTask)
{
if (((SqlConnectionString)ConnectionOptions).MARS)
{
return;
}
Interlocked.CompareExchange(ref _asyncWaitingForReconnection, waitingTask, null);
if (_asyncWaitingForReconnection != waitingTask)
{ // somebody else managed to register
throw SQL.MARSUnspportedOnConnection();
}
}
private async Task ReconnectAsync(int timeout)
{
try
{
long commandTimeoutExpiration = 0;
if (timeout > 0)
{
commandTimeoutExpiration = ADP.TimerCurrent() + ADP.TimerFromSeconds(timeout);
}
CancellationTokenSource cts = new CancellationTokenSource();
_reconnectionCancellationSource = cts;
CancellationToken ctoken = cts.Token;
int retryCount = _connectRetryCount; // take a snapshot: could be changed by modifying the connection string
for (int attempt = 0; attempt < retryCount; attempt++)
{
if (ctoken.IsCancellationRequested)
{
return;
}
try
{
try
{
ForceNewConnection = true;
await OpenAsync(ctoken).ConfigureAwait(false);
// On success, increment the reconnect count - we don't really care if it rolls over since it is approx.
_reconnectCount = unchecked(_reconnectCount + 1);
#if DEBUG
Debug.Assert(_recoverySessionData._debugReconnectDataApplied, "Reconnect data was not applied !");
#endif
}
finally
{
ForceNewConnection = false;
}
return;
}
catch (SqlException e)
{
if (attempt == retryCount - 1)
{
throw SQL.CR_AllAttemptsFailed(e, _originalConnectionId);
}
if (timeout > 0 && ADP.TimerRemaining(commandTimeoutExpiration) < ADP.TimerFromSeconds(ConnectRetryInterval))
{
throw SQL.CR_NextAttemptWillExceedQueryTimeout(e, _originalConnectionId);
}
}
await Task.Delay(1000 * ConnectRetryInterval, ctoken).ConfigureAwait(false);
}
}
finally
{
_recoverySessionData = null;
_suppressStateChangeForReconnection = false;
}
Debug.Fail("Should not reach this point");
}
internal Task ValidateAndReconnect(Action beforeDisconnect, int timeout)
{
Task runningReconnect = _currentReconnectionTask;
// This loop in the end will return not completed reconnect task or null
while (runningReconnect != null && runningReconnect.IsCompleted)
{
// clean current reconnect task (if it is the same one we checked
Interlocked.CompareExchange<Task>(ref _currentReconnectionTask, null, runningReconnect);
// make sure nobody started new task (if which case we did not clean it)
runningReconnect = _currentReconnectionTask;
}
if (runningReconnect == null)
{
if (_connectRetryCount > 0)
{
SqlInternalConnectionTds tdsConn = GetOpenTdsConnection();
if (tdsConn._sessionRecoveryAcknowledged)
{
TdsParserStateObject stateObj = tdsConn.Parser._physicalStateObj;
if (!stateObj.ValidateSNIConnection())
{
if (tdsConn.Parser._sessionPool != null)
{
if (tdsConn.Parser._sessionPool.ActiveSessionsCount > 0)
{
// >1 MARS session
if (beforeDisconnect != null)
{
beforeDisconnect();
}
OnError(SQL.CR_UnrecoverableClient(ClientConnectionId), true, null);
}
}
SessionData cData = tdsConn.CurrentSessionData;
cData.AssertUnrecoverableStateCountIsCorrect();
if (cData._unrecoverableStatesCount == 0)
{
bool callDisconnect = false;
lock (_reconnectLock)
{
tdsConn.CheckEnlistedTransactionBinding();
runningReconnect = _currentReconnectionTask; // double check after obtaining the lock
if (runningReconnect == null)
{
if (cData._unrecoverableStatesCount == 0)
{ // could change since the first check, but now is stable since connection is know to be broken
_originalConnectionId = ClientConnectionId;
_recoverySessionData = cData;
if (beforeDisconnect != null)
{
beforeDisconnect();
}
try
{
_suppressStateChangeForReconnection = true;
tdsConn.DoomThisConnection();
}
catch (SqlException)
{
}
// use Task.Factory.StartNew with state overload instead of Task.Run to avoid anonymous closure context capture in method scope and avoid the unneeded allocation
runningReconnect = Task.Factory.StartNew(state => ReconnectAsync((int)state), timeout, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default);
// if current reconnect is not null, somebody already started reconnection task - some kind of race condition
Debug.Assert(_currentReconnectionTask == null, "Duplicate reconnection tasks detected");
_currentReconnectionTask = runningReconnect;
}
}
else
{
callDisconnect = true;
}
}
if (callDisconnect && beforeDisconnect != null)
{
beforeDisconnect();
}
}
else
{
if (beforeDisconnect != null)
{
beforeDisconnect();
}
OnError(SQL.CR_UnrecoverableServer(ClientConnectionId), true, null);
}
} // ValidateSNIConnection
} // sessionRecoverySupported
} // connectRetryCount>0
}
else
{ // runningReconnect = null
if (beforeDisconnect != null)
{
beforeDisconnect();
}
}
return runningReconnect;
}
// this is straightforward, but expensive method to do connection resiliency - it take locks and all preparations as for TDS request
partial void RepairInnerConnection()
{
WaitForPendingReconnection();
if (_connectRetryCount == 0)
{
return;
}
SqlInternalConnectionTds tdsConn = InnerConnection as SqlInternalConnectionTds;
if (tdsConn != null)
{
tdsConn.ValidateConnectionForExecute(null);
tdsConn.GetSessionAndReconnectIfNeeded((SqlConnection)this);
}
}
private void WaitForPendingReconnection()
{
Task reconnectTask = _currentReconnectionTask;
if (reconnectTask != null && !reconnectTask.IsCompleted)
{
AsyncHelper.WaitForCompletion(reconnectTask, 0, null, rethrowExceptions: false);
}
}
private void CancelOpenAndWait()
{
// copy from member to avoid changes by background thread
var completion = _currentCompletion;
if (completion != null)
{
completion.Item1.TrySetCanceled();
((IAsyncResult)completion.Item2).AsyncWaitHandle.WaitOne();
}
Debug.Assert(_currentCompletion == null, "After waiting for an async call to complete, there should be no completion source");
}
public override Task OpenAsync(CancellationToken cancellationToken)
{
Guid operationId = s_diagnosticListener.WriteConnectionOpenBefore(this);
PrepareStatisticsForNewConnection();
SqlStatistics statistics = null;
try
{
statistics = SqlStatistics.StartTimer(Statistics);
System.Transactions.Transaction transaction = ADP.GetCurrentTransaction();
TaskCompletionSource<DbConnectionInternal> completion = new TaskCompletionSource<DbConnectionInternal>(transaction);
TaskCompletionSource<object> result = new TaskCompletionSource<object>();
if (s_diagnosticListener.IsEnabled(SqlClientDiagnosticListenerExtensions.SqlAfterOpenConnection) ||
s_diagnosticListener.IsEnabled(SqlClientDiagnosticListenerExtensions.SqlErrorOpenConnection))
{
result.Task.ContinueWith((t) =>
{
if (t.Exception != null)
{
s_diagnosticListener.WriteConnectionOpenError(operationId, this, t.Exception);
}
else
{
s_diagnosticListener.WriteConnectionOpenAfter(operationId, this);
}
}, TaskScheduler.Default);
}
if (cancellationToken.IsCancellationRequested)
{
result.SetCanceled();
return result.Task;
}
bool completed;
try
{
completed = TryOpen(completion);
}
catch (Exception e)
{
s_diagnosticListener.WriteConnectionOpenError(operationId, this, e);
result.SetException(e);
return result.Task;
}
if (completed)
{
result.SetResult(null);
}
else
{
CancellationTokenRegistration registration = new CancellationTokenRegistration();
if (cancellationToken.CanBeCanceled)
{
registration = cancellationToken.Register(s => ((TaskCompletionSource<DbConnectionInternal>)s).TrySetCanceled(), completion);
}
OpenAsyncRetry retry = new OpenAsyncRetry(this, completion, result, registration);
_currentCompletion = new Tuple<TaskCompletionSource<DbConnectionInternal>, Task>(completion, result.Task);
completion.Task.ContinueWith(retry.Retry, TaskScheduler.Default);
return result.Task;
}
return result.Task;
}
catch (Exception ex)
{
s_diagnosticListener.WriteConnectionOpenError(operationId, this, ex);
throw;
}
finally
{
SqlStatistics.StopTimer(statistics);
}
}
public override DataTable GetSchema()
{
return GetSchema(DbMetaDataCollectionNames.MetaDataCollections, null);
}
public override DataTable GetSchema(string collectionName)
{
return GetSchema(collectionName, null);
}
public override DataTable GetSchema(string collectionName, string[] restrictionValues)
{
return InnerConnection.GetSchema(ConnectionFactory, PoolGroup, this, collectionName, restrictionValues);
}
private class OpenAsyncRetry
{
private readonly SqlConnection _parent;
private readonly TaskCompletionSource<DbConnectionInternal> _retry;
private readonly TaskCompletionSource<object> _result;
private readonly CancellationTokenRegistration _registration;
public OpenAsyncRetry(SqlConnection parent, TaskCompletionSource<DbConnectionInternal> retry, TaskCompletionSource<object> result, CancellationTokenRegistration registration)
{
_parent = parent;
_retry = retry;
_result = result;
_registration = registration;
}
internal void Retry(Task<DbConnectionInternal> retryTask)
{
_registration.Dispose();
try
{
SqlStatistics statistics = null;
try
{
statistics = SqlStatistics.StartTimer(_parent.Statistics);
if (retryTask.IsFaulted)
{
_parent.CloseInnerConnection();
_parent._currentCompletion = null;
_result.SetException(retryTask.Exception.InnerException);
}
else if (retryTask.IsCanceled)
{
_parent.CloseInnerConnection();
_parent._currentCompletion = null;
_result.SetCanceled();
}
else
{
bool result;
// protect continuation from races with close and cancel
lock (_parent.InnerConnection)
{
result = _parent.TryOpen(_retry);
}
if (result)
{
_parent._currentCompletion = null;
_result.SetResult(null);
}
else
{
_parent.CloseInnerConnection();
_parent._currentCompletion = null;
_result.SetException(ADP.ExceptionWithStackTrace(ADP.InternalError(ADP.InternalErrorCode.CompletedConnectReturnedPending)));
}
}
}
finally
{
SqlStatistics.StopTimer(statistics);
}
}
catch (Exception e)
{
_parent.CloseInnerConnection();
_parent._currentCompletion = null;
_result.SetException(e);
}
}
}
private void PrepareStatisticsForNewConnection()
{
if (StatisticsEnabled ||
s_diagnosticListener.IsEnabled(SqlClientDiagnosticListenerExtensions.SqlAfterExecuteCommand) ||
s_diagnosticListener.IsEnabled(SqlClientDiagnosticListenerExtensions.SqlAfterOpenConnection))
{
if (null == _statistics)
{
_statistics = new SqlStatistics();
}
else
{
_statistics.ContinueOnNewConnection();
}
}
}
private bool TryOpen(TaskCompletionSource<DbConnectionInternal> retry)
{
SqlConnectionString connectionOptions = (SqlConnectionString)ConnectionOptions;
_applyTransientFaultHandling = (retry == null && connectionOptions != null && connectionOptions.ConnectRetryCount > 0);
if (ForceNewConnection)
{
if (!InnerConnection.TryReplaceConnection(this, ConnectionFactory, retry, UserConnectionOptions))
{
return false;
}
}
else
{
if (!InnerConnection.TryOpenConnection(this, ConnectionFactory, retry, UserConnectionOptions))
{
return false;
}
}
// does not require GC.KeepAlive(this) because of ReRegisterForFinalize below.
var tdsInnerConnection = (SqlInternalConnectionTds)InnerConnection;
Debug.Assert(tdsInnerConnection.Parser != null, "Where's the parser?");
if (!tdsInnerConnection.ConnectionOptions.Pooling)
{
// For non-pooled connections, we need to make sure that the finalizer does actually run to avoid leaking SNI handles
GC.ReRegisterForFinalize(this);
}
// The _statistics can change with StatisticsEnabled. Copying to a local variable before checking for a null value.
SqlStatistics statistics = _statistics;
if (StatisticsEnabled ||
(s_diagnosticListener.IsEnabled(SqlClientDiagnosticListenerExtensions.SqlAfterExecuteCommand) && statistics != null))
{
ADP.TimerCurrent(out _statistics._openTimestamp);
tdsInnerConnection.Parser.Statistics = _statistics;
}
else
{
tdsInnerConnection.Parser.Statistics = null;
_statistics = null; // in case of previous Open/Close/reset_CollectStats sequence
}
return true;
}
//
// INTERNAL PROPERTIES
//
internal bool HasLocalTransaction
{
get
{
return GetOpenTdsConnection().HasLocalTransaction;
}
}
internal bool HasLocalTransactionFromAPI
{
get
{
Task reconnectTask = _currentReconnectionTask;
if (reconnectTask != null && !reconnectTask.IsCompleted)
{
return false; //we will not go into reconnection if we are inside the transaction
}
return GetOpenTdsConnection().HasLocalTransactionFromAPI;
}
}
internal bool IsKatmaiOrNewer
{
get
{
if (_currentReconnectionTask != null)
{ // holds true even if task is completed
return true; // if CR is enabled, connection, if established, will be Katmai+
}
return GetOpenTdsConnection().IsKatmaiOrNewer;
}
}
internal TdsParser Parser
{
get
{
SqlInternalConnectionTds tdsConnection = GetOpenTdsConnection();
return tdsConnection.Parser;
}
}
//
// INTERNAL METHODS
//
internal void ValidateConnectionForExecute(string method, SqlCommand command)
{
Task asyncWaitingForReconnection = _asyncWaitingForReconnection;
if (asyncWaitingForReconnection != null)
{
if (!asyncWaitingForReconnection.IsCompleted)
{
throw SQL.MARSUnspportedOnConnection();
}
else
{
Interlocked.CompareExchange(ref _asyncWaitingForReconnection, null, asyncWaitingForReconnection);
}
}
if (_currentReconnectionTask != null)
{
Task currentReconnectionTask = _currentReconnectionTask;
if (currentReconnectionTask != null && !currentReconnectionTask.IsCompleted)
{
return; // execution will wait for this task later
}
}
SqlInternalConnectionTds innerConnection = GetOpenTdsConnection(method);
innerConnection.ValidateConnectionForExecute(command);
}
// Surround name in brackets and then escape any end bracket to protect against SQL Injection.
// NOTE: if the user escapes it themselves it will not work, but this was the case in V1 as well
// as native OleDb and Odbc.
internal static string FixupDatabaseTransactionName(string name)
{
if (!string.IsNullOrEmpty(name))
{
return SqlServerEscapeHelper.EscapeIdentifier(name);
}
else
{
return name;
}
}
// If wrapCloseInAction is defined, then the action it defines will be run with the connection close action passed in as a parameter
// The close action also supports being run asynchronously
internal void OnError(SqlException exception, bool breakConnection, Action<Action> wrapCloseInAction)
{
Debug.Assert(exception != null && exception.Errors.Count != 0, "SqlConnection: OnError called with null or empty exception!");
if (breakConnection && (ConnectionState.Open == State))
{
if (wrapCloseInAction != null)
{
int capturedCloseCount = _closeCount;
Action closeAction = () =>
{
if (capturedCloseCount == _closeCount)
{
Close();
}
};
wrapCloseInAction(closeAction);
}
else
{
Close();
}
}
if (exception.Class >= TdsEnums.MIN_ERROR_CLASS)
{
// It is an error, and should be thrown. Class of TdsEnums.MIN_ERROR_CLASS or above is an error,
// below TdsEnums.MIN_ERROR_CLASS denotes an info message.
throw exception;
}
else
{
// If it is a class < TdsEnums.MIN_ERROR_CLASS, it is a warning collection - so pass to handler
this.OnInfoMessage(new SqlInfoMessageEventArgs(exception));
}
}
//
// PRIVATE METHODS
//
internal SqlInternalConnectionTds GetOpenTdsConnection()
{
SqlInternalConnectionTds innerConnection = (InnerConnection as SqlInternalConnectionTds);
if (null == innerConnection)
{
throw ADP.ClosedConnectionError();
}
return innerConnection;
}
internal SqlInternalConnectionTds GetOpenTdsConnection(string method)
{
SqlInternalConnectionTds innerConnection = (InnerConnection as SqlInternalConnectionTds);
if (null == innerConnection)
{
throw ADP.OpenConnectionRequired(method, InnerConnection.State);
}
return innerConnection;
}
internal void OnInfoMessage(SqlInfoMessageEventArgs imevent)
{
bool notified;
OnInfoMessage(imevent, out notified);
}
internal void OnInfoMessage(SqlInfoMessageEventArgs imevent, out bool notified)
{
SqlInfoMessageEventHandler handler = InfoMessage;
if (null != handler)
{
notified = true;
try
{
handler(this, imevent);
}
catch (Exception e)
{
if (!ADP.IsCatchableOrSecurityExceptionType(e))
{
throw;
}
}
}
else
{
notified = false;
}
}
public static void ChangePassword(string connectionString, string newPassword)
{
if (string.IsNullOrEmpty(connectionString))
{
throw SQL.ChangePasswordArgumentMissing(nameof(newPassword));
}
if (string.IsNullOrEmpty(newPassword))
{
throw SQL.ChangePasswordArgumentMissing(nameof(newPassword));
}
if (TdsEnums.MAXLEN_NEWPASSWORD < newPassword.Length)
{
throw ADP.InvalidArgumentLength(nameof(newPassword), TdsEnums.MAXLEN_NEWPASSWORD);
}
SqlConnectionPoolKey key = new SqlConnectionPoolKey(connectionString, credential: null, accessToken: null);
SqlConnectionString connectionOptions = SqlConnectionFactory.FindSqlConnectionOptions(key);
if (connectionOptions.IntegratedSecurity)
{
throw SQL.ChangePasswordConflictsWithSSPI();
}
if (!string.IsNullOrEmpty(connectionOptions.AttachDBFilename))
{
throw SQL.ChangePasswordUseOfUnallowedKey(SqlConnectionString.KEY.AttachDBFilename);
}
ChangePassword(connectionString, connectionOptions, null, newPassword, null);
}
public static void ChangePassword(string connectionString, SqlCredential credential, SecureString newSecurePassword)
{
if (string.IsNullOrEmpty(connectionString))
{
throw SQL.ChangePasswordArgumentMissing(nameof(connectionString));
}
// check credential; not necessary to check the length of password in credential as the check is done by SqlCredential class
if (credential == null)
{
throw SQL.ChangePasswordArgumentMissing(nameof(credential));
}
if (newSecurePassword == null || newSecurePassword.Length == 0)
{
throw SQL.ChangePasswordArgumentMissing(nameof(newSecurePassword));
}
if (!newSecurePassword.IsReadOnly())
{
throw ADP.MustBeReadOnly(nameof(newSecurePassword));
}
if (TdsEnums.MAXLEN_NEWPASSWORD < newSecurePassword.Length)
{
throw ADP.InvalidArgumentLength(nameof(newSecurePassword), TdsEnums.MAXLEN_NEWPASSWORD);
}
SqlConnectionPoolKey key = new SqlConnectionPoolKey(connectionString, credential, accessToken: null);
SqlConnectionString connectionOptions = SqlConnectionFactory.FindSqlConnectionOptions(key);
// Check for connection string values incompatible with SqlCredential
if (!string.IsNullOrEmpty(connectionOptions.UserID) || !string.IsNullOrEmpty(connectionOptions.Password))
{
throw ADP.InvalidMixedArgumentOfSecureAndClearCredential();
}
if (connectionOptions.IntegratedSecurity)
{
throw SQL.ChangePasswordConflictsWithSSPI();
}
if (!string.IsNullOrEmpty(connectionOptions.AttachDBFilename))
{
throw SQL.ChangePasswordUseOfUnallowedKey(SqlConnectionString.KEY.AttachDBFilename);
}
ChangePassword(connectionString, connectionOptions, credential, null, newSecurePassword);
}
private static void ChangePassword(string connectionString, SqlConnectionString connectionOptions, SqlCredential credential, string newPassword, SecureString newSecurePassword)
{
// note: This is the only case where we directly construct the internal connection, passing in the new password.
// Normally we would simply create a regular connection and open it, but there is no other way to pass the
// new password down to the constructor. This would have an unwanted impact on the connection pool.
SqlInternalConnectionTds con = null;
try
{
con = new SqlInternalConnectionTds(null, connectionOptions, credential, null, newPassword, newSecurePassword, false);
}
finally
{
if (con != null)
con.Dispose();
}
SqlConnectionPoolKey key = new SqlConnectionPoolKey(connectionString, credential, accessToken: null);
SqlConnectionFactory.SingletonInstance.ClearPool(key);
}
//
// SQL DEBUGGING SUPPORT
//
// this only happens once per connection
// SxS: using named file mapping APIs
internal void RegisterForConnectionCloseNotification<T>(ref Task<T> outerTask, object value, int tag)
{
// Connection exists, schedule removal, will be added to ref collection after calling ValidateAndReconnect
outerTask = outerTask.ContinueWith(task =>
{
RemoveWeakReference(value);
return task;
}, TaskScheduler.Default).Unwrap();
}
public void ResetStatistics()
{
if (null != Statistics)
{
Statistics.Reset();
if (ConnectionState.Open == State)
{
// update timestamp;
ADP.TimerCurrent(out _statistics._openTimestamp);
}
}
}
public IDictionary RetrieveStatistics()
{
if (null != Statistics)
{
UpdateStatistics();
return Statistics.GetDictionary();
}
else
{
return new SqlStatistics().GetDictionary();
}
}
private void UpdateStatistics()
{
if (ConnectionState.Open == State)
{
// update timestamp
ADP.TimerCurrent(out _statistics._closeTimestamp);
}
// delegate the rest of the work to the SqlStatistics class
Statistics.UpdateStatistics();
}
object ICloneable.Clone() => new SqlConnection(this);
private void CopyFrom(SqlConnection connection)
{
ADP.CheckArgumentNull(connection, nameof(connection));
_userConnectionOptions = connection.UserConnectionOptions;
_poolGroup = connection.PoolGroup;
if (DbConnectionClosedNeverOpened.SingletonInstance == connection._innerConnection)
{
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
else
{
_innerConnection = DbConnectionClosedPreviouslyOpened.SingletonInstance;
}
}
// UDT SUPPORT
private Assembly ResolveTypeAssembly(AssemblyName asmRef, bool throwOnError)
{
Debug.Assert(TypeSystemAssemblyVersion != null, "TypeSystemAssembly should be set !");
if (string.Equals(asmRef.Name, "Microsoft.SqlServer.Types", StringComparison.OrdinalIgnoreCase))
{
asmRef.Version = TypeSystemAssemblyVersion;
}
try
{
return Assembly.Load(asmRef);
}
catch (Exception e)
{
if (throwOnError || !ADP.IsCatchableExceptionType(e))
{
throw;
}
else
{
return null;
};
}
}
internal void CheckGetExtendedUDTInfo(SqlMetaDataPriv metaData, bool fThrow)
{
if (metaData.udt?.Type == null)
{ // If null, we have not obtained extended info.
Debug.Assert(!string.IsNullOrEmpty(metaData.udt?.AssemblyQualifiedName), "Unexpected state on GetUDTInfo");
// Parameter throwOnError determines whether exception from Assembly.Load is thrown.
metaData.udt.Type =
Type.GetType(typeName: metaData.udt.AssemblyQualifiedName, assemblyResolver: asmRef => ResolveTypeAssembly(asmRef, fThrow), typeResolver: null, throwOnError: fThrow);
if (fThrow && metaData.udt.Type == null)
{
throw SQL.UDTUnexpectedResult(metaData.udt.AssemblyQualifiedName);
}
}
}
internal object GetUdtValue(object value, SqlMetaDataPriv metaData, bool returnDBNull)
{
if (returnDBNull && ADP.IsNull(value))
{
return DBNull.Value;
}
object o = null;
// Since the serializer doesn't handle nulls...
if (ADP.IsNull(value))
{
Type t = metaData.udt?.Type;
Debug.Assert(t != null, "Unexpected null of udtType on GetUdtValue!");
o = t.InvokeMember("Null", BindingFlags.Public | BindingFlags.GetProperty | BindingFlags.Static, null, null, Array.Empty<object>(), CultureInfo.InvariantCulture);
Debug.Assert(o != null);
return o;
}
else
{
MemoryStream stm = new MemoryStream((byte[])value);
o = SerializationHelperSql9.Deserialize(stm, metaData.udt?.Type);
Debug.Assert(o != null, "object could NOT be created");
return o;
}
}
internal byte[] GetBytes(object o)
{
Format format = Format.Native;
return GetBytes(o, out format, out int maxSize);
}
internal byte[] GetBytes(object o, out Format format, out int maxSize)
{
SqlUdtInfo attr = GetInfoFromType(o.GetType());
maxSize = attr.MaxByteSize;
format = attr.SerializationFormat;
if (maxSize < -1 || maxSize >= ushort.MaxValue)
{
throw new InvalidOperationException(o.GetType() + ": invalid Size");
}
byte[] retval;
using (MemoryStream stm = new MemoryStream(maxSize < 0 ? 0 : maxSize))
{
SerializationHelperSql9.Serialize(stm, o);
retval = stm.ToArray();
}
return retval;
}
private SqlUdtInfo GetInfoFromType(Type t)
{
Debug.Assert(t != null, "Type object cant be NULL");
Type orig = t;
do
{
SqlUdtInfo attr = SqlUdtInfo.TryGetFromType(t);
if (attr != null)
{
return attr;
}
else
{
t = t.BaseType;
}
}
while (t != null);
throw SQL.UDTInvalidSqlType(orig.AssemblyQualifiedName);
}
}
}
| |
/* Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using Antlr4.Runtime.Sharpen;
namespace Antlr4.Runtime.Dfa
{
/// <author>Sam Harwell</author>
public sealed class SparseEdgeMap<T> : AbstractEdgeMap<T>
where T : class
{
private const int DefaultMaxSize = 5;
private readonly int[] keys;
private readonly List<T> values;
public SparseEdgeMap(int minIndex, int maxIndex)
: this(minIndex, maxIndex, DefaultMaxSize)
{
}
public SparseEdgeMap(int minIndex, int maxIndex, int maxSparseSize)
: base(minIndex, maxIndex)
{
this.keys = new int[maxSparseSize];
this.values = new List<T>(maxSparseSize);
}
private SparseEdgeMap(Antlr4.Runtime.Dfa.SparseEdgeMap<T> map, int maxSparseSize)
: base(map.minIndex, map.maxIndex)
{
lock (map)
{
if (maxSparseSize < map.values.Count)
{
throw new ArgumentException();
}
keys = Arrays.CopyOf(map.keys, maxSparseSize);
values = new List<T>(maxSparseSize);
values.AddRange(map.Values);
}
}
public int[] Keys
{
get
{
return keys;
}
}
public IList<T> Values
{
get
{
return values;
}
}
public int MaxSparseSize
{
get
{
return keys.Length;
}
}
public override int Count
{
get
{
return values.Count;
}
}
public override bool IsEmpty
{
get
{
return values.Count == 0;
}
}
public override bool ContainsKey(int key)
{
return this[key] != null;
}
public override T this[int key]
{
get
{
// Special property of this collection: values are only even added to
// the end, else a new object is returned from put(). Therefore no lock
// is required in this method.
int index = System.Array.BinarySearch(keys, 0, Count, key);
if (index < 0)
{
return null;
}
return values[index];
}
}
public override AbstractEdgeMap<T> Put(int key, T value)
{
if (key < minIndex || key > maxIndex)
{
return this;
}
if (value == null)
{
return Remove(key);
}
lock (this)
{
int index = System.Array.BinarySearch(keys, 0, Count, key);
if (index >= 0)
{
// replace existing entry
values[index] = value;
return this;
}
System.Diagnostics.Debug.Assert(index < 0 && value != null);
int insertIndex = -index - 1;
if (Count < MaxSparseSize && insertIndex == Count)
{
// stay sparse and add new entry
keys[insertIndex] = key;
values.Add(value);
return this;
}
int desiredSize = Count >= MaxSparseSize ? MaxSparseSize * 2 : MaxSparseSize;
int space = maxIndex - minIndex + 1;
// SparseEdgeMap only uses less memory than ArrayEdgeMap up to half the size of the symbol space
if (desiredSize >= space / 2)
{
ArrayEdgeMap<T> arrayMap = new ArrayEdgeMap<T>(minIndex, maxIndex);
arrayMap = ((ArrayEdgeMap<T>)arrayMap.PutAll(this));
arrayMap.Put(key, value);
return arrayMap;
}
else
{
Antlr4.Runtime.Dfa.SparseEdgeMap<T> resized = new Antlr4.Runtime.Dfa.SparseEdgeMap<T>(this, desiredSize);
System.Array.Copy(resized.keys, insertIndex, resized.keys, insertIndex + 1, Count - insertIndex);
resized.keys[insertIndex] = key;
resized.values.Insert(insertIndex, value);
return resized;
}
}
}
public override AbstractEdgeMap<T> Remove(int key)
{
lock (this)
{
int index = System.Array.BinarySearch(keys, 0, Count, key);
if (index < 0)
{
return this;
}
Antlr4.Runtime.Dfa.SparseEdgeMap<T> result = new Antlr4.Runtime.Dfa.SparseEdgeMap<T>(this, MaxSparseSize);
System.Array.Copy(result.keys, index + 1, result.keys, index, Count - index - 1);
result.values.RemoveAt(index);
return result;
}
}
public override AbstractEdgeMap<T> Clear()
{
if (IsEmpty)
{
return this;
}
return new EmptyEdgeMap<T>(minIndex, maxIndex);
}
#if NET45PLUS
public override IReadOnlyDictionary<int, T> ToMap()
#else
public override IDictionary<int, T> ToMap()
#endif
{
if (IsEmpty)
{
return Sharpen.Collections.EmptyMap<int, T>();
}
lock (this)
{
#if COMPACT
IDictionary<int, T> result = new SortedList<int, T>();
#elif PORTABLE && !NET45PLUS
IDictionary<int, T> result = new Dictionary<int, T>();
#else
IDictionary<int, T> result = new SortedDictionary<int, T>();
#endif
for (int i = 0; i < Count; i++)
{
result[keys[i]] = values[i];
}
#if NET45PLUS
return new ReadOnlyDictionary<int, T>(result);
#else
return result;
#endif
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.IO.Abstractions;
using System.Linq;
using System.Windows.Input;
using Autofac;
using GalaSoft.MvvmLight;
using GalaSoft.MvvmLight.Command;
using GalaSoft.MvvmLight.Ioc;
using PicklesDoc.Pickles.ObjectModel;
using PicklesDoc.Pickles.UserInterface.Mvvm;
using PicklesDoc.Pickles.UserInterface.Settings;
namespace PicklesDoc.Pickles.UserInterface.ViewModel
{
/// <summary>
/// This class contains properties that the main View can data bind to.
/// <para>
/// Use the <strong>mvvminpc</strong> snippet to add bindable properties to this ViewModel.
/// </para>
/// <para>
/// You can also use Blend to data bind with the tool's support.
/// </para>
/// <para>
/// See http://www.galasoft.ch/mvvm
/// </para>
/// </summary>
public class MainViewModel : ViewModelBase
{
private readonly MultiSelectableCollection<DocumentationFormat> documentationFormats;
private readonly SelectableCollection<TestResultsFormat> testResultsFormats;
private readonly RelayCommand browseForFeatureFolderCommand;
private readonly RelayCommand browseForOutputFolderCommand;
private readonly RelayCommand browseForTestResultsFileCommand;
private readonly RelayCommand generateCommand;
private readonly RelayCommand openOutputDirectory;
private readonly IMainModelSerializer mainModelSerializer;
private readonly IFileSystem fileSystem;
private string picklesVersion = typeof(Feature).Assembly.GetName().Version.ToString();
private string featureFolder;
private string outputFolder;
private string projectName;
private string projectVersion;
private string testResultsFile;
private CultureInfo selectedLanguage;
private bool includeTests;
private bool isRunning;
private bool isFeatureDirectoryValid;
private bool isOutputDirectoryValid;
private bool isProjectNameValid;
private bool isProjectVersionValid;
private bool isTestResultsFileValid;
private bool isTestResultsFormatValid;
private bool isLanguageValid = true;
private bool createDirectoryForEachOutputFormat;
private readonly CultureInfo[] neutralCultures;
private bool isDocumentationFormatValid;
public MainViewModel(IMainModelSerializer mainModelSerializer, IFileSystem fileSystem)
{
this.documentationFormats = new MultiSelectableCollection<DocumentationFormat>(Enum.GetValues(typeof(DocumentationFormat)).Cast<DocumentationFormat>());
this.documentationFormats.First().IsSelected = true;
this.documentationFormats.SelectionChanged += this.DocumentationFormatsOnCollectionChanged;
this.testResultsFormats = new SelectableCollection<TestResultsFormat>(Enum.GetValues(typeof(TestResultsFormat)).Cast<TestResultsFormat>());
this.testResultsFormats.First().IsSelected = true;
this.testResultsFormats.SelectionChanged += this.TestResultsFormatsOnCollectionChanged;
this.browseForFeatureFolderCommand = new RelayCommand(this.DoBrowseForFeature);
this.browseForOutputFolderCommand = new RelayCommand(this.DoBrowseForOutputFolder);
this.browseForTestResultsFileCommand = new RelayCommand(this.DoBrowseForTestResultsFile);
this.generateCommand = new RelayCommand(this.DoGenerate, this.CanGenerate);
this.openOutputDirectory = new RelayCommand(this.DoOpenOutputDirectory, this.CanOpenOutputDirectory);
this.PropertyChanged += this.MainWindowViewModelPropertyChanged;
this.neutralCultures = CultureInfo.GetCultures(CultureTypes.NeutralCultures);
this.selectedLanguage = CultureInfo.GetCultureInfo("en");
this.mainModelSerializer = mainModelSerializer;
this.fileSystem = fileSystem;
}
private void TestResultsFormatsOnCollectionChanged(object sender, EventArgs notifyCollectionChangedEventArgs)
{
this.IsTestResultsFormatValid = Enum.IsDefined(typeof(TestResultsFormat), this.testResultsFormats.Selected);
}
private void DocumentationFormatsOnCollectionChanged(object sender, EventArgs notifyCollectionChangedEventArgs)
{
this.IsDocumentationFormatValid = this.documentationFormats.Selected.Any();
}
public string PicklesVersion
{
get { return this.picklesVersion; }
set
{
Set(() => this.PicklesVersion, ref this.picklesVersion, value);
}
}
public string FeatureFolder
{
get { return this.featureFolder; }
set
{
Set(() => this.FeatureFolder, ref this.featureFolder, value);
}
}
public string OutputFolder
{
get { return this.outputFolder; }
set
{
Set(() => this.OutputFolder, ref this.outputFolder, value);
}
}
public MultiSelectableCollection<DocumentationFormat> DocumentationFormatValues
{
get { return this.documentationFormats; }
}
public string ProjectName
{
get { return this.projectName; }
set
{
Set(() => this.ProjectName, ref this.projectName, value);
}
}
public string ProjectVersion
{
get { return this.projectVersion; }
set
{
Set(() => this.ProjectVersion, ref this.projectVersion, value);
}
}
public string TestResultsFile
{
get { return this.testResultsFile; }
set
{
Set(() => this.TestResultsFile, ref this.testResultsFile, value);
}
}
public SelectableCollection<TestResultsFormat> TestResultsFormatValues
{
get { return this.testResultsFormats; }
}
public CultureInfo SelectedLanguage
{
get { return this.selectedLanguage; }
set
{
Set(() => this.SelectedLanguage, ref this.selectedLanguage, value);
}
}
public IEnumerable<CultureInfo> LanguageValues
{
get { return this.neutralCultures; }
}
public bool IncludeTests
{
get { return this.includeTests; }
set
{
Set(() => this.IncludeTests, ref this.includeTests, value);
}
}
public ICommand GeneratePickles
{
get { return this.generateCommand; }
}
public ICommand BrowseForFeatureFolder
{
get { return this.browseForFeatureFolderCommand; }
}
public ICommand BrowseForOutputFolder
{
get { return this.browseForOutputFolderCommand; }
}
public ICommand BrowseForTestResultsFile
{
get { return this.browseForTestResultsFileCommand; }
}
public RelayCommand OpenOutputDirectory
{
get { return this.openOutputDirectory; }
}
public bool IsRunning
{
get { return this.isRunning; }
set
{
Set(() => this.IsRunning, ref this.isRunning, value);
}
}
public bool IsFeatureDirectoryValid
{
get
{
return this.isFeatureDirectoryValid;
}
set
{
Set(() => this.IsFeatureDirectoryValid, ref this.isFeatureDirectoryValid, value);
}
}
public bool IsDocumentationFormatValid
{
get
{
return this.isDocumentationFormatValid;
}
set
{
Set(() => this.IsDocumentationFormatValid, ref this.isDocumentationFormatValid, value);
}
}
public bool IsOutputDirectoryValid
{
get { return this.isOutputDirectoryValid; }
set
{
Set(() => this.IsOutputDirectoryValid, ref this.isOutputDirectoryValid, value);
}
}
public bool IsProjectNameValid
{
get { return this.isProjectNameValid; }
set
{
Set(() => this.IsProjectNameValid, ref this.isProjectNameValid, value);
}
}
public bool IsProjectVersionValid
{
get { return this.isProjectVersionValid; }
set
{
Set(() => this.IsProjectVersionValid, ref this.isProjectVersionValid, value);
}
}
public bool IsTestResultsFileValid
{
get { return this.isTestResultsFileValid; }
set
{
Set(() => this.IsTestResultsFileValid, ref this.isTestResultsFileValid, value);
}
}
public bool IsTestResultsFormatValid
{
get { return this.isTestResultsFormatValid; }
set
{
Set(() => this.IsTestResultsFormatValid, ref this.isTestResultsFormatValid, value);
}
}
public bool IsLanguageValid
{
get { return this.isLanguageValid; }
set
{
Set(() => this.IsLanguageValid, ref this.isLanguageValid, value);
}
}
public bool CreateDirectoryForEachOutputFormat
{
get
{
return this.createDirectoryForEachOutputFormat;
}
set
{
Set(() => this.CreateDirectoryForEachOutputFormat, ref this.createDirectoryForEachOutputFormat, value);
}
}
public void SaveToSettings()
{
MainModel mainModel = new MainModel
{
FeatureDirectory = this.featureFolder,
OutputDirectory = this.outputFolder,
ProjectName = this.projectName,
ProjectVersion = this.projectVersion,
IncludeTestResults = this.includeTests,
TestResultsFile = this.testResultsFile,
TestResultsFormat = this.testResultsFormats.Selected,
SelectedLanguageLcid = this.selectedLanguage.LCID,
DocumentationFormats = this.documentationFormats.Where(item => item.IsSelected).Select(item => item.Item).ToArray(),
CreateDirectoryForEachOutputFormat = this.createDirectoryForEachOutputFormat
};
this.mainModelSerializer.Write(mainModel);
}
public void LoadFromSettings()
{
MainModel mainModel = this.mainModelSerializer.Read();
if (mainModel == null)
{
return;
}
this.FeatureFolder = mainModel.FeatureDirectory;
this.OutputFolder = mainModel.OutputDirectory;
this.ProjectName = mainModel.ProjectName;
this.ProjectVersion = mainModel.ProjectVersion;
this.IncludeTests = mainModel.IncludeTestResults;
this.TestResultsFile = mainModel.TestResultsFile;
foreach (var item in this.TestResultsFormatValues)
{
if (item.Item == mainModel.TestResultsFormat)
{
item.IsSelected = true;
}
else
{
item.IsSelected = false;
}
}
this.SelectedLanguage = this.neutralCultures.Where(lv => lv.LCID == mainModel.SelectedLanguageLcid).FirstOrDefault();
foreach (var item in this.documentationFormats)
{
item.IsSelected = mainModel.DocumentationFormats.Contains(item.Item);
}
this.CreateDirectoryForEachOutputFormat = mainModel.CreateDirectoryForEachOutputFormat;
}
private void MainWindowViewModelPropertyChanged(object sender, PropertyChangedEventArgs e)
{
switch (e.PropertyName)
{
case "FeatureFolder":
{
if (this.fileSystem.Directory.Exists(this.featureFolder))
{
this.IsFeatureDirectoryValid = true;
}
else
{
this.IsFeatureDirectoryValid = false;
}
break;
}
case "OutputFolder":
{
if (this.fileSystem.Directory.Exists(this.outputFolder))
{
this.IsOutputDirectoryValid = true;
}
else
{
this.IsOutputDirectoryValid = false;
}
this.openOutputDirectory.RaiseCanExecuteChanged();
break;
}
case "TestResultsFile":
{
if (this.testResultsFile == null || this.testResultsFile.Split(';').All(trf => this.fileSystem.File.Exists(trf)))
{
this.IsTestResultsFileValid = true;
}
else
{
this.IsTestResultsFileValid = false;
}
break;
}
case "ProjectName":
{
this.IsProjectNameValid = !string.IsNullOrWhiteSpace(this.projectName);
break;
}
case "ProjectVersion":
{
this.IsProjectVersionValid = !string.IsNullOrWhiteSpace(this.projectVersion);
break;
}
case "IsRunning":
case "IsFeatureDirectoryValid":
case "IsOutputDirectoryValid":
case "IsProjectNameValid":
case "IsProjectVersionValid":
case "IsTestResultsFileValid":
case "IsTestResultsFormatValid":
case "IsLanguageValid":
case "IncludeTests":
case "IsDocumentationFormatValid":
{
this.generateCommand.RaiseCanExecuteChanged();
break;
}
}
//this.SaveToSettings();
}
private bool CanGenerate()
{
return !this.isRunning
&& this.isFeatureDirectoryValid
&& this.isOutputDirectoryValid
&& this.isProjectNameValid
&& this.isProjectVersionValid
&& (this.isTestResultsFileValid || !this.includeTests)
&& (this.isTestResultsFormatValid || !this.includeTests)
&& this.isDocumentationFormatValid
&& this.isLanguageValid;
}
private void DoGenerate()
{
this.IsRunning = true;
var backgroundWorker = new BackgroundWorker();
backgroundWorker.DoWork += (sender, args) => this.DoWork();
backgroundWorker.RunWorkerCompleted += (sender, args) =>
{
this.IsRunning = false;
};
backgroundWorker.RunWorkerAsync();
}
private void DoWork()
{
foreach (DocumentationFormat documentationFormat in this.documentationFormats.Selected)
{
var builder = new ContainerBuilder();
builder.RegisterAssemblyTypes(typeof(Runner).Assembly);
builder.Register<IFileSystem>(_ => this.fileSystem).SingleInstance();
builder.RegisterModule<PicklesModule>();
var container = builder.Build();
var configuration = container.Resolve<Configuration>();
configuration.FeatureFolder = fileSystem.DirectoryInfo.FromDirectoryName(this.featureFolder);
if (this.createDirectoryForEachOutputFormat)
{
configuration.OutputFolder = fileSystem.DirectoryInfo.FromDirectoryName(fileSystem.Path.Combine(outputFolder, documentationFormat.ToString("G")));
}
else
{
configuration.OutputFolder = fileSystem.DirectoryInfo.FromDirectoryName(this.outputFolder);
}
configuration.SystemUnderTestName = this.projectName;
configuration.SystemUnderTestVersion = this.projectVersion;
configuration.AddTestResultFiles(this.IncludeTests ? this.testResultsFile.Split(';').Select(trf => fileSystem.FileInfo.FromFileName(trf)).ToArray() : null);
configuration.TestResultsFormat = this.testResultsFormats.Selected;
configuration.Language = this.selectedLanguage != null ? this.selectedLanguage.TwoLetterISOLanguageName : CultureInfo.CurrentUICulture.TwoLetterISOLanguageName;
configuration.DocumentationFormat = documentationFormat;
var runner = container.Resolve<Runner>();
runner.Run(container);
}
}
private void DoBrowseForTestResultsFile()
{
var dlg = new Ookii.Dialogs.Wpf.VistaOpenFileDialog();
dlg.Multiselect = true;
var result = dlg.ShowDialog();
if (result == true) this.TestResultsFile = string.Join(";", dlg.FileNames);
}
private void DoBrowseForFeature()
{
var dlg = new Ookii.Dialogs.Wpf.VistaFolderBrowserDialog();
var result = dlg.ShowDialog();
if (result == true) this.FeatureFolder = dlg.SelectedPath;
}
private void DoBrowseForOutputFolder()
{
var dlg = new Ookii.Dialogs.Wpf.VistaFolderBrowserDialog();
var result = dlg.ShowDialog();
if (result == true) this.OutputFolder = dlg.SelectedPath;
}
private void DoOpenOutputDirectory()
{
Process.Start(this.outputFolder);
}
private bool CanOpenOutputDirectory()
{
return this.isOutputDirectoryValid;
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V10.Resources
{
/// <summary>Resource name for the <c>AdGroupAsset</c> resource.</summary>
public sealed partial class AdGroupAssetName : gax::IResourceName, sys::IEquatable<AdGroupAssetName>
{
/// <summary>The possible contents of <see cref="AdGroupAssetName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </summary>
CustomerAdGroupAssetFieldType = 1,
}
private static gax::PathTemplate s_customerAdGroupAssetFieldType = new gax::PathTemplate("customers/{customer_id}/adGroupAssets/{ad_group_id_asset_id_field_type}");
/// <summary>Creates a <see cref="AdGroupAssetName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="AdGroupAssetName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static AdGroupAssetName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new AdGroupAssetName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="AdGroupAssetName"/> with the pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="fieldTypeId">The <c>FieldType</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="AdGroupAssetName"/> constructed from the provided ids.</returns>
public static AdGroupAssetName FromCustomerAdGroupAssetFieldType(string customerId, string adGroupId, string assetId, string fieldTypeId) =>
new AdGroupAssetName(ResourceNameType.CustomerAdGroupAssetFieldType, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)), fieldTypeId: gax::GaxPreconditions.CheckNotNullOrEmpty(fieldTypeId, nameof(fieldTypeId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AdGroupAssetName"/> with pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="fieldTypeId">The <c>FieldType</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AdGroupAssetName"/> with pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </returns>
public static string Format(string customerId, string adGroupId, string assetId, string fieldTypeId) =>
FormatCustomerAdGroupAssetFieldType(customerId, adGroupId, assetId, fieldTypeId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AdGroupAssetName"/> with pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="fieldTypeId">The <c>FieldType</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AdGroupAssetName"/> with pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>.
/// </returns>
public static string FormatCustomerAdGroupAssetFieldType(string customerId, string adGroupId, string assetId, string fieldTypeId) =>
s_customerAdGroupAssetFieldType.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(fieldTypeId, nameof(fieldTypeId)))}");
/// <summary>Parses the given resource name string into a new <see cref="AdGroupAssetName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="adGroupAssetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="AdGroupAssetName"/> if successful.</returns>
public static AdGroupAssetName Parse(string adGroupAssetName) => Parse(adGroupAssetName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="AdGroupAssetName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="adGroupAssetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="AdGroupAssetName"/> if successful.</returns>
public static AdGroupAssetName Parse(string adGroupAssetName, bool allowUnparsed) =>
TryParse(adGroupAssetName, allowUnparsed, out AdGroupAssetName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AdGroupAssetName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="adGroupAssetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AdGroupAssetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string adGroupAssetName, out AdGroupAssetName result) =>
TryParse(adGroupAssetName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AdGroupAssetName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="adGroupAssetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AdGroupAssetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string adGroupAssetName, bool allowUnparsed, out AdGroupAssetName result)
{
gax::GaxPreconditions.CheckNotNull(adGroupAssetName, nameof(adGroupAssetName));
gax::TemplatedResourceName resourceName;
if (s_customerAdGroupAssetFieldType.TryParseName(adGroupAssetName, out resourceName))
{
string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', '~', });
if (split1 == null)
{
result = null;
return false;
}
result = FromCustomerAdGroupAssetFieldType(resourceName[0], split1[0], split1[1], split1[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(adGroupAssetName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private static string[] ParseSplitHelper(string s, char[] separators)
{
string[] result = new string[separators.Length + 1];
int i0 = 0;
for (int i = 0; i <= separators.Length; i++)
{
int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length;
if (i1 < 0 || i1 == i0)
{
return null;
}
result[i] = s.Substring(i0, i1 - i0);
i0 = i1 + 1;
}
return result;
}
private AdGroupAssetName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string adGroupId = null, string assetId = null, string customerId = null, string fieldTypeId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
AdGroupId = adGroupId;
AssetId = assetId;
CustomerId = customerId;
FieldTypeId = fieldTypeId;
}
/// <summary>
/// Constructs a new instance of a <see cref="AdGroupAssetName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="fieldTypeId">The <c>FieldType</c> ID. Must not be <c>null</c> or empty.</param>
public AdGroupAssetName(string customerId, string adGroupId, string assetId, string fieldTypeId) : this(ResourceNameType.CustomerAdGroupAssetFieldType, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)), fieldTypeId: gax::GaxPreconditions.CheckNotNullOrEmpty(fieldTypeId, nameof(fieldTypeId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>AdGroup</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string AdGroupId { get; }
/// <summary>
/// The <c>Asset</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string AssetId { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>
/// The <c>FieldType</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string FieldTypeId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerAdGroupAssetFieldType: return s_customerAdGroupAssetFieldType.Expand(CustomerId, $"{AdGroupId}~{AssetId}~{FieldTypeId}");
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as AdGroupAssetName);
/// <inheritdoc/>
public bool Equals(AdGroupAssetName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(AdGroupAssetName a, AdGroupAssetName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(AdGroupAssetName a, AdGroupAssetName b) => !(a == b);
}
public partial class AdGroupAsset
{
/// <summary>
/// <see cref="AdGroupAssetName"/>-typed view over the <see cref="ResourceName"/> resource name property.
/// </summary>
internal AdGroupAssetName ResourceNameAsAdGroupAssetName
{
get => string.IsNullOrEmpty(ResourceName) ? null : AdGroupAssetName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="AdGroupName"/>-typed view over the <see cref="AdGroup"/> resource name property.
/// </summary>
internal AdGroupName AdGroupAsAdGroupName
{
get => string.IsNullOrEmpty(AdGroup) ? null : AdGroupName.Parse(AdGroup, allowUnparsed: true);
set => AdGroup = value?.ToString() ?? "";
}
/// <summary><see cref="AssetName"/>-typed view over the <see cref="Asset"/> resource name property.</summary>
internal AssetName AssetAsAssetName
{
get => string.IsNullOrEmpty(Asset) ? null : AssetName.Parse(Asset, allowUnparsed: true);
set => Asset = value?.ToString() ?? "";
}
}
}
| |
/*``The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved via the world wide web at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* The Initial Developer of the Original Code is Ericsson Utvecklings AB.
* Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
* AB. All Rights Reserved.''
*
* Converted from Java to C# by Vlad Dumitrescu (vlad_Dumitrescu@hotmail.com)
*/
namespace Otp.Erlang
{
using System;
/*
* Provides a C# representation of Erlang binaries. Anything that
* can be represented as a sequence of bytes can be made into an
* Erlang binary.
**/
[Serializable]
public class Binary:Erlang.Object
{
// binary contents
private byte[] bin;
/*
* Create a binary from a byte array
*
* @param bin the array of bytes from which to create the binary.
**/
public Binary(byte[] bin)
{
this.bin = new byte[bin.Length];
Array.Copy(bin, 0, this.bin, 0, bin.Length);
}
/*
* Create a binary from a stream containinf a binary encoded in
* Erlang external format.
*
* @param buf the stream containing the encoded binary.
*
* @exception DecodeException if the buffer does not
* contain a valid external representation of an Erlang binary.
**/
public Binary(OtpInputStream buf)
{
this.bin = buf.read_binary();
}
/*
* Create a binary from an arbitrary C# Object. The object must
* implement C#.io.Serializable or C#.io.Externalizable.
*
* @param o the object to serialize and create this binary from.
**/
public Binary(System.Object o)
{
try
{
this.bin = toByteArray(o);
}
catch (System.IO.IOException)
{
throw new System.ArgumentException("Object must implement Serializable");
}
}
private static byte[] toByteArray(System.Object o)
{
if (o == null)
return null;
/*need to synchronize use of the shared baos */
System.IO.MemoryStream baos = new System.IO.MemoryStream();
System.IO.BinaryWriter oos = new System.IO.BinaryWriter(baos);
SupportClass.Serialize(oos, o);
oos.Flush();
return baos.ToArray();
}
private static System.Object fromByteArray(byte[] buf)
{
if (buf == null)
return null;
try
{
System.IO.MemoryStream bais = new System.IO.MemoryStream(buf);
System.IO.BinaryReader ois = new System.IO.BinaryReader(bais);
return SupportClass.Deserialize(ois);
}
catch (System.Exception)
{
}
return null;
}
/*
* Get the byte array from a binary.
*
* @return the byte array containing the bytes for this binary.
**/
public virtual byte[] binaryValue()
{
return bin;
}
/*
* Get the size of the binary.
*
* @return the number of bytes contained in the binary.
**/
public virtual int size()
{
return bin.Length;
}
/*
* Get the C# Object from the binary. If the binary contains a
* serialized C# object, then this method will recreate the
* object.
*
*
* @return the C# Object represented by this binary, or null if
* the binary does not represent a C# Object.
**/
public virtual System.Object getObject()
{
return fromByteArray(this.bin);
}
/*
* Get the string representation of this binary object. A binary is
* printed as #Bin<N>, where N is the number of bytes
* contained in the object.
*
* @return the Erlang string representation of this binary.
**/
public override System.String ToString()
{
return "#Bin<" + bin.Length + ">";
}
/*
* Convert this binary to the equivalent Erlang external representation.
*
* @param buf an output stream to which the encoded binary should be
* written.
**/
public override void encode(OtpOutputStream buf)
{
buf.write_binary(this.bin);
}
/*
* Determine if two binaries are equal. Binaries are equal if they have
* the same length and the array of bytes is identical.
*
* @param o the binary to compare to.
*
* @return true if the byte arrays contain the same bytes, false
* otherwise.
**/
public override bool Equals(System.Object o)
{
if (!(o is Binary))
return false;
Binary bin = (Binary) o;
int size = this.size();
if (size != bin.size())
return false;
for (int i = 0; i < size; i++)
{
if (this.bin[i] != bin.bin[i])
return false;
// early exit
}
return true;
}
public override int GetHashCode()
{
return 1;
}
public override System.Object clone()
{
Binary newBin = (Binary) (base.clone());
newBin.bin = new byte[bin.Length];
bin.CopyTo(newBin.bin, 0);
return newBin;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using UnityEngine;
namespace KERBALISM
{
// a window containing a panel
public sealed class Window
{
// click through locks
private bool clickThroughLocked = false;
private const ControlTypes WindowLockTypes = ControlTypes.MANNODE_ADDEDIT | ControlTypes.MANNODE_DELETE | ControlTypes.MAP_UI |
ControlTypes.TARGETING | ControlTypes.VESSEL_SWITCHING | ControlTypes.TWEAKABLES | ControlTypes.EDITOR_UI | ControlTypes.EDITOR_SOFT_LOCK | ControlTypes.UI;
// - width: window width in pixel
// - left: initial window horizontal position
// - top: initial window vertical position
public Window(uint width, uint left, uint top)
{
// generate unique id
win_id = Lib.RandomInt(int.MaxValue);
// setup window geometry
win_rect = new Rect((float)left, (float)top, (float)width, 0.0f);
// setup dragbox geometry
drag_rect = new Rect(0.0f, 0.0f, (float)width, Styles.ScaleFloat(20.0f));
// initialize tooltip utility
tooltip = new Tooltip();
}
public void Open(Action<Panel> refresh)
{
this.refresh = refresh;
}
public void Close()
{
// clear input locks
InputLockManager.RemoveControlLock("KerbalismWindowLock");
InputLockManager.RemoveControlLock("KerbalismMainGUILock");
refresh = null;
panel = null;
}
public void Update()
{
if (refresh != null)
{
// initialize or clear panel
if (panel == null) panel = new Panel();
else panel.Clear();
// refresh panel content
refresh(panel);
// if panel is empty, close the window
if (panel.Empty())
{
Close();
}
}
}
public void On_gui()
{
// window is considered closed if panel is null
if (panel == null) return;
// adapt window size to panel
// - clamp to screen height
win_rect.width = Math.Min(panel.Width(), Screen.width * 0.8f);
win_rect.height = Math.Min(Styles.ScaleFloat(20.0f) + panel.Height(), Screen.height * 0.8f);
// clamp the window to the screen, so it can't be dragged outside
float offset_x = Math.Max(0.0f, -win_rect.xMin) + Math.Min(0.0f, Screen.width - win_rect.xMax);
float offset_y = Math.Max(0.0f, -win_rect.yMin) + Math.Min(0.0f, Screen.height - win_rect.yMax);
win_rect.xMin += offset_x;
win_rect.xMax += offset_x;
win_rect.yMin += offset_y;
win_rect.yMax += offset_y;
// draw the window
win_rect = GUILayout.Window(win_id, win_rect, Draw_window, "", Styles.win);
// get mouse over state
//bool mouse_over = win_rect.Contains(Event.current.mousePosition);
bool mouse_over = win_rect.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y));
// disable camera mouse scrolling on mouse over
if (mouse_over)
{
GameSettings.AXIS_MOUSEWHEEL.primary.scale = 0.0f;
}
// Disable Click through
if (mouse_over && !clickThroughLocked)
{
InputLockManager.SetControlLock(WindowLockTypes, "KerbalismWindowLock");
clickThroughLocked = true;
}
if (!mouse_over && clickThroughLocked)
{
InputLockManager.RemoveControlLock("KerbalismWindowLock");
clickThroughLocked = false;
}
}
void Draw_window(int _)
{
// render window title
GUILayout.BeginHorizontal(Styles.title_container);
GUILayout.Label(Textures.empty, Styles.left_icon);
GUILayout.Label(panel.Title().ToUpper(), Styles.title_text);
GUILayout.Label(Textures.close, Styles.right_icon);
bool b = Lib.IsClicked();
GUILayout.EndHorizontal();
if (b) { Close(); return; }
// start scrolling view
scroll_pos = GUILayout.BeginScrollView(scroll_pos, HighLogic.Skin.horizontalScrollbar, HighLogic.Skin.verticalScrollbar);
// render panel content
panel.Render();
// end scroll view
GUILayout.EndScrollView();
// draw tooltip
tooltip.Draw(win_rect);
// right click close the window
if (Event.current.type == EventType.MouseDown
&& Event.current.button == 1)
{
Close();
}
// enable dragging
GUI.DragWindow(drag_rect);
}
public bool Contains(Vector2 pos)
{
return win_rect.Contains(pos);
}
public void Position(uint x, uint y)
{
win_rect.Set((float)x, (float)y, win_rect.width, win_rect.height);
}
public uint Left()
{
return (uint)win_rect.xMin;
}
public uint Top()
{
return (uint)win_rect.yMin;
}
public Panel.PanelType PanelType
{
get
{
if (panel == null)
return Panel.PanelType.unknown;
else
return panel.paneltype;
}
}
// store window id
private readonly int win_id;
// store window geometry
private Rect win_rect;
// store dragbox geometry
private Rect drag_rect;
// used by scroll window mechanics
private Vector2 scroll_pos;
// tooltip utility
private Tooltip tooltip;
// panel
private Panel panel;
// refresh function
private Action<Panel> refresh;
}
} // KERBALISM
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace CloudNinja.AuthorizationApi.Web.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Microsoft.Xml.Schema
{
using System;
using System.Collections;
using System.Globalization;
using System.Text;
using System.IO;
using System.Diagnostics;
internal sealed partial class Parser
{
private SchemaType _schemaType;
private XmlNameTable _nameTable;
private SchemaNames _schemaNames;
private ValidationEventHandler _eventHandler;
private XmlNamespaceManager _namespaceManager;
private XmlReader _reader;
private PositionInfo _positionInfo;
private bool _isProcessNamespaces;
private int _schemaXmlDepth = 0;
private int _markupDepth;
private SchemaBuilder _builder;
private XmlSchema _schema;
private SchemaInfo _xdrSchema;
private XmlResolver _xmlResolver = null; //to be used only by XDRBuilder
//xs:Annotation perf fix
private XmlDocument _dummyDocument;
private bool _processMarkup;
private XmlNode _parentNode;
private XmlNamespaceManager _annotationNSManager;
private string _xmlns;
//Whitespace check for text nodes
private XmlCharType _xmlCharType = XmlCharType.Instance;
public Parser(SchemaType schemaType, XmlNameTable nameTable, SchemaNames schemaNames, ValidationEventHandler eventHandler)
{
_schemaType = schemaType;
_nameTable = nameTable;
_schemaNames = schemaNames;
_eventHandler = eventHandler;
_xmlResolver = Microsoft.Xml.XmlConfiguration.XmlReaderSection.CreateDefaultResolver();
_processMarkup = true;
_dummyDocument = new XmlDocument();
}
public SchemaType Parse(XmlReader reader, string targetNamespace)
{
StartParsing(reader, targetNamespace);
while (ParseReaderNode() && reader.Read()) { }
return FinishParsing();
}
public void StartParsing(XmlReader reader, string targetNamespace)
{
_reader = reader;
_positionInfo = PositionInfo.GetPositionInfo(reader);
_namespaceManager = reader.NamespaceManager;
if (_namespaceManager == null)
{
_namespaceManager = new XmlNamespaceManager(_nameTable);
_isProcessNamespaces = true;
}
else
{
_isProcessNamespaces = false;
}
while (reader.NodeType != XmlNodeType.Element && reader.Read()) { }
_markupDepth = int.MaxValue;
_schemaXmlDepth = reader.Depth;
SchemaType rootType = _schemaNames.SchemaTypeFromRoot(reader.LocalName, reader.NamespaceURI);
string code;
if (!CheckSchemaRoot(rootType, out code))
{
throw new XmlSchemaException(code, reader.BaseURI, _positionInfo.LineNumber, _positionInfo.LinePosition);
}
if (_schemaType == SchemaType.XSD)
{
_schema = new XmlSchema();
_schema.BaseUri = new Uri(reader.BaseURI, UriKind.RelativeOrAbsolute);
_builder = new XsdBuilder(reader, _namespaceManager, _schema, _nameTable, _schemaNames, _eventHandler);
}
else
{
Debug.Assert(_schemaType == SchemaType.XDR);
_xdrSchema = new SchemaInfo();
_xdrSchema.SchemaType = SchemaType.XDR;
_builder = new XdrBuilder(reader, _namespaceManager, _xdrSchema, targetNamespace, _nameTable, _schemaNames, _eventHandler);
((XdrBuilder)_builder).XmlResolver = _xmlResolver;
}
}
private bool CheckSchemaRoot(SchemaType rootType, out string code)
{
code = null;
if (_schemaType == SchemaType.None)
{
_schemaType = rootType;
}
switch (rootType)
{
case SchemaType.XSD:
if (_schemaType != SchemaType.XSD)
{
code = ResXml.Sch_MixSchemaTypes;
return false;
}
break;
case SchemaType.XDR:
if (_schemaType == SchemaType.XSD)
{
code = ResXml.Sch_XSDSchemaOnly;
return false;
}
else if (_schemaType != SchemaType.XDR)
{
code = ResXml.Sch_MixSchemaTypes;
return false;
}
break;
case SchemaType.DTD: //Did not detect schema type that can be parsed by this parser
case SchemaType.None:
code = ResXml.Sch_SchemaRootExpected;
if (_schemaType == SchemaType.XSD)
{
code = ResXml.Sch_XSDSchemaRootExpected;
}
return false;
default:
Debug.Assert(false);
break;
}
return true;
}
public SchemaType FinishParsing()
{
return _schemaType;
}
public XmlSchema XmlSchema
{
get { return _schema; }
}
internal XmlResolver XmlResolver
{
set
{
_xmlResolver = value;
}
}
public SchemaInfo XdrSchema
{
get { return _xdrSchema; }
}
public bool ParseReaderNode()
{
if (_reader.Depth > _markupDepth)
{
if (_processMarkup)
{
ProcessAppInfoDocMarkup(false);
}
return true;
}
else if (_reader.NodeType == XmlNodeType.Element)
{
if (_builder.ProcessElement(_reader.Prefix, _reader.LocalName, _reader.NamespaceURI))
{
_namespaceManager.PushScope();
if (_reader.MoveToFirstAttribute())
{
do
{
_builder.ProcessAttribute(_reader.Prefix, _reader.LocalName, _reader.NamespaceURI, _reader.Value);
if (Ref.Equal(_reader.NamespaceURI, _schemaNames.NsXmlNs) && _isProcessNamespaces)
{
_namespaceManager.AddNamespace(_reader.Prefix.Length == 0 ? string.Empty : _reader.LocalName, _reader.Value);
}
}
while (_reader.MoveToNextAttribute());
_reader.MoveToElement(); // get back to the element
}
_builder.StartChildren();
if (_reader.IsEmptyElement)
{
_namespaceManager.PopScope();
_builder.EndChildren();
if (_reader.Depth == _schemaXmlDepth)
{
return false; // done
}
}
else if (!_builder.IsContentParsed())
{ //AppInfo and Documentation
_markupDepth = _reader.Depth;
_processMarkup = true;
if (_annotationNSManager == null)
{
_annotationNSManager = new XmlNamespaceManager(_nameTable);
_xmlns = _nameTable.Add("xmlns");
}
ProcessAppInfoDocMarkup(true);
}
}
else if (!_reader.IsEmptyElement)
{ //UnsupportedElement in that context
_markupDepth = _reader.Depth;
_processMarkup = false; //Hack to not process unsupported elements
}
}
else if (_reader.NodeType == XmlNodeType.Text)
{ //Check for whitespace
if (!_xmlCharType.IsOnlyWhitespace(_reader.Value))
{
_builder.ProcessCData(_reader.Value);
}
}
else if (_reader.NodeType == XmlNodeType.EntityReference ||
_reader.NodeType == XmlNodeType.SignificantWhitespace ||
_reader.NodeType == XmlNodeType.CDATA)
{
_builder.ProcessCData(_reader.Value);
}
else if (_reader.NodeType == XmlNodeType.EndElement)
{
if (_reader.Depth == _markupDepth)
{
if (_processMarkup)
{
Debug.Assert(_parentNode != null);
XmlNodeList list = _parentNode.ChildNodes;
XmlNode[] markup = new XmlNode[list.Count];
for (int i = 0; i < list.Count; i++)
{
markup[i] = list[i];
}
_builder.ProcessMarkup(markup);
_namespaceManager.PopScope();
_builder.EndChildren();
}
_markupDepth = int.MaxValue;
}
else
{
_namespaceManager.PopScope();
_builder.EndChildren();
}
if (_reader.Depth == _schemaXmlDepth)
{
return false; // done
}
}
return true;
}
private void ProcessAppInfoDocMarkup(bool root)
{
//First time reader is positioned on AppInfo or Documentation element
XmlNode currentNode = null;
switch (_reader.NodeType)
{
case XmlNodeType.Element:
_annotationNSManager.PushScope();
currentNode = LoadElementNode(root);
// Dev10 (TFS) #479761: The following code was to address the issue of where an in-scope namespace delaration attribute
// was not added when an element follows an empty element. This fix will result in persisting schema in a consistent form
// although it does not change the semantic meaning of the schema.
// Since it is as a breaking change and Dev10 needs to maintain the backward compatibility, this fix is being reverted.
// if (reader.IsEmptyElement) {
// annotationNSManager.PopScope();
// }
break;
case XmlNodeType.Text:
currentNode = _dummyDocument.CreateTextNode(_reader.Value);
goto default;
case XmlNodeType.SignificantWhitespace:
currentNode = _dummyDocument.CreateSignificantWhitespace(_reader.Value);
goto default;
case XmlNodeType.CDATA:
currentNode = _dummyDocument.CreateCDataSection(_reader.Value);
goto default;
case XmlNodeType.EntityReference:
currentNode = _dummyDocument.CreateEntityReference(_reader.Name);
goto default;
case XmlNodeType.Comment:
currentNode = _dummyDocument.CreateComment(_reader.Value);
goto default;
case XmlNodeType.ProcessingInstruction:
currentNode = _dummyDocument.CreateProcessingInstruction(_reader.Name, _reader.Value);
goto default;
case XmlNodeType.EndEntity:
break;
case XmlNodeType.Whitespace:
break;
case XmlNodeType.EndElement:
_annotationNSManager.PopScope();
_parentNode = _parentNode.ParentNode;
break;
default: //other possible node types: Document/DocType/DocumentFrag/Entity/Notation/Xmldecl cannot appear as children of xs:appInfo or xs:doc
Debug.Assert(currentNode != null);
Debug.Assert(_parentNode != null);
_parentNode.AppendChild(currentNode);
break;
}
}
private XmlElement LoadElementNode(bool root)
{
Debug.Assert(_reader.NodeType == XmlNodeType.Element);
XmlReader r = _reader;
bool fEmptyElement = r.IsEmptyElement;
XmlElement element = _dummyDocument.CreateElement(r.Prefix, r.LocalName, r.NamespaceURI);
element.IsEmpty = fEmptyElement;
if (root)
{
_parentNode = element;
}
else
{
XmlAttributeCollection attributes = element.Attributes;
if (r.MoveToFirstAttribute())
{
do
{
if (Ref.Equal(r.NamespaceURI, _schemaNames.NsXmlNs))
{ //Namespace Attribute
_annotationNSManager.AddNamespace(r.Prefix.Length == 0 ? string.Empty : _reader.LocalName, _reader.Value);
}
XmlAttribute attr = LoadAttributeNode();
attributes.Append(attr);
} while (r.MoveToNextAttribute());
}
r.MoveToElement();
string ns = _annotationNSManager.LookupNamespace(r.Prefix);
if (ns == null)
{
XmlAttribute attr = CreateXmlNsAttribute(r.Prefix, _namespaceManager.LookupNamespace(r.Prefix));
attributes.Append(attr);
}
else if (ns.Length == 0)
{ //string.Empty prefix is mapped to string.Empty NS by default
string elemNS = _namespaceManager.LookupNamespace(r.Prefix);
if (elemNS != string.Empty)
{
XmlAttribute attr = CreateXmlNsAttribute(r.Prefix, elemNS);
attributes.Append(attr);
}
}
while (r.MoveToNextAttribute())
{
if (r.Prefix.Length != 0)
{
string attNS = _annotationNSManager.LookupNamespace(r.Prefix);
if (attNS == null)
{
XmlAttribute attr = CreateXmlNsAttribute(r.Prefix, _namespaceManager.LookupNamespace(r.Prefix));
attributes.Append(attr);
}
}
}
r.MoveToElement();
_parentNode.AppendChild(element);
if (!r.IsEmptyElement)
{
_parentNode = element;
}
}
return element;
}
private XmlAttribute CreateXmlNsAttribute(string prefix, string value)
{
XmlAttribute attr;
if (prefix.Length == 0)
{
attr = _dummyDocument.CreateAttribute(string.Empty, _xmlns, XmlReservedNs.NsXmlNs);
}
else
{
attr = _dummyDocument.CreateAttribute(_xmlns, prefix, XmlReservedNs.NsXmlNs);
}
attr.AppendChild(_dummyDocument.CreateTextNode(value));
_annotationNSManager.AddNamespace(prefix, value);
return attr;
}
private XmlAttribute LoadAttributeNode()
{
Debug.Assert(_reader.NodeType == XmlNodeType.Attribute);
XmlReader r = _reader;
XmlAttribute attr = _dummyDocument.CreateAttribute(r.Prefix, r.LocalName, r.NamespaceURI);
while (r.ReadAttributeValue())
{
switch (r.NodeType)
{
case XmlNodeType.Text:
attr.AppendChild(_dummyDocument.CreateTextNode(r.Value));
continue;
case XmlNodeType.EntityReference:
attr.AppendChild(LoadEntityReferenceInAttribute());
continue;
default:
throw XmlLoader.UnexpectedNodeType(r.NodeType);
}
}
return attr;
}
private XmlEntityReference LoadEntityReferenceInAttribute()
{
Debug.Assert(_reader.NodeType == XmlNodeType.EntityReference);
XmlEntityReference eref = _dummyDocument.CreateEntityReference(_reader.LocalName);
if (!_reader.CanResolveEntity)
{
return eref;
}
_reader.ResolveEntity();
while (_reader.ReadAttributeValue())
{
switch (_reader.NodeType)
{
case XmlNodeType.Text:
eref.AppendChild(_dummyDocument.CreateTextNode(_reader.Value));
continue;
case XmlNodeType.EndEntity:
if (eref.ChildNodes.Count == 0)
{
eref.AppendChild(_dummyDocument.CreateTextNode(String.Empty));
}
return eref;
case XmlNodeType.EntityReference:
eref.AppendChild(LoadEntityReferenceInAttribute());
break;
default:
throw XmlLoader.UnexpectedNodeType(_reader.NodeType);
}
}
return eref;
}
};
} // namespace Microsoft.Xml
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using JCG = J2N.Collections.Generic;
namespace Lucene.Net.Util.Automaton
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Just holds a set of <see cref="T:int[]"/> states, plus a corresponding
/// <see cref="T:int[]"/> count per state. Used by
/// <see cref="BasicOperations.Determinize(Automaton)"/>.
/// <para/>
/// NOTE: This was SortedIntSet in Lucene
/// </summary>
internal sealed class SortedInt32Set : IEquatable<SortedInt32Set>, IEquatable<SortedInt32Set.FrozenInt32Set>
{
internal int[] values;
internal int[] counts;
internal int upto;
private int hashCode;
// If we hold more than this many states, we switch from
// O(N^2) linear ops to O(N log(N)) TreeMap
private const int TREE_MAP_CUTOVER = 30;
private readonly IDictionary<int, int> map = new JCG.SortedDictionary<int, int>();
private bool useTreeMap;
internal State state;
public SortedInt32Set(int capacity)
{
values = new int[capacity];
counts = new int[capacity];
}
// Adds this state to the set
public void Incr(int num)
{
if (useTreeMap)
{
int key = num;
if (!map.TryGetValue(key, out int val))
{
map[key] = 1;
}
else
{
map[key] = 1 + val;
}
return;
}
if (upto == values.Length)
{
values = ArrayUtil.Grow(values, 1 + upto);
counts = ArrayUtil.Grow(counts, 1 + upto);
}
for (int i = 0; i < upto; i++)
{
if (values[i] == num)
{
counts[i]++;
return;
}
else if (num < values[i])
{
// insert here
int j = upto - 1;
while (j >= i)
{
values[1 + j] = values[j];
counts[1 + j] = counts[j];
j--;
}
values[i] = num;
counts[i] = 1;
upto++;
return;
}
}
// append
values[upto] = num;
counts[upto] = 1;
upto++;
if (upto == TREE_MAP_CUTOVER)
{
useTreeMap = true;
for (int i = 0; i < upto; i++)
{
map[values[i]] = counts[i];
}
}
}
// Removes this state from the set, if count decrs to 0
public void Decr(int num)
{
if (useTreeMap)
{
int count = map[num];
if (count == 1)
{
map.Remove(num);
}
else
{
map[num] = count - 1;
}
// Fall back to simple arrays once we touch zero again
if (map.Count == 0)
{
useTreeMap = false;
upto = 0;
}
return;
}
for (int i = 0; i < upto; i++)
{
if (values[i] == num)
{
counts[i]--;
if (counts[i] == 0)
{
int limit = upto - 1;
while (i < limit)
{
values[i] = values[i + 1];
counts[i] = counts[i + 1];
i++;
}
upto = limit;
}
return;
}
}
Debug.Assert(false);
}
public void ComputeHash()
{
if (useTreeMap)
{
if (map.Count > values.Length)
{
int size = ArrayUtil.Oversize(map.Count, RamUsageEstimator.NUM_BYTES_INT32);
values = new int[size];
counts = new int[size];
}
hashCode = map.Count;
upto = 0;
foreach (int state in map.Keys)
{
hashCode = 683 * hashCode + state;
values[upto++] = state;
}
}
else
{
hashCode = upto;
for (int i = 0; i < upto; i++)
{
hashCode = 683 * hashCode + values[i];
}
}
}
public FrozenInt32Set ToFrozenInt32Set() // LUCENENET TODO: This didn't exist in the original
{
int[] c = new int[upto];
Array.Copy(values, 0, c, 0, upto);
return new FrozenInt32Set(c, this.hashCode, this.state);
}
public FrozenInt32Set Freeze(State state)
{
int[] c = new int[upto];
Array.Copy(values, 0, c, 0, upto);
return new FrozenInt32Set(c, hashCode, state);
}
public override int GetHashCode()
{
return hashCode;
}
public override bool Equals(object other)
{
if (other == null)
{
return false;
}
if (!(other is FrozenInt32Set))
{
return false;
}
FrozenInt32Set other2 = (FrozenInt32Set)other;
if (hashCode != other2.hashCode)
{
return false;
}
if (other2.values.Length != upto)
{
return false;
}
for (int i = 0; i < upto; i++)
{
if (other2.values[i] != values[i])
{
return false;
}
}
return true;
}
public bool Equals(SortedInt32Set other) // LUCENENET TODO: This didn't exist in the original
{
throw new NotImplementedException("SortedIntSet Equals");
}
public bool Equals(FrozenInt32Set other) // LUCENENET TODO: This didn't exist in the original
{
if (other == null)
{
return false;
}
if (hashCode != other.hashCode)
{
return false;
}
if (other.values.Length != upto)
{
return false;
}
for (int i = 0; i < upto; i++)
{
if (other.values[i] != values[i])
{
return false;
}
}
return true;
}
public override string ToString()
{
StringBuilder sb = (new StringBuilder()).Append('[');
for (int i = 0; i < upto; i++)
{
if (i > 0)
{
sb.Append(' ');
}
sb.Append(values[i]).Append(':').Append(counts[i]);
}
sb.Append(']');
return sb.ToString();
}
/// <summary>
/// NOTE: This was FrozenIntSet in Lucene
/// </summary>
public sealed class FrozenInt32Set : IEquatable<SortedInt32Set>, IEquatable<FrozenInt32Set>
{
internal readonly int[] values;
internal readonly int hashCode;
internal readonly State state;
public FrozenInt32Set(int[] values, int hashCode, State state)
{
this.values = values;
this.hashCode = hashCode;
this.state = state;
}
public FrozenInt32Set(int num, State state)
{
this.values = new int[] { num };
this.state = state;
this.hashCode = 683 + num;
}
public override int GetHashCode()
{
return hashCode;
}
public override bool Equals(object other)
{
if (other == null)
{
return false;
}
if (other is FrozenInt32Set)
{
FrozenInt32Set other2 = (FrozenInt32Set)other;
if (hashCode != other2.hashCode)
{
return false;
}
if (other2.values.Length != values.Length)
{
return false;
}
for (int i = 0; i < values.Length; i++)
{
if (other2.values[i] != values[i])
{
return false;
}
}
return true;
}
else if (other is SortedInt32Set)
{
SortedInt32Set other3 = (SortedInt32Set)other;
if (hashCode != other3.hashCode)
{
return false;
}
if (other3.values.Length != values.Length)
{
return false;
}
for (int i = 0; i < values.Length; i++)
{
if (other3.values[i] != values[i])
{
return false;
}
}
return true;
}
return false;
}
public bool Equals(SortedInt32Set other) // LUCENENET TODO: This didn't exist in the original
{
if (other == null)
{
return false;
}
if (hashCode != other.hashCode)
{
return false;
}
if (other.values.Length != values.Length)
{
return false;
}
for (int i = 0; i < values.Length; i++)
{
if (other.values[i] != values[i])
{
return false;
}
}
return true;
}
public bool Equals(FrozenInt32Set other) // LUCENENET TODO: This didn't exist in the original
{
if (other == null)
{
return false;
}
if (hashCode != other.hashCode)
{
return false;
}
if (other.values.Length != values.Length)
{
return false;
}
for (int i = 0; i < values.Length; i++)
{
if (other.values[i] != values[i])
{
return false;
}
}
return true;
}
public override string ToString()
{
StringBuilder sb = (new StringBuilder()).Append('[');
for (int i = 0; i < values.Length; i++)
{
if (i > 0)
{
sb.Append(' ');
}
sb.Append(values[i]);
}
sb.Append(']');
return sb.ToString();
}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NPOI.HSSF.Extractor
{
using System;
using System.Text;
using System.IO;
using System.Collections;
using NPOI.HSSF.UserModel;
using NPOI.HSSF.Record;
using NPOI.POIFS.FileSystem;
using NPOI;
using NPOI.HPSF;
using NPOI.HSSF.EventUserModel;
using NPOI.HSSF.Model;
//using NPOI.HSSF.Util;
using NPOI.SS.Util;
using System.Globalization;
/// <summary>
/// A text extractor for Excel files, that is based
/// on the hssf eventusermodel api.
/// It will typically use less memory than
/// ExcelExtractor, but may not provide
/// the same richness of formatting.
/// Returns the textual content of the file, suitable for
/// indexing by something like Lucene, but not really
/// intended for display to the user.
/// </summary>
public class EventBasedExcelExtractor : POIOLE2TextExtractor
{
private POIFSFileSystem fs;
private bool includeSheetNames = true;
private bool formulasNotResults = false;
public EventBasedExcelExtractor(POIFSFileSystem fs)
: base(null)
{
this.fs = fs;
}
/// <summary>
/// Would return the document information metadata for the document,
/// if we supported it
/// </summary>
/// <value>The doc summary information.</value>
public override DocumentSummaryInformation DocSummaryInformation
{
get {
throw new NotImplementedException("Metadata extraction not supported in streaming mode, please use ExcelExtractor");
}
}
/// <summary>
/// Would return the summary information metadata for the document,
/// if we supported it
/// </summary>
/// <value>The summary information.</value>
public override SummaryInformation SummaryInformation
{
get
{
throw new NotImplementedException("Metadata extraction not supported in streaming mode, please use ExcelExtractor");
}
}
/// <summary>
/// Should sheet names be included? Default is true
/// </summary>
/// <value>if set to <c>true</c> [include sheet names].</value>
public bool IncludeSheetNames
{
get
{
return this.includeSheetNames;
}
set
{
this.includeSheetNames = value;
}
}
/// <summary>
/// Should we return the formula itself, and not
/// the result it produces? Default is false
/// </summary>
/// <value>if set to <c>true</c> [formulas not results].</value>
public bool FormulasNotResults
{
get
{
return this.formulasNotResults;
}
set
{
this.formulasNotResults = value;
}
}
/// <summary>
/// Retreives the text contents of the file
/// </summary>
/// <value>All the text from the document.</value>
public override String Text
{
get
{
String text = null;
try
{
TextListener tl = TriggerExtraction();
text = tl.text.ToString();
if (!text.EndsWith("\n", StringComparison.Ordinal))
{
text = text + "\n";
}
}
catch (IOException)
{
throw;
}
return text;
}
}
/// <summary>
/// Triggers the extraction.
/// </summary>
/// <returns></returns>
private TextListener TriggerExtraction()
{
TextListener tl = new TextListener(includeSheetNames,formulasNotResults);
FormatTrackingHSSFListener ft = new FormatTrackingHSSFListener(tl);
tl.ft = ft;
// Register and process
HSSFEventFactory factory = new HSSFEventFactory();
HSSFRequest request = new HSSFRequest();
request.AddListenerForAllRecords(ft);
factory.ProcessWorkbookEvents(request, fs);
return tl;
}
private class TextListener : IHSSFListener
{
public FormatTrackingHSSFListener ft;
private SSTRecord sstRecord;
private IList sheetNames = new ArrayList();
public StringBuilder text = new StringBuilder();
private int sheetNum = -1;
private int rowNum;
private bool outputNextStringValue = false;
private int nextRow = -1;
private bool includeSheetNames;
private bool formulasNotResults;
public TextListener(bool includeSheetNames, bool formulasNotResults)
{
this.includeSheetNames = includeSheetNames;
this.formulasNotResults = formulasNotResults;
}
/// <summary>
/// Process an HSSF Record. Called when a record occurs in an HSSF file.
/// </summary>
/// <param name="record"></param>
public void ProcessRecord(Record record)
{
String thisText = null;
int thisRow = -1;
switch (record.Sid)
{
case BoundSheetRecord.sid:
BoundSheetRecord sr = (BoundSheetRecord)record;
sheetNames.Add(sr.Sheetname);
break;
case BOFRecord.sid:
BOFRecord bof = (BOFRecord)record;
if (bof.Type == BOFRecordType.Worksheet)
{
sheetNum++;
rowNum = -1;
if (includeSheetNames)
{
if (text.Length > 0) text.Append("\n");
text.Append(sheetNames[sheetNum]);
}
}
break;
case SSTRecord.sid:
sstRecord = (SSTRecord)record;
break;
case FormulaRecord.sid:
FormulaRecord frec = (FormulaRecord)record;
thisRow = frec.Row;
if (formulasNotResults)
{
thisText = HSSFFormulaParser.ToFormulaString((HSSFWorkbook)null, frec.ParsedExpression);
}
else
{
if (frec.HasCachedResultString)
{
// Formula result is a string
// This is stored in the next record
outputNextStringValue = true;
nextRow = frec.Row;
}
else
{
thisText = FormatNumberDateCell(frec, frec.Value);
}
}
break;
case StringRecord.sid:
if (outputNextStringValue)
{
// String for formula
StringRecord srec = (StringRecord)record;
thisText = srec.String;
thisRow = nextRow;
outputNextStringValue = false;
}
break;
case LabelRecord.sid:
LabelRecord lrec = (LabelRecord)record;
thisRow = lrec.Row;
thisText = lrec.Value;
break;
case LabelSSTRecord.sid:
LabelSSTRecord lsrec = (LabelSSTRecord)record;
thisRow = lsrec.Row;
if (sstRecord == null)
{
throw new Exception("No SST record found");
}
thisText = sstRecord.GetString(lsrec.SSTIndex).ToString();
break;
case NoteRecord.sid:
NoteRecord nrec = (NoteRecord)record;
thisRow = nrec.Row;
// TODO: Find object to match nrec.GetShapeId()
break;
case NumberRecord.sid:
NumberRecord numrec = (NumberRecord)record;
thisRow = numrec.Row;
thisText = FormatNumberDateCell(numrec, numrec.Value);
break;
default:
break;
}
if (thisText != null)
{
if (thisRow != rowNum)
{
rowNum = thisRow;
if (text.Length > 0)
text.Append("\n");
}
else
{
text.Append("\t");
}
text.Append(thisText);
}
}
/// <summary>
/// Formats a number or date cell, be that a real number, or the
/// answer to a formula
/// </summary>
/// <param name="cell">The cell.</param>
/// <param name="value">The value.</param>
/// <returns></returns>
private String FormatNumberDateCell(CellValueRecordInterface cell, double value)
{
// Get the built in format, if there is one
int formatIndex = ft.GetFormatIndex(cell);
String formatString = ft.GetFormatString(cell);
if (formatString == null)
{
return value.ToString(CultureInfo.InvariantCulture);
}
else
{
// Is it a date?
if (NPOI.SS.UserModel.DateUtil.IsADateFormat(formatIndex, formatString) &&
NPOI.SS.UserModel.DateUtil.IsValidExcelDate(value))
{
// Java wants M not m for month
formatString = formatString.Replace('m', 'M');
// Change \- into -, if it's there
formatString = formatString.Replace("\\\\-", "-");
// Format as a date
DateTime d = NPOI.SS.UserModel.DateUtil.GetJavaDate(value, false);
SimpleDateFormat df = new SimpleDateFormat(formatString);
return df.Format(d, CultureInfo.CurrentCulture);
}
else
{
if (formatString == "General")
{
// Some sort of wierd default
return value.ToString(CultureInfo.InvariantCulture);
}
// Format as a number
DecimalFormat df = new DecimalFormat(formatString);
return df.Format(value, CultureInfo.CurrentCulture);
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if AMD64 || ARM64 || (BIT32 && !ARM)
#define HAS_CUSTOM_BLOCKS
#endif
using System.Diagnostics;
using System.Runtime;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using Internal.Runtime.CompilerServices;
#pragma warning disable SA1121 // explicitly using type aliases instead of built-in types
#if BIT64
using nint = System.Int64;
using nuint = System.UInt64;
#else
using nint = System.Int32;
using nuint = System.UInt32;
#endif
namespace System
{
public static partial class Buffer
{
// Copies from one primitive array to another primitive array without
// respecting types. This calls memmove internally. The count and
// offset parameters here are in bytes. If you want to use traditional
// array element indices and counts, use Array.Copy.
public static unsafe void BlockCopy(Array src, int srcOffset, Array dst, int dstOffset, int count)
{
if (src == null)
throw new ArgumentNullException(nameof(src));
if (dst == null)
throw new ArgumentNullException(nameof(dst));
nuint uSrcLen = (nuint)src.LongLength;
if (src.GetType() != typeof(byte[]))
{
if (!IsPrimitiveTypeArray(src))
throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(src));
uSrcLen *= (nuint)src.GetElementSize();
}
nuint uDstLen = uSrcLen;
if (src != dst)
{
uDstLen = (nuint)dst.LongLength;
if (dst.GetType() != typeof(byte[]))
{
if (!IsPrimitiveTypeArray(dst))
throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(dst));
uDstLen *= (nuint)dst.GetElementSize();
}
}
if (srcOffset < 0)
throw new ArgumentOutOfRangeException(nameof(srcOffset), SR.ArgumentOutOfRange_MustBeNonNegInt32);
if (dstOffset < 0)
throw new ArgumentOutOfRangeException(nameof(dstOffset), SR.ArgumentOutOfRange_MustBeNonNegInt32);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_MustBeNonNegInt32);
nuint uCount = (nuint)count;
nuint uSrcOffset = (nuint)srcOffset;
nuint uDstOffset = (nuint)dstOffset;
if ((uSrcLen < uSrcOffset + uCount) || (uDstLen < uDstOffset + uCount))
throw new ArgumentException(SR.Argument_InvalidOffLen);
Memmove(ref Unsafe.AddByteOffset(ref dst.GetRawArrayData(), uDstOffset), ref Unsafe.AddByteOffset(ref src.GetRawArrayData(), uSrcOffset), uCount);
}
public static int ByteLength(Array array)
{
// Is the array present?
if (array == null)
throw new ArgumentNullException(nameof(array));
// Is it of primitive types?
if (!IsPrimitiveTypeArray(array))
throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array));
nuint byteLength = (nuint)array.LongLength * (nuint)array.GetElementSize();
// This API is explosed both as Buffer.ByteLength and also used indirectly in argument
// checks for Buffer.GetByte/SetByte.
//
// If somebody called Get/SetByte on 2GB+ arrays, there is a decent chance that
// the computation of the index has overflowed. Thus we intentionally always
// throw on 2GB+ arrays in Get/SetByte argument checks (even for indicies <2GB)
// to prevent people from running into a trap silently.
return checked((int)byteLength);
}
public static byte GetByte(Array array, int index)
{
// array argument validation done via ByteLength
if ((uint)index >= (uint)ByteLength(array))
throw new ArgumentOutOfRangeException(nameof(index));
return Unsafe.Add<byte>(ref array.GetRawArrayData(), index);
}
public static void SetByte(Array array, int index, byte value)
{
// array argument validation done via ByteLength
if ((uint)index >= (uint)ByteLength(array))
throw new ArgumentOutOfRangeException(nameof(index));
Unsafe.Add<byte>(ref array.GetRawArrayData(), index) = value;
}
// This method has different signature for x64 and other platforms and is done for performance reasons.
internal static unsafe void ZeroMemory(byte* dest, nuint len)
{
SpanHelpers.ClearWithoutReferences(ref *dest, len);
}
// The attributes on this method are chosen for best JIT performance.
// Please do not edit unless intentional.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[CLSCompliant(false)]
public static unsafe void MemoryCopy(void* source, void* destination, long destinationSizeInBytes, long sourceBytesToCopy)
{
if (sourceBytesToCopy > destinationSizeInBytes)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy);
}
Memmove((byte*)destination, (byte*)source, checked((nuint)sourceBytesToCopy));
}
// The attributes on this method are chosen for best JIT performance.
// Please do not edit unless intentional.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[CLSCompliant(false)]
public static unsafe void MemoryCopy(void* source, void* destination, ulong destinationSizeInBytes, ulong sourceBytesToCopy)
{
if (sourceBytesToCopy > destinationSizeInBytes)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy);
}
Memmove((byte*)destination, (byte*)source, checked((nuint)sourceBytesToCopy));
}
// This method has different signature for x64 and other platforms and is done for performance reasons.
internal static unsafe void Memmove(byte* dest, byte* src, nuint len)
{
// P/Invoke into the native version when the buffers are overlapping.
if (((nuint)dest - (nuint)src < len) || ((nuint)src - (nuint)dest < len))
{
goto PInvoke;
}
byte* srcEnd = src + len;
byte* destEnd = dest + len;
if (len <= 16) goto MCPY02;
if (len > 64) goto MCPY05;
MCPY00:
// Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
Debug.Assert(len > 16 && len <= 64);
#if HAS_CUSTOM_BLOCKS
*(Block16*)dest = *(Block16*)src; // [0,16]
#elif BIT64
*(long*)dest = *(long*)src;
*(long*)(dest + 8) = *(long*)(src + 8); // [0,16]
#else
*(int*)dest = *(int*)src;
*(int*)(dest + 4) = *(int*)(src + 4);
*(int*)(dest + 8) = *(int*)(src + 8);
*(int*)(dest + 12) = *(int*)(src + 12); // [0,16]
#endif
if (len <= 32) goto MCPY01;
#if HAS_CUSTOM_BLOCKS
*(Block16*)(dest + 16) = *(Block16*)(src + 16); // [0,32]
#elif BIT64
*(long*)(dest + 16) = *(long*)(src + 16);
*(long*)(dest + 24) = *(long*)(src + 24); // [0,32]
#else
*(int*)(dest + 16) = *(int*)(src + 16);
*(int*)(dest + 20) = *(int*)(src + 20);
*(int*)(dest + 24) = *(int*)(src + 24);
*(int*)(dest + 28) = *(int*)(src + 28); // [0,32]
#endif
if (len <= 48) goto MCPY01;
#if HAS_CUSTOM_BLOCKS
*(Block16*)(dest + 32) = *(Block16*)(src + 32); // [0,48]
#elif BIT64
*(long*)(dest + 32) = *(long*)(src + 32);
*(long*)(dest + 40) = *(long*)(src + 40); // [0,48]
#else
*(int*)(dest + 32) = *(int*)(src + 32);
*(int*)(dest + 36) = *(int*)(src + 36);
*(int*)(dest + 40) = *(int*)(src + 40);
*(int*)(dest + 44) = *(int*)(src + 44); // [0,48]
#endif
MCPY01:
// Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
Debug.Assert(len > 16 && len <= 64);
#if HAS_CUSTOM_BLOCKS
*(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16);
#elif BIT64
*(long*)(destEnd - 16) = *(long*)(srcEnd - 16);
*(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
#else
*(int*)(destEnd - 16) = *(int*)(srcEnd - 16);
*(int*)(destEnd - 12) = *(int*)(srcEnd - 12);
*(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
*(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
#endif
return;
MCPY02:
// Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return.
if ((len & 24) == 0) goto MCPY03;
Debug.Assert(len >= 8 && len <= 16);
#if BIT64
*(long*)dest = *(long*)src;
*(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
#else
*(int*)dest = *(int*)src;
*(int*)(dest + 4) = *(int*)(src + 4);
*(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
*(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
#endif
return;
MCPY03:
// Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return.
if ((len & 4) == 0) goto MCPY04;
Debug.Assert(len >= 4 && len < 8);
*(int*)dest = *(int*)src;
*(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
return;
MCPY04:
// Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return.
Debug.Assert(len < 4);
if (len == 0) return;
*dest = *src;
if ((len & 2) == 0) return;
*(short*)(destEnd - 2) = *(short*)(srcEnd - 2);
return;
MCPY05:
// PInvoke to the native version when the copy length exceeds the threshold.
if (len > MemmoveNativeThreshold)
{
goto PInvoke;
}
// Copy 64-bytes at a time until the remainder is less than 64.
// If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return.
Debug.Assert(len > 64 && len <= MemmoveNativeThreshold);
nuint n = len >> 6;
MCPY06:
#if HAS_CUSTOM_BLOCKS
*(Block64*)dest = *(Block64*)src;
#elif BIT64
*(long*)dest = *(long*)src;
*(long*)(dest + 8) = *(long*)(src + 8);
*(long*)(dest + 16) = *(long*)(src + 16);
*(long*)(dest + 24) = *(long*)(src + 24);
*(long*)(dest + 32) = *(long*)(src + 32);
*(long*)(dest + 40) = *(long*)(src + 40);
*(long*)(dest + 48) = *(long*)(src + 48);
*(long*)(dest + 56) = *(long*)(src + 56);
#else
*(int*)dest = *(int*)src;
*(int*)(dest + 4) = *(int*)(src + 4);
*(int*)(dest + 8) = *(int*)(src + 8);
*(int*)(dest + 12) = *(int*)(src + 12);
*(int*)(dest + 16) = *(int*)(src + 16);
*(int*)(dest + 20) = *(int*)(src + 20);
*(int*)(dest + 24) = *(int*)(src + 24);
*(int*)(dest + 28) = *(int*)(src + 28);
*(int*)(dest + 32) = *(int*)(src + 32);
*(int*)(dest + 36) = *(int*)(src + 36);
*(int*)(dest + 40) = *(int*)(src + 40);
*(int*)(dest + 44) = *(int*)(src + 44);
*(int*)(dest + 48) = *(int*)(src + 48);
*(int*)(dest + 52) = *(int*)(src + 52);
*(int*)(dest + 56) = *(int*)(src + 56);
*(int*)(dest + 60) = *(int*)(src + 60);
#endif
dest += 64;
src += 64;
n--;
if (n != 0) goto MCPY06;
len %= 64;
if (len > 16) goto MCPY00;
#if HAS_CUSTOM_BLOCKS
*(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16);
#elif BIT64
*(long*)(destEnd - 16) = *(long*)(srcEnd - 16);
*(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
#else
*(int*)(destEnd - 16) = *(int*)(srcEnd - 16);
*(int*)(destEnd - 12) = *(int*)(srcEnd - 12);
*(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
*(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
#endif
return;
PInvoke:
_Memmove(dest, src, len);
}
// This method has different signature for x64 and other platforms and is done for performance reasons.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)
{
if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
{
// Blittable memmove
Memmove(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
else
{
// Non-blittable memmove
BulkMoveWithWriteBarrier(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
}
// This method has different signature for x64 and other platforms and is done for performance reasons.
private static void Memmove(ref byte dest, ref byte src, nuint len)
{
// P/Invoke into the native version when the buffers are overlapping.
if (((nuint)Unsafe.ByteOffset(ref src, ref dest) < len) || ((nuint)Unsafe.ByteOffset(ref dest, ref src) < len))
{
goto BuffersOverlap;
}
// Use "(IntPtr)(nint)len" to avoid overflow checking on the explicit cast to IntPtr
ref byte srcEnd = ref Unsafe.Add(ref src, (IntPtr)(nint)len);
ref byte destEnd = ref Unsafe.Add(ref dest, (IntPtr)(nint)len);
if (len <= 16)
goto MCPY02;
if (len > 64)
goto MCPY05;
MCPY00:
// Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
Debug.Assert(len > 16 && len <= 64);
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block16>(ref dest) = Unsafe.As<byte, Block16>(ref src); // [0,16]
#elif BIT64
Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 8)); // [0,16]
#else
Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 8));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 12)); // [0,16]
#endif
if (len <= 32)
goto MCPY01;
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block16>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref src, 16)); // [0,32]
#elif BIT64
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 16));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 24)); // [0,32]
#else
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 16));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 20)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 20));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 24));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 28)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 28)); // [0,32]
#endif
if (len <= 48)
goto MCPY01;
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block16>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref src, 32)); // [0,48]
#elif BIT64
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 32));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 40)); // [0,48]
#else
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 32));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 36)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 36));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 40));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 44)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 44)); // [0,48]
#endif
MCPY01:
// Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
Debug.Assert(len > 16 && len <= 64);
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block16>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref srcEnd, -16));
#elif BIT64
Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -16));
Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
#else
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -16));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -12));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
#endif
return;
MCPY02:
// Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return.
if ((len & 24) == 0)
goto MCPY03;
Debug.Assert(len >= 8 && len <= 16);
#if BIT64
Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
#else
Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
#endif
return;
MCPY03:
// Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return.
if ((len & 4) == 0)
goto MCPY04;
Debug.Assert(len >= 4 && len < 8);
Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
return;
MCPY04:
// Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return.
Debug.Assert(len < 4);
if (len == 0)
return;
dest = src;
if ((len & 2) == 0)
return;
Unsafe.As<byte, short>(ref Unsafe.Add(ref destEnd, -2)) = Unsafe.As<byte, short>(ref Unsafe.Add(ref srcEnd, -2));
return;
MCPY05:
// PInvoke to the native version when the copy length exceeds the threshold.
if (len > MemmoveNativeThreshold)
{
goto PInvoke;
}
// Copy 64-bytes at a time until the remainder is less than 64.
// If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return.
Debug.Assert(len > 64 && len <= MemmoveNativeThreshold);
nuint n = len >> 6;
MCPY06:
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block64>(ref dest) = Unsafe.As<byte, Block64>(ref src);
#elif BIT64
Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 8));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 16));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 24));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 32));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 40));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 48)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 48));
Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 56)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 56));
#else
Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 8));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 12));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 16));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 20)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 20));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 24));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 28)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 28));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 32));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 36)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 36));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 40));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 44)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 44));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 48)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 48));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 52)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 52));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 56)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 56));
Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 60)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 60));
#endif
dest = ref Unsafe.Add(ref dest, 64);
src = ref Unsafe.Add(ref src, 64);
n--;
if (n != 0)
goto MCPY06;
len %= 64;
if (len > 16)
goto MCPY00;
#if HAS_CUSTOM_BLOCKS
Unsafe.As<byte, Block16>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref srcEnd, -16));
#elif BIT64
Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -16));
Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
#else
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -16));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -12));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
#endif
return;
BuffersOverlap:
// If the buffers overlap perfectly, there's no point to copying the data.
if (Unsafe.AreSame(ref dest, ref src))
{
return;
}
PInvoke:
_Memmove(ref dest, ref src, len);
}
// Non-inlinable wrapper around the QCall that avoids polluting the fast path
// with P/Invoke prolog/epilog.
[MethodImpl(MethodImplOptions.NoInlining)]
private static unsafe void _Memmove(byte* dest, byte* src, nuint len)
{
__Memmove(dest, src, len);
}
// Non-inlinable wrapper around the QCall that avoids polluting the fast path
// with P/Invoke prolog/epilog.
[MethodImpl(MethodImplOptions.NoInlining)]
private static unsafe void _Memmove(ref byte dest, ref byte src, nuint len)
{
fixed (byte* pDest = &dest)
fixed (byte* pSrc = &src)
__Memmove(pDest, pSrc, len);
}
#if HAS_CUSTOM_BLOCKS
[StructLayout(LayoutKind.Sequential, Size = 16)]
private struct Block16 { }
[StructLayout(LayoutKind.Sequential, Size = 64)]
private struct Block64 { }
#endif // HAS_CUSTOM_BLOCKS
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Compute.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedNetworksClientTest
{
[xunit::FactAttribute]
public void GetRequestObject()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetNetworkRequest request = new GetNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
Network expectedResponse = new Network
{
Id = 11672635353343658936UL,
Mtu = 1280318054,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
CreationTimestamp = "creation_timestamp235e59a1",
IPv4Range = "I_pv4_range613b129f",
Peerings =
{
new NetworkPeering(),
},
GatewayIPv4 = "gateway_i_pv47f9ce361",
AutoCreateSubnetworks = false,
Subnetworks =
{
"subnetworks81f34af0",
},
Description = "description2cf9da67",
SelfLink = "self_link7e87f12d",
RoutingConfig = new NetworkRoutingConfig(),
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
Network response = client.Get(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRequestObjectAsync()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetNetworkRequest request = new GetNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
Network expectedResponse = new Network
{
Id = 11672635353343658936UL,
Mtu = 1280318054,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
CreationTimestamp = "creation_timestamp235e59a1",
IPv4Range = "I_pv4_range613b129f",
Peerings =
{
new NetworkPeering(),
},
GatewayIPv4 = "gateway_i_pv47f9ce361",
AutoCreateSubnetworks = false,
Subnetworks =
{
"subnetworks81f34af0",
},
Description = "description2cf9da67",
SelfLink = "self_link7e87f12d",
RoutingConfig = new NetworkRoutingConfig(),
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Network>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
Network responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Network responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Get()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetNetworkRequest request = new GetNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
Network expectedResponse = new Network
{
Id = 11672635353343658936UL,
Mtu = 1280318054,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
CreationTimestamp = "creation_timestamp235e59a1",
IPv4Range = "I_pv4_range613b129f",
Peerings =
{
new NetworkPeering(),
},
GatewayIPv4 = "gateway_i_pv47f9ce361",
AutoCreateSubnetworks = false,
Subnetworks =
{
"subnetworks81f34af0",
},
Description = "description2cf9da67",
SelfLink = "self_link7e87f12d",
RoutingConfig = new NetworkRoutingConfig(),
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
Network response = client.Get(request.Project, request.Network);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAsync()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetNetworkRequest request = new GetNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
Network expectedResponse = new Network
{
Id = 11672635353343658936UL,
Mtu = 1280318054,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
CreationTimestamp = "creation_timestamp235e59a1",
IPv4Range = "I_pv4_range613b129f",
Peerings =
{
new NetworkPeering(),
},
GatewayIPv4 = "gateway_i_pv47f9ce361",
AutoCreateSubnetworks = false,
Subnetworks =
{
"subnetworks81f34af0",
},
Description = "description2cf9da67",
SelfLink = "self_link7e87f12d",
RoutingConfig = new NetworkRoutingConfig(),
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Network>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
Network responseCallSettings = await client.GetAsync(request.Project, request.Network, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Network responseCancellationToken = await client.GetAsync(request.Project, request.Network, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetEffectiveFirewallsRequestObject()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEffectiveFirewallsNetworkRequest request = new GetEffectiveFirewallsNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
NetworksGetEffectiveFirewallsResponse expectedResponse = new NetworksGetEffectiveFirewallsResponse
{
Firewalls = { new Firewall(), },
FirewallPolicys =
{
new NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(),
},
};
mockGrpcClient.Setup(x => x.GetEffectiveFirewalls(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
NetworksGetEffectiveFirewallsResponse response = client.GetEffectiveFirewalls(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetEffectiveFirewallsRequestObjectAsync()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEffectiveFirewallsNetworkRequest request = new GetEffectiveFirewallsNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
NetworksGetEffectiveFirewallsResponse expectedResponse = new NetworksGetEffectiveFirewallsResponse
{
Firewalls = { new Firewall(), },
FirewallPolicys =
{
new NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(),
},
};
mockGrpcClient.Setup(x => x.GetEffectiveFirewallsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<NetworksGetEffectiveFirewallsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
NetworksGetEffectiveFirewallsResponse responseCallSettings = await client.GetEffectiveFirewallsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
NetworksGetEffectiveFirewallsResponse responseCancellationToken = await client.GetEffectiveFirewallsAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetEffectiveFirewalls()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEffectiveFirewallsNetworkRequest request = new GetEffectiveFirewallsNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
NetworksGetEffectiveFirewallsResponse expectedResponse = new NetworksGetEffectiveFirewallsResponse
{
Firewalls = { new Firewall(), },
FirewallPolicys =
{
new NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(),
},
};
mockGrpcClient.Setup(x => x.GetEffectiveFirewalls(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
NetworksGetEffectiveFirewallsResponse response = client.GetEffectiveFirewalls(request.Project, request.Network);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetEffectiveFirewallsAsync()
{
moq::Mock<Networks.NetworksClient> mockGrpcClient = new moq::Mock<Networks.NetworksClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEffectiveFirewallsNetworkRequest request = new GetEffectiveFirewallsNetworkRequest
{
Project = "projectaa6ff846",
Network = "networkd22ce091",
};
NetworksGetEffectiveFirewallsResponse expectedResponse = new NetworksGetEffectiveFirewallsResponse
{
Firewalls = { new Firewall(), },
FirewallPolicys =
{
new NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(),
},
};
mockGrpcClient.Setup(x => x.GetEffectiveFirewallsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<NetworksGetEffectiveFirewallsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NetworksClient client = new NetworksClientImpl(mockGrpcClient.Object, null);
NetworksGetEffectiveFirewallsResponse responseCallSettings = await client.GetEffectiveFirewallsAsync(request.Project, request.Network, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
NetworksGetEffectiveFirewallsResponse responseCancellationToken = await client.GetEffectiveFirewallsAsync(request.Project, request.Network, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Reflection;
using System.Diagnostics;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.TypeInfos;
using System.Reflection.Runtime.ParameterInfos;
using System.Reflection.Runtime.CustomAttributes;
using Internal.Reflection.Core.Execution;
using Internal.Reflection.Tracing;
namespace System.Reflection.Runtime.EventInfos
{
//
// The runtime's implementation of EventInfo's
//
[DebuggerDisplay("{_debugName}")]
internal abstract partial class RuntimeEventInfo : EventInfo, ITraceableTypeMember
{
protected RuntimeEventInfo(RuntimeTypeInfo contextTypeInfo, RuntimeTypeInfo reflectedType)
{
ContextTypeInfo = contextTypeInfo;
ReflectedTypeInfo = reflectedType;
}
public sealed override MethodInfo AddMethod
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.EventInfo_AddMethod(this);
#endif
MethodInfo adder = _lazyAdder;
if (adder == null)
{
adder = GetEventMethod(EventMethodSemantics.Add);
if (adder != null)
return _lazyAdder = adder;
throw new BadImageFormatException(); // Added is a required method.
}
return adder;
}
}
public sealed override Type DeclaringType
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.EventInfo_DeclaringType(this);
#endif
return ContextTypeInfo;
}
}
public sealed override MethodInfo[] GetOtherMethods(bool nonPublic)
{
throw new PlatformNotSupportedException();
}
public abstract override bool HasSameMetadataDefinitionAs(MemberInfo other);
public sealed override Module Module
{
get
{
return DefiningTypeInfo.Module;
}
}
public sealed override String Name
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.EventInfo_Name(this);
#endif
return MetadataName;
}
}
public sealed override Type ReflectedType
{
get
{
return ReflectedTypeInfo;
}
}
public sealed override MethodInfo RaiseMethod
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.EventInfo_RaiseMethod(this);
#endif
return GetEventMethod(EventMethodSemantics.Fire);
}
}
public sealed override MethodInfo RemoveMethod
{
get
{
#if ENABLE_REFLECTION_TRACE
if (ReflectionTrace.Enabled)
ReflectionTrace.EventInfo_RemoveMethod(this);
#endif
MethodInfo remover = _lazyRemover;
if (remover == null)
{
remover = GetEventMethod(EventMethodSemantics.Remove);
if (remover != null)
return _lazyRemover = remover;
throw new BadImageFormatException(); // Removed is a required method.
}
return remover;
}
}
public sealed override String ToString()
{
MethodInfo addMethod = this.AddMethod;
ParameterInfo[] parameters = addMethod.GetParametersNoCopy();
if (parameters.Length == 0)
throw new InvalidOperationException(); // Legacy: Why is a ToString() intentionally throwing an exception?
RuntimeParameterInfo runtimeParameterInfo = (RuntimeParameterInfo)(parameters[0]);
return runtimeParameterInfo.ParameterTypeString + " " + this.Name;
}
String ITraceableTypeMember.MemberName
{
get
{
return MetadataName;
}
}
Type ITraceableTypeMember.ContainingType
{
get
{
return ContextTypeInfo;
}
}
protected RuntimeEventInfo WithDebugName()
{
bool populateDebugNames = DeveloperExperienceState.DeveloperExperienceModeEnabled;
#if DEBUG
populateDebugNames = true;
#endif
if (!populateDebugNames)
return this;
if (_debugName == null)
{
_debugName = "Constructing..."; // Protect against any inadvertent reentrancy.
_debugName = MetadataName;
}
return this;
}
// Types that derive from RuntimeEventInfo must implement the following public surface area members
public abstract override EventAttributes Attributes { get; }
public abstract override IEnumerable<CustomAttributeData> CustomAttributes { get; }
public abstract override bool Equals(Object obj);
public abstract override int GetHashCode();
public abstract override Type EventHandlerType { get; }
public abstract override int MetadataToken { get; }
protected enum EventMethodSemantics
{
Add,
Remove,
Fire
}
/// <summary>
/// Override to return the Method that corresponds to the specified semantic.
/// Return null if no method is to be found.
/// </summary>
protected abstract MethodInfo GetEventMethod(EventMethodSemantics whichMethod);
/// <summary>
/// Override to provide the metadata based name of an event. (Different from the Name
/// property in that it does not go into the reflection trace logic.)
/// </summary>
protected abstract string MetadataName { get; }
/// <summary>
/// Return the DefiningTypeInfo as a RuntimeTypeInfo (instead of as a format specific type info)
/// </summary>
protected abstract RuntimeTypeInfo DefiningTypeInfo { get; }
protected readonly RuntimeTypeInfo ContextTypeInfo;
protected readonly RuntimeTypeInfo ReflectedTypeInfo;
private volatile MethodInfo _lazyAdder;
private volatile MethodInfo _lazyRemover;
private String _debugName;
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Prism.Events;
using Xunit;
namespace Prism.Tests.Events
{
public class PubSubEventFixture
{
[Fact]
public void EnsureSubscriptionListIsEmptyAfterPublishingAMessage()
{
var pubSubEvent = new TestablePubSubEvent<string>();
SubscribeExternalActionWithoutReference(pubSubEvent);
GC.Collect();
pubSubEvent.Publish("testPayload");
Assert.True(pubSubEvent.BaseSubscriptions.Count == 0, "Subscriptionlist is not empty");
}
[Fact]
public void EnsureSubscriptionListIsNotEmptyWithoutPublishOrSubscribe()
{
var pubSubEvent = new TestablePubSubEvent<string>();
SubscribeExternalActionWithoutReference(pubSubEvent);
GC.Collect();
Assert.True(pubSubEvent.BaseSubscriptions.Count == 1, "Subscriptionlist is empty");
}
[Fact]
public void EnsureSubscriptionListIsEmptyAfterSubscribeAgainAMessage()
{
var pubSubEvent = new TestablePubSubEvent<string>();
SubscribeExternalActionWithoutReference(pubSubEvent);
GC.Collect();
SubscribeExternalActionWithoutReference(pubSubEvent);
pubSubEvent.Prune();
Assert.True(pubSubEvent.BaseSubscriptions.Count == 1, "Subscriptionlist is empty");
}
private static void SubscribeExternalActionWithoutReference(TestablePubSubEvent<string> pubSubEvent)
{
pubSubEvent.Subscribe(new ExternalAction().ExecuteAction);
}
[Fact]
public void CanSubscribeAndRaiseEvent()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
bool published = false;
pubSubEvent.Subscribe(delegate { published = true; }, ThreadOption.PublisherThread, true, delegate { return true; });
pubSubEvent.Publish(null);
Assert.True(published);
}
[Fact]
public void CanSubscribeAndRaiseEventNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
bool published = false;
pubSubEvent.Subscribe(delegate { published = true; }, ThreadOption.PublisherThread, true);
pubSubEvent.Publish();
Assert.True(published);
}
[Fact]
public void CanSubscribeAndRaiseCustomEvent()
{
var customEvent = new TestablePubSubEvent<Payload>();
Payload payload = new Payload();
var action = new ActionHelper();
customEvent.Subscribe(action.Action);
customEvent.Publish(payload);
Assert.Same(action.ActionArg<Payload>(), payload);
}
[Fact]
public void CanHaveMultipleSubscribersAndRaiseCustomEvent()
{
var customEvent = new TestablePubSubEvent<Payload>();
Payload payload = new Payload();
var action1 = new ActionHelper();
var action2 = new ActionHelper();
customEvent.Subscribe(action1.Action);
customEvent.Subscribe(action2.Action);
customEvent.Publish(payload);
Assert.Same(action1.ActionArg<Payload>(), payload);
Assert.Same(action2.ActionArg<Payload>(), payload);
}
[Fact]
public void CanHaveMultipleSubscribersAndRaiseEvent()
{
var customEvent = new TestablePubSubEvent();
var action1 = new ActionHelper();
var action2 = new ActionHelper();
customEvent.Subscribe(action1.Action);
customEvent.Subscribe(action2.Action);
customEvent.Publish();
Assert.True(action1.ActionCalled);
Assert.True(action2.ActionCalled);
}
[Fact]
public void SubscribeTakesExecuteDelegateThreadOptionAndFilter()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
var action = new ActionHelper();
pubSubEvent.Subscribe(action.Action);
pubSubEvent.Publish("test");
Assert.Equal("test", action.ActionArg<string>());
}
[Fact]
public void FilterEnablesActionTarget()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
var goodFilter = new MockFilter { FilterReturnValue = true };
var actionGoodFilter = new ActionHelper();
var badFilter = new MockFilter { FilterReturnValue = false };
var actionBadFilter = new ActionHelper();
pubSubEvent.Subscribe(actionGoodFilter.Action, ThreadOption.PublisherThread, true, goodFilter.FilterString);
pubSubEvent.Subscribe(actionBadFilter.Action, ThreadOption.PublisherThread, true, badFilter.FilterString);
pubSubEvent.Publish("test");
Assert.True(actionGoodFilter.ActionCalled);
Assert.False(actionBadFilter.ActionCalled);
}
[Fact]
public void FilterEnablesActionTarget_Weak()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
var goodFilter = new MockFilter { FilterReturnValue = true };
var actionGoodFilter = new ActionHelper();
var badFilter = new MockFilter { FilterReturnValue = false };
var actionBadFilter = new ActionHelper();
pubSubEvent.Subscribe(actionGoodFilter.Action, goodFilter.FilterString);
pubSubEvent.Subscribe(actionBadFilter.Action, badFilter.FilterString);
pubSubEvent.Publish("test");
Assert.True(actionGoodFilter.ActionCalled);
Assert.False(actionBadFilter.ActionCalled);
}
[Fact]
public void SubscribeDefaultsThreadOptionAndNoFilter()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
SynchronizationContext calledSyncContext = null;
var myAction = new ActionHelper()
{
ActionToExecute =
() => calledSyncContext = SynchronizationContext.Current
};
pubSubEvent.Subscribe(myAction.Action);
pubSubEvent.Publish("test");
Assert.Equal(SynchronizationContext.Current, calledSyncContext);
}
[Fact]
public void SubscribeDefaultsThreadOptionAndNoFilterNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
SynchronizationContext calledSyncContext = null;
var myAction = new ActionHelper()
{
ActionToExecute =
() => calledSyncContext = SynchronizationContext.Current
};
pubSubEvent.Subscribe(myAction.Action);
pubSubEvent.Publish();
Assert.Equal(SynchronizationContext.Current, calledSyncContext);
}
[Fact]
public void ShouldUnsubscribeFromPublisherThread()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var actionEvent = new ActionHelper();
PubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.PublisherThread);
Assert.True(PubSubEvent.Contains(actionEvent.Action));
PubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(PubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void ShouldUnsubscribeFromPublisherThreadNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var actionEvent = new ActionHelper();
pubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.PublisherThread);
Assert.True(pubSubEvent.Contains(actionEvent.Action));
pubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(pubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void UnsubscribeShouldNotFailWithNonSubscriber()
{
TestablePubSubEvent<string> pubSubEvent = new TestablePubSubEvent<string>();
Action<string> subscriber = delegate { };
pubSubEvent.Unsubscribe(subscriber);
}
[Fact]
public void UnsubscribeShouldNotFailWithNonSubscriberNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
Action subscriber = delegate { };
pubSubEvent.Unsubscribe(subscriber);
}
[Fact]
public void ShouldUnsubscribeFromBackgroundThread()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var actionEvent = new ActionHelper();
PubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.BackgroundThread);
Assert.True(PubSubEvent.Contains(actionEvent.Action));
PubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(PubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void ShouldUnsubscribeFromBackgroundThreadNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var actionEvent = new ActionHelper();
pubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.BackgroundThread);
Assert.True(pubSubEvent.Contains(actionEvent.Action));
pubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(pubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void ShouldUnsubscribeFromUIThread()
{
var PubSubEvent = new TestablePubSubEvent<string>();
PubSubEvent.SynchronizationContext = new SynchronizationContext();
var actionEvent = new ActionHelper();
PubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.UIThread);
Assert.True(PubSubEvent.Contains(actionEvent.Action));
PubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(PubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void ShouldUnsubscribeFromUIThreadNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
pubSubEvent.SynchronizationContext = new SynchronizationContext();
var actionEvent = new ActionHelper();
pubSubEvent.Subscribe(
actionEvent.Action,
ThreadOption.UIThread);
Assert.True(pubSubEvent.Contains(actionEvent.Action));
pubSubEvent.Unsubscribe(actionEvent.Action);
Assert.False(pubSubEvent.Contains(actionEvent.Action));
}
[Fact]
public void ShouldUnsubscribeASingleDelegate()
{
var PubSubEvent = new TestablePubSubEvent<string>();
int callCount = 0;
var actionEvent = new ActionHelper() { ActionToExecute = () => callCount++ };
PubSubEvent.Subscribe(actionEvent.Action);
PubSubEvent.Subscribe(actionEvent.Action);
PubSubEvent.Publish(null);
Assert.Equal<int>(2, callCount);
callCount = 0;
PubSubEvent.Unsubscribe(actionEvent.Action);
PubSubEvent.Publish(null);
Assert.Equal<int>(1, callCount);
}
[Fact]
public void ShouldUnsubscribeASingleDelegateNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
int callCount = 0;
var actionEvent = new ActionHelper() { ActionToExecute = () => callCount++ };
pubSubEvent.Subscribe(actionEvent.Action);
pubSubEvent.Subscribe(actionEvent.Action);
pubSubEvent.Publish();
Assert.Equal<int>(2, callCount);
callCount = 0;
pubSubEvent.Unsubscribe(actionEvent.Action);
pubSubEvent.Publish();
Assert.Equal<int>(1, callCount);
}
[Fact]
public async Task ShouldNotExecuteOnGarbageCollectedDelegateReferenceWhenNotKeepAlive()
{
var PubSubEvent = new TestablePubSubEvent<string>();
ExternalAction externalAction = new ExternalAction();
PubSubEvent.Subscribe(externalAction.ExecuteAction);
PubSubEvent.Publish("testPayload");
Assert.Equal("testPayload", externalAction.PassedValue);
WeakReference actionEventReference = new WeakReference(externalAction);
externalAction = null;
await Task.Delay(100);
GC.Collect();
Assert.False(actionEventReference.IsAlive);
PubSubEvent.Publish("testPayload");
}
[Fact]
public async Task ShouldNotExecuteOnGarbageCollectedDelegateReferenceWhenNotKeepAliveNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var externalAction = new ExternalAction();
pubSubEvent.Subscribe(externalAction.ExecuteAction);
pubSubEvent.Publish();
Assert.True(externalAction.Executed);
var actionEventReference = new WeakReference(externalAction);
externalAction = null;
await Task.Delay(100);
GC.Collect();
Assert.False(actionEventReference.IsAlive);
pubSubEvent.Publish();
}
[Fact]
public async Task ShouldNotExecuteOnGarbageCollectedFilterReferenceWhenNotKeepAlive()
{
var PubSubEvent = new TestablePubSubEvent<string>();
bool wasCalled = false;
var actionEvent = new ActionHelper() { ActionToExecute = () => wasCalled = true };
ExternalFilter filter = new ExternalFilter();
PubSubEvent.Subscribe(actionEvent.Action, ThreadOption.PublisherThread, false, filter.AlwaysTrueFilter);
PubSubEvent.Publish("testPayload");
Assert.True(wasCalled);
wasCalled = false;
WeakReference filterReference = new WeakReference(filter);
filter = null;
await Task.Delay(100);
GC.Collect();
Assert.False(filterReference.IsAlive);
PubSubEvent.Publish("testPayload");
Assert.False(wasCalled);
}
[Fact]
public void CanAddSubscriptionWhileEventIsFiring()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var emptyAction = new ActionHelper();
var subscriptionAction = new ActionHelper
{
ActionToExecute = (() =>
PubSubEvent.Subscribe(
emptyAction.Action))
};
PubSubEvent.Subscribe(subscriptionAction.Action);
Assert.False(PubSubEvent.Contains(emptyAction.Action));
PubSubEvent.Publish(null);
Assert.True((PubSubEvent.Contains(emptyAction.Action)));
}
[Fact]
public void CanAddSubscriptionWhileEventIsFiringNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var emptyAction = new ActionHelper();
var subscriptionAction = new ActionHelper
{
ActionToExecute = (() =>
pubSubEvent.Subscribe(
emptyAction.Action))
};
pubSubEvent.Subscribe(subscriptionAction.Action);
Assert.False(pubSubEvent.Contains(emptyAction.Action));
pubSubEvent.Publish();
Assert.True((pubSubEvent.Contains(emptyAction.Action)));
}
[Fact]
public void InlineDelegateDeclarationsDoesNotGetCollectedIncorrectlyWithWeakReferences()
{
var PubSubEvent = new TestablePubSubEvent<string>();
bool published = false;
PubSubEvent.Subscribe(delegate { published = true; }, ThreadOption.PublisherThread, false, delegate { return true; });
GC.Collect();
PubSubEvent.Publish(null);
Assert.True(published);
}
[Fact]
public void InlineDelegateDeclarationsDoesNotGetCollectedIncorrectlyWithWeakReferencesNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
bool published = false;
pubSubEvent.Subscribe(delegate { published = true; }, ThreadOption.PublisherThread, false);
GC.Collect();
pubSubEvent.Publish();
Assert.True(published);
}
[Fact]
public void ShouldNotGarbageCollectDelegateReferenceWhenUsingKeepAlive()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var externalAction = new ExternalAction();
PubSubEvent.Subscribe(externalAction.ExecuteAction, ThreadOption.PublisherThread, true);
WeakReference actionEventReference = new WeakReference(externalAction);
externalAction = null;
GC.Collect();
GC.Collect();
Assert.True(actionEventReference.IsAlive);
PubSubEvent.Publish("testPayload");
Assert.Equal("testPayload", ((ExternalAction)actionEventReference.Target).PassedValue);
}
[Fact]
public void ShouldNotGarbageCollectDelegateReferenceWhenUsingKeepAliveNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var externalAction = new ExternalAction();
pubSubEvent.Subscribe(externalAction.ExecuteAction, ThreadOption.PublisherThread, true);
WeakReference actionEventReference = new WeakReference(externalAction);
externalAction = null;
GC.Collect();
GC.Collect();
Assert.True(actionEventReference.IsAlive);
pubSubEvent.Publish();
Assert.True(((ExternalAction)actionEventReference.Target).Executed);
}
[Fact]
public void RegisterReturnsTokenThatCanBeUsedToUnsubscribe()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var emptyAction = new ActionHelper();
var token = PubSubEvent.Subscribe(emptyAction.Action);
PubSubEvent.Unsubscribe(token);
Assert.False(PubSubEvent.Contains(emptyAction.Action));
}
[Fact]
public void RegisterReturnsTokenThatCanBeUsedToUnsubscribeNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var emptyAction = new ActionHelper();
var token = pubSubEvent.Subscribe(emptyAction.Action);
pubSubEvent.Unsubscribe(token);
Assert.False(pubSubEvent.Contains(emptyAction.Action));
}
[Fact]
public void ContainsShouldSearchByToken()
{
var PubSubEvent = new TestablePubSubEvent<string>();
var emptyAction = new ActionHelper();
var token = PubSubEvent.Subscribe(emptyAction.Action);
Assert.True(PubSubEvent.Contains(token));
PubSubEvent.Unsubscribe(emptyAction.Action);
Assert.False(PubSubEvent.Contains(token));
}
[Fact]
public void ContainsShouldSearchByTokenNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
var emptyAction = new ActionHelper();
var token = pubSubEvent.Subscribe(emptyAction.Action);
Assert.True(pubSubEvent.Contains(token));
pubSubEvent.Unsubscribe(emptyAction.Action);
Assert.False(pubSubEvent.Contains(token));
}
[Fact]
public void SubscribeDefaultsToPublisherThread()
{
var PubSubEvent = new TestablePubSubEvent<string>();
Action<string> action = delegate { };
var token = PubSubEvent.Subscribe(action, true);
Assert.Equal(1, PubSubEvent.BaseSubscriptions.Count);
Assert.Equal(typeof(EventSubscription<string>), PubSubEvent.BaseSubscriptions.ElementAt(0).GetType());
}
[Fact]
public void SubscribeDefaultsToPublisherThreadNonGeneric()
{
var pubSubEvent = new TestablePubSubEvent();
Action action = delegate { };
var token = pubSubEvent.Subscribe(action, true);
Assert.Equal(1, pubSubEvent.BaseSubscriptions.Count);
Assert.Equal(typeof(EventSubscription), pubSubEvent.BaseSubscriptions.ElementAt(0).GetType());
}
public class ExternalFilter
{
public bool AlwaysTrueFilter(string value)
{
return true;
}
}
public class ExternalAction
{
public string PassedValue;
public bool Executed = false;
public void ExecuteAction(string value)
{
PassedValue = value;
Executed = true;
}
public void ExecuteAction()
{
Executed = true;
}
}
class TestablePubSubEvent<TPayload> : PubSubEvent<TPayload>
{
public ICollection<IEventSubscription> BaseSubscriptions
{
get { return base.Subscriptions; }
}
}
class TestablePubSubEvent : PubSubEvent
{
public ICollection<IEventSubscription> BaseSubscriptions
{
get { return base.Subscriptions; }
}
}
public class Payload { }
}
public class ActionHelper
{
public bool ActionCalled;
public Action ActionToExecute = null;
private object actionArg;
public T ActionArg<T>()
{
return (T)actionArg;
}
public void Action(PubSubEventFixture.Payload arg)
{
Action((object)arg);
}
public void Action(string arg)
{
Action((object)arg);
}
public void Action(object arg)
{
actionArg = arg;
ActionCalled = true;
if (ActionToExecute != null)
{
ActionToExecute.Invoke();
}
}
public void Action()
{
ActionCalled = true;
if (ActionToExecute != null)
{
ActionToExecute.Invoke();
}
}
}
public class MockFilter
{
public bool FilterReturnValue;
public bool FilterString(string arg)
{
return FilterReturnValue;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Xml;
using System.Xml.Schema;
using System.Reflection;
using System.Reflection.Emit;
using System.Collections;
using System.Collections.Generic;
using System.Security;
#if NET_NATIVE
using Internal.Runtime.Augments;
#endif
namespace System.Runtime.Serialization
{
#if USE_REFEMIT || NET_NATIVE
public delegate object XmlFormatClassReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces);
public delegate object XmlFormatCollectionReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString itemName, XmlDictionaryString itemNamespace, CollectionDataContract collectionContract);
public delegate void XmlFormatGetOnlyCollectionReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString itemName, XmlDictionaryString itemNamespace, CollectionDataContract collectionContract);
public sealed class XmlFormatReaderGenerator
#else
internal delegate object XmlFormatClassReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString[] memberNames, XmlDictionaryString[] memberNamespaces);
internal delegate object XmlFormatCollectionReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString itemName, XmlDictionaryString itemNamespace, CollectionDataContract collectionContract);
internal delegate void XmlFormatGetOnlyCollectionReaderDelegate(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context, XmlDictionaryString itemName, XmlDictionaryString itemNamespace, CollectionDataContract collectionContract);
internal sealed class XmlFormatReaderGenerator
#endif
{
#if !NET_NATIVE
[SecurityCritical]
/// <SecurityNote>
/// Critical - holds instance of CriticalHelper which keeps state that was produced within an assert
/// </SecurityNote>
private CriticalHelper _helper;
/// <SecurityNote>
/// Critical - initializes SecurityCritical field 'helper'
/// </SecurityNote>
[SecurityCritical]
public XmlFormatReaderGenerator()
{
_helper = new CriticalHelper();
}
/// <SecurityNote>
/// Critical - accesses SecurityCritical helper class 'CriticalHelper'
/// </SecurityNote>
[SecurityCritical]
public XmlFormatClassReaderDelegate GenerateClassReader(ClassDataContract classContract)
{
return _helper.GenerateClassReader(classContract);
}
/// <SecurityNote>
/// Critical - accesses SecurityCritical helper class 'CriticalHelper'
/// </SecurityNote>
[SecurityCritical]
public XmlFormatCollectionReaderDelegate GenerateCollectionReader(CollectionDataContract collectionContract)
{
return _helper.GenerateCollectionReader(collectionContract);
}
/// <SecurityNote>
/// Critical - accesses SecurityCritical helper class 'CriticalHelper'
/// </SecurityNote>
[SecurityCritical]
public XmlFormatGetOnlyCollectionReaderDelegate GenerateGetOnlyCollectionReader(CollectionDataContract collectionContract)
{
return _helper.GenerateGetOnlyCollectionReader(collectionContract);
}
/// <SecurityNote>
/// Review - handles all aspects of IL generation including initializing the DynamicMethod.
/// changes to how IL generated could affect how data is deserialized and what gets access to data,
/// therefore we mark it for review so that changes to generation logic are reviewed.
/// </SecurityNote>
private class CriticalHelper
{
private CodeGenerator _ilg;
private LocalBuilder _objectLocal;
private Type _objectType;
private ArgBuilder _xmlReaderArg;
private ArgBuilder _contextArg;
private ArgBuilder _memberNamesArg;
private ArgBuilder _memberNamespacesArg;
private ArgBuilder _collectionContractArg;
public XmlFormatClassReaderDelegate GenerateClassReader(ClassDataContract classContract)
{
_ilg = new CodeGenerator();
bool memberAccessFlag = classContract.RequiresMemberAccessForRead(null);
try
{
_ilg.BeginMethod("Read" + classContract.StableName.Name + "FromXml", Globals.TypeOfXmlFormatClassReaderDelegate, memberAccessFlag);
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
classContract.RequiresMemberAccessForRead(securityException);
}
else
{
throw;
}
}
InitArgs();
CreateObject(classContract);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectMethod, _objectLocal);
InvokeOnDeserializing(classContract);
LocalBuilder objectId = null;
ReadClass(classContract);
InvokeOnDeserialized(classContract);
if (objectId == null)
{
_ilg.Load(_objectLocal);
// Do a conversion back from DateTimeOffsetAdapter to DateTimeOffset after deserialization.
// DateTimeOffsetAdapter is used here for deserialization purposes to bypass the ISerializable implementation
// on DateTimeOffset; which does not work in partial trust.
if (classContract.UnderlyingType == Globals.TypeOfDateTimeOffsetAdapter)
{
_ilg.ConvertValue(_objectLocal.LocalType, Globals.TypeOfDateTimeOffsetAdapter);
_ilg.Call(XmlFormatGeneratorStatics.GetDateTimeOffsetMethod);
_ilg.ConvertValue(Globals.TypeOfDateTimeOffset, _ilg.CurrentMethod.ReturnType);
}
//Copy the KeyValuePairAdapter<K,T> to a KeyValuePair<K,T>.
else if (classContract.IsKeyValuePairAdapter)
{
_ilg.Call(classContract.GetKeyValuePairMethodInfo);
_ilg.ConvertValue(Globals.TypeOfKeyValuePair.MakeGenericType(classContract.KeyValuePairGenericArguments), _ilg.CurrentMethod.ReturnType);
}
else
{
_ilg.ConvertValue(_objectLocal.LocalType, _ilg.CurrentMethod.ReturnType);
}
}
return (XmlFormatClassReaderDelegate)_ilg.EndMethod();
}
public XmlFormatCollectionReaderDelegate GenerateCollectionReader(CollectionDataContract collectionContract)
{
_ilg = GenerateCollectionReaderHelper(collectionContract, false /*isGetOnlyCollection*/);
ReadCollection(collectionContract);
_ilg.Load(_objectLocal);
_ilg.ConvertValue(_objectLocal.LocalType, _ilg.CurrentMethod.ReturnType);
return (XmlFormatCollectionReaderDelegate)_ilg.EndMethod();
}
public XmlFormatGetOnlyCollectionReaderDelegate GenerateGetOnlyCollectionReader(CollectionDataContract collectionContract)
{
_ilg = GenerateCollectionReaderHelper(collectionContract, true /*isGetOnlyCollection*/);
ReadGetOnlyCollection(collectionContract);
return (XmlFormatGetOnlyCollectionReaderDelegate)_ilg.EndMethod();
}
private CodeGenerator GenerateCollectionReaderHelper(CollectionDataContract collectionContract, bool isGetOnlyCollection)
{
_ilg = new CodeGenerator();
bool memberAccessFlag = collectionContract.RequiresMemberAccessForRead(null);
try
{
if (isGetOnlyCollection)
{
_ilg.BeginMethod("Read" + collectionContract.StableName.Name + "FromXml" + "IsGetOnly", Globals.TypeOfXmlFormatGetOnlyCollectionReaderDelegate, memberAccessFlag);
}
else
{
_ilg.BeginMethod("Read" + collectionContract.StableName.Name + "FromXml" + string.Empty, Globals.TypeOfXmlFormatCollectionReaderDelegate, memberAccessFlag);
}
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
collectionContract.RequiresMemberAccessForRead(securityException);
}
else
{
throw;
}
}
InitArgs();
_collectionContractArg = _ilg.GetArg(4);
return _ilg;
}
private void InitArgs()
{
_xmlReaderArg = _ilg.GetArg(0);
_contextArg = _ilg.GetArg(1);
_memberNamesArg = _ilg.GetArg(2);
_memberNamespacesArg = _ilg.GetArg(3);
}
private void CreateObject(ClassDataContract classContract)
{
Type type = _objectType = classContract.UnderlyingType;
if (type.GetTypeInfo().IsValueType && !classContract.IsNonAttributedType)
type = Globals.TypeOfValueType;
_objectLocal = _ilg.DeclareLocal(type, "objectDeserialized");
if (classContract.UnderlyingType == Globals.TypeOfDBNull)
{
_ilg.LoadMember(Globals.TypeOfDBNull.GetField("Value"));
_ilg.Stloc(_objectLocal);
}
else if (classContract.IsNonAttributedType)
{
if (type.GetTypeInfo().IsValueType)
{
_ilg.Ldloca(_objectLocal);
_ilg.InitObj(type);
}
else
{
_ilg.New(classContract.GetNonAttributedTypeConstructor());
_ilg.Stloc(_objectLocal);
}
}
else
{
_ilg.Call(null, XmlFormatGeneratorStatics.GetUninitializedObjectMethod, DataContract.GetIdForInitialization(classContract));
_ilg.ConvertValue(Globals.TypeOfObject, type);
_ilg.Stloc(_objectLocal);
}
}
private void InvokeOnDeserializing(ClassDataContract classContract)
{
if (classContract.BaseContract != null)
InvokeOnDeserializing(classContract.BaseContract);
if (classContract.OnDeserializing != null)
{
_ilg.LoadAddress(_objectLocal);
_ilg.ConvertAddress(_objectLocal.LocalType, _objectType);
_ilg.Load(_contextArg);
_ilg.LoadMember(XmlFormatGeneratorStatics.GetStreamingContextMethod);
_ilg.Call(classContract.OnDeserializing);
}
}
private void InvokeOnDeserialized(ClassDataContract classContract)
{
if (classContract.BaseContract != null)
InvokeOnDeserialized(classContract.BaseContract);
if (classContract.OnDeserialized != null)
{
_ilg.LoadAddress(_objectLocal);
_ilg.ConvertAddress(_objectLocal.LocalType, _objectType);
_ilg.Load(_contextArg);
_ilg.LoadMember(XmlFormatGeneratorStatics.GetStreamingContextMethod);
_ilg.Call(classContract.OnDeserialized);
}
}
private void ReadClass(ClassDataContract classContract)
{
ReadMembers(classContract, null /*extensionDataLocal*/);
}
private void ReadMembers(ClassDataContract classContract, LocalBuilder extensionDataLocal)
{
int memberCount = classContract.MemberNames.Length;
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.IncrementItemCountMethod, memberCount);
LocalBuilder memberIndexLocal = _ilg.DeclareLocal(Globals.TypeOfInt, "memberIndex", -1);
int firstRequiredMember;
bool[] requiredMembers = GetRequiredMembers(classContract, out firstRequiredMember);
bool hasRequiredMembers = (firstRequiredMember < memberCount);
LocalBuilder requiredIndexLocal = hasRequiredMembers ? _ilg.DeclareLocal(Globals.TypeOfInt, "requiredIndex", firstRequiredMember) : null;
object forReadElements = _ilg.For(null, null, null);
_ilg.Call(null, XmlFormatGeneratorStatics.MoveToNextElementMethod, _xmlReaderArg);
_ilg.IfFalseBreak(forReadElements);
if (hasRequiredMembers)
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetMemberIndexWithRequiredMembersMethod, _xmlReaderArg, _memberNamesArg, _memberNamespacesArg, memberIndexLocal, requiredIndexLocal, extensionDataLocal);
else
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetMemberIndexMethod, _xmlReaderArg, _memberNamesArg, _memberNamespacesArg, memberIndexLocal, extensionDataLocal);
Label[] memberLabels = _ilg.Switch(memberCount);
ReadMembers(classContract, requiredMembers, memberLabels, memberIndexLocal, requiredIndexLocal);
_ilg.EndSwitch();
_ilg.EndFor();
if (hasRequiredMembers)
{
_ilg.If(requiredIndexLocal, Cmp.LessThan, memberCount);
_ilg.Call(null, XmlFormatGeneratorStatics.ThrowRequiredMemberMissingExceptionMethod, _xmlReaderArg, memberIndexLocal, requiredIndexLocal, _memberNamesArg);
_ilg.EndIf();
}
}
private int ReadMembers(ClassDataContract classContract, bool[] requiredMembers, Label[] memberLabels, LocalBuilder memberIndexLocal, LocalBuilder requiredIndexLocal)
{
int memberCount = (classContract.BaseContract == null) ? 0 : ReadMembers(classContract.BaseContract, requiredMembers,
memberLabels, memberIndexLocal, requiredIndexLocal);
for (int i = 0; i < classContract.Members.Count; i++, memberCount++)
{
DataMember dataMember = classContract.Members[i];
Type memberType = dataMember.MemberType;
_ilg.Case(memberLabels[memberCount], dataMember.Name);
if (dataMember.IsRequired)
{
int nextRequiredIndex = memberCount + 1;
for (; nextRequiredIndex < requiredMembers.Length; nextRequiredIndex++)
if (requiredMembers[nextRequiredIndex])
break;
_ilg.Set(requiredIndexLocal, nextRequiredIndex);
}
LocalBuilder value = null;
if (dataMember.IsGetOnlyCollection)
{
_ilg.LoadAddress(_objectLocal);
_ilg.LoadMember(dataMember.MemberInfo);
value = _ilg.DeclareLocal(memberType, dataMember.Name + "Value");
_ilg.Stloc(value);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.StoreCollectionMemberInfoMethod, value);
ReadValue(memberType, dataMember.Name, classContract.StableName.Namespace);
}
else
{
value = ReadValue(memberType, dataMember.Name, classContract.StableName.Namespace);
_ilg.LoadAddress(_objectLocal);
_ilg.ConvertAddress(_objectLocal.LocalType, _objectType);
_ilg.Ldloc(value);
_ilg.StoreMember(dataMember.MemberInfo);
}
#if FEATURE_LEGACYNETCF
// The DataContractSerializer in the full framework doesn't support unordered elements:
// deserialization will fail if the data members in the XML are not sorted alphabetically.
// But the NetCF DataContractSerializer does support unordered element. To maintain compatibility
// with Mango we always search for the member from the beginning of the member list.
// We set memberIndexLocal to -1 because GetMemberIndex always starts from memberIndex+1.
if (CompatibilitySwitches.IsAppEarlierThanWindowsPhone8)
ilg.Set(memberIndexLocal, (int)-1);
else
#endif // FEATURE_LEGACYNETCF
_ilg.Set(memberIndexLocal, memberCount);
_ilg.EndCase();
}
return memberCount;
}
private bool[] GetRequiredMembers(ClassDataContract contract, out int firstRequiredMember)
{
int memberCount = contract.MemberNames.Length;
bool[] requiredMembers = new bool[memberCount];
GetRequiredMembers(contract, requiredMembers);
for (firstRequiredMember = 0; firstRequiredMember < memberCount; firstRequiredMember++)
if (requiredMembers[firstRequiredMember])
break;
return requiredMembers;
}
private int GetRequiredMembers(ClassDataContract contract, bool[] requiredMembers)
{
int memberCount = (contract.BaseContract == null) ? 0 : GetRequiredMembers(contract.BaseContract, requiredMembers);
List<DataMember> members = contract.Members;
for (int i = 0; i < members.Count; i++, memberCount++)
{
requiredMembers[memberCount] = members[i].IsRequired;
}
return memberCount;
}
private LocalBuilder ReadValue(Type type, string name, string ns)
{
LocalBuilder value = _ilg.DeclareLocal(type, "valueRead");
LocalBuilder nullableValue = null;
int nullables = 0;
while (type.GetTypeInfo().IsGenericType && type.GetGenericTypeDefinition() == Globals.TypeOfNullable)
{
nullables++;
type = type.GetGenericArguments()[0];
}
PrimitiveDataContract primitiveContract = PrimitiveDataContract.GetPrimitiveDataContract(type);
if ((primitiveContract != null && primitiveContract.UnderlyingType != Globals.TypeOfObject) || nullables != 0 || type.GetTypeInfo().IsValueType)
{
LocalBuilder objectId = _ilg.DeclareLocal(Globals.TypeOfString, "objectIdRead");
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.ReadAttributesMethod, _xmlReaderArg);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.ReadIfNullOrRefMethod, _xmlReaderArg, type, DataContract.IsTypeSerializable(type));
_ilg.Stloc(objectId);
// Deserialize null
_ilg.If(objectId, Cmp.EqualTo, Globals.NullObjectId);
if (nullables != 0)
{
_ilg.LoadAddress(value);
_ilg.InitObj(value.LocalType);
}
else if (type.GetTypeInfo().IsValueType)
ThrowValidationException(SR.Format(SR.ValueTypeCannotBeNull, DataContract.GetClrTypeFullName(type)));
else
{
_ilg.Load(null);
_ilg.Stloc(value);
}
// Deserialize value
// Compare against Globals.NewObjectId, which is set to string.Empty
_ilg.ElseIfIsEmptyString(objectId);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetObjectIdMethod);
_ilg.Stloc(objectId);
if (type.GetTypeInfo().IsValueType)
{
_ilg.IfNotIsEmptyString(objectId);
ThrowValidationException(SR.Format(SR.ValueTypeCannotHaveId, DataContract.GetClrTypeFullName(type)));
_ilg.EndIf();
}
if (nullables != 0)
{
nullableValue = value;
value = _ilg.DeclareLocal(type, "innerValueRead");
}
if (primitiveContract != null && primitiveContract.UnderlyingType != Globals.TypeOfObject)
{
_ilg.Call(_xmlReaderArg, primitiveContract.XmlFormatReaderMethod);
_ilg.Stloc(value);
if (!type.GetTypeInfo().IsValueType)
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectMethod, value);
}
else
{
InternalDeserialize(value, type, name, ns);
}
// Deserialize ref
_ilg.Else();
if (type.GetTypeInfo().IsValueType)
ThrowValidationException(SR.Format(SR.ValueTypeCannotHaveRef, DataContract.GetClrTypeFullName(type)));
else
{
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetExistingObjectMethod, objectId, type, name, ns);
_ilg.ConvertValue(Globals.TypeOfObject, type);
_ilg.Stloc(value);
}
_ilg.EndIf();
if (nullableValue != null)
{
_ilg.If(objectId, Cmp.NotEqualTo, Globals.NullObjectId);
WrapNullableObject(value, nullableValue, nullables);
_ilg.EndIf();
value = nullableValue;
}
}
else
{
InternalDeserialize(value, type, name, ns);
}
return value;
}
private void InternalDeserialize(LocalBuilder value, Type type, string name, string ns)
{
_ilg.Load(_contextArg);
_ilg.Load(_xmlReaderArg);
Type declaredType = type;
_ilg.Load(DataContract.GetId(declaredType.TypeHandle));
_ilg.Ldtoken(declaredType);
_ilg.Load(name);
_ilg.Load(ns);
_ilg.Call(XmlFormatGeneratorStatics.InternalDeserializeMethod);
_ilg.ConvertValue(Globals.TypeOfObject, type);
_ilg.Stloc(value);
}
private void WrapNullableObject(LocalBuilder innerValue, LocalBuilder outerValue, int nullables)
{
Type innerType = innerValue.LocalType, outerType = outerValue.LocalType;
_ilg.LoadAddress(outerValue);
_ilg.Load(innerValue);
for (int i = 1; i < nullables; i++)
{
Type type = Globals.TypeOfNullable.MakeGenericType(innerType);
_ilg.New(type.GetConstructor(new Type[] { innerType }));
innerType = type;
}
_ilg.Call(outerType.GetConstructor(new Type[] { innerType }));
}
private void ReadCollection(CollectionDataContract collectionContract)
{
Type type = collectionContract.UnderlyingType;
Type itemType = collectionContract.ItemType;
bool isArray = (collectionContract.Kind == CollectionKind.Array);
ConstructorInfo constructor = collectionContract.Constructor;
if (type.GetTypeInfo().IsInterface)
{
switch (collectionContract.Kind)
{
case CollectionKind.GenericDictionary:
type = Globals.TypeOfDictionaryGeneric.MakeGenericType(itemType.GetGenericArguments());
constructor = type.GetConstructor(BindingFlags.Instance | BindingFlags.Public, Array.Empty<Type>());
break;
case CollectionKind.Dictionary:
type = Globals.TypeOfHashtable;
constructor = XmlFormatGeneratorStatics.HashtableCtor;
break;
case CollectionKind.Collection:
case CollectionKind.GenericCollection:
case CollectionKind.Enumerable:
case CollectionKind.GenericEnumerable:
case CollectionKind.List:
case CollectionKind.GenericList:
type = itemType.MakeArrayType();
isArray = true;
break;
}
}
string itemName = collectionContract.ItemName;
string itemNs = collectionContract.StableName.Namespace;
_objectLocal = _ilg.DeclareLocal(type, "objectDeserialized");
if (!isArray)
{
if (type.GetTypeInfo().IsValueType)
{
_ilg.Ldloca(_objectLocal);
_ilg.InitObj(type);
}
else
{
_ilg.New(constructor);
_ilg.Stloc(_objectLocal);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectMethod, _objectLocal);
}
}
LocalBuilder size = _ilg.DeclareLocal(Globals.TypeOfInt, "arraySize");
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetArraySizeMethod);
_ilg.Stloc(size);
LocalBuilder objectId = _ilg.DeclareLocal(Globals.TypeOfString, "objectIdRead");
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.GetObjectIdMethod);
_ilg.Stloc(objectId);
bool canReadPrimitiveArray = false;
if (isArray && TryReadPrimitiveArray(type, itemType, size))
{
canReadPrimitiveArray = true;
_ilg.IfNot();
}
_ilg.If(size, Cmp.EqualTo, -1);
LocalBuilder growingCollection = null;
if (isArray)
{
growingCollection = _ilg.DeclareLocal(type, "growingCollection");
_ilg.NewArray(itemType, 32);
_ilg.Stloc(growingCollection);
}
LocalBuilder i = _ilg.DeclareLocal(Globals.TypeOfInt, "i");
object forLoop = _ilg.For(i, 0, Int32.MaxValue);
IsStartElement(_memberNamesArg, _memberNamespacesArg);
_ilg.If();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.IncrementItemCountMethod, 1);
LocalBuilder value = ReadCollectionItem(collectionContract, itemType, itemName, itemNs);
if (isArray)
{
MethodInfo ensureArraySizeMethod = XmlFormatGeneratorStatics.EnsureArraySizeMethod.MakeGenericMethod(itemType);
_ilg.Call(null, ensureArraySizeMethod, growingCollection, i);
_ilg.Stloc(growingCollection);
_ilg.StoreArrayElement(growingCollection, i, value);
}
else
StoreCollectionValue(_objectLocal, value, collectionContract);
_ilg.Else();
IsEndElement();
_ilg.If();
_ilg.Break(forLoop);
_ilg.Else();
HandleUnexpectedItemInCollection(i);
_ilg.EndIf();
_ilg.EndIf();
_ilg.EndFor();
if (isArray)
{
MethodInfo trimArraySizeMethod = XmlFormatGeneratorStatics.TrimArraySizeMethod.MakeGenericMethod(itemType);
_ilg.Call(null, trimArraySizeMethod, growingCollection, i);
_ilg.Stloc(_objectLocal);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectWithIdMethod, objectId, _objectLocal);
}
_ilg.Else();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.IncrementItemCountMethod, size);
if (isArray)
{
_ilg.NewArray(itemType, size);
_ilg.Stloc(_objectLocal);
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectMethod, _objectLocal);
}
LocalBuilder j = _ilg.DeclareLocal(Globals.TypeOfInt, "j");
_ilg.For(j, 0, size);
IsStartElement(_memberNamesArg, _memberNamespacesArg);
_ilg.If();
LocalBuilder itemValue = ReadCollectionItem(collectionContract, itemType, itemName, itemNs);
if (isArray)
_ilg.StoreArrayElement(_objectLocal, j, itemValue);
else
StoreCollectionValue(_objectLocal, itemValue, collectionContract);
_ilg.Else();
HandleUnexpectedItemInCollection(j);
_ilg.EndIf();
_ilg.EndFor();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.CheckEndOfArrayMethod, _xmlReaderArg, size, _memberNamesArg, _memberNamespacesArg);
_ilg.EndIf();
if (canReadPrimitiveArray)
{
_ilg.Else();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectWithIdMethod, objectId, _objectLocal);
_ilg.EndIf();
}
}
private void ReadGetOnlyCollection(CollectionDataContract collectionContract)
{
Type type = collectionContract.UnderlyingType;
Type itemType = collectionContract.ItemType;
bool isArray = (collectionContract.Kind == CollectionKind.Array);
string itemName = collectionContract.ItemName;
string itemNs = collectionContract.StableName.Namespace;
_objectLocal = _ilg.DeclareLocal(type, "objectDeserialized");
_ilg.Load(_contextArg);
_ilg.LoadMember(XmlFormatGeneratorStatics.GetCollectionMemberMethod);
_ilg.ConvertValue(Globals.TypeOfObject, type);
_ilg.Stloc(_objectLocal);
//check that items are actually going to be deserialized into the collection
IsStartElement(_memberNamesArg, _memberNamespacesArg);
_ilg.If();
_ilg.If(_objectLocal, Cmp.EqualTo, null);
_ilg.Call(null, XmlFormatGeneratorStatics.ThrowNullValueReturnedForGetOnlyCollectionExceptionMethod, type);
_ilg.Else();
LocalBuilder size = _ilg.DeclareLocal(Globals.TypeOfInt, "arraySize");
if (isArray)
{
_ilg.Load(_objectLocal);
_ilg.Call(XmlFormatGeneratorStatics.GetArrayLengthMethod);
_ilg.Stloc(size);
}
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.AddNewObjectMethod, _objectLocal);
LocalBuilder i = _ilg.DeclareLocal(Globals.TypeOfInt, "i");
object forLoop = _ilg.For(i, 0, Int32.MaxValue);
IsStartElement(_memberNamesArg, _memberNamespacesArg);
_ilg.If();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.IncrementItemCountMethod, 1);
LocalBuilder value = ReadCollectionItem(collectionContract, itemType, itemName, itemNs);
if (isArray)
{
_ilg.If(size, Cmp.EqualTo, i);
_ilg.Call(null, XmlFormatGeneratorStatics.ThrowArrayExceededSizeExceptionMethod, size, type);
_ilg.Else();
_ilg.StoreArrayElement(_objectLocal, i, value);
_ilg.EndIf();
}
else
StoreCollectionValue(_objectLocal, value, collectionContract);
_ilg.Else();
IsEndElement();
_ilg.If();
_ilg.Break(forLoop);
_ilg.Else();
HandleUnexpectedItemInCollection(i);
_ilg.EndIf();
_ilg.EndIf();
_ilg.EndFor();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.CheckEndOfArrayMethod, _xmlReaderArg, size, _memberNamesArg, _memberNamespacesArg);
_ilg.EndIf();
_ilg.EndIf();
}
private bool TryReadPrimitiveArray(Type type, Type itemType, LocalBuilder size)
{
PrimitiveDataContract primitiveContract = PrimitiveDataContract.GetPrimitiveDataContract(itemType);
if (primitiveContract == null)
return false;
string readArrayMethod = null;
switch (itemType.GetTypeCode())
{
case TypeCode.Boolean:
readArrayMethod = "TryReadBooleanArray";
break;
case TypeCode.DateTime:
readArrayMethod = "TryReadDateTimeArray";
break;
case TypeCode.Decimal:
readArrayMethod = "TryReadDecimalArray";
break;
case TypeCode.Int32:
readArrayMethod = "TryReadInt32Array";
break;
case TypeCode.Int64:
readArrayMethod = "TryReadInt64Array";
break;
case TypeCode.Single:
readArrayMethod = "TryReadSingleArray";
break;
case TypeCode.Double:
readArrayMethod = "TryReadDoubleArray";
break;
default:
break;
}
if (readArrayMethod != null)
{
_ilg.Load(_xmlReaderArg);
_ilg.Load(_contextArg);
_ilg.Load(_memberNamesArg);
_ilg.Load(_memberNamespacesArg);
_ilg.Load(size);
_ilg.Ldloca(_objectLocal);
_ilg.Call(typeof(XmlReaderDelegator).GetMethod(readArrayMethod, Globals.ScanAllMembers));
return true;
}
return false;
}
private LocalBuilder ReadCollectionItem(CollectionDataContract collectionContract, Type itemType, string itemName, string itemNs)
{
if (collectionContract.Kind == CollectionKind.Dictionary || collectionContract.Kind == CollectionKind.GenericDictionary)
{
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.ResetAttributesMethod);
LocalBuilder value = _ilg.DeclareLocal(itemType, "valueRead");
_ilg.Load(_collectionContractArg);
_ilg.Call(XmlFormatGeneratorStatics.GetItemContractMethod);
_ilg.Load(_xmlReaderArg);
_ilg.Load(_contextArg);
_ilg.Call(XmlFormatGeneratorStatics.ReadXmlValueMethod);
_ilg.ConvertValue(Globals.TypeOfObject, itemType);
_ilg.Stloc(value);
return value;
}
else
{
return ReadValue(itemType, itemName, itemNs);
}
}
private void StoreCollectionValue(LocalBuilder collection, LocalBuilder value, CollectionDataContract collectionContract)
{
if (collectionContract.Kind == CollectionKind.GenericDictionary || collectionContract.Kind == CollectionKind.Dictionary)
{
ClassDataContract keyValuePairContract = DataContract.GetDataContract(value.LocalType) as ClassDataContract;
if (keyValuePairContract == null)
{
DiagnosticUtility.DebugAssert("Failed to create contract for KeyValuePair type");
}
DataMember keyMember = keyValuePairContract.Members[0];
DataMember valueMember = keyValuePairContract.Members[1];
LocalBuilder pairKey = _ilg.DeclareLocal(keyMember.MemberType, keyMember.Name);
LocalBuilder pairValue = _ilg.DeclareLocal(valueMember.MemberType, valueMember.Name);
_ilg.LoadAddress(value);
_ilg.LoadMember(keyMember.MemberInfo);
_ilg.Stloc(pairKey);
_ilg.LoadAddress(value);
_ilg.LoadMember(valueMember.MemberInfo);
_ilg.Stloc(pairValue);
_ilg.Call(collection, collectionContract.AddMethod, pairKey, pairValue);
if (collectionContract.AddMethod.ReturnType != Globals.TypeOfVoid)
_ilg.Pop();
}
else
{
_ilg.Call(collection, collectionContract.AddMethod, value);
if (collectionContract.AddMethod.ReturnType != Globals.TypeOfVoid)
_ilg.Pop();
}
}
private void HandleUnexpectedItemInCollection(LocalBuilder iterator)
{
IsStartElement();
_ilg.If();
_ilg.Call(_contextArg, XmlFormatGeneratorStatics.SkipUnknownElementMethod, _xmlReaderArg);
_ilg.Dec(iterator);
_ilg.Else();
ThrowUnexpectedStateException(XmlNodeType.Element);
_ilg.EndIf();
}
private void IsStartElement(ArgBuilder nameArg, ArgBuilder nsArg)
{
_ilg.Call(_xmlReaderArg, XmlFormatGeneratorStatics.IsStartElementMethod2, nameArg, nsArg);
}
private void IsStartElement()
{
_ilg.Call(_xmlReaderArg, XmlFormatGeneratorStatics.IsStartElementMethod0);
}
private void IsEndElement()
{
_ilg.Load(_xmlReaderArg);
_ilg.LoadMember(XmlFormatGeneratorStatics.NodeTypeProperty);
_ilg.Load(XmlNodeType.EndElement);
_ilg.Ceq();
}
private void ThrowUnexpectedStateException(XmlNodeType expectedState)
{
_ilg.Call(null, XmlFormatGeneratorStatics.CreateUnexpectedStateExceptionMethod, expectedState, _xmlReaderArg);
_ilg.Throw();
}
private void ThrowValidationException(string msg, params object[] values)
{
{
_ilg.Load(msg);
}
ThrowValidationException();
}
private void ThrowValidationException()
{
//SerializationException is internal in SL and so cannot be directly invoked from DynamicMethod
//So use helper function to create SerializationException
_ilg.Call(XmlFormatGeneratorStatics.CreateSerializationExceptionMethod);
_ilg.Throw();
}
}
#endif
[SecuritySafeCritical]
static internal object UnsafeGetUninitializedObject(Type type)
{
#if !NET_NATIVE
return TryGetUninitializedObjectWithFormatterServices(type) ?? Activator.CreateInstance(type);
#else
return RuntimeAugments.NewObject(type.TypeHandle);
#endif
}
/// <SecurityNote>
/// Critical - Elevates by calling GetUninitializedObject which has a LinkDemand
/// Safe - marked as such so that it's callable from transparent generated IL. Takes id as parameter which
/// is guaranteed to be in internal serialization cache.
/// </SecurityNote>
[SecuritySafeCritical]
#if USE_REFEMIT
public static object UnsafeGetUninitializedObject(int id)
#else
static internal object UnsafeGetUninitializedObject(int id)
#endif
{
var type = DataContract.GetDataContractForInitialization(id).TypeForInitialization;
return UnsafeGetUninitializedObject(type);
}
static internal object TryGetUninitializedObjectWithFormatterServices(Type type)
{
object obj = null;
var formatterServiceType = typeof(string).GetTypeInfo().Assembly.GetType("System.Runtime.Serialization.FormatterServices");
if (formatterServiceType != null)
{
var methodInfo = formatterServiceType.GetMethod("GetUninitializedObject", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static);
if (methodInfo != null)
{
obj = methodInfo.Invoke(null, new object[] { type });
}
}
return obj;
}
}
}
| |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using JetBrains.Annotations;
using NLog.Common;
using NLog.Config;
using NLog.Internal;
using NLog.Targets;
using NLog.Internal.Fakeables;
using System.Reflection;
#if SILVERLIGHT
using System.Windows;
#endif
/// <summary>
/// Creates and manages instances of <see cref="T:NLog.Logger" /> objects.
/// </summary>
public class LogFactory : IDisposable
{
#if !SILVERLIGHT
private const int ReconfigAfterFileChangedTimeout = 1000;
private static TimeSpan defaultFlushTimeout = TimeSpan.FromSeconds(15);
private Timer reloadTimer;
private readonly MultiFileWatcher watcher;
#endif
private static IAppDomain currentAppDomain;
private readonly object syncRoot = new object();
private LoggingConfiguration config;
private LogLevel globalThreshold = LogLevel.MinLevel;
private bool configLoaded;
// TODO: logsEnabled property might be possible to be encapsulated into LogFactory.LogsEnabler class.
private int logsEnabled;
private readonly LoggerCache loggerCache = new LoggerCache();
/// <summary>
/// Occurs when logging <see cref="Configuration" /> changes.
/// </summary>
public event EventHandler<LoggingConfigurationChangedEventArgs> ConfigurationChanged;
#if !SILVERLIGHT
/// <summary>
/// Occurs when logging <see cref="Configuration" /> gets reloaded.
/// </summary>
public event EventHandler<LoggingConfigurationReloadedEventArgs> ConfigurationReloaded;
#endif
/// <summary>
/// Initializes a new instance of the <see cref="LogFactory" /> class.
/// </summary>
public LogFactory()
{
#if !SILVERLIGHT
this.watcher = new MultiFileWatcher();
this.watcher.OnChange += this.ConfigFileChanged;
CurrentAppDomain.DomainUnload += currentAppDomain_DomainUnload;
#endif
}
/// <summary>
/// Initializes a new instance of the <see cref="LogFactory" /> class.
/// </summary>
/// <param name="config">The config.</param>
public LogFactory(LoggingConfiguration config)
: this()
{
this.Configuration = config;
}
/// <summary>
/// Gets the current <see cref="IAppDomain"/>.
/// </summary>
public static IAppDomain CurrentAppDomain
{
get { return currentAppDomain ?? (currentAppDomain = AppDomainWrapper.CurrentDomain); }
set { currentAppDomain = value; }
}
/// <summary>
/// Gets or sets a value indicating whether exceptions should be thrown.
/// </summary>
/// <value>A value of <c>true</c> if exception should be thrown; otherwise, <c>false</c>.</value>
/// <remarks>By default exceptions are not thrown under any circumstances.</remarks>
public bool ThrowExceptions { get; set; }
/// <summary>
/// Gets or sets the current logging configuration. After setting this property all
/// existing loggers will be re-configured, so that there is no need to call <see cref="ReconfigExistingLoggers" />
/// manually.
/// </summary>
public LoggingConfiguration Configuration
{
get
{
lock (this.syncRoot)
{
if (this.configLoaded)
{
return this.config;
}
this.configLoaded = true;
#if !SILVERLIGHT
if (this.config == null)
{
// Try to load default configuration.
this.config = XmlLoggingConfiguration.AppConfig;
}
#endif
// Retest the condition as we might have loaded a config.
if (this.config == null)
{
foreach (string configFile in GetCandidateConfigFileNames())
{
#if SILVERLIGHT
Uri configFileUri = new Uri(configFile, UriKind.Relative);
if (Application.GetResourceStream(configFileUri) != null)
{
LoadLoggingConfiguration(configFile);
break;
}
#else
if (File.Exists(configFile))
{
LoadLoggingConfiguration(configFile);
break;
}
#endif
}
}
if (this.config != null)
{
#if !SILVERLIGHT
config.Dump();
try
{
this.watcher.Watch(this.config.FileNamesToWatch);
}
catch (Exception exception)
{
InternalLogger.Warn("Cannot start file watching: {0}. File watching is disabled", exception);
}
#endif
this.config.InitializeAll();
LogConfigurationInitialized();
}
return this.config;
}
}
set
{
#if !SILVERLIGHT
try
{
this.watcher.StopWatching();
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Error("Cannot stop file watching: {0}", exception);
}
#endif
lock (this.syncRoot)
{
LoggingConfiguration oldConfig = this.config;
if (oldConfig != null)
{
InternalLogger.Info("Closing old configuration.");
#if !SILVERLIGHT
this.Flush();
#endif
oldConfig.Close();
}
this.config = value;
this.configLoaded = true;
if (this.config != null)
{
config.Dump();
this.config.InitializeAll();
this.ReconfigExistingLoggers();
#if !SILVERLIGHT
try
{
this.watcher.Watch(this.config.FileNamesToWatch);
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Warn("Cannot start file watching: {0}", exception);
}
#endif
}
this.OnConfigurationChanged(new LoggingConfigurationChangedEventArgs(value, oldConfig));
}
}
}
/// <summary>
/// Gets or sets the global log threshold. Log events below this threshold are not logged.
/// </summary>
public LogLevel GlobalThreshold
{
get
{
return this.globalThreshold;
}
set
{
lock (this.syncRoot)
{
this.globalThreshold = value;
this.ReconfigExistingLoggers();
}
}
}
/// <summary>
/// Gets the default culture info to use as <see cref="LogEventInfo.FormatProvider"/>.
/// </summary>
/// <value>
/// Specific culture info or null to use <see cref="CultureInfo.CurrentCulture"/>
/// </value>
[CanBeNull]
public CultureInfo DefaultCultureInfo
{
get
{
var configuration = this.Configuration;
return configuration != null ? configuration.DefaultCultureInfo : null;
}
}
private void LogConfigurationInitialized()
{
InternalLogger.Info("Configuration initialized.");
InternalLogger.LogAssemblyVersion(typeof(ILogger).Assembly);
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting
/// unmanaged resources.
/// </summary>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Creates a logger that discards all log messages.
/// </summary>
/// <returns>Null logger instance.</returns>
public Logger CreateNullLogger()
{
TargetWithFilterChain[] targetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
Logger newLogger = new Logger();
newLogger.Initialize(string.Empty, new LoggerConfiguration(targetsByLevel, false), this);
return newLogger;
}
/// <summary>
/// Gets the logger with the name of the current class.
/// </summary>
/// <returns>The logger.</returns>
/// <remarks>This is a slow-running method.
/// Make sure you're not doing this in a loop.</remarks>
[MethodImpl(MethodImplOptions.NoInlining)]
public Logger GetCurrentClassLogger()
{
#if SILVERLIGHT
var frame = new StackFrame(1);
#else
var frame = new StackFrame(1, false);
#endif
return this.GetLogger(frame.GetMethod().DeclaringType.FullName);
}
/// <summary>
/// Gets a custom logger with the name of the current class. Use <paramref name="loggerType"/> to pass the type of the needed Logger.
/// </summary>
/// <param name="loggerType">The type of the logger to create. The type must inherit from
/// NLog.Logger.</param>
/// <returns>The logger of type <paramref name="loggerType"/>.</returns>
/// <remarks>This is a slow-running method. Make sure you are not calling this method in a
/// loop.</remarks>
[MethodImpl(MethodImplOptions.NoInlining)]
public Logger GetCurrentClassLogger(Type loggerType)
{
#if !SILVERLIGHT
var frame = new StackFrame(1, false);
#else
var frame = new StackFrame(1);
#endif
return this.GetLogger(frame.GetMethod().DeclaringType.FullName, loggerType);
}
/// <summary>
/// Gets the specified named logger.
/// </summary>
/// <param name="name">Name of the logger.</param>
/// <returns>The logger reference. Multiple calls to <c>GetLogger</c> with the same argument
/// are not guaranteed to return the same logger reference.</returns>
public Logger GetLogger(string name)
{
return this.GetLogger(new LoggerCacheKey(name, typeof(Logger)));
}
/// <summary>
/// Gets the specified named logger. Use <paramref name="loggerType"/> to pass the type of the needed Logger.
/// </summary>
/// <param name="name">Name of the logger.</param>
/// <param name="loggerType">The type of the logger to create. The type must inherit from <see cref="Logger" />.</param>
/// <returns>The logger of type <paramref name="loggerType"/>. Multiple calls to <c>GetLogger</c> with the
/// same argument aren't guaranteed to return the same logger reference.</returns>
public Logger GetLogger(string name, Type loggerType)
{
return this.GetLogger(new LoggerCacheKey(name, loggerType));
}
/// <summary>
/// Loops through all loggers previously returned by GetLogger and recalculates their
/// target and filter list. Useful after modifying the configuration programmatically
/// to ensure that all loggers have been properly configured.
/// </summary>
public void ReconfigExistingLoggers()
{
if (this.config != null)
{
this.config.InitializeAll();
}
//new list to avoid "Collection was modified; enumeration operation may not execute"
var loggers = new List<Logger>(loggerCache.Loggers);
foreach (var logger in loggers)
{
logger.SetConfiguration(this.GetConfigurationForLogger(logger.Name, this.config));
}
}
#if !SILVERLIGHT
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
public void Flush()
{
this.Flush(defaultFlushTimeout);
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="timeout">Maximum time to allow for the flush. Any messages after that time
/// will be discarded.</param>
public void Flush(TimeSpan timeout)
{
try
{
AsyncHelpers.RunSynchronously(cb => this.Flush(cb, timeout));
}
catch (Exception e)
{
if (ThrowExceptions)
{
throw;
}
InternalLogger.Error(e.ToString());
}
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages
/// after that time will be discarded.</param>
public void Flush(int timeoutMilliseconds)
{
this.Flush(TimeSpan.FromMilliseconds(timeoutMilliseconds));
}
#endif
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
public void Flush(AsyncContinuation asyncContinuation)
{
this.Flush(asyncContinuation, TimeSpan.MaxValue);
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
/// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages
/// after that time will be discarded.</param>
public void Flush(AsyncContinuation asyncContinuation, int timeoutMilliseconds)
{
this.Flush(asyncContinuation, TimeSpan.FromMilliseconds(timeoutMilliseconds));
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
/// <param name="timeout">Maximum time to allow for the flush. Any messages after that time will be discarded.</param>
public void Flush(AsyncContinuation asyncContinuation, TimeSpan timeout)
{
try
{
InternalLogger.Trace("LogFactory.Flush({0})", timeout);
var loggingConfiguration = this.Configuration;
if (loggingConfiguration != null)
{
InternalLogger.Trace("Flushing all targets...");
loggingConfiguration.FlushAllTargets(AsyncHelpers.WithTimeout(asyncContinuation, timeout));
}
else
{
asyncContinuation(null);
}
}
catch (Exception e)
{
if (ThrowExceptions)
{
throw;
}
InternalLogger.Error(e.ToString());
}
}
/// <summary>
/// Decreases the log enable counter and if it reaches -1 the logs are disabled.
/// </summary>
/// <remarks>
/// Logging is enabled if the number of <see cref="ResumeLogging"/> calls is greater than
/// or equal to <see cref="SuspendLogging"/> calls.
/// </remarks>
/// <returns>An object that implements IDisposable whose Dispose() method re-enables logging.
/// To be used with C# <c>using ()</c> statement.</returns>
[Obsolete("Use SuspendLogging() instead.")]
public IDisposable DisableLogging()
{
return SuspendLogging();
}
/// <summary>
/// Increases the log enable counter and if it reaches 0 the logs are disabled.
/// </summary>
/// <remarks>
/// Logging is enabled if the number of <see cref="ResumeLogging"/> calls is greater than
/// or equal to <see cref="SuspendLogging"/> calls.</remarks>
[Obsolete("Use ResumeLogging() instead.")]
public void EnableLogging()
{
ResumeLogging();
}
/// <summary>
/// Decreases the log enable counter and if it reaches -1 the logs are disabled.
/// </summary>
/// <remarks>
/// Logging is enabled if the number of <see cref="ResumeLogging"/> calls is greater than
/// or equal to <see cref="SuspendLogging"/> calls.
/// </remarks>
/// <returns>An object that implements IDisposable whose Dispose() method re-enables logging.
/// To be used with C# <c>using ()</c> statement.</returns>
public IDisposable SuspendLogging()
{
lock (this.syncRoot)
{
this.logsEnabled--;
if (this.logsEnabled == -1)
{
this.ReconfigExistingLoggers();
}
}
return new LogEnabler(this);
}
/// <summary>
/// Increases the log enable counter and if it reaches 0 the logs are disabled.
/// </summary>
/// <remarks>Logging is enabled if the number of <see cref="ResumeLogging"/> calls is greater
/// than or equal to <see cref="SuspendLogging"/> calls.</remarks>
public void ResumeLogging()
{
lock (this.syncRoot)
{
this.logsEnabled++;
if (this.logsEnabled == 0)
{
this.ReconfigExistingLoggers();
}
}
}
/// <summary>
/// Returns <see langword="true" /> if logging is currently enabled.
/// </summary>
/// <returns>A value of <see langword="true" /> if logging is currently enabled,
/// <see langword="false"/> otherwise.</returns>
/// <remarks>Logging is enabled if the number of <see cref="ResumeLogging"/> calls is greater
/// than or equal to <see cref="SuspendLogging"/> calls.</remarks>
public bool IsLoggingEnabled()
{
return this.logsEnabled >= 0;
}
/// <summary>
/// Invoke the Changed event; called whenever list changes
/// </summary>
/// <param name="e">Event arguments.</param>
protected virtual void OnConfigurationChanged(LoggingConfigurationChangedEventArgs e)
{
var changed = this.ConfigurationChanged;
if (changed != null)
{
changed(this, e);
}
}
#if !SILVERLIGHT
internal void ReloadConfigOnTimer(object state)
{
LoggingConfiguration configurationToReload = (LoggingConfiguration)state;
InternalLogger.Info("Reloading configuration...");
lock (this.syncRoot)
{
if (this.reloadTimer != null)
{
this.reloadTimer.Dispose();
this.reloadTimer = null;
}
if (IsDisposing)
{
//timer was disposed already.
this.watcher.Dispose();
return;
}
this.watcher.StopWatching();
try
{
if (this.Configuration != configurationToReload)
{
throw new NLogConfigurationException("Config changed in between. Not reloading.");
}
LoggingConfiguration newConfig = configurationToReload.Reload();
//problem: XmlLoggingConfiguration.Initialize eats exception with invalid XML. ALso XmlLoggingConfiguration.Reload never returns null.
//therefor we check the InitializeSucceeded property.
var xmlConfig = newConfig as XmlLoggingConfiguration;
if (xmlConfig != null)
{
if (!xmlConfig.InitializeSucceeded.HasValue || !xmlConfig.InitializeSucceeded.Value)
{
throw new NLogConfigurationException("Configuration.Reload() failed. Invalid XML?");
}
}
if (newConfig != null)
{
this.Configuration = newConfig;
if (this.ConfigurationReloaded != null)
{
this.ConfigurationReloaded(this, new LoggingConfigurationReloadedEventArgs(true, null));
}
}
else
{
throw new NLogConfigurationException("Configuration.Reload() returned null. Not reloading.");
}
}
catch (Exception exception)
{
if (exception is NLogConfigurationException)
{
InternalLogger.Warn(exception.Message);
}
else if (exception.MustBeRethrown())
{
throw;
}
this.watcher.Watch(configurationToReload.FileNamesToWatch);
var configurationReloadedDelegate = this.ConfigurationReloaded;
if (configurationReloadedDelegate != null)
{
configurationReloadedDelegate(this, new LoggingConfigurationReloadedEventArgs(false, exception));
}
}
}
}
#endif
private void GetTargetsByLevelForLogger(string name, IEnumerable<LoggingRule> rules, TargetWithFilterChain[] targetsByLevel, TargetWithFilterChain[] lastTargetsByLevel, bool[] suppressedLevels)
{
//no "System.InvalidOperationException: Collection was modified"
var loggingRules = new List<LoggingRule>(rules);
foreach (LoggingRule rule in loggingRules)
{
if (!rule.NameMatches(name))
{
continue;
}
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
if (i < this.GlobalThreshold.Ordinal || suppressedLevels[i] || !rule.IsLoggingEnabledForLevel(LogLevel.FromOrdinal(i)))
{
continue;
}
if (rule.Final)
suppressedLevels[i] = true;
foreach (Target target in rule.Targets.ToList())
{
var awf = new TargetWithFilterChain(target, rule.Filters);
if (lastTargetsByLevel[i] != null)
{
lastTargetsByLevel[i].NextInChain = awf;
}
else
{
targetsByLevel[i] = awf;
}
lastTargetsByLevel[i] = awf;
}
}
// Recursively analyze the child rules.
this.GetTargetsByLevelForLogger(name, rule.ChildRules, targetsByLevel, lastTargetsByLevel, suppressedLevels);
}
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
TargetWithFilterChain tfc = targetsByLevel[i];
if (tfc != null)
{
tfc.PrecalculateStackTraceUsage();
}
}
}
internal LoggerConfiguration GetConfigurationForLogger(string name, LoggingConfiguration configuration)
{
TargetWithFilterChain[] targetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
TargetWithFilterChain[] lastTargetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
bool[] suppressedLevels = new bool[LogLevel.MaxLevel.Ordinal + 1];
if (configuration != null && this.IsLoggingEnabled())
{
this.GetTargetsByLevelForLogger(name, configuration.LoggingRules, targetsByLevel, lastTargetsByLevel, suppressedLevels);
}
InternalLogger.Debug("Targets for {0} by level:", name);
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
StringBuilder sb = new StringBuilder();
sb.AppendFormat(CultureInfo.InvariantCulture, "{0} =>", LogLevel.FromOrdinal(i));
for (TargetWithFilterChain afc = targetsByLevel[i]; afc != null; afc = afc.NextInChain)
{
sb.AppendFormat(CultureInfo.InvariantCulture, " {0}", afc.Target.Name);
if (afc.FilterChain.Count > 0)
{
sb.AppendFormat(CultureInfo.InvariantCulture, " ({0} filters)", afc.FilterChain.Count);
}
}
InternalLogger.Debug(sb.ToString());
}
#pragma warning disable 618
return new LoggerConfiguration(targetsByLevel, configuration != null && configuration.ExceptionLoggingOldStyle);
#pragma warning restore 618
}
/// <summary>
/// Releases unmanaged and - optionally - managed resources.
/// </summary>
/// <param name="disposing"><c>True</c> to release both managed and unmanaged resources;
/// <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
#if !SILVERLIGHT
if (disposing)
{
this.watcher.Dispose();
if (this.reloadTimer != null)
{
this.reloadTimer.Dispose();
this.reloadTimer = null;
}
}
#endif
}
private static IEnumerable<string> GetCandidateConfigFileNames()
{
#if SILVERLIGHT
yield return "NLog.config";
#else
// NLog.config from application directory
if (CurrentAppDomain.BaseDirectory != null)
{
yield return Path.Combine(CurrentAppDomain.BaseDirectory, "NLog.config");
}
// Current config file with .config renamed to .nlog
string cf = CurrentAppDomain.ConfigurationFile;
if (cf != null)
{
yield return Path.ChangeExtension(cf, ".nlog");
// .nlog file based on the non-vshost version of the current config file
const string vshostSubStr = ".vshost.";
if (cf.Contains(vshostSubStr))
{
yield return Path.ChangeExtension(cf.Replace(vshostSubStr, "."), ".nlog");
}
IEnumerable<string> privateBinPaths = CurrentAppDomain.PrivateBinPath;
if (privateBinPaths != null)
{
foreach (var path in privateBinPaths)
{
if (path != null)
{
yield return Path.Combine(path, "NLog.config");
}
}
}
}
// Get path to NLog.dll.nlog only if the assembly is not in the GAC
var nlogAssembly = typeof(LogFactory).Assembly;
if (!nlogAssembly.GlobalAssemblyCache)
{
if (!string.IsNullOrEmpty(nlogAssembly.Location))
{
yield return nlogAssembly.Location + ".nlog";
}
}
#endif
}
private Logger GetLogger(LoggerCacheKey cacheKey)
{
lock (this.syncRoot)
{
Logger existingLogger = loggerCache.Retrieve(cacheKey);
if (existingLogger != null)
{
// Logger is still in cache and referenced.
return existingLogger;
}
Logger newLogger;
if (cacheKey.ConcreteType != null && cacheKey.ConcreteType != typeof(Logger))
{
var fullName = cacheKey.ConcreteType.FullName;
try
{
//creating instance of static class isn't possible, and also not wanted (it cannot inherited from Logger)
if (cacheKey.ConcreteType.IsStaticClass())
{
var errorMessage = string.Format("GetLogger / GetCurrentClassLogger is '{0}' as loggerType can be a static class and should inherit from Logger",
fullName);
InternalLogger.Error(errorMessage);
if (ThrowExceptions)
{
throw new NLogRuntimeException(errorMessage);
}
newLogger = CreateDefaultLogger(ref cacheKey);
}
else
{
var instance = FactoryHelper.CreateInstance(cacheKey.ConcreteType);
newLogger = instance as Logger;
if (newLogger == null)
{
//well, it's not a Logger, and we should return a Logger.
var errorMessage = string.Format("GetLogger / GetCurrentClassLogger got '{0}' as loggerType which doesn't inherit from Logger", fullName);
InternalLogger.Error(errorMessage);
if (ThrowExceptions)
{
throw new NLogRuntimeException(errorMessage);
}
// Creating default instance of logger if instance of specified type cannot be created.
newLogger = CreateDefaultLogger(ref cacheKey);
}
}
}
catch (Exception ex)
{
if (ex.MustBeRethrown())
{
throw;
}
var errorMessage = string.Format("GetLogger / GetCurrentClassLogger. Cannot create instance of type '{0}'. It should have an default contructor. ", fullName);
if (ThrowExceptions)
{
throw new NLogRuntimeException(errorMessage, ex);
}
InternalLogger.Error(errorMessage + ". Exception : {0}", ex);
// Creating default instance of logger if instance of specified type cannot be created.
newLogger = CreateDefaultLogger(ref cacheKey);
}
}
else
{
newLogger = new Logger();
}
if (cacheKey.ConcreteType != null)
{
newLogger.Initialize(cacheKey.Name, this.GetConfigurationForLogger(cacheKey.Name, this.Configuration), this);
}
// TODO: Clarify what is the intention when cacheKey.ConcreteType = null.
// At the moment, a logger typeof(Logger) will be created but the ConcreteType
// will remain null and inserted into the cache.
// Should we set cacheKey.ConcreteType = typeof(Logger) for default loggers?
loggerCache.InsertOrUpdate(cacheKey, newLogger);
return newLogger;
}
}
private static Logger CreateDefaultLogger(ref LoggerCacheKey cacheKey)
{
cacheKey = new LoggerCacheKey(cacheKey.Name, typeof(Logger));
var newLogger = new Logger();
return newLogger;
}
#if !SILVERLIGHT
private void ConfigFileChanged(object sender, EventArgs args)
{
InternalLogger.Info("Configuration file change detected! Reloading in {0}ms...", LogFactory.ReconfigAfterFileChangedTimeout);
// In the rare cases we may get multiple notifications here,
// but we need to reload config only once.
//
// The trick is to schedule the reload in one second after
// the last change notification comes in.
lock (this.syncRoot)
{
if (this.reloadTimer == null)
{
this.reloadTimer = new Timer(
this.ReloadConfigOnTimer,
this.Configuration,
LogFactory.ReconfigAfterFileChangedTimeout,
Timeout.Infinite);
}
else
{
this.reloadTimer.Change(
LogFactory.ReconfigAfterFileChangedTimeout,
Timeout.Infinite);
}
}
}
#endif
private void LoadLoggingConfiguration(string configFile)
{
InternalLogger.Debug("Loading config from {0}", configFile);
this.config = new XmlLoggingConfiguration(configFile);
}
#if !SILVERLIGHT
/// <summary>
/// Currenty this logfactory is disposing?
/// </summary>
private bool IsDisposing;
private void currentAppDomain_DomainUnload(object sender, EventArgs e)
{
//stop timer on domain unload, otherwise:
//Exception: System.AppDomainUnloadedException
//Message: Attempted to access an unloaded AppDomain.
lock (this.syncRoot)
{
IsDisposing = true;
if (this.reloadTimer != null)
{
this.reloadTimer.Dispose();
this.reloadTimer = null;
}
}
}
#endif
/// <summary>
/// Logger cache key.
/// </summary>
internal class LoggerCacheKey : IEquatable<LoggerCacheKey>
{
public string Name { get; private set; }
public Type ConcreteType { get; private set; }
public LoggerCacheKey(string name, Type concreteType)
{
this.Name = name;
this.ConcreteType = concreteType;
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>
/// A hash code for the current <see cref="T:System.Object"/>.
/// </returns>
public override int GetHashCode()
{
return this.ConcreteType.GetHashCode() ^ this.Name.GetHashCode();
}
/// <summary>
/// Determines if two objects are equal in value.
/// </summary>
/// <param name="obj">Other object to compare to.</param>
/// <returns>True if objects are equal, false otherwise.</returns>
public override bool Equals(object obj)
{
LoggerCacheKey key = obj as LoggerCacheKey;
if (ReferenceEquals(key, null))
{
return false;
}
return (this.ConcreteType == key.ConcreteType) && (key.Name == this.Name);
}
/// <summary>
/// Determines if two objects of the same type are equal in value.
/// </summary>
/// <param name="key">Other object to compare to.</param>
/// <returns>True if objects are equal, false otherwise.</returns>
public bool Equals(LoggerCacheKey key)
{
if (ReferenceEquals(key, null))
{
return false;
}
return (this.ConcreteType == key.ConcreteType) && (key.Name == this.Name);
}
}
/// <summary>
/// Logger cache.
/// </summary>
private class LoggerCache
{
// The values of WeakReferences are of type Logger i.e. Directory<LoggerCacheKey, Logger>.
private readonly Dictionary<LoggerCacheKey, WeakReference> loggerCache =
new Dictionary<LoggerCacheKey, WeakReference>();
/// <summary>
/// Inserts or updates.
/// </summary>
/// <param name="cacheKey"></param>
/// <param name="logger"></param>
public void InsertOrUpdate(LoggerCacheKey cacheKey, Logger logger)
{
loggerCache[cacheKey] = new WeakReference(logger);
}
public Logger Retrieve(LoggerCacheKey cacheKey)
{
WeakReference loggerReference;
if (loggerCache.TryGetValue(cacheKey, out loggerReference))
{
// logger in the cache and still referenced
return loggerReference.Target as Logger;
}
return null;
}
public IEnumerable<Logger> Loggers
{
get { return GetLoggers(); }
}
private IEnumerable<Logger> GetLoggers()
{
// TODO: Test if loggerCache.Values.ToList<Logger>() can be used for the conversion instead.
List<Logger> values = new List<Logger>(loggerCache.Count);
foreach (WeakReference loggerReference in loggerCache.Values)
{
Logger logger = loggerReference.Target as Logger;
if (logger != null)
{
values.Add(logger);
}
}
return values;
}
}
/// <summary>
/// Enables logging in <see cref="IDisposable.Dispose"/> implementation.
/// </summary>
private class LogEnabler : IDisposable
{
private LogFactory factory;
/// <summary>
/// Initializes a new instance of the <see cref="LogEnabler" /> class.
/// </summary>
/// <param name="factory">The factory.</param>
public LogEnabler(LogFactory factory)
{
this.factory = factory;
}
/// <summary>
/// Enables logging.
/// </summary>
void IDisposable.Dispose()
{
this.factory.ResumeLogging();
}
}
}
}
| |
--- /dev/null 2016-01-28 15:56:29.000000000 -0500
+++ src/System.Console/src/SR.cs 2016-01-28 15:57:05.566763000 -0500
@@ -0,0 +1,389 @@
+using System.Resources;
+
+namespace FxResources.System.Console
+{
+ internal class SR
+ {
+ }
+}
+
+namespace System
+{
+ internal static class SR
+ {
+ private static ResourceManager s_resourceManager;
+
+ private const String s_resourcesName = "FxResources.System.Console.SR";
+
+ internal static String InvalidOperation_ConsoleKeyAvailableOnFile
+ {
+ get
+ {
+ return SR.GetResourceString("InvalidOperation_ConsoleKeyAvailableOnFile", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_ConsoleTitleTooLong
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_ConsoleTitleTooLong", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_ConsoleBufferBoundaries
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_ConsoleBufferBoundaries", null);
+ }
+ }
+
+
+ internal static String Arg_InvalidConsoleColor
+ {
+ get
+ {
+ return SR.GetResourceString("Arg_InvalidConsoleColor", null);
+ }
+ }
+
+ internal static String Argument_InvalidOffLen
+ {
+ get
+ {
+ return SR.GetResourceString("Argument_InvalidOffLen", null);
+ }
+ }
+
+ internal static String ArgumentNull_Buffer
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentNull_Buffer", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_ConsoleKey
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_ConsoleKey", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_ConsoleWindowBufferSize
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_ConsoleWindowBufferSize", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_ConsoleWindowSize_Size
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_ConsoleWindowSize_Size", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_FileLengthTooBig
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_FileLengthTooBig", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_NeedNonNegNum
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_NeedNonNegNum", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_NeedPosNum
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_NeedPosNum", null);
+ }
+ }
+
+ internal static String IndexOutOfRange_IORaceCondition
+ {
+ get
+ {
+ return SR.GetResourceString("IndexOutOfRange_IORaceCondition", null);
+ }
+ }
+
+ internal static String InvalidOperation_ConsoleReadKeyOnFile
+ {
+ get
+ {
+ return SR.GetResourceString("InvalidOperation_ConsoleReadKeyOnFile", null);
+ }
+ }
+
+ internal static String InvalidOperation_EmptyStack
+ {
+ get
+ {
+ return SR.GetResourceString("InvalidOperation_EmptyStack", null);
+ }
+ }
+
+ internal static String InvalidOperation_PrintF
+ {
+ get
+ {
+ return SR.GetResourceString("InvalidOperation_PrintF", null);
+ }
+ }
+
+ internal static String IO_AlreadyExists_Name
+ {
+ get
+ {
+ return SR.GetResourceString("IO_AlreadyExists_Name", null);
+ }
+ }
+
+ internal static String IO_FileExists_Name
+ {
+ get
+ {
+ return SR.GetResourceString("IO_FileExists_Name", null);
+ }
+ }
+
+ internal static String IO_FileNotFound
+ {
+ get
+ {
+ return SR.GetResourceString("IO_FileNotFound", null);
+ }
+ }
+
+ internal static String IO_FileNotFound_FileName
+ {
+ get
+ {
+ return SR.GetResourceString("IO_FileNotFound_FileName", null);
+ }
+ }
+
+ internal static String IO_NoConsole
+ {
+ get
+ {
+ return SR.GetResourceString("IO_NoConsole", null);
+ }
+ }
+
+ internal static String IO_PathNotFound_NoPathName
+ {
+ get
+ {
+ return SR.GetResourceString("IO_PathNotFound_NoPathName", null);
+ }
+ }
+
+ internal static String IO_PathNotFound_Path
+ {
+ get
+ {
+ return SR.GetResourceString("IO_PathNotFound_Path", null);
+ }
+ }
+
+ internal static String IO_PathTooLong
+ {
+ get
+ {
+ return SR.GetResourceString("IO_PathTooLong", null);
+ }
+ }
+
+ internal static String IO_SharingViolation_File
+ {
+ get
+ {
+ return SR.GetResourceString("IO_SharingViolation_File", null);
+ }
+ }
+
+ internal static String IO_SharingViolation_NoFileName
+ {
+ get
+ {
+ return SR.GetResourceString("IO_SharingViolation_NoFileName", null);
+ }
+ }
+
+ internal static String IO_TermInfoInvalid
+ {
+ get
+ {
+ return SR.GetResourceString("IO_TermInfoInvalid", null);
+ }
+ }
+
+ internal static String NotSupported_UnreadableStream
+ {
+ get
+ {
+ return SR.GetResourceString("NotSupported_UnreadableStream", null);
+ }
+ }
+
+ internal static String NotSupported_UnseekableStream
+ {
+ get
+ {
+ return SR.GetResourceString("NotSupported_UnseekableStream", null);
+ }
+ }
+
+ internal static String NotSupported_UnwritableStream
+ {
+ get
+ {
+ return SR.GetResourceString("NotSupported_UnwritableStream", null);
+ }
+ }
+
+ internal static String ObjectDisposed_FileClosed
+ {
+ get
+ {
+ return SR.GetResourceString("ObjectDisposed_FileClosed", null);
+ }
+ }
+
+ internal static String PersistedFiles_NoHomeDirectory
+ {
+ get
+ {
+ return SR.GetResourceString("PersistedFiles_NoHomeDirectory", null);
+ }
+ }
+
+ internal static String PlatformNotSupported_GettingColor
+ {
+ get
+ {
+ return SR.GetResourceString("PlatformNotSupported_GettingColor", null);
+ }
+ }
+
+ private static ResourceManager ResourceManager
+ {
+ get
+ {
+ if (SR.s_resourceManager == null)
+ {
+ SR.s_resourceManager = new ResourceManager(SR.ResourceType);
+ }
+ return SR.s_resourceManager;
+ }
+ }
+
+ internal static Type ResourceType
+ {
+ get
+ {
+ return typeof(FxResources.System.Console.SR);
+ }
+ }
+
+ internal static String UnauthorizedAccess_IODenied_NoPathName
+ {
+ get
+ {
+ return SR.GetResourceString("UnauthorizedAccess_IODenied_NoPathName", null);
+ }
+ }
+
+ internal static String UnauthorizedAccess_IODenied_Path
+ {
+ get
+ {
+ return SR.GetResourceString("UnauthorizedAccess_IODenied_Path", null);
+ }
+ }
+
+ internal static String UnknownError_Num
+ {
+ get
+ {
+ return SR.GetResourceString("UnknownError_Num", null);
+ }
+ }
+
+ internal static String Format(String resourceFormat, params Object[] args)
+ {
+ if (args == null)
+ {
+ return resourceFormat;
+ }
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, args);
+ }
+ return String.Concat(resourceFormat, String.Join(", ", args));
+ }
+
+ internal static String Format(String resourceFormat, Object p1)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1 });
+ }
+
+ internal static String Format(String resourceFormat, Object p1, Object p2)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1, p2);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1, p2 });
+ }
+
+ internal static String Format(String resourceFormat, Object p1, Object p2, Object p3)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1, p2, p3);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1, p2, p3 });
+ }
+
+ internal static String GetResourceString(String resourceKey, String defaultString)
+ {
+ String str = null;
+ try
+ {
+ str = SR.ResourceManager.GetString(resourceKey);
+ }
+ catch (MissingManifestResourceException missingManifestResourceException)
+ {
+ }
+ if (defaultString != null && resourceKey.Equals(str))
+ {
+ return defaultString;
+ }
+ return str;
+ }
+
+ private static Boolean UsingResourceKeys()
+ {
+ return false;
+ }
+ }
+}
| |
/*
* Infoplus API
*
* Infoplus API.
*
* OpenAPI spec version: v1.0
* Contact: api@infopluscommerce.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace Infoplus.Model
{
/// <summary>
/// OrderSourceReservation
/// </summary>
[DataContract]
public partial class OrderSourceReservation : IEquatable<OrderSourceReservation>
{
/// <summary>
/// Initializes a new instance of the <see cref="OrderSourceReservation" /> class.
/// </summary>
[JsonConstructorAttribute]
protected OrderSourceReservation() { }
/// <summary>
/// Initializes a new instance of the <see cref="OrderSourceReservation" /> class.
/// </summary>
/// <param name="OrderSourceId">OrderSourceId (required).</param>
/// <param name="ReservedQuantity">ReservedQuantity (required).</param>
/// <param name="Sku">Sku.</param>
public OrderSourceReservation(int? OrderSourceId = null, int? ReservedQuantity = null, string Sku = null)
{
// to ensure "OrderSourceId" is required (not null)
if (OrderSourceId == null)
{
throw new InvalidDataException("OrderSourceId is a required property for OrderSourceReservation and cannot be null");
}
else
{
this.OrderSourceId = OrderSourceId;
}
// to ensure "ReservedQuantity" is required (not null)
if (ReservedQuantity == null)
{
throw new InvalidDataException("ReservedQuantity is a required property for OrderSourceReservation and cannot be null");
}
else
{
this.ReservedQuantity = ReservedQuantity;
}
this.Sku = Sku;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; private set; }
/// <summary>
/// Gets or Sets OrderSourceId
/// </summary>
[DataMember(Name="orderSourceId", EmitDefaultValue=false)]
public int? OrderSourceId { get; set; }
/// <summary>
/// Gets or Sets CreateDate
/// </summary>
[DataMember(Name="createDate", EmitDefaultValue=false)]
public DateTime? CreateDate { get; private set; }
/// <summary>
/// Gets or Sets ModifyDate
/// </summary>
[DataMember(Name="modifyDate", EmitDefaultValue=false)]
public DateTime? ModifyDate { get; private set; }
/// <summary>
/// Gets or Sets ReservedQuantity
/// </summary>
[DataMember(Name="reservedQuantity", EmitDefaultValue=false)]
public int? ReservedQuantity { get; set; }
/// <summary>
/// Gets or Sets Sku
/// </summary>
[DataMember(Name="sku", EmitDefaultValue=false)]
public string Sku { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class OrderSourceReservation {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" OrderSourceId: ").Append(OrderSourceId).Append("\n");
sb.Append(" CreateDate: ").Append(CreateDate).Append("\n");
sb.Append(" ModifyDate: ").Append(ModifyDate).Append("\n");
sb.Append(" ReservedQuantity: ").Append(ReservedQuantity).Append("\n");
sb.Append(" Sku: ").Append(Sku).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as OrderSourceReservation);
}
/// <summary>
/// Returns true if OrderSourceReservation instances are equal
/// </summary>
/// <param name="other">Instance of OrderSourceReservation to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(OrderSourceReservation other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.OrderSourceId == other.OrderSourceId ||
this.OrderSourceId != null &&
this.OrderSourceId.Equals(other.OrderSourceId)
) &&
(
this.CreateDate == other.CreateDate ||
this.CreateDate != null &&
this.CreateDate.Equals(other.CreateDate)
) &&
(
this.ModifyDate == other.ModifyDate ||
this.ModifyDate != null &&
this.ModifyDate.Equals(other.ModifyDate)
) &&
(
this.ReservedQuantity == other.ReservedQuantity ||
this.ReservedQuantity != null &&
this.ReservedQuantity.Equals(other.ReservedQuantity)
) &&
(
this.Sku == other.Sku ||
this.Sku != null &&
this.Sku.Equals(other.Sku)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.OrderSourceId != null)
hash = hash * 59 + this.OrderSourceId.GetHashCode();
if (this.CreateDate != null)
hash = hash * 59 + this.CreateDate.GetHashCode();
if (this.ModifyDate != null)
hash = hash * 59 + this.ModifyDate.GetHashCode();
if (this.ReservedQuantity != null)
hash = hash * 59 + this.ReservedQuantity.GetHashCode();
if (this.Sku != null)
hash = hash * 59 + this.Sku.GetHashCode();
return hash;
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.