context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace CustomerPortal.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace ApplicationGateway
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// NetworkSecurityGroupsOperations operations.
/// </summary>
internal partial class NetworkSecurityGroupsOperations : IServiceOperations<NetworkClient>, INetworkSecurityGroupsOperations
{
/// <summary>
/// Initializes a new instance of the NetworkSecurityGroupsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal NetworkSecurityGroupsOperations(NetworkClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the NetworkClient
/// </summary>
public NetworkClient Client { get; private set; }
/// <summary>
/// Deletes the specified network security group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='networkSecurityGroupName'>
/// The name of the network security group.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified network security group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='networkSecurityGroupName'>
/// The name of the network security group.
/// </param>
/// <param name='expand'>
/// Expands referenced resources.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<NetworkSecurityGroup>> GetWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (networkSecurityGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("expand", expand);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (expand != null)
{
_queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(expand)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<NetworkSecurityGroup>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a network security group in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='networkSecurityGroupName'>
/// The name of the network security group.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the create or update network security group
/// operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<NetworkSecurityGroup>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, NetworkSecurityGroup parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send Request
AzureOperationResponse<NetworkSecurityGroup> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, parameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets all network security groups in a subscription.
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListAll", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all network security groups in a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Deletes the specified network security group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='networkSecurityGroupName'>
/// The name of the network security group.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (networkSecurityGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 202 && (int)_statusCode != 200 && (int)_statusCode != 204)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a network security group in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='networkSecurityGroupName'>
/// The name of the network security group.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the create or update network security group
/// operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<NetworkSecurityGroup>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, NetworkSecurityGroup parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (networkSecurityGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 201 && (int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<NetworkSecurityGroup>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all network security groups in a subscription.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListAllNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListAllNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all network security groups in a resource group.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Linq;
using System.Diagnostics;
using System.Collections.Generic;
using System.IO;
using System.Drawing;
using BizHawk.Common;
using BizHawk.Client.Common;
using BizHawk.Client.EmuHawk.Filters;
using BizHawk.Bizware.BizwareGL;
using BizHawk.Bizware.BizwareGL.Drivers.OpenTK;
using OpenTK;
using OpenTK.Graphics;
namespace BizHawk.Client.EmuHawk.FilterManager
{
public enum SurfaceDisposition
{
Unspecified, Texture, RenderTarget
}
public class SurfaceFormat
{
public SurfaceFormat(Size size) { this.Size = size; }
public Size Size { get; private set; }
}
public class SurfaceState
{
public SurfaceState() { }
public SurfaceState(SurfaceFormat surfaceFormat, SurfaceDisposition surfaceDisposition = SurfaceDisposition.Unspecified)
{
this.SurfaceFormat = surfaceFormat;
this.SurfaceDisposition = surfaceDisposition;
}
public SurfaceFormat SurfaceFormat;
public SurfaceDisposition SurfaceDisposition;
}
public interface IRenderTargetProvider
{
RenderTarget Get(Size size);
}
public class FilterProgram
{
public List<BaseFilter> Filters = new List<BaseFilter>();
Dictionary<string, BaseFilter> FilterNameIndex = new Dictionary<string, BaseFilter>();
public List<ProgramStep> Program = new List<ProgramStep>();
public BaseFilter this[string name]
{
get
{
BaseFilter ret;
FilterNameIndex.TryGetValue(name, out ret);
return ret;
}
}
public enum ProgramStepType
{
Run,
NewTarget,
FinalTarget
}
//services to filters:
public IGuiRenderer GuiRenderer;
public IGL GL;
public IRenderTargetProvider RenderTargetProvider;
public RenderTarget GetRenderTarget(string channel = "default") { return CurrRenderTarget; }
public RenderTarget CurrRenderTarget;
public RenderTarget GetTempTarget(int width, int height)
{
return RenderTargetProvider.Get(new Size(width, height));
}
public void AddFilter(BaseFilter filter, string name = "")
{
Filters.Add(filter);
FilterNameIndex[name] = filter;
}
/// <summary>
/// Receives a point in the coordinate space of the output of the filter program and untransforms it back to input points
/// </summary>
public Vector2 UntransformPoint(string channel, Vector2 point)
{
for (int i = Filters.Count - 1; i >= 0; i--)
{
var filter = Filters[i];
point = filter.UntransformPoint(channel, point);
}
//we COULD handle the case where the output size is 0,0, but it's not mathematically sensible
//it should be considered a bug to call this under those conditions
return point;
}
/// <summary>
/// Receives a point in the input space of the filter program and transforms it through to output points
/// </summary>
public Vector2 TransformPoint(string channel, Vector2 point)
{
for (int i = 0; i < Filters.Count; i++)
{
var filter = Filters[i];
point = filter.TransformPoint(channel, point);
}
//we COULD handle the case where the output size is 0,0, but it's not mathematically sensible
//it should be considered a bug to call this under those conditions
////in case the output size is zero, transform all points to zero, since the above maths may have malfunctioned
//var size = Filters[Filters.Count - 1].FindOutput().SurfaceFormat.Size;
//if (size.Width == 0) point.X = 0;
//if (size.Height == 0) point.Y = 0;
return point;
}
public class ProgramStep
{
public ProgramStep(ProgramStepType type, object args, string comment = null)
{
this.Type = type;
this.Args = args;
this.Comment = comment;
}
public ProgramStepType Type;
public object Args;
public string Comment;
public override string ToString()
{
if (Type == ProgramStepType.Run)
return string.Format("Run {0} ({1})", (int)Args, Comment);
if (Type == ProgramStepType.NewTarget)
return string.Format("NewTarget {0}", (Size)Args);
if (Type == ProgramStepType.FinalTarget)
return string.Format("FinalTarget");
return null;
}
}
public void Compile(string channel, Size insize, Size outsize, bool finalTarget)
{
RETRY:
Program.Clear();
//prep filters for initialization
foreach (var f in Filters)
{
f.BeginInitialization(this);
f.Initialize();
}
//propagate input size forwards through filter chain to allow a 'flex' filter to determine what its input will be
Size presize = insize;
for (int i = 0; i < Filters.Count; i++)
{
var filter = Filters[i];
presize = filter.PresizeInput(channel, presize);
}
//propagate output size backwards through filter chain to allow a 'flex' filter to determine its output based on the desired output needs
presize = outsize;
for (int i = Filters.Count - 1; i >= 0; i--)
{
var filter = Filters[i];
presize = filter.PresizeOutput(channel, presize);
}
SurfaceState currState = null;
for (int i = 0; i < Filters.Count; i++)
{
BaseFilter f = Filters[i];
//check whether this filter needs input. if so, notify it of the current pipeline state
var iosi = f.FindInput(channel);
if (iosi != null)
{
iosi.SurfaceFormat = currState.SurfaceFormat;
f.SetInputFormat(channel, currState);
if (f.IsNOP)
{
continue;
}
//check if the desired disposition needs to change from texture to render target
//(if so, insert a render filter)
if (iosi.SurfaceDisposition == SurfaceDisposition.RenderTarget && currState.SurfaceDisposition == SurfaceDisposition.Texture)
{
var renderer = new Render();
Filters.Insert(i, renderer);
goto RETRY;
}
//check if the desired disposition needs to change from a render target to a texture
//(if so, the current render target gets resolved, and made no longer current
else if (iosi.SurfaceDisposition == SurfaceDisposition.Texture && currState.SurfaceDisposition == SurfaceDisposition.RenderTarget)
{
var resolver = new Resolve();
Filters.Insert(i, resolver);
goto RETRY;
}
}
//now, the filter will have set its output state depending on its input state. check if it outputs:
iosi = f.FindOutput(channel);
if (iosi != null)
{
if (currState == null)
{
currState = new SurfaceState();
currState.SurfaceFormat = iosi.SurfaceFormat;
currState.SurfaceDisposition = iosi.SurfaceDisposition;
}
else
{
//if output disposition is unspecified, change it to whatever we've got right now
if (iosi.SurfaceDisposition == SurfaceDisposition.Unspecified)
{
iosi.SurfaceDisposition = currState.SurfaceDisposition;
}
bool newTarget = false;
if (iosi.SurfaceFormat.Size != currState.SurfaceFormat.Size)
newTarget = true;
else if (currState.SurfaceDisposition == SurfaceDisposition.Texture && iosi.SurfaceDisposition == SurfaceDisposition.RenderTarget)
newTarget = true;
if (newTarget)
{
currState = new SurfaceState();
iosi.SurfaceFormat = currState.SurfaceFormat = iosi.SurfaceFormat;
iosi.SurfaceDisposition = currState.SurfaceDisposition = iosi.SurfaceDisposition;
Program.Add(new ProgramStep(ProgramStepType.NewTarget, currState.SurfaceFormat.Size));
}
else
{
currState.SurfaceDisposition = iosi.SurfaceDisposition;
}
}
}
Program.Add(new ProgramStep(ProgramStepType.Run, i, f.GetType().Name));
} //filter loop
//if the current output disposition is a texture, we need to render it
if (currState.SurfaceDisposition == SurfaceDisposition.Texture)
{
var renderer = new Render();
Filters.Insert(Filters.Count, renderer);
goto RETRY;
}
//patch the program so that the final rendertarget set operation is the framebuffer instead
if (finalTarget)
{
for (int i = Program.Count - 1; i >= 0; i--)
{
var ps = Program[i];
if (ps.Type == ProgramStepType.NewTarget)
{
var size = (Size)ps.Args;
Debug.Assert(size == outsize);
ps.Type = ProgramStepType.FinalTarget;
ps.Args = size;
break;
}
}
}
}
}
}
| |
namespace AzureTypedStorage
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Table;
public class CloudTableImpl<TElement> : IQuerableCloudTable<TElement>, IExecutableCloudTable, IExecutableCloudTable<TElement>, IEditableCloudTable<TElement>, ICloudTable<TElement>
where TElement : ITableEntity, new()
{
private readonly CloudTable _instance;
public CloudTableImpl(CloudTable instance)
{
_instance = instance;
}
public TableResult Execute(
TableOperation operation,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.Execute(operation, requestOptions, operationContext);
}
TableResult<TElement> IExecutableCloudTable<TElement>.Execute(
TableOperation operation,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return new TableResult<TElement>(_instance.Execute(operation, requestOptions, operationContext));
}
public ICancellableAsyncResult BeginExecute(TableOperation operation, AsyncCallback callback, object state)
{
return _instance.BeginExecute(operation, callback, state);
}
public ICancellableAsyncResult BeginExecute(
TableOperation operation,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecute(operation, requestOptions, operationContext, callback, state);
}
public TableResult EndExecute(IAsyncResult asyncResult)
{
return _instance.EndExecute(asyncResult);
}
public Task<TableResult> ExecuteAsync(TableOperation operation)
{
return _instance.ExecuteAsync(operation);
}
public Task<TableResult> ExecuteAsync(TableOperation operation, CancellationToken cancellationToken)
{
return _instance.ExecuteAsync(operation, cancellationToken);
}
public Task<TableResult> ExecuteAsync(TableOperation operation, TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.ExecuteAsync(operation, requestOptions, operationContext);
}
public Task<TableResult> ExecuteAsync(
TableOperation operation,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteAsync(operation, requestOptions, operationContext, cancellationToken);
}
public IList<TableResult> ExecuteBatch(
TableBatchOperation batch,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteBatch(batch, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExecuteBatch(TableBatchOperation batch, AsyncCallback callback, object state)
{
return _instance.BeginExecuteBatch(batch, callback, state);
}
public ICancellableAsyncResult BeginExecuteBatch(
TableBatchOperation batch,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteBatch(batch, requestOptions, operationContext, callback, state);
}
public IList<TableResult> EndExecuteBatch(IAsyncResult asyncResult)
{
return _instance.EndExecuteBatch(asyncResult);
}
public Task<IList<TableResult>> ExecuteBatchAsync(TableBatchOperation batch)
{
return _instance.ExecuteBatchAsync(batch);
}
public Task<IList<TableResult>> ExecuteBatchAsync(TableBatchOperation batch, CancellationToken cancellationToken)
{
return _instance.ExecuteBatchAsync(batch, cancellationToken);
}
public Task<IList<TableResult>> ExecuteBatchAsync(TableBatchOperation batch, TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.ExecuteBatchAsync(batch, requestOptions, operationContext);
}
public Task<IList<TableResult>> ExecuteBatchAsync(
TableBatchOperation batch,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteBatchAsync(batch, requestOptions, operationContext, cancellationToken);
}
public IEnumerable<DynamicTableEntity> ExecuteQuery(
TableQuery query,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuery(query, requestOptions, operationContext);
}
public TableQuerySegment<DynamicTableEntity> ExecuteQuerySegmented(
TableQuery query,
TableContinuationToken token,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuerySegmented(query, token, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented(
TableQuery query,
TableContinuationToken token,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, token, callback, state);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented(
TableQuery query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, token, requestOptions, operationContext, callback, state);
}
public TableQuerySegment<DynamicTableEntity> EndExecuteQuerySegmented(IAsyncResult asyncResult)
{
return _instance.EndExecuteQuerySegmented(asyncResult);
}
public Task<TableQuerySegment<DynamicTableEntity>> ExecuteQuerySegmentedAsync(TableQuery query, TableContinuationToken token)
{
return _instance.ExecuteQuerySegmentedAsync(query, token);
}
public Task<TableQuerySegment<DynamicTableEntity>> ExecuteQuerySegmentedAsync(TableQuery query, TableContinuationToken token, CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, cancellationToken);
}
public Task<TableQuerySegment<DynamicTableEntity>> ExecuteQuerySegmentedAsync(
TableQuery query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, requestOptions, operationContext);
}
public Task<TableQuerySegment<DynamicTableEntity>> ExecuteQuerySegmentedAsync(
TableQuery query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, requestOptions, operationContext, cancellationToken);
}
public IEnumerable<TResult> ExecuteQuery<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuery(query, resolver, requestOptions, operationContext);
}
public TableQuerySegment<TResult> ExecuteQuerySegmented<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuerySegmented(query, resolver, token, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, resolver, token, callback, state);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, resolver, token, requestOptions, operationContext, callback, state);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(TableQuery query, EntityResolver<TResult> resolver, TableContinuationToken token)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, cancellationToken);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, requestOptions, operationContext);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, requestOptions, operationContext, cancellationToken);
}
public TableQuery<TElement> CreateQuery()
{
return _instance.CreateQuery<TElement>();
}
public IEnumerable<TElement> ExecuteQuery(
TableQuery<TElement> query,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuery(query, requestOptions, operationContext);
}
public TableQuerySegment<TElement> ExecuteQuerySegmented(
TableQuery<TElement> query,
TableContinuationToken token,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuerySegmented(query, token, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented(
TableQuery<TElement> query,
TableContinuationToken token,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, token, callback, state);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented(
TableQuery<TElement> query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, token, requestOptions, operationContext, callback, state);
}
public TableQuerySegment<TResult> EndExecuteQuerySegmented<TElement, TResult>(IAsyncResult asyncResult)
{
return _instance.EndExecuteQuerySegmented<TResult>(asyncResult);
}
public Task<TableQuerySegment<TElement>> ExecuteQuerySegmentedAsync(TableQuery<TElement> query, TableContinuationToken token)
{
return _instance.ExecuteQuerySegmentedAsync(query, token);
}
public Task<TableQuerySegment<TElement>> ExecuteQuerySegmentedAsync(
TableQuery<TElement> query,
TableContinuationToken token,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, cancellationToken);
}
public Task<TableQuerySegment<TElement>> ExecuteQuerySegmentedAsync(
TableQuery<TElement> query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, requestOptions, operationContext);
}
public Task<TableQuerySegment<TElement>> ExecuteQuerySegmentedAsync(
TableQuery<TElement> query,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, token, requestOptions, operationContext, cancellationToken);
}
public IEnumerable<TResult> ExecuteQuery<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuery(query, resolver, requestOptions, operationContext);
}
public TableQuerySegment<TResult> ExecuteQuerySegmented<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
return _instance.ExecuteQuerySegmented(query, resolver, token, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, resolver, token, callback, state);
}
public ICancellableAsyncResult BeginExecuteQuerySegmented<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExecuteQuerySegmented(query, resolver, token, requestOptions, operationContext, callback, state);
}
public TableQuerySegment<TResult> EndExecuteQuerySegmented<TResult>(IAsyncResult asyncResult)
{
return _instance.EndExecuteQuerySegmented<TResult>(asyncResult);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, cancellationToken);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, requestOptions, operationContext);
}
public Task<TableQuerySegment<TResult>> ExecuteQuerySegmentedAsync<TResult>(
TableQuery<TElement> query,
EntityResolver<TResult> resolver,
TableContinuationToken token,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExecuteQuerySegmentedAsync(query, resolver, token, requestOptions, operationContext, cancellationToken);
}
public void Create(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
_instance.Create(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginCreate(AsyncCallback callback, object state)
{
return _instance.BeginCreate(callback, state);
}
public ICancellableAsyncResult BeginCreate(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginCreate(requestOptions, operationContext, callback, state);
}
public void EndCreate(IAsyncResult asyncResult)
{
_instance.EndCreate(asyncResult);
}
public Task CreateAsync()
{
return _instance.CreateAsync();
}
public Task CreateAsync(CancellationToken cancellationToken)
{
return _instance.CreateAsync(cancellationToken);
}
public Task CreateAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.CreateAsync(requestOptions, operationContext);
}
public Task CreateAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.CreateAsync(requestOptions, operationContext, cancellationToken);
}
public bool CreateIfNotExists(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
return _instance.CreateIfNotExists(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginCreateIfNotExists(AsyncCallback callback, object state)
{
return _instance.BeginCreateIfNotExists(callback, state);
}
public ICancellableAsyncResult BeginCreateIfNotExists(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginCreateIfNotExists(requestOptions, operationContext, callback, state);
}
public bool EndCreateIfNotExists(IAsyncResult asyncResult)
{
return _instance.EndCreateIfNotExists(asyncResult);
}
public Task<bool> CreateIfNotExistsAsync()
{
return _instance.CreateIfNotExistsAsync();
}
public Task<bool> CreateIfNotExistsAsync(CancellationToken cancellationToken)
{
return _instance.CreateIfNotExistsAsync(cancellationToken);
}
public Task<bool> CreateIfNotExistsAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.CreateIfNotExistsAsync(requestOptions, operationContext);
}
public Task<bool> CreateIfNotExistsAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.CreateIfNotExistsAsync(requestOptions, operationContext, cancellationToken);
}
public void Delete(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
_instance.Delete(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginDelete(AsyncCallback callback, object state)
{
return _instance.BeginDelete(callback, state);
}
public ICancellableAsyncResult BeginDelete(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginDelete(requestOptions, operationContext, callback, state);
}
public void EndDelete(IAsyncResult asyncResult)
{
_instance.EndDelete(asyncResult);
}
public Task DeleteAsync()
{
return _instance.DeleteAsync();
}
public Task DeleteAsync(CancellationToken cancellationToken)
{
return _instance.DeleteAsync(cancellationToken);
}
public Task DeleteAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.DeleteAsync(requestOptions, operationContext);
}
public Task DeleteAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.DeleteAsync(requestOptions, operationContext, cancellationToken);
}
public bool DeleteIfExists(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
return _instance.DeleteIfExists(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginDeleteIfExists(AsyncCallback callback, object state)
{
return _instance.BeginDeleteIfExists(callback, state);
}
public ICancellableAsyncResult BeginDeleteIfExists(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginDeleteIfExists(requestOptions, operationContext, callback, state);
}
public bool EndDeleteIfExists(IAsyncResult asyncResult)
{
return _instance.EndDeleteIfExists(asyncResult);
}
public Task<bool> DeleteIfExistsAsync()
{
return _instance.DeleteIfExistsAsync();
}
public Task<bool> DeleteIfExistsAsync(CancellationToken cancellationToken)
{
return _instance.DeleteIfExistsAsync(cancellationToken);
}
public Task<bool> DeleteIfExistsAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.DeleteIfExistsAsync(requestOptions, operationContext);
}
public Task<bool> DeleteIfExistsAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.DeleteIfExistsAsync(requestOptions, operationContext, cancellationToken);
}
public bool Exists(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
return _instance.Exists(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginExists(AsyncCallback callback, object state)
{
return _instance.BeginExists(callback, state);
}
public ICancellableAsyncResult BeginExists(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginExists(requestOptions, operationContext, callback, state);
}
public bool EndExists(IAsyncResult asyncResult)
{
return _instance.EndExists(asyncResult);
}
public Task<bool> ExistsAsync()
{
return _instance.ExistsAsync();
}
public Task<bool> ExistsAsync(CancellationToken cancellationToken)
{
return _instance.ExistsAsync(cancellationToken);
}
public Task<bool> ExistsAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.ExistsAsync(requestOptions, operationContext);
}
public Task<bool> ExistsAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.ExistsAsync(requestOptions, operationContext, cancellationToken);
}
public TablePermissions GetPermissions(TableRequestOptions requestOptions = null, OperationContext operationContext = null)
{
return _instance.GetPermissions(requestOptions, operationContext);
}
public ICancellableAsyncResult BeginGetPermissions(AsyncCallback callback, object state)
{
return _instance.BeginGetPermissions(callback, state);
}
public ICancellableAsyncResult BeginGetPermissions(
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginGetPermissions(requestOptions, operationContext, callback, state);
}
public TablePermissions EndGetPermissions(IAsyncResult asyncResult)
{
return _instance.EndGetPermissions(asyncResult);
}
public Task<TablePermissions> GetPermissionsAsync()
{
return _instance.GetPermissionsAsync();
}
public Task<TablePermissions> GetPermissionsAsync(CancellationToken cancellationToken)
{
return _instance.GetPermissionsAsync(cancellationToken);
}
public Task<TablePermissions> GetPermissionsAsync(TableRequestOptions requestOptions, OperationContext operationContext)
{
return _instance.GetPermissionsAsync(requestOptions, operationContext);
}
public Task<TablePermissions> GetPermissionsAsync(
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.GetPermissionsAsync(requestOptions, operationContext, cancellationToken);
}
public void SetPermissions(
TablePermissions permissions,
TableRequestOptions requestOptions = null,
OperationContext operationContext = null)
{
_instance.SetPermissions(permissions, requestOptions, operationContext);
}
public ICancellableAsyncResult BeginSetPermissions(TablePermissions permissions, AsyncCallback callback, object state)
{
return _instance.BeginSetPermissions(permissions, callback, state);
}
public ICancellableAsyncResult BeginSetPermissions(
TablePermissions permissions,
TableRequestOptions requestOptions,
OperationContext operationContext,
AsyncCallback callback,
object state)
{
return _instance.BeginSetPermissions(permissions, requestOptions, operationContext, callback, state);
}
public void EndSetPermissions(IAsyncResult asyncResult)
{
_instance.EndSetPermissions(asyncResult);
}
public Task SetPermissionsAsync(TablePermissions permissions)
{
return _instance.SetPermissionsAsync(permissions);
}
public Task SetPermissionsAsync(TablePermissions permissions, CancellationToken cancellationToken)
{
return _instance.SetPermissionsAsync(permissions, cancellationToken);
}
public Task SetPermissionsAsync(
TablePermissions permissions,
TableRequestOptions requestOptions,
OperationContext operationContext)
{
return _instance.SetPermissionsAsync(permissions, requestOptions, operationContext);
}
public Task SetPermissionsAsync(
TablePermissions permissions,
TableRequestOptions requestOptions,
OperationContext operationContext,
CancellationToken cancellationToken)
{
return _instance.SetPermissionsAsync(permissions, requestOptions, operationContext, cancellationToken);
}
public string GetSharedAccessSignature(
SharedAccessTablePolicy policy,
string accessPolicyIdentifier,
string startPartitionKey,
string startRowKey,
string endPartitionKey,
string endRowKey)
{
return _instance.GetSharedAccessSignature(policy, accessPolicyIdentifier, startPartitionKey, startRowKey, endPartitionKey, endRowKey);
}
public CloudTableClient ServiceClient
{
get
{
return _instance.ServiceClient;
}
}
public string Name
{
get
{
return _instance.Name;
}
}
public Uri Uri
{
get
{
return _instance.Uri;
}
}
public TableResult Insert(TElement element)
{
return _instance.Execute(TableOperation.Insert(element));
}
public TableResult InsertOrReplace(TElement element)
{
return _instance.Execute(TableOperation.InsertOrReplace(element));
}
public TableResult Remove(TElement element)
{
return _instance.Execute(TableOperation.Delete(element));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Globalization;
using System.IO;
using System.Text.Unicode;
using Xunit;
namespace System.Text.Encodings.Web.Tests
{
public partial class JavaScriptStringEncoderTests
{
[Fact]
public void TestSurrogate_Relaxed()
{
Assert.Equal("\\uD83D\\uDCA9", JavaScriptEncoder.UnsafeRelaxedJsonEscaping.Encode("\U0001f4a9"));
using var writer = new StringWriter();
JavaScriptEncoder.UnsafeRelaxedJsonEscaping.Encode(writer, "\U0001f4a9");
Assert.Equal("\\uD83D\\uDCA9", writer.GetStringBuilder().ToString());
}
[Fact]
public void Relaxed_EquivalentToAll_WithExceptions()
{
// Arrange
JavaScriptStringEncoder controlEncoder = new JavaScriptStringEncoder(UnicodeRanges.All);
JavaScriptStringEncoder testEncoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act & assert
for (int i = 0; i <= char.MaxValue; i++)
{
if (i == '"' || i == '&' || i == '<' || i == '>' || i == '+' || i == '\'' || i == '`')
{
string input = new string((char)i, 1);
Assert.NotEqual(controlEncoder.JavaScriptStringEncode(input), testEncoder.JavaScriptStringEncode(input));
continue;
}
if (!IsSurrogateCodePoint(i))
{
string input = new string((char)i, 1);
Assert.Equal(controlEncoder.JavaScriptStringEncode(input), testEncoder.JavaScriptStringEncode(input));
}
}
}
[Fact]
public void JavaScriptStringEncode_Relaxed_StillEncodesForbiddenChars_Simple_Escaping()
{
// The following two calls could be simply InlineData to the Theory below
// Unfortunately, the xUnit logger fails to escape the inputs when logging the test results,
// and so the suite fails despite all tests passing.
// TODO: I will try to fix it in xUnit, but for now this is a workaround to enable these tests.
JavaScriptStringEncode_Relaxed_StillEncodesForbiddenChars_Simple("\b", @"\b");
JavaScriptStringEncode_Relaxed_StillEncodesForbiddenChars_Simple("\f", @"\f");
}
[Theory]
[InlineData("\"", "\\\"")]
[InlineData("\\", @"\\")]
[InlineData("\n", @"\n")]
[InlineData("\t", @"\t")]
[InlineData("\r", @"\r")]
public void JavaScriptStringEncode_Relaxed_StillEncodesForbiddenChars_Simple(string input, string expected)
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act
string retVal = encoder.JavaScriptStringEncode(input);
// Assert
Assert.Equal(expected, retVal);
}
[Fact]
public void JavaScriptStringEncode_Relaxed_StillEncodesForbiddenChars_Extended()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act & assert - BMP chars
for (int i = 0; i <= 0xFFFF; i++)
{
string input = new string((char)i, 1);
string expected;
if (IsSurrogateCodePoint(i))
{
expected = "\uFFFD"; // unpaired surrogate -> Unicode replacement char
}
else
{
if (input == "\b")
{
expected = @"\b";
}
else if (input == "\t")
{
expected = @"\t";
}
else if (input == "\n")
{
expected = @"\n";
}
else if (input == "\f")
{
expected = @"\f";
}
else if (input == "\r")
{
expected = @"\r";
}
else if (input == "\\")
{
expected = @"\\";
}
else if (input == "\"")
{
expected = "\\\"";
}
else
{
bool mustEncode = false;
if (i <= 0x001F || (0x007F <= i && i <= 0x9F))
{
mustEncode = true; // control char
}
else if (!UnicodeHelpers.IsCharacterDefined((char)i))
{
mustEncode = true; // undefined (or otherwise disallowed) char
}
if (mustEncode)
{
expected = string.Format(CultureInfo.InvariantCulture, @"\u{0:X4}", i);
}
else
{
expected = input; // no encoding
}
}
}
string retVal = encoder.JavaScriptStringEncode(input);
Assert.Equal(expected, retVal);
}
// Act & assert - astral chars
for (int i = 0x10000; i <= 0x10FFFF; i++)
{
string input = char.ConvertFromUtf32(i);
string expected = string.Format(CultureInfo.InvariantCulture, @"\u{0:X4}\u{1:X4}", (uint)input[0], (uint)input[1]);
string retVal = encoder.JavaScriptStringEncode(input);
Assert.Equal(expected, retVal);
}
}
[Fact]
public void JavaScriptStringEncode_BadSurrogates_ReturnsUnicodeReplacementChar_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping; // allow all codepoints
// "a<unpaired leading>b<unpaired trailing>c<trailing before leading>d<unpaired trailing><valid>e<high at end of string>"
const string input = "a\uD800b\uDFFFc\uDFFF\uD800d\uDFFF\uD800\uDFFFe\uD800";
const string expected = "a\uFFFDb\uFFFDc\uFFFD\uFFFDd\uFFFD\\uD800\\uDFFFe\uFFFD"; // 'D800' 'DFFF' was preserved since it's valid
// Act
string retVal = encoder.JavaScriptStringEncode(input);
// Assert
Assert.Equal(expected, retVal);
}
[Fact]
public void JavaScriptStringEncode_EmptyStringInput_ReturnsEmptyString_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act & assert
Assert.Equal("", encoder.JavaScriptStringEncode(""));
}
[Fact]
public void JavaScriptStringEncode_InputDoesNotRequireEncoding_ReturnsOriginalStringInstance_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
string input = "Hello, there!";
// Act & assert
Assert.Same(input, encoder.JavaScriptStringEncode(input));
}
[Fact]
public void JavaScriptStringEncode_NullInput_Throws_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
Assert.Throws<ArgumentNullException>(() => { encoder.JavaScriptStringEncode(null); });
}
[Fact]
public void JavaScriptStringEncode_WithCharsRequiringEncodingAtBeginning_Relaxed()
{
Assert.Equal(@"\\Hello, there!", JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping.JavaScriptStringEncode("\\Hello, there!"));
}
[Fact]
public void JavaScriptStringEncode_WithCharsRequiringEncodingAtEnd_Relaxed()
{
Assert.Equal(@"Hello, there!\\", JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping.JavaScriptStringEncode("Hello, there!\\"));
}
[Fact]
public void JavaScriptStringEncode_WithCharsRequiringEncodingInMiddle_Relaxed()
{
Assert.Equal(@"Hello, \\there!", JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping.JavaScriptStringEncode("Hello, \\there!"));
}
[Fact]
public void JavaScriptStringEncode_WithCharsRequiringEncodingInterspersed_Relaxed()
{
Assert.Equal("Hello, \\\\there\\\"!", JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping.JavaScriptStringEncode("Hello, \\there\"!"));
}
[Fact]
public void JavaScriptStringEncode_CharArray_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
using var output = new StringWriter();
// Act
encoder.JavaScriptStringEncode("Hello\\world!".ToCharArray(), 3, 5, output);
// Assert
Assert.Equal(@"lo\\wo", output.ToString());
}
[Fact]
public void JavaScriptStringEncode_StringSubstring_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
using var output = new StringWriter();
// Act
encoder.JavaScriptStringEncode("Hello\\world!", 3, 5, output);
// Assert
Assert.Equal(@"lo\\wo", output.ToString());
}
[Theory]
[InlineData("\"", "\\\"")]
[InlineData("'", "'")]
public void JavaScriptStringEncode_Quotes_Relaxed(string input, string expected)
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act
string retVal = encoder.JavaScriptStringEncode(input);
// Assert
Assert.Equal(expected, retVal);
}
[Theory]
[InlineData("hello+world", "hello+world")]
[InlineData("hello<world>", "hello<world>")]
[InlineData("hello&world", "hello&world")]
public void JavaScriptStringEncode_DoesOutputHtmlSensitiveCharacters_Relaxed(string input, string expected)
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act
string retVal = encoder.JavaScriptStringEncode(input);
// Assert
Assert.Equal(expected, retVal);
}
[Fact]
public void JavaScriptStringEncode_AboveAscii_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act & assert
for (int i = 0x128; i <= 0xFFFF; i++)
{
if (IsSurrogateCodePoint(i))
{
continue; // surrogates don't matter here
}
UnicodeCategory category = char.GetUnicodeCategory((char)i);
if (category != UnicodeCategory.NonSpacingMark)
{
continue; // skip undefined characters like U+0378, or spacing characters like U+2028
}
string javaScriptStringEncoded = encoder.JavaScriptStringEncode(char.ConvertFromUtf32(i));
Assert.True(char.ConvertFromUtf32(i) == javaScriptStringEncoded, i.ToString());
}
}
[Fact]
public void JavaScriptStringEncode_ControlCharacters_Relaxed()
{
// Arrange
JavaScriptStringEncoder encoder = JavaScriptStringEncoder.UnsafeRelaxedJsonEscaping;
// Act & assert
for (int i = 0; i <= 0x1F; i++)
{
// Skip characters that are escaped using '\\' since they are covered in other tests.
if (i == '\b' || i == '\f' || i == '\n' || i == '\r' || i == '\t')
{
continue;
}
string javaScriptStringEncoded = encoder.JavaScriptStringEncode(char.ConvertFromUtf32(i));
string expected = string.Format("\\u00{0:X2}", i);
Assert.Equal(expected, javaScriptStringEncoded);
}
}
}
}
| |
#region License and Terms
// MoreLINQ - Extensions to LINQ to Objects
// Copyright (c) 2010 Leopold Bushkin. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
namespace MoreLinq.Test
{
using System;
using System.Collections.Generic;
using NUnit.Framework;
/// <summary>
/// Tests that verify the behavior of the RandomSubset() operator
/// </summary>
[TestFixture]
public class RandomSubsetTest
{
/// <summary>
/// Verify that RandomSubset() behaves in a lazy manner.
/// </summary>
[Test]
public void TestRandomSubsetIsLazy()
{
new BreakingSequence<int>().RandomSubset(10);
new BreakingSequence<int>().RandomSubset(10, new Random());
}
/// <summary>
/// Verify that involving RandomSubsets with a subset size less than 0 results in an exception.
/// </summary>
[Test]
public void TestRandomSubsetNegativeSubsetSize()
{
AssertThrowsArgument.OutOfRangeException("subsetSize", () =>
Enumerable.Range(1, 10).RandomSubset(-5));
}
/// <summary>
/// Verify that involving RandomSubsets with a subset size less than 0 results in an exception.
/// </summary>
[Test]
public void TestRandomSubsetNegativeSubsetSize2()
{
AssertThrowsArgument.OutOfRangeException("subsetSize", () =>
Enumerable.Range(1, 10).RandomSubset(-1, new Random()));
}
/// <summary>
/// Verify that the 0-size random subset of the empty set is the empty set.
/// </summary>
[Test]
public void TestRandomSubsetOfEmptySequence()
{
var sequence = Enumerable.Empty<int>();
var result = sequence.RandomSubset(0); // we can only get subsets <= sequence.Count()
Assert.AreEqual(0, result.Count());
}
/// <summary>
/// Verify that RandomSubset can produce a random subset of equal length to the original sequence.
/// </summary>
[Test]
public void TestRandomSubsetSameLengthAsSequence()
{
const int count = 100;
var sequence = Enumerable.Range(1, count);
var resultA = sequence.RandomSubset(count);
var resultB = sequence.RandomSubset(count, new Random(12345));
// ensure random subset is always a complete reordering of original sequence
Assert.AreEqual(count, resultA.Distinct().Count());
Assert.AreEqual(count, resultB.Distinct().Count());
}
/// <summary>
/// Verify that RandomSubset can produce a random subset shorter than the original sequence.
/// </summary>
[Test]
public void TestRandomSubsetShorterThanSequence()
{
const int count = 100;
const int subsetSize = 20;
var sequence = Enumerable.Range(1, count);
var resultA = sequence.RandomSubset(subsetSize);
var resultB = sequence.RandomSubset(subsetSize, new Random(12345));
// ensure random subset is always a distinct subset of original sequence
Assert.AreEqual(subsetSize, resultA.Distinct().Count());
Assert.AreEqual(subsetSize, resultB.Distinct().Count());
}
/// <summary>
/// Verify that attempting to fetch a random subset longer than the original sequence
/// results in an exception. Only thrown when the resulting random sequence is enumerated.
/// </summary>
[Test]
public void TestRandomSubsetLongerThanSequence()
{
const int count = 100;
const int subsetSize = count + 5;
var sequence = Enumerable.Range(1, count);
AssertThrowsArgument.OutOfRangeException("subsetSize", () =>
{
sequence.RandomSubset(subsetSize).Consume();
});
}
/// <summary>
/// Verify that attempting to fetch a random subset longer than the original sequence
/// results in an exception. Only thrown when the resulting random sequence is enumerated.
/// </summary>
[Test]
public void TestRandomSubsetLongerThanSequence2()
{
const int count = 100;
const int subsetSize = count + 5;
var sequence = Enumerable.Range(1, count);
AssertThrowsArgument.OutOfRangeException("subsetSize", () =>
{
sequence.RandomSubset(subsetSize, new Random(1234)).Consume();
});
}
/// <summary>
/// Verify that RandomSubset does not exhibit selection bias in the subsets it returns.
/// </summary>
/// <remarks>
/// It's actually a complicated matter to ensure that a random process does not exhibit
/// any kind of bias. In this test, we want to make sure that the probability of any
/// particular subset being returned is roughly the same as any other. Here's how.
///
/// This test selects a random subset of length N from an ascending sequence 1..N.
/// It then adds up the values of the random result into an accumulator array. After many
/// iterations, we would hope that each index of the accumulator array approach the same
/// value. Of course, in the real world, there will be some variance, and the values will
/// never be the same. However, we can compute the relative standard deviation (RSD) of the
/// variance. As the number of trials increases, the RSD should continue decreasing
/// asymptotically towards zero. By running several iterations of increasing trial size,
/// we can assert that the RSD continually decreases, approaching zero.
///
/// For math geeks who read this:
/// A decreasing RSD demonstrates that the random subsets form a cumulative distribution
/// approaching unity (1.0). Which, given that the original sequence was monotonic, implies
/// there cannot be a selection bias in the returned subsets - quod erat demonstrandum (QED).
/// </remarks>
[Test]
[Explicit]
public void TestRandomSubsetIsUnbiased()
{
const int count = 20;
var sequence = Enumerable.Range(1, count);
var rsdTrials = new[] { 1000, 10000, 100000, 500000, 10000000 };
var rsdResults = new[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
var trialIndex = 0;
foreach (var trialSize in rsdTrials)
{
var biasAccumulator = Enumerable.Repeat(0.0, count).ToArray();
for (var i = 0; i < trialSize; i++)
{
var index = 0;
var result = sequence.RandomSubset(count);
foreach (var itemA in result)
biasAccumulator[index++] += itemA;
}
rsdResults[trialIndex++] = RelativeStandardDeviation(biasAccumulator);
}
// ensure that wth increasing trial size the a RSD% continually decreases
for (var j = 0; j < rsdResults.Length - 1; j++)
Assert.Less(rsdResults[j + 1], rsdResults[j]);
// ensure that the RSD% for the 5M trial size is < 1.0 (this is somewhat arbitrary)
Assert.Less(rsdResults.Last(), 1.0);
// for sanity, we output the RSD% values as a cross-check, the expected result should be
// that the RSD% rapidly decreases and eventually drops below 1.0
Console.WriteLine("RSD% = {0:0.00000}, {1:0.00000}, {2:0.00000}, {3:0.00000}, {4:0.00000}",
rsdResults[0], rsdResults[1], rsdResults[2], rsdResults[3], rsdResults[4]);
}
/// <summary>
/// Verify that RandomSubsets is idempotent with respect to the original sequence.
/// </summary>
/// <remarks>
/// Internally, RandomSubsets perform some in-place operations on a copy of the sequence.
/// This attempts to verify that the original sequence remains unaltered after a random
/// subset is returned and enumerated.
/// </remarks>
[Test]
public void TestRandomSubsetIsIdempotent()
{
const int count = 100;
const int subsetSize = count;
var sequence = Enumerable.Range(1, count).ToArray();
var sequenceClone = sequence.ToArray();
var resultA = sequence.RandomSubset(subsetSize);
var resultB = sequence.RandomSubset(subsetSize);
// force complete enumeration of random subsets
resultA.Consume();
resultB.Consume();
// verify the original sequence is untouched
Assert.That(sequence, Is.EqualTo(sequenceClone));
}
/// <summary>
/// Verify that RandomSubset produces subset where all elements belongs to original sequence.
/// </summary>
[Test]
public void TestRandomSubsetReturnsOriginalSequenceElements()
{
const int count = 100;
var sequence = Enumerable.Range(1, count);
var result = sequence.RandomSubset(count, new Random(12345));
// we do not test overload without seed because it can return original sequence
Assert.That(sequence, Is.Not.EqualTo(result));
// ensure random subset returns exactly the same elements of original sequence
Assert.That(sequence, Is.EqualTo(result.OrderBy(x => x)));
}
static double RelativeStandardDeviation(IEnumerable<double> values)
{
var average = values.Average();
var standardDeviation = StandardDeviationInternal(values, average);
return (standardDeviation * 100.0) / average;
}
static double StandardDeviationInternal(IEnumerable<double> values, double average)
{
return Math.Sqrt(values.Select(value => Math.Pow(value - average, 2.0)).Average());
}
}
}
| |
namespace SLeek
{
partial class InventoryConsole
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.ilsInventory = new System.Windows.Forms.ImageList(this.components);
this.splitContainer1 = new System.Windows.Forms.SplitContainer();
this.treInventory = new System.Windows.Forms.TreeView();
this.tstInventory = new System.Windows.Forms.ToolStrip();
this.tbtnNew = new System.Windows.Forms.ToolStripDropDownButton();
this.tmnuNewFolder = new System.Windows.Forms.ToolStripMenuItem();
this.toolStripMenuItem1 = new System.Windows.Forms.ToolStripSeparator();
this.tmnuNewLandmark = new System.Windows.Forms.ToolStripMenuItem();
this.tmnuNewNotecard = new System.Windows.Forms.ToolStripMenuItem();
this.tmnuNewScript = new System.Windows.Forms.ToolStripMenuItem();
this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator();
this.tbtnOrganize = new System.Windows.Forms.ToolStripDropDownButton();
this.tmnuCut = new System.Windows.Forms.ToolStripMenuItem();
this.tmnuCopy = new System.Windows.Forms.ToolStripMenuItem();
this.tmnuPaste = new System.Windows.Forms.ToolStripMenuItem();
this.toolStripMenuItem2 = new System.Windows.Forms.ToolStripSeparator();
this.tmnuRename = new System.Windows.Forms.ToolStripMenuItem();
this.toolStripMenuItem3 = new System.Windows.Forms.ToolStripSeparator();
this.tmnuDelete = new System.Windows.Forms.ToolStripMenuItem();
this.tbtnSort = new System.Windows.Forms.ToolStripDropDownButton();
this.splitContainer1.Panel1.SuspendLayout();
this.splitContainer1.SuspendLayout();
this.tstInventory.SuspendLayout();
this.SuspendLayout();
//
// ilsInventory
//
this.ilsInventory.ColorDepth = System.Windows.Forms.ColorDepth.Depth32Bit;
this.ilsInventory.ImageSize = new System.Drawing.Size(16, 16);
this.ilsInventory.TransparentColor = System.Drawing.Color.Transparent;
//
// splitContainer1
//
this.splitContainer1.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.splitContainer1.Dock = System.Windows.Forms.DockStyle.Fill;
this.splitContainer1.Location = new System.Drawing.Point(0, 0);
this.splitContainer1.Name = "splitContainer1";
//
// splitContainer1.Panel1
//
this.splitContainer1.Panel1.Controls.Add(this.treInventory);
this.splitContainer1.Panel1.Controls.Add(this.tstInventory);
this.splitContainer1.Size = new System.Drawing.Size(632, 432);
this.splitContainer1.SplitterDistance = 316;
this.splitContainer1.TabIndex = 2;
//
// treInventory
//
this.treInventory.Dock = System.Windows.Forms.DockStyle.Fill;
this.treInventory.ImageIndex = 0;
this.treInventory.ImageList = this.ilsInventory;
this.treInventory.Location = new System.Drawing.Point(0, 25);
this.treInventory.Name = "treInventory";
this.treInventory.SelectedImageIndex = 0;
this.treInventory.Size = new System.Drawing.Size(312, 403);
this.treInventory.TabIndex = 3;
this.treInventory.AfterCollapse += new System.Windows.Forms.TreeViewEventHandler(this.treInventory_AfterCollapse);
this.treInventory.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.treInventory_AfterSelect);
this.treInventory.AfterExpand += new System.Windows.Forms.TreeViewEventHandler(this.treInventory_AfterExpand);
//
// tstInventory
//
this.tstInventory.GripStyle = System.Windows.Forms.ToolStripGripStyle.Hidden;
this.tstInventory.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.tbtnNew,
this.toolStripSeparator1,
this.tbtnOrganize,
this.tbtnSort});
this.tstInventory.Location = new System.Drawing.Point(0, 0);
this.tstInventory.Name = "tstInventory";
this.tstInventory.RenderMode = System.Windows.Forms.ToolStripRenderMode.System;
this.tstInventory.Size = new System.Drawing.Size(312, 25);
this.tstInventory.TabIndex = 2;
this.tstInventory.Text = "toolStrip1";
//
// tbtnNew
//
this.tbtnNew.AutoToolTip = false;
this.tbtnNew.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.tmnuNewFolder,
this.toolStripMenuItem1,
this.tmnuNewLandmark,
this.tmnuNewNotecard,
this.tmnuNewScript});
this.tbtnNew.Enabled = false;
this.tbtnNew.Image = global::SLeek.Properties.Resources.add_16;
this.tbtnNew.ImageTransparentColor = System.Drawing.Color.Magenta;
this.tbtnNew.Name = "tbtnNew";
this.tbtnNew.Size = new System.Drawing.Size(57, 22);
this.tbtnNew.Text = "New";
//
// tmnuNewFolder
//
this.tmnuNewFolder.Image = global::SLeek.Properties.Resources.folder_closed_16;
this.tmnuNewFolder.Name = "tmnuNewFolder";
this.tmnuNewFolder.Size = new System.Drawing.Size(152, 22);
this.tmnuNewFolder.Text = "Folder";
this.tmnuNewFolder.Click += new System.EventHandler(this.tmnuNewFolder_Click);
//
// toolStripMenuItem1
//
this.toolStripMenuItem1.Name = "toolStripMenuItem1";
this.toolStripMenuItem1.Size = new System.Drawing.Size(149, 6);
//
// tmnuNewLandmark
//
this.tmnuNewLandmark.Enabled = false;
this.tmnuNewLandmark.Name = "tmnuNewLandmark";
this.tmnuNewLandmark.Size = new System.Drawing.Size(152, 22);
this.tmnuNewLandmark.Text = "Landmark";
//
// tmnuNewNotecard
//
this.tmnuNewNotecard.Image = global::SLeek.Properties.Resources.documents_16;
this.tmnuNewNotecard.Name = "tmnuNewNotecard";
this.tmnuNewNotecard.Size = new System.Drawing.Size(152, 22);
this.tmnuNewNotecard.Text = "Notecard";
this.tmnuNewNotecard.Click += new System.EventHandler(this.tmnuNewNotecard_Click);
//
// tmnuNewScript
//
this.tmnuNewScript.Image = global::SLeek.Properties.Resources.lsl_scripts_16;
this.tmnuNewScript.Name = "tmnuNewScript";
this.tmnuNewScript.Size = new System.Drawing.Size(152, 22);
this.tmnuNewScript.Text = "Script";
this.tmnuNewScript.Click += new System.EventHandler(this.tmnuNewScript_Click);
//
// toolStripSeparator1
//
this.toolStripSeparator1.Name = "toolStripSeparator1";
this.toolStripSeparator1.Size = new System.Drawing.Size(6, 25);
//
// tbtnOrganize
//
this.tbtnOrganize.AutoToolTip = false;
this.tbtnOrganize.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.tmnuCut,
this.tmnuCopy,
this.tmnuPaste,
this.toolStripMenuItem2,
this.tmnuRename,
this.toolStripMenuItem3,
this.tmnuDelete});
this.tbtnOrganize.Enabled = false;
this.tbtnOrganize.Image = global::SLeek.Properties.Resources.applications_16;
this.tbtnOrganize.ImageTransparentColor = System.Drawing.Color.Magenta;
this.tbtnOrganize.Name = "tbtnOrganize";
this.tbtnOrganize.Size = new System.Drawing.Size(79, 22);
this.tbtnOrganize.Text = "Organize";
//
// tmnuCut
//
this.tmnuCut.Image = global::SLeek.Properties.Resources.cut_16;
this.tmnuCut.Name = "tmnuCut";
this.tmnuCut.Size = new System.Drawing.Size(152, 22);
this.tmnuCut.Text = "Cut";
this.tmnuCut.Click += new System.EventHandler(this.tmnuCut_Click);
//
// tmnuCopy
//
this.tmnuCopy.Enabled = false;
this.tmnuCopy.Image = global::SLeek.Properties.Resources.copy_16;
this.tmnuCopy.Name = "tmnuCopy";
this.tmnuCopy.Size = new System.Drawing.Size(152, 22);
this.tmnuCopy.Text = "Copy";
//
// tmnuPaste
//
this.tmnuPaste.Image = global::SLeek.Properties.Resources.paste_16;
this.tmnuPaste.Name = "tmnuPaste";
this.tmnuPaste.Size = new System.Drawing.Size(152, 22);
this.tmnuPaste.Text = "Paste";
this.tmnuPaste.Click += new System.EventHandler(this.tmnuPaste_Click);
//
// toolStripMenuItem2
//
this.toolStripMenuItem2.Name = "toolStripMenuItem2";
this.toolStripMenuItem2.Size = new System.Drawing.Size(149, 6);
//
// tmnuRename
//
this.tmnuRename.Enabled = false;
this.tmnuRename.Name = "tmnuRename";
this.tmnuRename.Size = new System.Drawing.Size(152, 22);
this.tmnuRename.Text = "Rename";
this.tmnuRename.Click += new System.EventHandler(this.tmnuRename_Click);
//
// toolStripMenuItem3
//
this.toolStripMenuItem3.Name = "toolStripMenuItem3";
this.toolStripMenuItem3.Size = new System.Drawing.Size(149, 6);
//
// tmnuDelete
//
this.tmnuDelete.Image = global::SLeek.Properties.Resources.delete_16;
this.tmnuDelete.Name = "tmnuDelete";
this.tmnuDelete.Size = new System.Drawing.Size(152, 22);
this.tmnuDelete.Text = "Delete";
this.tmnuDelete.Click += new System.EventHandler(this.tmnuDelete_Click);
//
// tbtnSort
//
this.tbtnSort.AutoToolTip = false;
this.tbtnSort.Image = global::SLeek.Properties.Resources.copy_16;
this.tbtnSort.ImageTransparentColor = System.Drawing.Color.Magenta;
this.tbtnSort.Name = "tbtnSort";
this.tbtnSort.Size = new System.Drawing.Size(56, 22);
this.tbtnSort.Text = "Sort";
//
// InventoryConsole
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.splitContainer1);
this.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.Name = "InventoryConsole";
this.Size = new System.Drawing.Size(632, 432);
this.splitContainer1.Panel1.ResumeLayout(false);
this.splitContainer1.Panel1.PerformLayout();
this.splitContainer1.ResumeLayout(false);
this.tstInventory.ResumeLayout(false);
this.tstInventory.PerformLayout();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.ImageList ilsInventory;
private System.Windows.Forms.SplitContainer splitContainer1;
private System.Windows.Forms.TreeView treInventory;
private System.Windows.Forms.ToolStrip tstInventory;
private System.Windows.Forms.ToolStripDropDownButton tbtnNew;
private System.Windows.Forms.ToolStripMenuItem tmnuNewFolder;
private System.Windows.Forms.ToolStripSeparator toolStripMenuItem1;
private System.Windows.Forms.ToolStripMenuItem tmnuNewLandmark;
private System.Windows.Forms.ToolStripMenuItem tmnuNewNotecard;
private System.Windows.Forms.ToolStripSeparator toolStripSeparator1;
private System.Windows.Forms.ToolStripDropDownButton tbtnOrganize;
private System.Windows.Forms.ToolStripMenuItem tmnuCut;
private System.Windows.Forms.ToolStripMenuItem tmnuCopy;
private System.Windows.Forms.ToolStripMenuItem tmnuPaste;
private System.Windows.Forms.ToolStripSeparator toolStripMenuItem2;
private System.Windows.Forms.ToolStripMenuItem tmnuRename;
private System.Windows.Forms.ToolStripSeparator toolStripMenuItem3;
private System.Windows.Forms.ToolStripMenuItem tmnuDelete;
private System.Windows.Forms.ToolStripDropDownButton tbtnSort;
private System.Windows.Forms.ToolStripMenuItem tmnuNewScript;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Runtime.InteropServices;
namespace System.Data.OleDb
{
#if DEBUG
using Globalization;
using Text;
#endif
internal enum DBBindStatus
{
OK = 0,
BADORDINAL = 1,
UNSUPPORTEDCONVERSION = 2,
BADBINDINFO = 3,
BADSTORAGEFLAGS = 4,
NOINTERFACE = 5,
MULTIPLESTORAGE = 6
}
#if false
typedef struct tagDBPARAMBINDINFO {
LPOLESTR pwszDataSourceType;
LPOLESTR pwszName;
DBLENGTH ulParamSize;
DBPARAMFLAGS dwFlags;
BYTE bPrecision;
BYTE bScale;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal struct tagDBPARAMBINDINFO
{
internal IntPtr pwszDataSourceType;
internal IntPtr pwszName;
internal IntPtr ulParamSize;
internal int dwFlags;
internal byte bPrecision;
internal byte bScale;
#if DEBUG
public override string ToString()
{
StringBuilder builder = new StringBuilder();
builder.Append("tagDBPARAMBINDINFO").Append(Environment.NewLine);
if (IntPtr.Zero != pwszDataSourceType)
{
builder.Append("pwszDataSourceType =").Append(Marshal.PtrToStringUni(pwszDataSourceType)).Append(Environment.NewLine);
}
builder.Append("\tulParamSize =" + ulParamSize.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tdwFlags =0x" + dwFlags.ToString("X4", CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tPrecision =" + bPrecision.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tScale =" + bScale.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
return builder.ToString();
}
#endif
}
#if false
typedef struct tagDBBINDING {
DBORDINAL iOrdinal;
DBBYTEOFFSET obValue;
DBBYTEOFFSET obLength;
DBBYTEOFFSET obStatus;
ITypeInfo *pTypeInfo;
DBOBJECT *pObject;
DBBINDEXT *pBindExt;
DBPART dwPart;
DBMEMOWNER dwMemOwner;
DBPARAMIO eParamIO;
DBLENGTH cbMaxLen;
DWORD dwFlags;
DBTYPE wType;
BYTE bPrecision;
BYTE bScale;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBBINDING
{
internal IntPtr iOrdinal;
internal IntPtr obValue;
internal IntPtr obLength;
internal IntPtr obStatus;
internal IntPtr pTypeInfo;
internal IntPtr pObject;
internal IntPtr pBindExt;
internal int dwPart;
internal int dwMemOwner;
internal int eParamIO;
internal IntPtr cbMaxLen;
internal int dwFlags;
internal short wType;
internal byte bPrecision;
internal byte bScale;
internal tagDBBINDING()
{
}
#if DEBUG
public override string ToString()
{
StringBuilder builder = new StringBuilder();
builder.Append("tagDBBINDING").Append(Environment.NewLine);
builder.Append("\tOrdinal =" + iOrdinal.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tValueOffset =" + obValue.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tLengthOffset=" + obLength.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tStatusOffset=" + obStatus.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tMaxLength =" + cbMaxLen.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tDB_Type =" + ODB.WLookup(wType)).Append(Environment.NewLine);
builder.Append("\tPrecision =" + bPrecision.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\tScale =" + bScale.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
return builder.ToString();
}
#endif
}
#if false
typedef struct tagDBCOLUMNACCESS {
void *pData;
DBID columnid;
DBLENGTH cbDataLen;
DBSTATUS dwStatus;
DBLENGTH cbMaxLen;
DB_DWRESERVE dwReserved;
DBTYPE wType;
BYTE bPrecision;
BYTE bScale;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal struct tagDBCOLUMNACCESS
{
internal IntPtr pData;
internal tagDBIDX columnid;
internal IntPtr cbDataLen;
internal int dwStatus;
internal IntPtr cbMaxLen;
internal IntPtr dwReserved;
internal short wType;
internal byte bPrecision;
internal byte bScale;
}
#if false
typedef struct tagDBID {
/* [switch_is][switch_type] */ union {
/* [case()] */ GUID guid;
/* [case()] */ GUID *pguid;
/* [default] */ /* Empty union arm */
} uGuid;
DBKIND eKind;
/* [switch_is][switch_type] */ union {
/* [case()] */ LPOLESTR pwszName;
/* [case()] */ ULONG ulPropid;
/* [default] */ /* Empty union arm */
} uName;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal struct tagDBIDX
{
internal Guid uGuid;
internal int eKind;
internal IntPtr ulPropid;
}
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBID
{
internal Guid uGuid;
internal int eKind;
internal IntPtr ulPropid;
}
#if false
typedef struct tagDBLITERALINFO {
LPOLESTR pwszLiteralValue;
LPOLESTR pwszInvalidChars;
LPOLESTR pwszInvalidStartingChars;
DBLITERAL lt;
BOOL fSupported;
ULONG cchMaxLen;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBLITERALINFO
{
[MarshalAs(UnmanagedType.LPWStr)]
internal string pwszLiteralValue = null;
[MarshalAs(UnmanagedType.LPWStr)]
internal string pwszInvalidChars = null;
[MarshalAs(UnmanagedType.LPWStr)]
internal string pwszInvalidStartingChars = null;
internal int it;
internal int fSupported;
internal int cchMaxLen;
internal tagDBLITERALINFO()
{
}
}
#if false
typedef struct tagDBPROPSET {
/* [size_is] */ DBPROP *rgProperties;
ULONG cProperties;
GUID guidPropertySet;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBPROPSET
{
internal IntPtr rgProperties;
internal int cProperties;
internal Guid guidPropertySet;
internal tagDBPROPSET()
{
}
internal tagDBPROPSET(int propertyCount, Guid propertySet)
{
cProperties = propertyCount;
guidPropertySet = propertySet;
}
}
#if false
typedef struct tagDBPROP {
DBPROPID dwPropertyID;
DBPROPOPTIONS dwOptions;
DBPROPSTATUS dwStatus;
DBID colid;
VARIANT vValue;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBPROP
{
internal int dwPropertyID;
internal int dwOptions;
internal OleDbPropertyStatus dwStatus;
internal tagDBIDX columnid;
// Variant
[MarshalAs(UnmanagedType.Struct)] internal object vValue;
internal tagDBPROP()
{
}
internal tagDBPROP(int propertyID, bool required, object value)
{
dwPropertyID = propertyID;
dwOptions = ((required) ? ODB.DBPROPOPTIONS_REQUIRED : ODB.DBPROPOPTIONS_OPTIONAL);
vValue = value;
}
}
#if false
typedef struct tagDBPARAMS {
void *pData;
DB_UPARAMS cParamSets;
HACCESSOR hAccessor;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBPARAMS
{
internal IntPtr pData;
internal int cParamSets;
internal IntPtr hAccessor;
internal tagDBPARAMS()
{
}
}
#if false
typedef struct tagDBCOLUMNINFO {
LPOLESTR pwszName;
ITypeInfo *pTypeInfo;
DBORDINAL iOrdinal;
DBCOLUMNFLAGS dwFlags;
DBLENGTH ulColumnSize;
DBTYPE wType;
BYTE bPrecision;
BYTE bScale;
DBID columnid;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBCOLUMNINFO
{
[MarshalAs(UnmanagedType.LPWStr)]
internal string pwszName = null;
//[MarshalAs(UnmanagedType.Interface)]
internal IntPtr pTypeInfo = (IntPtr)0;
internal IntPtr iOrdinal = (IntPtr)0;
internal int dwFlags = 0;
internal IntPtr ulColumnSize = (IntPtr)0;
internal short wType = 0;
internal byte bPrecision = 0;
internal byte bScale = 0;
internal tagDBIDX columnid;
internal tagDBCOLUMNINFO()
{
}
#if DEBUG
public override string ToString()
{
StringBuilder builder = new StringBuilder();
builder.Append("tagDBCOLUMNINFO: " + Convert.ToString(pwszName, CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + iOrdinal.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + "0x" + dwFlags.ToString("X8", CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + ulColumnSize.ToInt64().ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + "0x" + wType.ToString("X2", CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + bPrecision.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + bScale.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
builder.Append("\t" + columnid.eKind.ToString(CultureInfo.InvariantCulture)).Append(Environment.NewLine);
return builder.ToString();
}
#endif
}
#if false
typedef struct tagDBPROPINFOSET {
/* [size_is] */ PDBPROPINFO rgPropertyInfos;
ULONG cPropertyInfos;
GUID guidPropertySet;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBPROPINFOSET
{
internal IntPtr rgPropertyInfos;
internal int cPropertyInfos;
internal Guid guidPropertySet;
internal tagDBPROPINFOSET()
{
}
}
#if false
typedef struct tagDBPROPINFO {
LPOLESTR pwszDescription;
DBPROPID dwPropertyID;
DBPROPFLAGS dwFlags;
VARTYPE vtType;
VARIANT vValues;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal sealed class tagDBPROPINFO
{
[MarshalAs(UnmanagedType.LPWStr)] internal string pwszDescription;
internal int dwPropertyID;
internal int dwFlags;
internal short vtType;
[MarshalAs(UnmanagedType.Struct)] internal object vValue;
internal tagDBPROPINFO()
{
}
}
#if false
typedef struct tagDBPROPIDSET {
/* [size_is] */ DBPROPID *rgPropertyIDs;
ULONG cPropertyIDs;
GUID guidPropertySet;
}
#endif
#if (WIN32 && !ARCH_arm)
[StructLayoutAttribute(LayoutKind.Sequential, Pack = 2)]
#else
[StructLayout(LayoutKind.Sequential, Pack = 8)]
#endif
internal struct tagDBPROPIDSET
{
internal IntPtr rgPropertyIDs;
internal int cPropertyIDs;
internal Guid guidPropertySet;
}
}
| |
using HomeSeerAPI;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
using static System.FormattableString;
namespace Hspi
{
/// <summary>
/// Class to store PlugIn Configuration
/// </summary>
/// <seealso cref="System.IDisposable" />
internal class PluginConfig : IDisposable
{
public event EventHandler<EventArgs> ConfigChanged;
/// <summary>
/// Initializes a new instance of the <see cref="PluginConfig"/> class.
/// </summary>
/// <param name="HS">The homeseer application.</param>
public PluginConfig(IHSApplication HS)
{
this.HS = HS;
debugLogging = GetValue(DebugLoggingKey, false);
string deviceIdsConcatString = GetValue(DeviceIds, string.Empty);
var deviceIds = deviceIdsConcatString.Split(DeviceIdsSeparator);
foreach (var deviceId in deviceIds)
{
if (string.IsNullOrWhiteSpace(deviceId))
{
continue;
}
string ipAddressString = GetValue(IPAddressKey, string.Empty, deviceId);
IPAddress.TryParse(ipAddressString, out var deviceIP);
var resolution = new Dictionary<DeviceType, double>();
foreach (var item in System.Enum.GetValues(typeof(DeviceType)))
{
DeviceType deviceType = (DeviceType)item;
resolution.Add(deviceType, GetValue(ResolutionKey(deviceType), GetDefaultResolution(deviceType), deviceId));
}
string enabledPortsString = GetValue(PortsEnabledKey, string.Empty, deviceId);
var enabledPorts = new SortedSet<int>();
foreach (var portString in enabledPortsString.Split(PortsEnabledSeparator))
{
if (int.TryParse(portString, NumberStyles.Any, CultureInfo.InvariantCulture, out var port))
{
enabledPorts.Add(port);
}
}
var enabledTypes = new SortedSet<DeviceType>();
foreach (var item in System.Enum.GetValues(typeof(DeviceType)))
{
if (GetValue(item.ToString(), false, deviceId))
{
enabledTypes.Add((DeviceType)item);
}
}
string name = GetValue(NameKey, string.Empty, deviceId);
string username = GetValue(UserNameKey, string.Empty, deviceId);
string passwordEncrypted = GetValue(PasswordKey, string.Empty, deviceId);
string password = HS.DecryptString(passwordEncrypted, EncryptPassword);
devices.Add(deviceId, new MPowerDevice(deviceId, name, deviceIP, username, password, enabledTypes, resolution, enabledPorts));
}
}
/// <summary>
/// Gets or sets the devices
/// </summary>
/// <value>
/// The API key.
/// </value>
public ImmutableDictionary<string, MPowerDevice> Devices
{
get
{
configLock.EnterReadLock();
try
{
return devices.ToImmutableDictionary();
}
finally
{
configLock.ExitReadLock();
}
}
}
/// <summary>
/// Gets or sets a value indicating whether debug logging is enabled.
/// </summary>
/// <value>
/// <c>true</c> if [debug logging]; otherwise, <c>false</c>.
/// </value>
public bool DebugLogging
{
get
{
configLock.EnterReadLock();
try
{
return debugLogging;
}
finally
{
configLock.ExitReadLock();
}
}
set
{
configLock.EnterWriteLock();
try
{
SetValue(DebugLoggingKey, value);
debugLogging = value;
}
finally
{
configLock.ExitWriteLock();
}
}
}
private static string ResolutionKey(DeviceType deviceType)
{
return deviceType.ToString() + "Resolution";
}
public void AddDevice(MPowerDevice device)
{
configLock.EnterWriteLock();
try
{
devices[device.Id] = device;
SetValue(NameKey, device.Name, device.Id);
SetValue(IPAddressKey, device.DeviceIP.ToString(), device.Id);
SetValue(UserNameKey, device.Username, device.Id);
SetValue(PasswordKey, HS.EncryptString(device.Password, EncryptPassword), device.Id);
foreach (var item in System.Enum.GetValues(typeof(DeviceType)))
{
SetValue(item.ToString(), device.EnabledTypes.Contains((DeviceType)item), device.Id);
}
foreach (var pair in device.Resolution)
{
SetValue(ResolutionKey(pair.Key), pair.Value, device.Id);
}
if (device.EnabledPorts.Count > 0)
{
SetValue(PortsEnabledKey, device.EnabledPorts
.Select(x => x.ToString(CultureInfo.InvariantCulture))
.Aggregate((x, y) => x + PortsEnabledSeparator + y), device.Id);
}
else
{
SetValue(PortsEnabledKey, string.Empty, device.Id);
}
SetValue(DeviceIds, devices.Keys.Aggregate((x, y) => x + DeviceIdsSeparator + y));
}
finally
{
configLock.ExitWriteLock();
}
}
public void RemoveDevice(string deviceId)
{
configLock.EnterWriteLock();
try
{
devices.Remove(deviceId);
if (devices.Count > 0)
{
SetValue(DeviceIds, devices.Keys.Aggregate((x, y) => x + DeviceIdsSeparator + y));
}
else
{
SetValue(DeviceIds, string.Empty);
}
HS.ClearINISection(deviceId, FileName);
}
finally
{
configLock.ExitWriteLock();
}
}
private T GetValue<T>(string key, T defaultValue)
{
return GetValue(key, defaultValue, DefaultSection);
}
private T GetValue<T>(string key, T defaultValue, string section)
{
string stringValue = HS.GetINISetting(section, key, null, FileName);
if (stringValue != null)
{
try
{
T result = (T)System.Convert.ChangeType(stringValue, typeof(T), CultureInfo.InvariantCulture);
return result;
}
catch (Exception)
{
return defaultValue;
}
}
return defaultValue;
}
private void SetValue<T>(string key, T value)
{
SetValue<T>(key, value, DefaultSection);
}
private void SetValue<T>(string key, T value, string section)
{
string stringValue = System.Convert.ToString(value, CultureInfo.InvariantCulture);
HS.SaveINISetting(section, key, stringValue, FileName);
}
/// <summary>
/// Fires event that configuration changed.
/// </summary>
public void FireConfigChanged()
{
if (ConfigChanged != null)
{
var ConfigChangedCopy = ConfigChanged;
ConfigChangedCopy(this, EventArgs.Empty);
}
}
public static double GetDefaultResolution(DeviceType deviceType)
{
switch (deviceType)
{
case DeviceType.Output:
return 1;
case DeviceType.Power:
return 0.01;
case DeviceType.Current:
return 0.01;
case DeviceType.Voltage:
return 0.1;
case DeviceType.PowerFactor:
return 0.01;
case DeviceType.Energy:
return 0.01;
default:
return 0.01;
}
}
public static string GetUnits(DeviceType deviceType)
{
switch (deviceType)
{
case DeviceType.Output:
return string.Empty;
case DeviceType.Power:
return "Watts";
case DeviceType.Current:
return "Amps";
case DeviceType.Voltage:
return "Volts";
case DeviceType.PowerFactor:
return string.Empty;
case DeviceType.Energy:
return "KW Hours";
default:
return string.Empty;
}
}
#region IDisposable Support
protected virtual void Dispose(bool disposing)
{
if (!disposedValue)
{
if (disposing)
{
configLock.Dispose();
}
disposedValue = true;
}
}
// This code added to correctly implement the disposable pattern.
public void Dispose()
{
Dispose(true);
}
#endregion IDisposable Support
private const string NameKey = "Name";
private const string UserNameKey = "Username";
private const string PasswordKey = "Password";
private const string DeviceIds = "DevicesIds";
private const string DebugLoggingKey = "DebugLogging";
private readonly static string FileName = Invariant($"{Path.GetFileName(System.Reflection.Assembly.GetEntryAssembly().Location)}.ini");
private const string IPAddressKey = "IPAddress";
private const string DefaultSection = "Settings";
private const string EncryptPassword = "Not sure what is more secure";
private const string PortsEnabledKey = "PortsEnabled";
private const char DeviceIdsSeparator = '|';
private const char PortsEnabledSeparator = ',';
private readonly Dictionary<string, MPowerDevice> devices = new Dictionary<string, MPowerDevice>();
private readonly IHSApplication HS;
private bool debugLogging;
private bool disposedValue = false;
private readonly ReaderWriterLockSlim configLock = new ReaderWriterLockSlim();
};
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.Scheduler
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for JobCollectionsOperations.
/// </summary>
public static partial class JobCollectionsOperationsExtensions
{
/// <summary>
/// Gets all job collections under specified subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static IPage<JobCollectionDefinition> ListBySubscription(this IJobCollectionsOperations operations)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).ListBySubscriptionAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all job collections under specified subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<JobCollectionDefinition>> ListBySubscriptionAsync(this IJobCollectionsOperations operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListBySubscriptionWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all job collections under specified resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
public static IPage<JobCollectionDefinition> ListByResourceGroup(this IJobCollectionsOperations operations, string resourceGroupName)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).ListByResourceGroupAsync(resourceGroupName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all job collections under specified resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<JobCollectionDefinition>> ListByResourceGroupAsync(this IJobCollectionsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static JobCollectionDefinition Get(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).GetAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<JobCollectionDefinition> GetAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Provisions a new job collection or updates an existing job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='jobCollection'>
/// The job collection definition.
/// </param>
public static JobCollectionDefinition CreateOrUpdate(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, JobCollectionDefinition jobCollection)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).CreateOrUpdateAsync(resourceGroupName, jobCollectionName, jobCollection), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Provisions a new job collection or updates an existing job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='jobCollection'>
/// The job collection definition.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<JobCollectionDefinition> CreateOrUpdateAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, JobCollectionDefinition jobCollection, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, jobCollectionName, jobCollection, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Patches an existing job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='jobCollection'>
/// The job collection definition.
/// </param>
public static JobCollectionDefinition Patch(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, JobCollectionDefinition jobCollection)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).PatchAsync(resourceGroupName, jobCollectionName, jobCollection), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Patches an existing job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='jobCollection'>
/// The job collection definition.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<JobCollectionDefinition> PatchAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, JobCollectionDefinition jobCollection, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.PatchWithHttpMessagesAsync(resourceGroupName, jobCollectionName, jobCollection, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void Delete(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).DeleteAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Deletes a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void BeginDelete(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).BeginDeleteAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Enables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void Enable(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).EnableAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Enables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task EnableAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.EnableWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Enables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void BeginEnable(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).BeginEnableAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Enables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginEnableAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginEnableWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Disables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void Disable(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).DisableAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Disables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DisableAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DisableWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Disables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
public static void BeginDisable(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName)
{
Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).BeginDisableAsync(resourceGroupName, jobCollectionName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Disables all of the jobs in the job collection.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='jobCollectionName'>
/// The job collection name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDisableAsync(this IJobCollectionsOperations operations, string resourceGroupName, string jobCollectionName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginDisableWithHttpMessagesAsync(resourceGroupName, jobCollectionName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets all job collections under specified subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<JobCollectionDefinition> ListBySubscriptionNext(this IJobCollectionsOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).ListBySubscriptionNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all job collections under specified subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<JobCollectionDefinition>> ListBySubscriptionNextAsync(this IJobCollectionsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListBySubscriptionNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all job collections under specified resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<JobCollectionDefinition> ListByResourceGroupNext(this IJobCollectionsOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IJobCollectionsOperations)s).ListByResourceGroupNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all job collections under specified resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<JobCollectionDefinition>> ListByResourceGroupNextAsync(this IJobCollectionsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using Godot;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using GodotTools.Build;
using GodotTools.Core;
using GodotTools.Internals;
using JetBrains.Annotations;
using static GodotTools.Internals.Globals;
using Directory = GodotTools.Utils.Directory;
using File = GodotTools.Utils.File;
using OS = GodotTools.Utils.OS;
using Path = System.IO.Path;
namespace GodotTools.Export
{
public class ExportPlugin : EditorExportPlugin
{
[Flags]
enum I18NCodesets : long
{
None = 0,
CJK = 1,
MidEast = 2,
Other = 4,
Rare = 8,
West = 16,
All = CJK | MidEast | Other | Rare | West
}
private void AddI18NAssemblies(Godot.Collections.Dictionary<string, string> assemblies, string bclDir)
{
var codesets = (I18NCodesets)ProjectSettings.GetSetting("mono/export/i18n_codesets");
if (codesets == I18NCodesets.None)
return;
void AddI18NAssembly(string name) => assemblies.Add(name, Path.Combine(bclDir, $"{name}.dll"));
AddI18NAssembly("I18N");
if ((codesets & I18NCodesets.CJK) != 0)
AddI18NAssembly("I18N.CJK");
if ((codesets & I18NCodesets.MidEast) != 0)
AddI18NAssembly("I18N.MidEast");
if ((codesets & I18NCodesets.Other) != 0)
AddI18NAssembly("I18N.Other");
if ((codesets & I18NCodesets.Rare) != 0)
AddI18NAssembly("I18N.Rare");
if ((codesets & I18NCodesets.West) != 0)
AddI18NAssembly("I18N.West");
}
public void RegisterExportSettings()
{
// TODO: These would be better as export preset options, but that doesn't seem to be supported yet
GlobalDef("mono/export/include_scripts_content", false);
GlobalDef("mono/export/export_assemblies_inside_pck", true);
GlobalDef("mono/export/i18n_codesets", I18NCodesets.All);
ProjectSettings.AddPropertyInfo(new Godot.Collections.Dictionary
{
["type"] = Variant.Type.Int,
["name"] = "mono/export/i18n_codesets",
["hint"] = PropertyHint.Flags,
["hint_string"] = "CJK,MidEast,Other,Rare,West"
});
GlobalDef("mono/export/aot/enabled", false);
GlobalDef("mono/export/aot/full_aot", false);
GlobalDef("mono/export/aot/use_interpreter", true);
// --aot or --aot=opt1,opt2 (use 'mono --aot=help AuxAssembly.dll' to list AOT options)
GlobalDef("mono/export/aot/extra_aot_options", Array.Empty<string>());
// --optimize/-O=opt1,opt2 (use 'mono --list-opt'' to list optimize options)
GlobalDef("mono/export/aot/extra_optimizer_options", Array.Empty<string>());
GlobalDef("mono/export/aot/android_toolchain_path", "");
}
private string maybeLastExportError;
private void AddFile(string srcPath, string dstPath, bool remap = false)
{
// Add file to the PCK
AddFile(dstPath.Replace("\\", "/"), File.ReadAllBytes(srcPath), remap);
}
// With this method we can override how a file is exported in the PCK
public override void _ExportFile(string path, string type, string[] features)
{
base._ExportFile(path, type, features);
if (type != Internal.CSharpLanguageType)
return;
if (Path.GetExtension(path) != Internal.CSharpLanguageExtension)
throw new ArgumentException($"Resource of type {Internal.CSharpLanguageType} has an invalid file extension: {path}", nameof(path));
// TODO What if the source file is not part of the game's C# project
bool includeScriptsContent = (bool)ProjectSettings.GetSetting("mono/export/include_scripts_content");
if (!includeScriptsContent)
{
// We don't want to include the source code on exported games.
// Sadly, Godot prints errors when adding an empty file (nothing goes wrong, it's just noise).
// Because of this, we add a file which contains a line break.
AddFile(path, System.Text.Encoding.UTF8.GetBytes("\n"), remap: false);
// Tell the Godot exporter that we already took care of the file
Skip();
}
}
public override void _ExportBegin(string[] features, bool isDebug, string path, int flags)
{
base._ExportBegin(features, isDebug, path, flags);
try
{
_ExportBeginImpl(features, isDebug, path, flags);
}
catch (Exception e)
{
maybeLastExportError = e.Message;
// 'maybeLastExportError' cannot be null or empty if there was an error, so we
// must consider the possibility of exceptions being thrown without a message.
if (string.IsNullOrEmpty(maybeLastExportError))
maybeLastExportError = $"Exception thrown: {e.GetType().Name}";
GD.PushError($"Failed to export project: {maybeLastExportError}");
Console.Error.WriteLine(e);
// TODO: Do something on error once _ExportBegin supports failing.
}
}
private void _ExportBeginImpl(string[] features, bool isDebug, string path, int flags)
{
_ = flags; // Unused
if (!File.Exists(GodotSharpDirs.ProjectSlnPath))
return;
if (!DeterminePlatformFromFeatures(features, out string platform))
throw new NotSupportedException("Target platform not supported");
string outputDir = new FileInfo(path).Directory?.FullName ??
throw new FileNotFoundException("Base directory not found");
string buildConfig = isDebug ? "ExportDebug" : "ExportRelease";
if (!BuildManager.BuildProjectBlocking(buildConfig, platform: platform))
throw new Exception("Failed to build project");
// Add dependency assemblies
var assemblies = new Godot.Collections.Dictionary<string, string>();
string projectDllName = GodotSharpEditor.ProjectAssemblyName;
string projectDllSrcDir = Path.Combine(GodotSharpDirs.ResTempAssembliesBaseDir, buildConfig);
string projectDllSrcPath = Path.Combine(projectDllSrcDir, $"{projectDllName}.dll");
assemblies[projectDllName] = projectDllSrcPath;
string bclDir = DeterminePlatformBclDir(platform);
if (platform == OS.Platforms.Android)
{
string godotAndroidExtProfileDir = GetBclProfileDir("godot_android_ext");
string monoAndroidAssemblyPath = Path.Combine(godotAndroidExtProfileDir, "Mono.Android.dll");
if (!File.Exists(monoAndroidAssemblyPath))
throw new FileNotFoundException("Assembly not found: 'Mono.Android'", monoAndroidAssemblyPath);
assemblies["Mono.Android"] = monoAndroidAssemblyPath;
}
else if (platform == OS.Platforms.HTML5)
{
// Ideally these would be added automatically since they're referenced by the wasm BCL assemblies.
// However, at least in the case of 'WebAssembly.Net.Http' for some reason the BCL assemblies
// reference a different version even though the assembly is the same, for some weird reason.
var wasmFrameworkAssemblies = new[] { "WebAssembly.Bindings", "WebAssembly.Net.WebSockets" };
foreach (string thisWasmFrameworkAssemblyName in wasmFrameworkAssemblies)
{
string thisWasmFrameworkAssemblyPath = Path.Combine(bclDir, thisWasmFrameworkAssemblyName + ".dll");
if (!File.Exists(thisWasmFrameworkAssemblyPath))
throw new FileNotFoundException($"Assembly not found: '{thisWasmFrameworkAssemblyName}'", thisWasmFrameworkAssemblyPath);
assemblies[thisWasmFrameworkAssemblyName] = thisWasmFrameworkAssemblyPath;
}
// Assemblies that can have a different name in a newer version. Newer version must come first and it has priority.
(string newName, string oldName)[] wasmFrameworkAssembliesOneOf = new[]
{
("System.Net.Http.WebAssemblyHttpHandler", "WebAssembly.Net.Http")
};
foreach (var thisWasmFrameworkAssemblyName in wasmFrameworkAssembliesOneOf)
{
string thisWasmFrameworkAssemblyPath = Path.Combine(bclDir, thisWasmFrameworkAssemblyName.newName + ".dll");
if (File.Exists(thisWasmFrameworkAssemblyPath))
{
assemblies[thisWasmFrameworkAssemblyName.newName] = thisWasmFrameworkAssemblyPath;
}
else
{
thisWasmFrameworkAssemblyPath = Path.Combine(bclDir, thisWasmFrameworkAssemblyName.oldName + ".dll");
if (!File.Exists(thisWasmFrameworkAssemblyPath))
{
throw new FileNotFoundException("Expected one of the following assemblies but none were found: " +
$"'{thisWasmFrameworkAssemblyName.newName}' / '{thisWasmFrameworkAssemblyName.oldName}'",
thisWasmFrameworkAssemblyPath);
}
assemblies[thisWasmFrameworkAssemblyName.oldName] = thisWasmFrameworkAssemblyPath;
}
}
}
var initialAssemblies = assemblies.Duplicate();
internal_GetExportedAssemblyDependencies(initialAssemblies, buildConfig, bclDir, assemblies);
AddI18NAssemblies(assemblies, bclDir);
string outputDataDir = null;
if (PlatformHasTemplateDir(platform))
outputDataDir = ExportDataDirectory(features, platform, isDebug, outputDir);
string apiConfig = isDebug ? "Debug" : "Release";
string resAssembliesDir = Path.Combine(GodotSharpDirs.ResAssembliesBaseDir, apiConfig);
bool assembliesInsidePck = (bool)ProjectSettings.GetSetting("mono/export/export_assemblies_inside_pck") || outputDataDir == null;
if (!assembliesInsidePck)
{
string outputDataGameAssembliesDir = Path.Combine(outputDataDir, "Assemblies");
if (!Directory.Exists(outputDataGameAssembliesDir))
Directory.CreateDirectory(outputDataGameAssembliesDir);
}
foreach (var assembly in assemblies)
{
void AddToAssembliesDir(string fileSrcPath)
{
if (assembliesInsidePck)
{
string fileDstPath = Path.Combine(resAssembliesDir, fileSrcPath.GetFile());
AddFile(fileSrcPath, fileDstPath);
}
else
{
Debug.Assert(outputDataDir != null);
string fileDstPath = Path.Combine(outputDataDir, "Assemblies", fileSrcPath.GetFile());
File.Copy(fileSrcPath, fileDstPath);
}
}
string assemblySrcPath = assembly.Value;
string assemblyPathWithoutExtension = Path.ChangeExtension(assemblySrcPath, null);
string pdbSrcPath = assemblyPathWithoutExtension + ".pdb";
AddToAssembliesDir(assemblySrcPath);
if (File.Exists(pdbSrcPath))
AddToAssembliesDir(pdbSrcPath);
}
// AOT compilation
bool aotEnabled = platform == OS.Platforms.iOS || (bool)ProjectSettings.GetSetting("mono/export/aot/enabled");
if (aotEnabled)
{
string aotToolchainPath = null;
if (platform == OS.Platforms.Android)
aotToolchainPath = (string)ProjectSettings.GetSetting("mono/export/aot/android_toolchain_path");
if (aotToolchainPath == string.Empty)
aotToolchainPath = null; // Don't risk it being used as current working dir
// TODO: LLVM settings are hard-coded and disabled for now
var aotOpts = new AotOptions
{
EnableLLVM = false,
LLVMOnly = false,
LLVMPath = "",
LLVMOutputPath = "",
FullAot = platform == OS.Platforms.iOS || (bool)(ProjectSettings.GetSetting("mono/export/aot/full_aot") ?? false),
UseInterpreter = (bool)ProjectSettings.GetSetting("mono/export/aot/use_interpreter"),
ExtraAotOptions = (string[])ProjectSettings.GetSetting("mono/export/aot/extra_aot_options") ?? Array.Empty<string>(),
ExtraOptimizerOptions = (string[])ProjectSettings.GetSetting("mono/export/aot/extra_optimizer_options") ?? Array.Empty<string>(),
ToolchainPath = aotToolchainPath
};
AotBuilder.CompileAssemblies(this, aotOpts, features, platform, isDebug, bclDir, outputDir, outputDataDir, assemblies);
}
}
public override void _ExportEnd()
{
base._ExportEnd();
string aotTempDir = Path.Combine(Path.GetTempPath(), $"godot-aot-{Process.GetCurrentProcess().Id}");
if (Directory.Exists(aotTempDir))
Directory.Delete(aotTempDir, recursive: true);
// TODO: Just a workaround until the export plugins can be made to abort with errors
if (!string.IsNullOrEmpty(maybeLastExportError)) // Check empty as well, because it's set to empty after hot-reloading
{
string lastExportError = maybeLastExportError;
maybeLastExportError = null;
GodotSharpEditor.Instance.ShowErrorDialog(lastExportError, "Failed to export C# project");
}
}
[NotNull]
private static string ExportDataDirectory(string[] features, string platform, bool isDebug, string outputDir)
{
string target = isDebug ? "release_debug" : "release";
// NOTE: Bits is ok for now as all platforms with a data directory only have one or two architectures.
// However, this may change in the future if we add arm linux or windows desktop templates.
string bits = features.Contains("64") ? "64" : "32";
string TemplateDirName() => $"data.mono.{platform}.{bits}.{target}";
string templateDirPath = Path.Combine(Internal.FullTemplatesDir, TemplateDirName());
bool validTemplatePathFound = true;
if (!Directory.Exists(templateDirPath))
{
validTemplatePathFound = false;
if (isDebug)
{
target = "debug"; // Support both 'release_debug' and 'debug' for the template data directory name
templateDirPath = Path.Combine(Internal.FullTemplatesDir, TemplateDirName());
validTemplatePathFound = true;
if (!Directory.Exists(templateDirPath))
validTemplatePathFound = false;
}
}
if (!validTemplatePathFound)
throw new FileNotFoundException("Data template directory not found", templateDirPath);
string outputDataDir = Path.Combine(outputDir, DetermineDataDirNameForProject());
if (Directory.Exists(outputDataDir))
Directory.Delete(outputDataDir, recursive: true); // Clean first
Directory.CreateDirectory(outputDataDir);
foreach (string dir in Directory.GetDirectories(templateDirPath, "*", SearchOption.AllDirectories))
{
Directory.CreateDirectory(Path.Combine(outputDataDir, dir.Substring(templateDirPath.Length + 1)));
}
foreach (string file in Directory.GetFiles(templateDirPath, "*", SearchOption.AllDirectories))
{
File.Copy(file, Path.Combine(outputDataDir, file.Substring(templateDirPath.Length + 1)));
}
return outputDataDir;
}
private static bool PlatformHasTemplateDir(string platform)
{
// OSX export templates are contained in a zip, so we place our custom template inside it and let Godot do the rest.
return !new[] { OS.Platforms.MacOS, OS.Platforms.Android, OS.Platforms.iOS, OS.Platforms.HTML5 }.Contains(platform);
}
private static bool DeterminePlatformFromFeatures(IEnumerable<string> features, out string platform)
{
foreach (var feature in features)
{
if (OS.PlatformNameMap.TryGetValue(feature, out platform))
return true;
}
platform = null;
return false;
}
private static string GetBclProfileDir(string profile)
{
string templatesDir = Internal.FullTemplatesDir;
return Path.Combine(templatesDir, "bcl", profile);
}
private static string DeterminePlatformBclDir(string platform)
{
string templatesDir = Internal.FullTemplatesDir;
string platformBclDir = Path.Combine(templatesDir, "bcl", platform);
if (!File.Exists(Path.Combine(platformBclDir, "mscorlib.dll")))
{
string profile = DeterminePlatformBclProfile(platform);
platformBclDir = Path.Combine(templatesDir, "bcl", profile);
if (!File.Exists(Path.Combine(platformBclDir, "mscorlib.dll")))
{
if (PlatformRequiresCustomBcl(platform))
throw new FileNotFoundException($"Missing BCL (Base Class Library) for platform: {platform}");
platformBclDir = typeof(object).Assembly.Location.GetBaseDir(); // Use the one we're running on
}
}
return platformBclDir;
}
/// <summary>
/// Determines whether the BCL bundled with the Godot editor can be used for the target platform,
/// or if it requires a custom BCL that must be distributed with the export templates.
/// </summary>
private static bool PlatformRequiresCustomBcl(string platform)
{
if (new[] { OS.Platforms.Android, OS.Platforms.iOS, OS.Platforms.HTML5 }.Contains(platform))
return true;
// The 'net_4_x' BCL is not compatible between Windows and the other platforms.
// We use the names 'net_4_x_win' and 'net_4_x' to differentiate between the two.
bool isWinOrUwp = new[]
{
OS.Platforms.Windows,
OS.Platforms.UWP
}.Contains(platform);
return OS.IsWindows ? !isWinOrUwp : isWinOrUwp;
}
private static string DeterminePlatformBclProfile(string platform)
{
switch (platform)
{
case OS.Platforms.Windows:
case OS.Platforms.UWP:
return "net_4_x_win";
case OS.Platforms.MacOS:
case OS.Platforms.LinuxBSD:
case OS.Platforms.Server:
case OS.Platforms.Haiku:
return "net_4_x";
case OS.Platforms.Android:
return "monodroid";
case OS.Platforms.iOS:
return "monotouch";
case OS.Platforms.HTML5:
return "wasm";
default:
throw new NotSupportedException($"Platform not supported: {platform}");
}
}
private static string DetermineDataDirNameForProject()
{
var appName = (string)ProjectSettings.GetSetting("application/config/name");
string appNameSafe = appName.ToSafeDirName();
return $"data_{appNameSafe}";
}
[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void internal_GetExportedAssemblyDependencies(Godot.Collections.Dictionary<string, string> initialAssemblies,
string buildConfig, string customBclDir, Godot.Collections.Dictionary<string, string> dependencyAssemblies);
}
}
| |
// Copyright 2007-2010 The Apache Software Foundation.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
using Burrows.Tests.Framework;
namespace Burrows.Tests.Pipeline
{
using System;
using System.Diagnostics;
using Magnum.Extensions;
using Burrows.Pipeline;
using Burrows.Pipeline.Configuration;
using Burrows.Pipeline.Inspectors;
using Messages;
using NUnit.Framework;
using Rhino.Mocks;
using TestConsumers;
[TestFixture]
public class When_subscribing_a_consumer_to_the_pipeline
{
[SetUp]
public void Setup()
{
_pipeline = InboundPipelineConfigurator.CreateDefault(null);
}
private IInboundMessagePipeline _pipeline;
[Test]
public void A_bunch_of_mixed_subscriber_types_should_work()
{
var consumer = new IndiscriminantConsumer<PingMessage>();
var consumerYes = new ParticularConsumer(true);
var consumerNo = new ParticularConsumer(false);
Stopwatch firstTime = Stopwatch.StartNew();
var unsubscribeToken = _pipeline.ConnectInstance(consumer);
firstTime.Stop();
Stopwatch secondTime = Stopwatch.StartNew();
unsubscribeToken += _pipeline.ConnectInstance(consumerYes);
secondTime.Stop();
unsubscribeToken += _pipeline.ConnectInstance(consumerNo);
Trace.WriteLine(string.Format("First time: {0}, Second Time: {1}", firstTime.Elapsed, secondTime.Elapsed));
PipelineViewer.Trace(_pipeline);
var message = new PingMessage();
_pipeline.Dispatch(message);
Assert.AreEqual(message, consumer.Consumed);
Assert.AreEqual(message, consumerYes.Consumed);
Assert.AreEqual(null, consumerNo.Consumed);
unsubscribeToken();
var nextMessage = new PingMessage();
_pipeline.Dispatch(nextMessage);
Assert.AreEqual(message, consumer.Consumed);
Assert.AreEqual(message, consumerYes.Consumed);
}
[Test]
public void A_component_should_be_subscribed_to_the_pipeline()
{
var consumer = new TestMessageConsumer<PingMessage>();
_pipeline.ConnectConsumer<TestMessageConsumer<PingMessage>>(() => consumer);
PipelineViewer.Trace(_pipeline);
var message = new PingMessage();
_pipeline.ShouldHaveSubscriptionFor<PingMessage>();
_pipeline.Dispatch(message);
TestMessageConsumer<PingMessage>.AnyShouldHaveReceivedMessage(message, 1.Seconds());
}
[Test]
public void A_selective_component_should_properly_handle_the_love()
{
ParticularConsumer consumer = MockRepository.GenerateMock<ParticularConsumer>();
_pipeline.ConnectConsumer<ParticularConsumer>(() => consumer);
PipelineViewer.Trace(_pipeline);
var message = new PingMessage();
consumer.Expect(x => x.Accept(message)).Return(true);
consumer.Expect(x => x.Consume(message));
_pipeline.Dispatch(message);
consumer.VerifyAllExpectations();
}
[Test]
public void A_component_should_be_subscribed_to_multiple_messages_on_the_pipeline()
{
PingPongConsumer consumer = MockRepository.GenerateMock<PingPongConsumer>();
_pipeline.ConnectConsumer<PingPongConsumer>(() => consumer);
PipelineViewer.Trace(_pipeline);
var ping = new PingMessage();
consumer.Expect(x => x.Consume(ping));
_pipeline.Dispatch(ping);
var pong = new PongMessage(ping.CorrelationId);
consumer.Expect(x => x.Consume(pong));
_pipeline.Dispatch(pong);
consumer.VerifyAllExpectations();
}
[Test]
public void The_subscription_should_be_added()
{
var consumer = new IndiscriminantConsumer<PingMessage>();
Stopwatch firstTime = Stopwatch.StartNew();
_pipeline.ConnectInstance(consumer);
firstTime.Stop();
var message = new PingMessage();
_pipeline.Dispatch(message);
Assert.AreEqual(message, consumer.Consumed);
}
[Test]
public void Correlated_subscriptions_should_make_happy_sounds()
{
var message = new PingMessage();
var consumer = new TestCorrelatedConsumer<PingMessage, Guid>(message.CorrelationId);
var negativeConsumer = new TestCorrelatedConsumer<PingMessage, Guid>(Guid.Empty);
var token = _pipeline.ConnectInstance(consumer);
token += _pipeline.ConnectInstance(negativeConsumer);
PipelineViewer.Trace(_pipeline);
_pipeline.Dispatch(message);
consumer.ShouldHaveReceivedMessage(message, 0.Seconds());
negativeConsumer.ShouldNotHaveReceivedMessage(message, 0.Seconds());
token();
PipelineViewer.Trace(_pipeline);
}
[Test, Explicit]
public void Correlated_subscription_benchmark()
{
var consumer = new TestCorrelatedConsumer<PingMessage, Guid>(Guid.NewGuid());
UnsubscribeAction token = _pipeline.ConnectInstance(consumer);
token();
Stopwatch overall = Stopwatch.StartNew();
for (int i = 0; i < 10000; i++)
{
token = _pipeline.ConnectInstance(consumer);
token();
}
overall.Stop();
Trace.WriteLine("Elapsed Time: " + overall.Elapsed);
}
[Test]
public void The_subscription_should_be_added_for_selective_consumers()
{
var consumer = new ParticularConsumer(false);
_pipeline.ConnectInstance(consumer);
var message = new PingMessage();
_pipeline.Dispatch(message);
Assert.AreEqual(null, consumer.Consumed);
}
[Test]
public void The_subscription_should_be_added_for_selective_consumers_that_are_interested()
{
var consumer = new ParticularConsumer(true);
_pipeline.ConnectInstance(consumer);
var message = new PingMessage();
_pipeline.Dispatch(message);
Assert.AreEqual(message, consumer.Consumed);
}
[Test]
public void The_wrong_type_of_message_should_not_blow_up_the_test()
{
var consumer = new IndiscriminantConsumer<PingMessage>();
_pipeline.ConnectInstance(consumer);
var message = new PongMessage();
_pipeline.Dispatch(message);
Assert.AreEqual(null, consumer.Consumed);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Testing;
using Test.Utilities;
using Xunit;
using VerifyCS = Test.Utilities.CSharpCodeFixVerifier<
Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.UsePropertiesWhereAppropriateAnalyzer,
Microsoft.CodeQuality.CSharp.Analyzers.ApiDesignGuidelines.CSharpUsePropertiesWhereAppropriateFixer>;
using VerifyVB = Test.Utilities.VisualBasicCodeFixVerifier<
Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.UsePropertiesWhereAppropriateAnalyzer,
Microsoft.CodeQuality.VisualBasic.Analyzers.ApiDesignGuidelines.BasicUsePropertiesWhereAppropriateFixer>;
namespace Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.UnitTests
{
public class UsePropertiesWhereAppropriateTests
{
[Fact]
public async Task CSharp_CA1024NoDiagnosticCases()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System;
using System.Collections;
public class GenericType<T>
{
}
public class Base
{
public virtual int GetSomething()
{
return 0;
}
public virtual int GetOverloadedMethod()
{
return 1;
}
public virtual int GetOverloadedMethod(int i)
{
return i;
}
}
public class Class1 : Base
{
private string fileName = """";
// 1) Returns void
public void GetWronglyNamedMethod()
{
}
// 2) Not a method
public string LogFile
{
get { return fileName; }
}
// 3) Returns an array type
public int[] GetValues()
{
return null;
}
// 4) Has parameters
public int[] GetMethodWithParameters(int p)
{
return new int[] { p };
}
// 5a) Name doesn't start with a 'Get'
public int SomeMethod()
{
return 0;
}
// 5b) First compound word is not 'Get'
public int GetterMethod()
{
return 0;
}
// 6) Generic method
public object GetGenericMethod<T>()
{
return new GenericType<T>();
}
// 7) Override
public override int GetSomething()
{
return 1;
}
// 8) Method with overloads
public override int GetOverloadedMethod()
{
return 1;
}
public override int GetOverloadedMethod(int i)
{
return i;
}
// 9) Methods with special name
public override int GetHashCode()
{
return 0;
}
public IEnumerator GetEnumerator()
{
return null;
}
public ref string GetPinnableReference() // If the method isn't ref-returning, there will be a diagnostic.
{
return ref fileName;
}
// 10) Method with invocation expressions
public int GetSomethingWithInvocation()
{
Console.WriteLine(this);
return 0;
}
// 11) Method named 'Get'
public string Get()
{
return fileName;
}
// 12) Private method
private string GetSomethingPrivate()
{
return fileName;
}
// 13) Internal method
internal string GetSomethingInternal()
{
return fileName;
}
}
public class Class2
{
private string fileName = """";
public ref readonly string GetPinnableReference() // If the method isn't ref-returning, there will be a diagnostic.
{
return ref fileName;
}
}
");
}
[Fact]
public async Task CSharp_CA1024DiagnosticCases()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class Class
{
private string fileName = ""data.txt"";
public string GetFileName()
{
return fileName;
}
public string Get_FileName2()
{
return fileName;
}
public string Get123()
{
return fileName;
}
protected string GetFileNameProtected()
{
return fileName;
}
public int GetPinnableReference() // Not a ref-return method.
{
return 0;
}
}
",
GetCA1024CSharpResultAt(6, 19, "GetFileName"),
GetCA1024CSharpResultAt(11, 19, "Get_FileName2"),
GetCA1024CSharpResultAt(16, 19, "Get123"),
GetCA1024CSharpResultAt(21, 22, "GetFileNameProtected"),
GetCA1024CSharpResultAt(26, 16, "GetPinnableReference"));
}
[Fact, WorkItem(1432, "https://github.com/dotnet/roslyn-analyzers/issues/1432")]
public async Task CSharp_CA1024NoDiagnosticCases_Internal()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public class Class
{
private string fileName = ""data.txt"";
internal string GetFileName()
{
return fileName;
}
private string Get_FileName2()
{
return fileName;
}
private class InnerClass
{
private string fileName = ""data.txt"";
public string Get123()
{
return fileName;
}
}
}
");
}
[Fact]
public async Task VisualBasic_CA1024NoDiagnosticCases()
{
await VerifyVB.VerifyAnalyzerAsync(@"
Imports System.Collections
Public Class Base
Public Overridable Function GetSomething() As Integer
Return 0
End Function
End Class
Public Class Class1
Inherits Base
Private fileName As String
' 1) Returns void
Public Sub GetWronglyNamedMethod()
End Sub
' 2) Not a method
Public ReadOnly Property LogFile() As String
Get
Return fileName
End Get
End Property
' 3) Returns an array type
Public Function GetValues() As Integer()
Return Nothing
End Function
' 4) Has parameters
Public Function GetMethodWithParameters(p As Integer) As Integer()
Return New Integer() {p}
End Function
' 5a) Name doesn't start with a 'Get'
Public Function SomeMethod() As Integer
Return 0
End Function
' 5b) First compound word is not 'Get'
Public Function GetterMethod() As Integer
Return 0
End Function
' 6) Generic method
Public Function GetGenericMethod(Of T)() As Object
Return New GenericType(Of T)()
End Function
' 7) Override
Public Overrides Function GetSomething() As Integer
Return 1
End Function
' 8) Method with overloads
Public Function GetOverloadedMethod() As Integer
Return 1
End Function
Public Function GetOverloadedMethod(i As Integer) As Integer
Return i
End Function
' 9) Methods with special name
Public Overloads Function GetHashCode() As Integer
Return 0
End Function
Public Function GetEnumerator() As IEnumerator
Return Nothing
End Function
' 10) Method with invocation expressions
Public Function GetSomethingWithInvocation() As Integer
System.Console.WriteLine(Me)
Return 0
End Function
' 11) Method named 'Get'
Public Function [Get]() As String
Return fileName
End Function
' 12) Private method
Private Function GetSomethingPrivate() As String
Return fileName
End Function
' 13) Friend method
Friend Function GetSomethingInternal() As String
Return fileName
End Function
End Class
Public Class GenericType(Of T)
End Class
");
}
[Fact]
public async Task CSharp_CA1024NoDiagnosticOnUnboundMethodCaller()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System;
public class class1
{
public int GetSomethingWithUnboundInvocation()
{
Console.WriteLine(this);
return 0;
}
}
");
}
[Fact]
public async Task VisualBasic_CA1024NoDiagnosticOnUnboundMethodCaller()
{
await VerifyVB.VerifyAnalyzerAsync(@"
Imports System
Public Class class1
Public Function GetSomethingWithUnboundInvocation() As Integer
Console.WriteLine(Me)
Return 0
End Function
End Class
");
}
[Fact]
public async Task VisualBasic_CA1024DiagnosticCases()
{
await VerifyVB.VerifyAnalyzerAsync(@"
Public Class Class1
Private fileName As String
Public Function GetFileName() As String
Return filename
End Function
Public Function Get_FileName2() As String
Return filename
End Function
Public Function Get123() As String
Return filename
End Function
Protected Function GetFileNameProtected() As String
Return filename
End Function
End Class
",
GetCA1024BasicResultAt(5, 21, "GetFileName"),
GetCA1024BasicResultAt(9, 21, "Get_FileName2"),
GetCA1024BasicResultAt(13, 21, "Get123"),
GetCA1024BasicResultAt(17, 24, "GetFileNameProtected"));
}
[Fact, WorkItem(1432, "https://github.com/dotnet/roslyn-analyzers/issues/1432")]
public async Task VisualBasic_CA1024NoDiagnosticCases_Internal()
{
await VerifyVB.VerifyAnalyzerAsync(@"
Public Class Class1
Private fileName As String
Friend Function GetFileName() As String
Return filename
End Function
Private Function Get_FileName2() As String
Return filename
End Function
Private Class InnerClass
Private fileName As String
Public Function Get123() As String
Return filename
End Function
End Class
End Class
");
}
[Fact, WorkItem(1551, "https://github.com/dotnet/roslyn-analyzers/issues/1551")]
public async Task CA1024_ExplicitInterfaceImplementation_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public interface ISomething
{
object GetContent();
}
public class Something : ISomething
{
object ISomething.GetContent()
{
return null;
}
}
");
}
[Fact, WorkItem(1551, "https://github.com/dotnet/roslyn-analyzers/issues/1551")]
public async Task CA1024_ImplicitInterfaceImplementation_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
public interface ISomething
{
object GetContent();
}
public class Something : ISomething
{
public object GetContent()
{
return null;
}
}
");
}
[Fact, WorkItem(3877, "https://github.com/dotnet/roslyn-analyzers/issues/3877")]
public async Task CA1024_ReturnsTask_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System.Threading.Tasks;
public class Something
{
public Task GetTask() => default(Task);
public Task<int> GetGenericTask() => default(Task<int>);
public ValueTask GetValueTask() => default(ValueTask);
public ValueTask<int> GetGenericValueTask() => default(ValueTask<int>);
}
");
await VerifyVB.VerifyAnalyzerAsync(@"
Imports System.Threading.Tasks
Public Class Something
Public Function GetTask() As Task
Return Nothing
End Function
Public Function GetGenericTask() As Task(Of Integer)
Return Nothing
End Function
Public Function GetValueTask() As ValueTask
Return Nothing
End Function
Public Function GetGenericValueTask() As ValueTask(Of Integer)
Return Nothing
End Function
End Class
");
}
[Fact, WorkItem(4623, "https://github.com/dotnet/roslyn-analyzers/issues/4623")]
public async Task AwaiterPattern_INotifyCompletion_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System;
using System.Runtime.CompilerServices;
public class DummyAwaiter : INotifyCompletion
{
public object GetResult() => null;
public bool IsCompleted => false;
public void OnCompleted(Action continuation) => throw null;
}");
}
[Fact, WorkItem(4623, "https://github.com/dotnet/roslyn-analyzers/issues/4623")]
public async Task AwaiterPattern_ICriticalNotifyCompletion_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System;
using System.Runtime.CompilerServices;
public class DummyAwaiter : ICriticalNotifyCompletion
{
public object GetResult() => null;
public bool IsCompleted => false;
public void OnCompleted(Action continuation) => throw null;
public void UnsafeOnCompleted(Action continuation) => throw null;
}");
}
[Fact, WorkItem(4623, "https://github.com/dotnet/roslyn-analyzers/issues/4623")]
public async Task AwaitablePattern_NoDiagnostic()
{
await VerifyCS.VerifyAnalyzerAsync(@"
using System;
using System.Runtime.CompilerServices;
public class DummyAwaitable
{
public DummyAwaiter GetAwaiter() => new DummyAwaiter();
}
public class DummyAwaiter : INotifyCompletion
{
public void GetResult()
{
}
public bool IsCompleted => false;
public void OnCompleted(Action continuation) => throw null;
}");
}
private static DiagnosticResult GetCA1024CSharpResultAt(int line, int column, string methodName)
#pragma warning disable RS0030 // Do not used banned APIs
=> VerifyCS.Diagnostic()
.WithLocation(line, column)
#pragma warning restore RS0030 // Do not used banned APIs
.WithArguments(methodName);
private static DiagnosticResult GetCA1024BasicResultAt(int line, int column, string methodName)
#pragma warning disable RS0030 // Do not used banned APIs
=> VerifyVB.Diagnostic()
.WithLocation(line, column)
#pragma warning restore RS0030 // Do not used banned APIs
.WithArguments(methodName);
}
}
| |
// Copyright 2011 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Microsoft.Data.OData.Atom
{
#region Namespaces
using System;
using System.Diagnostics;
#endregion Namespaces
/// <summary>
/// Helper methods related to the ATOM Format
/// </summary>
internal static class AtomUtils
{
/// <summary>The length of the media type for ATOM payloads (application/atom+xml).</summary>
private const int MimeApplicationAtomXmlLength = 20;
/// <summary>The length of the media type for ATOM payloads when terminated by a ';' (application/atom+xml;).</summary>
private const int MimeApplicationAtomXmlLengthWithSemicolon = 21;
/// <summary>The length of the media type for links referencing a single entry (application/atom+xml;type=entry).</summary>
private const int MimeApplicationAtomXmlTypeEntryLength = 31;
/// <summary>The length of the media type for links referencing a collection of entries (application/atom+xml;type=feed).</summary>
private const int MimeApplicationAtomXmlTypeFeedLength = 30;
/// <summary>Parameter string for the media type for links referencing a single entry.</summary>
private const string MimeApplicationAtomXmlTypeEntryParameter = ";" + MimeConstants.MimeTypeParameterName + "=" + MimeConstants.MimeTypeParameterValueEntry;
/// <summary>Parameter string for the media type for links referencing a collection of entries.</summary>
private const string MimeApplicationAtomXmlTypeFeedParameter = ";" + MimeConstants.MimeTypeParameterName + "=" + MimeConstants.MimeTypeParameterValueFeed;
/// <summary>
/// Creates the value for the navigation property's link relation attribute.
/// </summary>
/// <param name="navigationLink">The link representing the navigation property for which the relation value is created.</param>
/// <returns>The relation attribute value for the navigation property's link relation.</returns>
internal static string ComputeODataNavigationLinkRelation(ODataNavigationLink navigationLink)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(navigationLink != null, "navigationLink != null");
Debug.Assert(navigationLink.Name != null, "navigationLink.Name != null");
return string.Join("/", new string[] { AtomConstants.ODataNamespace, AtomConstants.ODataNavigationPropertiesRelatedSegmentName, navigationLink.Name });
}
/// <summary>
/// Creates the value for the navigation property's type attribute.
/// </summary>
/// <param name="navigationLink">The link representing the navigation property for which the type value is created.</param>
/// <returns>The type attribute value for the navigation property.</returns>
internal static string ComputeODataNavigationLinkType(ODataNavigationLink navigationLink)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(navigationLink != null, "navigationLink != null");
Debug.Assert(navigationLink.IsCollection.HasValue, "navigationLink.IsCollection.HasValue");
// "application/atom+xml;type=entry" or type="application/atom+xml;type=feed"
return navigationLink.IsCollection.Value ? MimeConstants.MimeApplicationAtomXmlTypeFeed : MimeConstants.MimeApplicationAtomXmlTypeEntry;
}
/// <summary>
/// Creates the value for the navigation property's association link relation attribute.
/// </summary>
/// <param name="associationLink">The link representing the navigation property's association for which the relation value is created.</param>
/// <returns>The relation attribute value for the navigation property's association link relation.</returns>
internal static string ComputeODataAssociationLinkRelation(ODataAssociationLink associationLink)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(associationLink != null, "link != null");
Debug.Assert(associationLink.Name != null, "link.Name != null");
return string.Join("/", new string[] { AtomConstants.ODataNamespace, AtomConstants.ODataNavigationPropertiesAssociationRelatedSegmentName, associationLink.Name });
}
/// <summary>
/// Creates the value for the stream property's link relation attribute.
/// </summary>
/// <param name="streamProperty">The stream property to create the relation for.</param>
/// <param name="forEditLink">'true' if the relation is computed for an edit link; otherwise 'false'.</param>
/// <returns>The relation attribute value for the stream property's link relation.</returns>
internal static string ComputeStreamPropertyRelation(ODataProperty streamProperty, bool forEditLink)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(streamProperty != null, "streamProperty != null");
Debug.Assert(!string.IsNullOrEmpty(streamProperty.Name), "!string.IsNullOrEmpty(streamProperty.Name)");
string segmentName = forEditLink ? AtomConstants.ODataStreamPropertyEditMediaSegmentName : AtomConstants.ODataStreamPropertyMediaResourceSegmentName;
return string.Join("/", new string[] { AtomConstants.ODataNamespace, segmentName, streamProperty.Name });
}
/// <summary>
/// Unescape the <paramref name="relation"/> attribute value for ATOM link element.
/// </summary>
/// <param name="relation">ATOM link relation attribute value.</param>
/// <returns>
/// The unescaped relation attribute string if it's a valid URI.
/// null if relation attribute is not a valid URI.
/// </returns>
internal static string UnescapeAtomLinkRelationAttribute(string relation)
{
DebugUtils.CheckNoExternalCallers();
if (!string.IsNullOrEmpty(relation))
{
Uri uri;
if (Uri.TryCreate(relation, UriKind.RelativeOrAbsolute, out uri) && uri.IsAbsoluteUri)
{
return uri.GetComponents(UriComponents.AbsoluteUri, UriFormat.SafeUnescaped);
}
}
return null;
}
/// <summary>
/// Return name following the specified <paramref name="namespacePrefix"/> in the <paramref name="relation"/>.
/// </summary>
/// <param name="relation">ATOM link relation attribute value, unescaped parsed URI string.</param>
/// <param name="namespacePrefix">Value which the rel attribute should start with.</param>
/// <returns>
/// The name if the <paramref name="relation"/> starts with the given <paramref name="namespacePrefix"/>.
/// If the <paramref name="relation"/> value does not start with the <paramref name="namespacePrefix"/> a null value is returned.
/// </returns>
internal static string GetNameFromAtomLinkRelationAttribute(string relation, string namespacePrefix)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(
relation == null || relation == UnescapeAtomLinkRelationAttribute(relation),
"The relation attribute was not unescaped, it is necessary to first call the UnescapeAtomLinkRelationAttribute method.");
Debug.Assert(namespacePrefix != null, "namespacePrefix != null");
if (relation != null && relation.StartsWith(namespacePrefix, StringComparison.Ordinal))
{
return relation.Substring(namespacePrefix.Length);
}
return null;
}
/// <summary>
/// Determines whether the type of a navigation link has one of the expected standard values.
/// </summary>
/// <param name="navigationLinkType">The navigation link type to check.</param>
/// <param name="hasEntryType">true if the navigation link type has a 'type' parameter with the value 'entry'; otherwise false.</param>
/// <param name="hasFeedType">true if the navigation link type has a 'type' parameter with the value 'feed'; otherwise false.</param>
/// <returns>true if the navigation link type is the expected application/atom+xml; otherwise false.</returns>
internal static bool IsExactNavigationLinkTypeMatch(string navigationLinkType, out bool hasEntryType, out bool hasFeedType)
{
DebugUtils.CheckNoExternalCallers();
Debug.Assert(!string.IsNullOrEmpty(navigationLinkType), "!string.IsNullOrEmpty(navigationLinkType)");
hasEntryType = false;
hasFeedType = false;
// NOTE: using ordinal comparison since this is the fast path and ordinal comparison is faster than ignore-case comparison
if (!navigationLinkType.StartsWith(MimeConstants.MimeApplicationAtomXml, StringComparison.Ordinal))
{
return false;
}
int typeLength = navigationLinkType.Length;
switch (typeLength)
{
case MimeApplicationAtomXmlLength:
return true;
case MimeApplicationAtomXmlLengthWithSemicolon:
// If there is a trailing ';' we also accept it
return navigationLinkType[typeLength - 1] == ';';
case MimeApplicationAtomXmlTypeEntryLength:
hasEntryType = string.Compare(
MimeApplicationAtomXmlTypeEntryParameter,
0,
navigationLinkType,
MimeApplicationAtomXmlLength,
MimeApplicationAtomXmlTypeEntryParameter.Length,
StringComparison.Ordinal) == 0;
return hasEntryType;
case MimeApplicationAtomXmlTypeFeedLength:
hasFeedType = string.Compare(
MimeApplicationAtomXmlTypeFeedParameter,
0,
navigationLinkType,
MimeApplicationAtomXmlLength,
MimeApplicationAtomXmlTypeFeedParameter.Length,
StringComparison.Ordinal) == 0;
return hasFeedType;
default:
return false;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.WebSites
{
using Azure;
using Management;
using Rest;
using Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for WebSiteManagementClient.
/// </summary>
public static partial class WebSiteManagementClientExtensions
{
/// <summary>
/// Gets the source controls available for Azure websites.
/// </summary>
/// <remarks>
/// Gets the source controls available for Azure websites.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static IPage<SourceControl> ListSourceControls(this IWebSiteManagementClient operations)
{
return operations.ListSourceControlsAsync().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the source controls available for Azure websites.
/// </summary>
/// <remarks>
/// Gets the source controls available for Azure websites.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SourceControl>> ListSourceControlsAsync(this IWebSiteManagementClient operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSourceControlsWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates source control token
/// </summary>
/// <remarks>
/// Updates source control token
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='sourceControlType'>
/// Type of source control
/// </param>
/// <param name='requestMessage'>
/// Source control token information
/// </param>
public static SourceControl UpdateSourceControl(this IWebSiteManagementClient operations, string sourceControlType, SourceControl requestMessage)
{
return operations.UpdateSourceControlAsync(sourceControlType, requestMessage).GetAwaiter().GetResult();
}
/// <summary>
/// Updates source control token
/// </summary>
/// <remarks>
/// Updates source control token
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='sourceControlType'>
/// Type of source control
/// </param>
/// <param name='requestMessage'>
/// Source control token information
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SourceControl> UpdateSourceControlAsync(this IWebSiteManagementClient operations, string sourceControlType, SourceControl requestMessage, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdateSourceControlWithHttpMessagesAsync(sourceControlType, requestMessage, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Check if a resource name is available.
/// </summary>
/// <remarks>
/// Check if a resource name is available.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='name'>
/// Resource name to verify.
/// </param>
/// <param name='type'>
/// Resource type used for verification. Possible values include: 'Site',
/// 'Slot', 'HostingEnvironment'
/// </param>
/// <param name='isFqdn'>
/// Is fully qualified domain name.
/// </param>
public static ResourceNameAvailability CheckNameAvailability(this IWebSiteManagementClient operations, string name, string type, bool? isFqdn = default(bool?))
{
return operations.CheckNameAvailabilityAsync(name, type, isFqdn).GetAwaiter().GetResult();
}
/// <summary>
/// Check if a resource name is available.
/// </summary>
/// <remarks>
/// Check if a resource name is available.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='name'>
/// Resource name to verify.
/// </param>
/// <param name='type'>
/// Resource type used for verification. Possible values include: 'Site',
/// 'Slot', 'HostingEnvironment'
/// </param>
/// <param name='isFqdn'>
/// Is fully qualified domain name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ResourceNameAvailability> CheckNameAvailabilityAsync(this IWebSiteManagementClient operations, string name, string type, bool? isFqdn = default(bool?), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CheckNameAvailabilityWithHttpMessagesAsync(name, type, isFqdn, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get a list of available geographical regions.
/// </summary>
/// <remarks>
/// Get a list of available geographical regions.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='sku'>
/// Name of SKU used to filter the regions. Possible values include: 'Free',
/// 'Shared', 'Basic', 'Standard', 'Premium', 'Dynamic'
/// </param>
public static IPage<GeoRegion> ListGeoRegions(this IWebSiteManagementClient operations, string sku = default(string))
{
return operations.ListGeoRegionsAsync(sku).GetAwaiter().GetResult();
}
/// <summary>
/// Get a list of available geographical regions.
/// </summary>
/// <remarks>
/// Get a list of available geographical regions.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='sku'>
/// Name of SKU used to filter the regions. Possible values include: 'Free',
/// 'Shared', 'Basic', 'Standard', 'Premium', 'Dynamic'
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<GeoRegion>> ListGeoRegionsAsync(this IWebSiteManagementClient operations, string sku = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListGeoRegionsWithHttpMessagesAsync(sku, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List all premier add-on offers.
/// </summary>
/// <remarks>
/// List all premier add-on offers.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static IPage<PremierAddOnOffer> ListPremierAddOnOffers(this IWebSiteManagementClient operations)
{
return operations.ListPremierAddOnOffersAsync().GetAwaiter().GetResult();
}
/// <summary>
/// List all premier add-on offers.
/// </summary>
/// <remarks>
/// List all premier add-on offers.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<PremierAddOnOffer>> ListPremierAddOnOffersAsync(this IWebSiteManagementClient operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListPremierAddOnOffersWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get the publishing credentials for the subscription owner.
/// </summary>
/// <remarks>
/// Get the publishing credentials for the subscription owner.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static User GetPublishingCredentials(this IWebSiteManagementClient operations)
{
return operations.GetPublishingCredentialsAsync().GetAwaiter().GetResult();
}
/// <summary>
/// Get the publishing credentials for the subscription owner.
/// </summary>
/// <remarks>
/// Get the publishing credentials for the subscription owner.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<User> GetPublishingCredentialsAsync(this IWebSiteManagementClient operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetPublishingCredentialsWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Update the publishing credentials for the subscription owner.
/// </summary>
/// <remarks>
/// Update the publishing credentials for the subscription owner.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='requestMessage'>
/// A request message with the new publishing credentials.
/// </param>
public static User UpdatePublishingCredentials(this IWebSiteManagementClient operations, User requestMessage)
{
return operations.UpdatePublishingCredentialsAsync(requestMessage).GetAwaiter().GetResult();
}
/// <summary>
/// Update the publishing credentials for the subscription owner.
/// </summary>
/// <remarks>
/// Update the publishing credentials for the subscription owner.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='requestMessage'>
/// A request message with the new publishing credentials.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<User> UpdatePublishingCredentialsAsync(this IWebSiteManagementClient operations, User requestMessage, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdatePublishingCredentialsWithHttpMessagesAsync(requestMessage, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List all SKUs.
/// </summary>
/// <remarks>
/// List all SKUs.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static SkuInfos ListSkus(this IWebSiteManagementClient operations)
{
return operations.ListSkusAsync().GetAwaiter().GetResult();
}
/// <summary>
/// List all SKUs.
/// </summary>
/// <remarks>
/// List all SKUs.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SkuInfos> ListSkusAsync(this IWebSiteManagementClient operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSkusWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Move resources between resource groups.
/// </summary>
/// <remarks>
/// Move resources between resource groups.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='moveResourceEnvelope'>
/// Object that represents the resource to move.
/// </param>
public static void Move(this IWebSiteManagementClient operations, string resourceGroupName, CsmMoveResourceEnvelope moveResourceEnvelope)
{
operations.MoveAsync(resourceGroupName, moveResourceEnvelope).GetAwaiter().GetResult();
}
/// <summary>
/// Move resources between resource groups.
/// </summary>
/// <remarks>
/// Move resources between resource groups.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='moveResourceEnvelope'>
/// Object that represents the resource to move.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task MoveAsync(this IWebSiteManagementClient operations, string resourceGroupName, CsmMoveResourceEnvelope moveResourceEnvelope, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.MoveWithHttpMessagesAsync(resourceGroupName, moveResourceEnvelope, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Validate if a resource can be created.
/// </summary>
/// <remarks>
/// Validate if a resource can be created.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='validateRequest'>
/// Request with the resources to validate.
/// </param>
public static ValidateResponse Validate(this IWebSiteManagementClient operations, string resourceGroupName, ValidateRequest validateRequest)
{
return operations.ValidateAsync(resourceGroupName, validateRequest).GetAwaiter().GetResult();
}
/// <summary>
/// Validate if a resource can be created.
/// </summary>
/// <remarks>
/// Validate if a resource can be created.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='validateRequest'>
/// Request with the resources to validate.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ValidateResponse> ValidateAsync(this IWebSiteManagementClient operations, string resourceGroupName, ValidateRequest validateRequest, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ValidateWithHttpMessagesAsync(resourceGroupName, validateRequest, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Validate whether a resource can be moved.
/// </summary>
/// <remarks>
/// Validate whether a resource can be moved.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='moveResourceEnvelope'>
/// Object that represents the resource to move.
/// </param>
public static void ValidateMove(this IWebSiteManagementClient operations, string resourceGroupName, CsmMoveResourceEnvelope moveResourceEnvelope)
{
operations.ValidateMoveAsync(resourceGroupName, moveResourceEnvelope).GetAwaiter().GetResult();
}
/// <summary>
/// Validate whether a resource can be moved.
/// </summary>
/// <remarks>
/// Validate whether a resource can be moved.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the resource group to which the resource belongs.
/// </param>
/// <param name='moveResourceEnvelope'>
/// Object that represents the resource to move.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task ValidateMoveAsync(this IWebSiteManagementClient operations, string resourceGroupName, CsmMoveResourceEnvelope moveResourceEnvelope, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.ValidateMoveWithHttpMessagesAsync(resourceGroupName, moveResourceEnvelope, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the source controls available for Azure websites.
/// </summary>
/// <remarks>
/// Gets the source controls available for Azure websites.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<SourceControl> ListSourceControlsNext(this IWebSiteManagementClient operations, string nextPageLink)
{
return operations.ListSourceControlsNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets the source controls available for Azure websites.
/// </summary>
/// <remarks>
/// Gets the source controls available for Azure websites.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SourceControl>> ListSourceControlsNextAsync(this IWebSiteManagementClient operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSourceControlsNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get a list of available geographical regions.
/// </summary>
/// <remarks>
/// Get a list of available geographical regions.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<GeoRegion> ListGeoRegionsNext(this IWebSiteManagementClient operations, string nextPageLink)
{
return operations.ListGeoRegionsNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Get a list of available geographical regions.
/// </summary>
/// <remarks>
/// Get a list of available geographical regions.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<GeoRegion>> ListGeoRegionsNextAsync(this IWebSiteManagementClient operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListGeoRegionsNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List all premier add-on offers.
/// </summary>
/// <remarks>
/// List all premier add-on offers.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<PremierAddOnOffer> ListPremierAddOnOffersNext(this IWebSiteManagementClient operations, string nextPageLink)
{
return operations.ListPremierAddOnOffersNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// List all premier add-on offers.
/// </summary>
/// <remarks>
/// List all premier add-on offers.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<PremierAddOnOffer>> ListPremierAddOnOffersNextAsync(this IWebSiteManagementClient operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListPremierAddOnOffersNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.CompilerServices;
using EditorBrowsableState = System.ComponentModel.EditorBrowsableState;
using EditorBrowsableAttribute = System.ComponentModel.EditorBrowsableAttribute;
#pragma warning disable 0809 //warning CS0809: Obsolete member 'Span<T>.Equals(object)' overrides non-obsolete member 'object.Equals(object)'
namespace System
{
/// <summary>
/// ReadOnlySpan represents a contiguous region of arbitrary memory. Unlike arrays, it can point to either managed
/// or native memory, or to memory allocated on the stack. It is type- and memory-safe.
/// </summary>
public struct ReadOnlySpan<T>
{
/// <summary>
/// Creates a new read-only span over the entirety of the target array.
/// </summary>
/// <param name="array">The target array.</param>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="array"/> is a null
/// reference (Nothing in Visual Basic).</exception>
/// <exception cref="System.ArrayTypeMismatchException">Thrown when <paramref name="array"/> is covariant and array's type is not exactly T[].</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReadOnlySpan(T[] array)
{
if (array == null)
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
_length = array.Length;
_pinnable = Unsafe.As<Pinnable<T>>(array);
_byteOffset = SpanHelpers.PerTypeValues<T>.ArrayAdjustment;
}
/// <summary>
/// Creates a new read-only span over the portion of the target array beginning
/// at 'start' index and covering the remainder of the array.
/// </summary>
/// <param name="array">The target array.</param>
/// <param name="start">The index at which to begin the read-only span.</param>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="array"/> is a null
/// reference (Nothing in Visual Basic).</exception>
/// <exception cref="System.ArrayTypeMismatchException">Thrown when <paramref name="array"/> is covariant and array's type is not exactly T[].</exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="start"/> is not in the range (<0 or >=Length).
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReadOnlySpan(T[] array, int start)
{
if (array == null)
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
int arrayLength = array.Length;
if ((uint)start > (uint)arrayLength)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start);
_length = arrayLength - start;
_pinnable = Unsafe.As<Pinnable<T>>(array);
_byteOffset = SpanHelpers.PerTypeValues<T>.ArrayAdjustment.Add<T>(start);
}
/// <summary>
/// Creates a new read-only span over the portion of the target array beginning
/// at 'start' index and ending at 'end' index (exclusive).
/// </summary>
/// <param name="array">The target array.</param>
/// <param name="start">The index at which to begin the read-only span.</param>
/// <param name="length">The number of items in the read-only span.</param>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="array"/> is a null
/// reference (Nothing in Visual Basic).</exception>
/// <exception cref="System.ArrayTypeMismatchException">Thrown when <paramref name="array"/> is covariant and array's type is not exactly T[].</exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="start"/> or end index is not in the range (<0 or >=Length).
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReadOnlySpan(T[] array, int start, int length)
{
if (array == null)
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
if ((uint)start > (uint)array.Length || (uint)length > (uint)(array.Length - start))
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start);
_length = length;
_pinnable = Unsafe.As<Pinnable<T>>(array);
_byteOffset = SpanHelpers.PerTypeValues<T>.ArrayAdjustment.Add<T>(start);
}
/// <summary>
/// Creates a new read-only span over the target unmanaged buffer. Clearly this
/// is quite dangerous, because we are creating arbitrarily typed T's
/// out of a void*-typed block of memory. And the length is not checked.
/// But if this creation is correct, then all subsequent uses are correct.
/// </summary>
/// <param name="pointer">An unmanaged pointer to memory.</param>
/// <param name="length">The number of <typeparamref name="T"/> elements the memory contains.</param>
/// <exception cref="System.ArgumentException">
/// Thrown when <typeparamref name="T"/> is reference type or contains pointers and hence cannot be stored in unmanaged memory.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="length"/> is negative.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe ReadOnlySpan(void* pointer, int length)
{
if (SpanHelpers.IsReferenceOrContainsReferences<T>())
ThrowHelper.ThrowArgumentException_InvalidTypeWithPointersNotSupported(typeof(T));
if (length < 0)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start);
_length = length;
_pinnable = null;
_byteOffset = new IntPtr(pointer);
}
/// <summary>
/// Create a new read-only span over a portion of a regular managed object. This can be useful
/// if part of a managed object represents a "fixed array." This is dangerous because neither the
/// <paramref name="length"/> is checked, nor <paramref name="obj"/> being null, nor the fact that
/// "rawPointer" actually lies within <paramref name="obj"/>.
/// </summary>
/// <param name="obj">The managed object that contains the data to span over.</param>
/// <param name="objectData">A reference to data within that object.</param>
/// <param name="length">The number of <typeparamref name="T"/> elements the memory contains.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static ReadOnlySpan<T> DangerousCreate(object obj, ref T objectData, int length)
{
Pinnable<T> pinnable = Unsafe.As<Pinnable<T>>(obj);
IntPtr byteOffset = Unsafe.ByteOffset<T>(ref pinnable.Data, ref objectData);
return new ReadOnlySpan<T>(pinnable, byteOffset, length);
}
// Constructor for internal use only.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal ReadOnlySpan(Pinnable<T> pinnable, IntPtr byteOffset, int length)
{
Debug.Assert(length >= 0);
_length = length;
_pinnable = pinnable;
_byteOffset = byteOffset;
}
/// <summary>
/// The number of items in the read-only span.
/// </summary>
public int Length => _length;
/// <summary>
/// Returns true if Length is 0.
/// </summary>
public bool IsEmpty => _length == 0;
/// <summary>
/// Returns the specified element of the read-only span.
/// </summary>
/// <param name="index"></param>
/// <returns></returns>
/// <exception cref="System.IndexOutOfRangeException">
/// Thrown when index less than 0 or index greater than or equal to Length
/// </exception>
public T this[int index]
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get
{
if ((uint)index >= ((uint)_length))
ThrowHelper.ThrowIndexOutOfRangeException();
if (_pinnable == null)
unsafe { return Unsafe.Add<T>(ref Unsafe.AsRef<T>(_byteOffset.ToPointer()), index); }
else
return Unsafe.Add<T>(ref Unsafe.AddByteOffset<T>(ref _pinnable.Data, _byteOffset), index);
}
}
/// <summary>
/// Copies the contents of this read-only span into destination span. If the source
/// and destinations overlap, this method behaves as if the original values in
/// a temporary location before the destination is overwritten.
///
/// <param name="destination">The span to copy items into.</param>
/// <exception cref="System.ArgumentException">
/// Thrown when the destination Span is shorter than the source Span.
/// </exception>
/// </summary>
public void CopyTo(Span<T> destination)
{
if (!TryCopyTo(destination))
ThrowHelper.ThrowArgumentException_DestinationTooShort();
}
/// <summary>
/// Copies the contents of this read-only span into destination span. If the source
/// and destinations overlap, this method behaves as if the original values in
/// a temporary location before the destination is overwritten.
///
/// <returns>If the destination span is shorter than the source span, this method
/// return false and no data is written to the destination.</returns>
/// </summary>
/// <param name="destination">The span to copy items into.</param>
public bool TryCopyTo(Span<T> destination)
{
int length = _length;
int destLength = destination.Length;
if ((uint)length == 0)
return true;
if ((uint)length > (uint)destLength)
return false;
ref T src = ref DangerousGetPinnableReference();
ref T dst = ref destination.DangerousGetPinnableReference();
SpanHelpers.CopyTo<T>(ref dst, destLength, ref src, length);
return true;
}
/// <summary>
/// Returns true if left and right point at the same memory and have the same length. Note that
/// this does *not* check to see if the *contents* are equal.
/// </summary>
public static bool operator ==(ReadOnlySpan<T> left, ReadOnlySpan<T> right)
{
return left._length == right._length && Unsafe.AreSame<T>(ref left.DangerousGetPinnableReference(), ref right.DangerousGetPinnableReference());
}
/// <summary>
/// Returns false if left and right point at the same memory and have the same length. Note that
/// this does *not* check to see if the *contents* are equal.
/// </summary>
public static bool operator !=(ReadOnlySpan<T> left, ReadOnlySpan<T> right) => !(left == right);
/// <summary>
/// This method is not supported as spans cannot be boxed. To compare two spans, use operator==.
/// <exception cref="System.NotSupportedException">
/// Always thrown by this method.
/// </exception>
/// </summary>
[Obsolete("Equals() on Span will always throw an exception. Use == instead.")]
[EditorBrowsable(EditorBrowsableState.Never)]
public override bool Equals(object obj)
{
throw new NotSupportedException(SR.CannotCallEqualsOnSpan);
}
/// <summary>
/// This method is not supported as spans cannot be boxed.
/// <exception cref="System.NotSupportedException">
/// Always thrown by this method.
/// </exception>
/// </summary>
[Obsolete("GetHashCode() on Span will always throw an exception.")]
[EditorBrowsable(EditorBrowsableState.Never)]
public override int GetHashCode()
{
throw new NotSupportedException(SR.CannotCallGetHashCodeOnSpan);
}
/// <summary>
/// Defines an implicit conversion of an array to a <see cref="ReadOnlySpan{T}"/>
/// </summary>
public static implicit operator ReadOnlySpan<T>(T[] array) => new ReadOnlySpan<T>(array);
/// <summary>
/// Defines an implicit conversion of a <see cref="ArraySegment{T}"/> to a <see cref="ReadOnlySpan{T}"/>
/// </summary>
public static implicit operator ReadOnlySpan<T>(ArraySegment<T> arraySegment) => new ReadOnlySpan<T>(arraySegment.Array, arraySegment.Offset, arraySegment.Count);
/// <summary>
/// Forms a slice out of the given read-only span, beginning at 'start'.
/// </summary>
/// <param name="start">The index at which to begin this slice.</param>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="start"/> index is not in range (<0 or >=Length).
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReadOnlySpan<T> Slice(int start)
{
if ((uint)start > (uint)_length)
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start);
IntPtr newOffset = _byteOffset.Add<T>(start);
int length = _length - start;
return new ReadOnlySpan<T>(_pinnable, newOffset, length);
}
/// <summary>
/// Forms a slice out of the given read-only span, beginning at 'start', of given length
/// </summary>
/// <param name="start">The index at which to begin this slice.</param>
/// <param name="length">The desired length for the slice (exclusive).</param>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified <paramref name="start"/> or end index is not in range (<0 or >=Length).
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReadOnlySpan<T> Slice(int start, int length)
{
if ((uint)start > (uint)_length || (uint)length > (uint)(_length - start))
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start);
IntPtr newOffset = _byteOffset.Add<T>(start);
return new ReadOnlySpan<T>(_pinnable, newOffset, length);
}
/// <summary>
/// Copies the contents of this read-only span into a new array. This heap
/// allocates, so should generally be avoided, however it is sometimes
/// necessary to bridge the gap with APIs written in terms of arrays.
/// </summary>
public T[] ToArray()
{
if (_length == 0)
return SpanHelpers.PerTypeValues<T>.EmptyArray;
T[] result = new T[_length];
CopyTo(result);
return result;
}
/// <summary>
/// Returns a 0-length read-only span whose base is the null pointer.
/// </summary>
public static ReadOnlySpan<T> Empty => default(ReadOnlySpan<T>);
/// <summary>
/// Returns a reference to the 0th element of the Span. If the Span is empty, returns a reference to the location where the 0th element
/// would have been stored. Such a reference can be used for pinning but must never be dereferenced.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ref T DangerousGetPinnableReference()
{
if (_pinnable == null)
unsafe { return ref Unsafe.AsRef<T>(_byteOffset.ToPointer()); }
else
return ref Unsafe.AddByteOffset<T>(ref _pinnable.Data, _byteOffset);
}
// These expose the internal representation for Span-related apis use only.
internal Pinnable<T> Pinnable => _pinnable;
internal IntPtr ByteOffset => _byteOffset;
//
// If the Span was constructed from an object,
//
// _pinnable = that object (unsafe-casted to a Pinnable<T>)
// _byteOffset = offset in bytes from "ref _pinnable.Data" to "ref span[0]"
//
// If the Span was constructed from a native pointer,
//
// _pinnable = null
// _byteOffset = the pointer
//
private readonly Pinnable<T> _pinnable;
private readonly IntPtr _byteOffset;
private readonly int _length;
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
* ***************************************************************************/
using System;
using System.ComponentModel.Design;
using System.Runtime.InteropServices;
using Microsoft.VisualStudioTools.Navigation;
using Microsoft.VisualStudioTools.Project;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.ComponentModelHost;
using Microsoft.VisualStudio.Editor;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.TextManager.Interop;
namespace Microsoft.VisualStudioTools {
public abstract class CommonPackage : Package, IOleComponent {
private uint _componentID;
private LibraryManager _libraryManager;
private IOleComponentManager _compMgr;
#region Language-specific abstracts
public abstract Type GetLibraryManagerType();
internal abstract LibraryManager CreateLibraryManager(CommonPackage package);
public abstract bool IsRecognizedFile(string filename);
// TODO:
// public abstract bool TryGetStartupFileAndDirectory(out string filename, out string dir);
#endregion
static CommonPackage() {
// ensure the UI thread is initialized
UIThread.Instance.Run(() => { });
}
internal CommonPackage() {
IServiceContainer container = this as IServiceContainer;
ServiceCreatorCallback callback = new ServiceCreatorCallback(CreateService);
//container.AddService(GetLanguageServiceType(), callback, true);
var libraryManager = GetLibraryManagerType();
if (libraryManager != null)
{
container.AddService(GetLibraryManagerType(), callback, true);
}
}
protected override void Dispose(bool disposing) {
try {
if (_componentID != 0) {
IOleComponentManager mgr = GetService(typeof(SOleComponentManager)) as IOleComponentManager;
if (mgr != null) {
mgr.FRevokeComponent(_componentID);
}
_componentID = 0;
}
if (null != _libraryManager) {
_libraryManager.Dispose();
_libraryManager = null;
}
} finally {
base.Dispose(disposing);
}
}
private object CreateService(IServiceContainer container, Type serviceType) {
if (GetLibraryManagerType() == serviceType) {
return _libraryManager = CreateLibraryManager(this);
}
return null;
}
/// <summary>
/// Gets the current IWpfTextView that is the active document.
/// </summary>
/// <returns></returns>
public static IWpfTextView GetActiveTextView() {
var monitorSelection = (IVsMonitorSelection)Package.GetGlobalService(typeof(SVsShellMonitorSelection));
if (monitorSelection == null) {
return null;
}
object curDocument;
if (ErrorHandler.Failed(monitorSelection.GetCurrentElementValue((uint)VSConstants.VSSELELEMID.SEID_DocumentFrame, out curDocument))) {
// TODO: Report error
return null;
}
IVsWindowFrame frame = curDocument as IVsWindowFrame;
if (frame == null) {
// TODO: Report error
return null;
}
object docView = null;
if (ErrorHandler.Failed(frame.GetProperty((int)__VSFPROPID.VSFPROPID_DocView, out docView))) {
// TODO: Report error
return null;
}
if (docView is IVsCodeWindow) {
IVsTextView textView;
if (ErrorHandler.Failed(((IVsCodeWindow)docView).GetPrimaryView(out textView))) {
// TODO: Report error
return null;
}
var model = (IComponentModel)GetGlobalService(typeof(SComponentModel));
var adapterFactory = model.GetService<IVsEditorAdaptersFactoryService>();
var wpfTextView = adapterFactory.GetWpfTextView(textView);
return wpfTextView;
}
return null;
}
public static IComponentModel ComponentModel {
get {
return (IComponentModel)GetGlobalService(typeof(SComponentModel));
}
}
internal static CommonProjectNode GetStartupProject() {
var buildMgr = (IVsSolutionBuildManager)Package.GetGlobalService(typeof(IVsSolutionBuildManager));
IVsHierarchy hierarchy;
if (buildMgr != null && ErrorHandler.Succeeded(buildMgr.get_StartupProject(out hierarchy)) && hierarchy != null) {
return hierarchy.GetProject().GetCommonProject();
}
return null;
}
protected override void Initialize() {
var componentManager = _compMgr = (IOleComponentManager)GetService(typeof(SOleComponentManager));
OLECRINFO[] crinfo = new OLECRINFO[1];
crinfo[0].cbSize = (uint)Marshal.SizeOf(typeof(OLECRINFO));
crinfo[0].grfcrf = (uint)_OLECRF.olecrfNeedIdleTime;
crinfo[0].grfcadvf = (uint)_OLECADVF.olecadvfModal | (uint)_OLECADVF.olecadvfRedrawOff | (uint)_OLECADVF.olecadvfWarningsOff;
crinfo[0].uIdleTimeInterval = 0;
ErrorHandler.ThrowOnFailure(componentManager.FRegisterComponent(this, crinfo, out _componentID));
base.Initialize();
}
#region IOleComponent Members
public int FContinueMessageLoop(uint uReason, IntPtr pvLoopData, MSG[] pMsgPeeked) {
return 1;
}
public int FDoIdle(uint grfidlef) {
if (null != _libraryManager) {
_libraryManager.OnIdle(_compMgr);
}
var onIdle = OnIdle;
if (onIdle != null) {
onIdle(this, new ComponentManagerEventArgs(_compMgr));
}
return 0;
}
internal event EventHandler<ComponentManagerEventArgs> OnIdle;
public int FPreTranslateMessage(MSG[] pMsg) {
return 0;
}
public int FQueryTerminate(int fPromptUser) {
return 1;
}
public int FReserved1(uint dwReserved, uint message, IntPtr wParam, IntPtr lParam) {
return 1;
}
public IntPtr HwndGetWindow(uint dwWhich, uint dwReserved) {
return IntPtr.Zero;
}
public void OnActivationChange(IOleComponent pic, int fSameComponent, OLECRINFO[] pcrinfo, int fHostIsActivating, OLECHOSTINFO[] pchostinfo, uint dwReserved) {
}
public void OnAppActivate(int fActive, uint dwOtherThreadID) {
}
public void OnEnterState(uint uStateID, int fEnter) {
}
public void OnLoseActivation() {
}
public void Terminate() {
}
#endregion
}
class ComponentManagerEventArgs : EventArgs {
private readonly IOleComponentManager _compMgr;
public ComponentManagerEventArgs(IOleComponentManager compMgr) {
_compMgr = compMgr;
}
public IOleComponentManager ComponentManager {
get {
return _compMgr;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System;
using System.Collections.Generic;
using System.Linq;
#if UNITY_EDITOR
using UnityEditor;
#endif
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SceneSystem
{
/// <summary>
/// Configuration profile settings for setting up scene system.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality/Toolkit/Profiles/Mixed Reality Scene System Profile", fileName = "MixedRealitySceneSystemProfile", order = (int)CreateProfileMenuItemIndices.SceneSystem)]
[MixedRealityServiceProfile(typeof(IMixedRealitySceneSystem))]
[HelpURL("https://docs.microsoft.com/windows/mixed-reality/mrtk-unity/features/scene-system/scene-system-getting-started")]
public class MixedRealitySceneSystemProfile : BaseMixedRealityProfile
{
/// <summary>
/// Internal class used to cache lighting settings associated with a scene.
/// </summary>
[Serializable]
internal sealed class CachedLightingSettings
{
public string SceneName;
public RuntimeRenderSettings RenderSettings;
public RuntimeLightingSettings LightingSettings;
public RuntimeSunlightSettings SunlightSettings;
public DateTime TimeStamp;
}
public bool UseManagerScene { get { return useManagerScene && !managerScene.IsEmpty; } }
public bool UseLightingScene { get { return useLightingScene && lightingScenes.Count > 0; } }
public SceneInfo ManagerScene => managerScene;
public SceneInfo DefaultLightingScene { get { return lightingScenes[defaultLightingSceneIndex]; } }
public IEnumerable<SceneInfo> LightingScenes { get { return lightingScenes; } }
public IEnumerable<SceneInfo> ContentScenes { get { return contentScenes; } }
public IEnumerable<string> ContentTags { get { return contentTags; } }
public int NumLightingScenes { get { return lightingScenes.Count; } }
public int NumContentScenes { get { return contentScenes.Count; } }
public IEnumerable<Type> PermittedLightingSceneComponentTypes
{
get
{
foreach (SystemType systemType in permittedLightingSceneComponentTypes) { yield return systemType.Type; }
}
}
#if UNITY_EDITOR
public bool EditorManageBuildSettings => editorManageBuildSettings;
public bool EditorManageLoadedScenes => editorManageLoadedScenes;
public bool EditorEnforceSceneOrder => editorEnforceSceneOrder;
public bool EditorEnforceLightingSceneTypes => editorEnforceLightingSceneTypes;
public bool EditorLightingCacheOutOfDate => editorLightingCacheOutOfDate;
public bool EditorLightingCacheUpdateRequested { get; set; }
#endif
[SerializeField]
private bool useManagerScene = true;
[SerializeField]
private SceneInfo managerScene = default(SceneInfo);
[SerializeField]
private bool useLightingScene = true;
[SerializeField]
private int defaultLightingSceneIndex = 0;
[SerializeField]
private List<SceneInfo> lightingScenes = new List<SceneInfo>();
[SerializeField]
private List<SceneInfo> contentScenes = new List<SceneInfo>();
[SerializeField]
private SystemType[] permittedLightingSceneComponentTypes = new SystemType[] {
new SystemType(typeof(Transform)),
new SystemType(typeof(GameObject)),
new SystemType(typeof(Light)),
new SystemType(typeof(ReflectionProbe)),
new SystemType(typeof(LightProbeGroup)),
new SystemType(typeof(LightProbeProxyVolume)),
};
// These will be hidden by the default inspector.
[SerializeField]
[Tooltip("Cached content tags found in your content scenes")]
private List<string> contentTags = new List<string>();
// These will be hidden by the default inspector.
[SerializeField]
[Tooltip("Cached lighting settings from your lighting scenes")]
private List<CachedLightingSettings> cachedLightingSettings = new List<CachedLightingSettings>();
#region editor settings
// CS414 is disabled during this section because these properties are being used in the editor
// scenario - when this file is build for player scenario, these serialized fields still exist
// but are not used.
#pragma warning disable 414
[SerializeField]
[Tooltip("If true, the service will update your build settings automatically, ensuring that all manager, lighting and content scenes are added. Disable this if you want total control over build settings.")]
private bool editorManageBuildSettings = true;
[SerializeField]
[Tooltip("If true, the service will ensure manager scene is displayed first in scene hierarchy, followed by lighting and then content. Disable this if you want total control over scene hierarchy.")]
private bool editorEnforceSceneOrder = true;
[SerializeField]
[Tooltip("If true, service will ensure that manager scenes and lighting scenes are always loaded. Disable if you want total control over which scenes are loaded in editor.")]
private bool editorManageLoadedScenes = true;
[SerializeField]
[Tooltip("If true, service will ensure that only lighting-related components are allowed in lighting scenes. Disable if you want total control over the content of lighting scenes.")]
private bool editorEnforceLightingSceneTypes = true;
[SerializeField]
private bool editorLightingCacheOutOfDate = false;
#pragma warning restore 414
#endregion
public bool GetLightingSceneSettings(
string lightingSceneName,
out SceneInfo lightingScene,
out RuntimeLightingSettings lightingSettings,
out RuntimeRenderSettings renderSettings,
out RuntimeSunlightSettings sunlightSettings)
{
lightingSettings = default(RuntimeLightingSettings);
renderSettings = default(RuntimeRenderSettings);
sunlightSettings = default(RuntimeSunlightSettings);
lightingScene = SceneInfo.Empty;
for (int i = 0; i < lightingScenes.Count; i++)
{
if (lightingScenes[i].Name == lightingSceneName)
{
lightingScene = lightingScenes[i];
break;
}
}
if (lightingScene.IsEmpty)
{ // If we didn't find a lighting scene, don't bother looking for a cache
return false;
}
bool foundCache = false;
for (int i = 0; i < cachedLightingSettings.Count; i++)
{
CachedLightingSettings cache = cachedLightingSettings[i];
if (cache.SceneName == lightingSceneName)
{
lightingSettings = cache.LightingSettings;
renderSettings = cache.RenderSettings;
sunlightSettings = cache.SunlightSettings;
foundCache = true;
break;
}
}
return foundCache;
}
public IEnumerable<string> GetContentSceneNamesByTag(string tag)
{
foreach (SceneInfo contentScene in contentScenes)
{
if (contentScene.Tag == tag)
yield return contentScene.Name;
}
}
#if UNITY_EDITOR
#region validation
private void OnValidate()
{
if (Application.isPlaying || EditorApplication.isCompiling)
{
return;
}
bool saveChanges = false;
// Remove any duplicate entries from our lighting and content scene lists
saveChanges |= (RemoveOrClearDuplicateEntries(lightingScenes) || RemoveOrClearDuplicateEntries(contentScenes));
// Ensure that manager scenes are not contained in content or lighting scenes
saveChanges |= (UseManagerScene && (RemoveScene(lightingScenes, managerScene) || RemoveScene(contentScenes, managerScene)));
// Ensure that content scenes are not included in lighting scenes
saveChanges |= (UseLightingScene && RemoveScenes(lightingScenes, contentScenes));
// Build our content tags
List<string> newContentTags = new List<string>();
foreach (SceneInfo contentScene in contentScenes)
{
if (string.IsNullOrEmpty(contentScene.Tag))
{
continue;
}
if (contentScene.Tag == "Untagged")
{
continue;
}
if (!newContentTags.Contains(contentScene.Tag))
{
newContentTags.Add(contentScene.Tag);
}
}
// See if our content tags have changed
if (!contentTags.SequenceEqual(newContentTags))
{
contentTags = newContentTags;
saveChanges = true;
}
defaultLightingSceneIndex = Mathf.Clamp(defaultLightingSceneIndex, 0, lightingScenes.Count - 1);
if (saveChanges)
{ // We need to tie this directly to lighting scenes somehow
editorLightingCacheOutOfDate = true;
// Make sure our changes are saved to disk!
AssetDatabase.Refresh();
EditorUtility.SetDirty(this);
AssetDatabase.SaveAssets();
}
}
/// <summary>
/// Clears cached lighting settings.
/// Used to ensure we don't end up with 'dead' cached data.
/// </summary>
public void ClearLightingCache()
{
cachedLightingSettings.Clear();
}
/// <summary>
/// Used to update the cached lighting / render settings.
/// Since extracting them is complex and requires scene loading, I thought it best to avoid having the profile do it.
/// </summary>
/// <param name="sceneInfo">The scene these settings belong to.</param>
public void SetLightingCache(SceneInfo sceneInfo, RuntimeLightingSettings lightingSettings, RuntimeRenderSettings renderSettings, RuntimeSunlightSettings sunlightSettings)
{
CachedLightingSettings settings = new CachedLightingSettings();
settings.SceneName = sceneInfo.Name;
settings.LightingSettings = lightingSettings;
settings.RenderSettings = renderSettings;
settings.SunlightSettings = sunlightSettings;
settings.TimeStamp = DateTime.Now;
cachedLightingSettings.Add(settings);
editorLightingCacheOutOfDate = false;
}
/// <summary>
/// Sets editorLightingCacheOutOfDate to true and saves the profile.
/// </summary>
public void SetLightingCacheDirty()
{
editorLightingCacheOutOfDate = true;
AssetDatabase.Refresh();
EditorUtility.SetDirty(this);
AssetDatabase.SaveAssets();
}
public DateTime GetEarliestLightingCacheTimestamp()
{
if (cachedLightingSettings.Count <= 0)
{
return DateTime.MinValue;
}
DateTime earliestTimeStamp = DateTime.MaxValue;
foreach (CachedLightingSettings settings in cachedLightingSettings)
{
if (settings.TimeStamp < earliestTimeStamp)
{
earliestTimeStamp = settings.TimeStamp;
}
}
return earliestTimeStamp;
}
private static bool RemoveScenes(List<SceneInfo> sceneList, List<SceneInfo> scenesToRemove)
{
bool changed = false;
for (int i = sceneList.Count - 1; i >= 0; i--)
{
if (sceneList[i].IsEmpty)
{
continue;
}
foreach (SceneInfo sceneToRemove in scenesToRemove)
{
if (sceneToRemove.IsEmpty)
{
continue;
}
if (sceneList[i].Asset == sceneToRemove.Asset)
{
Debug.LogWarning("Removing scene " + sceneToRemove.Name + " from scene list.");
sceneList[i] = SceneInfo.Empty;
changed = true;
break;
}
}
}
return changed;
}
private static bool RemoveScene(List<SceneInfo> sceneList, SceneInfo sceneToRemove)
{
bool changed = false;
for (int i = sceneList.Count - 1; i >= 0; i--)
{
if (sceneList[i].IsEmpty)
{
continue;
}
if (sceneList[i].Asset == sceneToRemove.Asset)
{
Debug.LogWarning("Removing manager scene " + sceneToRemove.Name + " from scene list.");
sceneList[i] = SceneInfo.Empty;
changed = true;
}
}
return changed;
}
private static bool RemoveOrClearDuplicateEntries(List<SceneInfo> sceneList)
{
HashSet<string> scenePaths = new HashSet<string>();
bool changed = false;
for (int i = 0; i < sceneList.Count; i++)
{
if (sceneList[i].IsEmpty)
{
continue;
}
if (!scenePaths.Add(sceneList[i].Path))
{ // If we encounter a duplicate, just set it to empty.
// This will ensure we don't get duplicates when we add new elements to the array.
Debug.LogWarning("Found duplicate entry in scene list at " + i + ", removing");
sceneList[i] = SceneInfo.Empty;
changed = true;
}
}
return changed;
}
#endregion
#endif
}
}
| |
// <copyright file="SocketWrapper.cs" company="WebDriver Committers">
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.IO;
using System.Net;
using System.Net.Security;
using System.Net.Sockets;
using System.Runtime.Remoting.Messaging;
using System.Security.Authentication;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
namespace OpenQA.Selenium.Safari.Internal
{
/// <summary>
/// Provides a wrapper around a <see cref="System.Net.Sockets.Socket"/>.
/// </summary>
public class SocketWrapper : ISocket
{
private readonly Socket underlyingSocket;
private bool disposed;
private Stream stream;
/// <summary>
/// Initializes a new instance of the <see cref="SocketWrapper"/> class.
/// </summary>
/// <param name="socket">The <see cref="Socket"/> to wrap.</param>
public SocketWrapper(Socket socket)
{
this.underlyingSocket = socket;
if (this.underlyingSocket.Connected)
{
this.stream = new NetworkStream(this.underlyingSocket);
}
}
/// <summary>
/// Event raised when a connection is accepted by the socket.
/// </summary>
public event EventHandler<AcceptEventArgs> Accepted;
/// <summary>
/// Event raised when an error occurs accepting a connection.
/// </summary>
public event EventHandler<ErrorEventArgs> AcceptError;
/// <summary>
/// Event raised when data is sent through the socket.
/// </summary>
public event EventHandler Sent;
/// <summary>
/// Event raised when there is an error sending data.
/// </summary>
public event EventHandler<ErrorEventArgs> SendError;
/// <summary>
/// Event raised when data is received by the socket.
/// </summary>
public event EventHandler<ReceivedEventArgs> Received;
/// <summary>
/// Event raised when there is an error receiving data.
/// </summary>
public event EventHandler<ErrorEventArgs> ReceiveError;
/// <summary>
/// Event raised when authentication is completed over the socket.
/// </summary>
public event EventHandler Authenticated;
/// <summary>
/// Event raised when there is an error authenticating over the socket.
/// </summary>
public event EventHandler<ErrorEventArgs> AuthenticateError;
/// <summary>
/// Gets a value indicating whether the socket is connected.
/// </summary>
public bool Connected
{
get { return this.underlyingSocket.Connected; }
}
/// <summary>
/// Gets the remote IP address of the socket connection.
/// </summary>
public string RemoteIPAddress
{
get
{
var endpoint = this.underlyingSocket.RemoteEndPoint as IPEndPoint;
return endpoint != null ? endpoint.Address.ToString() : null;
}
}
/// <summary>
/// Gets a stream for reading and writing data.
/// </summary>
public Stream Stream
{
get { return this.stream; }
}
/// <summary>
/// Accepts a connection for the socket.
/// </summary>
public void Accept()
{
this.underlyingSocket.BeginAccept(this.OnClientConnect, null);
}
/// <summary>
/// Sends data over the socket.
/// </summary>
/// <param name="buffer">The data to be sent.</param>
public void Send(byte[] buffer)
{
this.stream.BeginWrite(buffer, 0, buffer.Length, this.OnDataSend, null);
}
/// <summary>
/// Receives data over the socket.
/// </summary>
/// <param name="buffer">The buffer into which the data will be read.</param>
/// <param name="offset">The offset into the buffer at which the data will be read.</param>
public void Receive(byte[] buffer, int offset)
{
this.stream.BeginRead(buffer, offset, buffer.Length, this.OnDataReceive, buffer);
}
/// <summary>
/// Authenticates over the socket.
/// </summary>
/// <param name="certificate">An <see cref="X509Certificate2"/> that specifies authentication information.</param>
public void Authenticate(X509Certificate2 certificate)
{
var ssl = new SslStream(this.stream, false);
this.stream = ssl;
ssl.BeginAuthenticateAsServer(certificate, false, SslProtocols.Tls, false, this.OnAuthenticate, ssl);
}
/// <summary>
/// Closes the socket connection.
/// </summary>
public void Close()
{
if (this.stream != null)
{
this.stream.Close();
}
if (this.underlyingSocket != null)
{
this.underlyingSocket.Close();
}
}
/// <summary>
/// Binds the socket to a local end point.
/// </summary>
/// <param name="localEndPoint">The local end point to which to bind the socket.</param>
public void Bind(EndPoint localEndPoint)
{
this.underlyingSocket.Bind(localEndPoint);
}
/// <summary>
/// Starts listening to data received over the socket.
/// </summary>
/// <param name="backlog">The number of pending connections to process.</param>
public void Listen(int backlog)
{
this.underlyingSocket.Listen(backlog);
}
/// <summary>
/// Releases all resources used by the <see cref="SocketWrapper"/>.
/// </summary>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Raises the Accepted event.
/// </summary>
/// <param name="e">An <see cref="AcceptEventArgs"/> that contains the event data.</param>
protected void OnAccepted(AcceptEventArgs e)
{
if (this.Accepted != null)
{
this.Accepted(this, e);
}
}
/// <summary>
/// Raises the AcceptError event.
/// </summary>
/// <param name="e">An <see cref="ErrorEventArgs"/> that contains the event data.</param>
protected void OnAcceptError(ErrorEventArgs e)
{
if (this.AcceptError != null)
{
this.AcceptError(this, e);
}
}
/// <summary>
/// Raises the Sent event.
/// </summary>
/// <param name="e">An <see cref="EventArgs"/> that contains the event data.</param>
protected void OnSent(EventArgs e)
{
if (this.Sent != null)
{
this.Sent(this, e);
}
}
/// <summary>
/// Raises the SendError event.
/// </summary>
/// <param name="e">An <see cref="ErrorEventArgs"/> that contains the event data.</param>
protected void OnSendError(ErrorEventArgs e)
{
if (this.SendError != null)
{
this.SendError(this, e);
}
}
/// <summary>
/// Raises the Received event.
/// </summary>
/// <param name="e">A <see cref="ReceivedEventArgs"/> that contains the event data.</param>
protected void OnReceived(ReceivedEventArgs e)
{
if (this.Received != null)
{
this.Received(this, e);
}
}
/// <summary>
/// Raises the ReceiveError event.
/// </summary>
/// <param name="e">An <see cref="ErrorEventArgs"/> that contains the event data.</param>
protected void OnReceiveError(ErrorEventArgs e)
{
if (this.ReceiveError != null)
{
this.ReceiveError(this, e);
}
}
/// <summary>
/// Raises the Authenticated event.
/// </summary>
/// <param name="e">An <see cref="EventArgs"/> that contains the event data.</param>
protected void OnAuthenticated(EventArgs e)
{
if (this.Authenticated != null)
{
this.Authenticated(this, e);
}
}
/// <summary>
/// Raises the AuthenticateError event.
/// </summary>
/// <param name="e">An <see cref="ErrorEventArgs"/> that contains the event data.</param>
protected void OnAuthenticateError(ErrorEventArgs e)
{
if (this.AuthenticateError != null)
{
this.AuthenticateError(this, e);
}
}
/// <summary>
/// Releases the unmanaged resources used by the <see cref="SocketWrapper"/> and optionally
/// releases the managed resources.
/// </summary>
/// <param name="disposing"><see langword="true"/> to release managed and resources;
/// <see langword="false"/> to only release unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing && !this.disposed)
{
this.disposed = true;
if (this.stream != null)
{
this.stream.Dispose();
}
if (this.underlyingSocket != null)
{
this.underlyingSocket.Close();
}
}
}
private void OnAuthenticate(IAsyncResult asyncResult)
{
SslStream sslStream = asyncResult.AsyncState as SslStream;
sslStream.EndAuthenticateAsServer(asyncResult);
try
{
this.OnAuthenticated(new EventArgs());
}
catch (Exception ex)
{
this.OnAuthenticateError(new ErrorEventArgs(ex));
}
}
private void OnDataReceive(IAsyncResult asyncResult)
{
try
{
int bytesRead = this.stream.EndRead(asyncResult);
byte[] buffer = asyncResult.AsyncState as byte[];
this.OnReceived(new ReceivedEventArgs(bytesRead, buffer));
}
catch (Exception ex)
{
this.OnReceiveError(new ErrorEventArgs(ex));
}
}
private void OnClientConnect(IAsyncResult asyncResult)
{
// This logic is mildly convoluted, and requires some explanation.
// The socket can be closed (disposed) while there is still a
// pending accept. This will cause an exception if we try to reference
// the disposed socket. To mitigate this, we set a flag when Dispose()
// is called so that we don't try to access a disposed socket.
if (!this.disposed)
{
SocketWrapper actual = new SocketWrapper(this.underlyingSocket.EndAccept(asyncResult));
try
{
this.OnAccepted(new AcceptEventArgs(actual));
}
catch (Exception ex)
{
this.OnAcceptError(new ErrorEventArgs(ex));
}
}
}
private void OnDataSend(IAsyncResult asyncResult)
{
this.stream.EndWrite(asyncResult);
try
{
this.OnSent(new EventArgs());
}
catch (Exception ex)
{
this.OnSendError(new ErrorEventArgs(ex));
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// ConcurrentBag.cs
//
// An unordered collection that allows duplicates and that provides add and get operations.
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.Collections.Concurrent
{
/// <summary>
/// Represents an thread-safe, unordered collection of objects.
/// </summary>
/// <typeparam name="T">Specifies the type of elements in the bag.</typeparam>
/// <remarks>
/// <para>
/// Bags are useful for storing objects when ordering doesn't matter, and unlike sets, bags support
/// duplicates. <see cref="ConcurrentBag{T}"/> is a thread-safe bag implementation, optimized for
/// scenarios where the same thread will be both producing and consuming data stored in the bag.
/// </para>
/// <para>
/// <see cref="ConcurrentBag{T}"/> accepts null reference (Nothing in Visual Basic) as a valid
/// value for reference types.
/// </para>
/// <para>
/// All public and protected members of <see cref="ConcurrentBag{T}"/> are thread-safe and may be used
/// concurrently from multiple threads.
/// </para>
/// </remarks>
[DebuggerTypeProxy(typeof(IProducerConsumerCollectionDebugView<>))]
[DebuggerDisplay("Count = {Count}")]
public class ConcurrentBag<T> : IProducerConsumerCollection<T>, IReadOnlyCollection<T>
{
// ThreadLocalList object that contains the data per thread
private ThreadLocal<ThreadLocalList> _locals;
// This head and tail pointers points to the first and last local lists, to allow enumeration on the thread locals objects
private volatile ThreadLocalList _headList, _tailList;
// A flag used to tell the operations thread that it must synchronize the operation, this flag is set/unset within
// GlobalListsLock lock
private bool _needSync;
/// <summary>
/// Initializes a new instance of the <see cref="ConcurrentBag{T}"/>
/// class.
/// </summary>
public ConcurrentBag()
{
Initialize(null);
}
/// <summary>
/// Initializes a new instance of the <see cref="ConcurrentBag{T}"/>
/// class that contains elements copied from the specified collection.
/// </summary>
/// <param name="collection">The collection whose elements are copied to the new <see
/// cref="ConcurrentBag{T}"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="collection"/> is a null reference
/// (Nothing in Visual Basic).</exception>
public ConcurrentBag(IEnumerable<T> collection)
{
if (collection == null)
{
throw new ArgumentNullException("collection", SR.ConcurrentBag_Ctor_ArgumentNullException);
}
Initialize(collection);
}
/// <summary>
/// Local helper function to initialize a new bag object
/// </summary>
/// <param name="collection">An enumeration containing items with which to initialize this bag.</param>
private void Initialize(IEnumerable<T> collection)
{
_locals = new ThreadLocal<ThreadLocalList>();
// Copy the collection to the bag
if (collection != null)
{
ThreadLocalList list = GetThreadList(true);
foreach (T item in collection)
{
list.Add(item, false);
}
}
}
/// <summary>
/// Adds an object to the <see cref="ConcurrentBag{T}"/>.
/// </summary>
/// <param name="item">The object to be added to the
/// <see cref="ConcurrentBag{T}"/>. The value can be a null reference
/// (Nothing in Visual Basic) for reference types.</param>
public void Add(T item)
{
// Get the local list for that thread, create a new list if this thread doesn't exist
//(first time to call add)
ThreadLocalList list = GetThreadList(true);
AddInternal(list, item);
}
/// <summary>
/// </summary>
/// <param name="list"></param>
/// <param name="item"></param>
private void AddInternal(ThreadLocalList list, T item)
{
bool lockTaken = false;
try
{
#pragma warning disable 0420
Interlocked.Exchange(ref list._currentOp, (int)ListOperation.Add);
#pragma warning restore 0420
//Synchronization cases:
// if the list count is less than two to avoid conflict with any stealing thread
// if _needSync is set, this means there is a thread that needs to freeze the bag
if (list.Count < 2 || _needSync)
{
// reset it back to zero to avoid deadlock with stealing thread
list._currentOp = (int)ListOperation.None;
Monitor.Enter(list, ref lockTaken);
}
list.Add(item, lockTaken);
}
finally
{
list._currentOp = (int)ListOperation.None;
if (lockTaken)
{
Monitor.Exit(list);
}
}
}
/// <summary>
/// Attempts to add an object to the <see cref="ConcurrentBag{T}"/>.
/// </summary>
/// <param name="item">The object to be added to the
/// <see cref="ConcurrentBag{T}"/>. The value can be a null reference
/// (Nothing in Visual Basic) for reference types.</param>
/// <returns>Always returns true</returns>
bool IProducerConsumerCollection<T>.TryAdd(T item)
{
Add(item);
return true;
}
/// <summary>
/// Attempts to remove and return an object from the <see
/// cref="ConcurrentBag{T}"/>.
/// </summary>
/// <param name="result">When this method returns, <paramref name="result"/> contains the object
/// removed from the <see cref="ConcurrentBag{T}"/> or the default value
/// of <typeparamref name="T"/> if the operation failed.</param>
/// <returns>true if an object was removed successfully; otherwise, false.</returns>
public bool TryTake(out T result)
{
return TryTakeOrPeek(out result, true);
}
/// <summary>
/// Attempts to return an object from the <see cref="ConcurrentBag{T}"/>
/// without removing it.
/// </summary>
/// <param name="result">When this method returns, <paramref name="result"/> contains an object from
/// the <see cref="ConcurrentBag{T}"/> or the default value of
/// <typeparamref name="T"/> if the operation failed.</param>
/// <returns>true if and object was returned successfully; otherwise, false.</returns>
public bool TryPeek(out T result)
{
return TryTakeOrPeek(out result, false);
}
/// <summary>
/// Local helper function to Take or Peek an item from the bag
/// </summary>
/// <param name="result">To receive the item retrieved from the bag</param>
/// <param name="take">True means Take operation, false means Peek operation</param>
/// <returns>True if succeeded, false otherwise</returns>
private bool TryTakeOrPeek(out T result, bool take)
{
// Get the local list for that thread, return null if the thread doesn't exit
//(this thread never add before)
ThreadLocalList list = GetThreadList(false);
if (list == null || list.Count == 0)
{
return Steal(out result, take);
}
bool lockTaken = false;
try
{
if (take) // Take operation
{
#pragma warning disable 0420
Interlocked.Exchange(ref list._currentOp, (int)ListOperation.Take);
#pragma warning restore 0420
//Synchronization cases:
// if the list count is less than or equal two to avoid conflict with any stealing thread
// if _needSync is set, this means there is a thread that needs to freeze the bag
if (list.Count <= 2 || _needSync)
{
// reset it back to zero to avoid deadlock with stealing thread
list._currentOp = (int)ListOperation.None;
Monitor.Enter(list, ref lockTaken);
// Double check the count and steal if it became empty
if (list.Count == 0)
{
// Release the lock before stealing
if (lockTaken)
{
try { }
finally
{
lockTaken = false; // reset lockTaken to avoid calling Monitor.Exit again in the finally block
Monitor.Exit(list);
}
}
return Steal(out result, true);
}
}
list.Remove(out result);
}
else
{
if (!list.Peek(out result))
{
return Steal(out result, false);
}
}
}
finally
{
list._currentOp = (int)ListOperation.None;
if (lockTaken)
{
Monitor.Exit(list);
}
}
return true;
}
/// <summary>
/// Local helper function to retrieve a thread local list by a thread object
/// </summary>
/// <param name="forceCreate">Create a new list if the thread does ot exist</param>
/// <returns>The local list object</returns>
private ThreadLocalList GetThreadList(bool forceCreate)
{
ThreadLocalList list = _locals.Value;
if (list != null)
{
return list;
}
else if (forceCreate)
{
// Acquire the lock to update the _tailList pointer
lock (GlobalListsLock)
{
if (_headList == null)
{
list = new ThreadLocalList(Environment.CurrentManagedThreadId);
_headList = list;
_tailList = list;
}
else
{
list = GetUnownedList();
if (list == null)
{
list = new ThreadLocalList(Environment.CurrentManagedThreadId);
_tailList._nextList = list;
_tailList = list;
}
}
_locals.Value = list;
}
}
else
{
return null;
}
Debug.Assert(list != null);
return list;
}
/// <summary>
/// Try to reuse an unowned list if exist
/// unowned lists are the lists that their owner threads are aborted or terminated
/// this is workaround to avoid memory leaks.
/// </summary>
/// <returns>The list object, null if all lists are owned</returns>
private ThreadLocalList GetUnownedList()
{
//the global lock must be held at this point
Debug.Assert(Monitor.IsEntered(GlobalListsLock));
int currentThreadId = Environment.CurrentManagedThreadId;
ThreadLocalList currentList = _headList;
while (currentList != null)
{
if (currentList._ownerThreadId == currentThreadId)
{
return currentList;
}
currentList = currentList._nextList;
}
return null;
}
/// <summary>
/// Local helper method to steal an item from any other non empty thread
/// It enumerate all other threads in two passes first pass acquire the lock with TryEnter if succeeded
/// it steals the item, otherwise it enumerate them again in 2nd pass and acquire the lock using Enter
/// </summary>
/// <param name="result">To receive the item retrieved from the bag</param>
/// <param name="take">Whether to remove or peek.</param>
/// <returns>True if succeeded, false otherwise.</returns>
private bool Steal(out T result, bool take)
{
#if FEATURE_TRACING
if (take)
CDSCollectionETWBCLProvider.Log.ConcurrentBag_TryTakeSteals();
else
CDSCollectionETWBCLProvider.Log.ConcurrentBag_TryPeekSteals();
#endif
bool loop;
List<int> versionsList = new List<int>(); // save the lists version
do
{
versionsList.Clear(); //clear the list from the previous iteration
loop = false;
ThreadLocalList currentList = _headList;
while (currentList != null)
{
versionsList.Add(currentList._version);
if (currentList._head != null && TrySteal(currentList, out result, take))
{
return true;
}
currentList = currentList._nextList;
}
// verify versioning, if other items are added to this list since we last visit it, we should retry
currentList = _headList;
foreach (int version in versionsList)
{
if (version != currentList._version) //oops state changed
{
loop = true;
if (currentList._head != null && TrySteal(currentList, out result, take))
return true;
}
currentList = currentList._nextList;
}
} while (loop);
result = default(T);
return false;
}
/// <summary>
/// local helper function tries to steal an item from given local list
/// </summary>
private bool TrySteal(ThreadLocalList list, out T result, bool take)
{
lock (list)
{
if (CanSteal(list))
{
list.Steal(out result, take);
return true;
}
result = default(T);
return false;
}
}
/// <summary>
/// Local helper function to check the list if it became empty after acquiring the lock
/// and wait if there is unsynchronized Add/Take operation in the list to be done
/// </summary>
/// <param name="list">The list to steal</param>
/// <returns>True if can steal, false otherwise</returns>
private static bool CanSteal(ThreadLocalList list)
{
if (list.Count <= 2 && list._currentOp != (int)ListOperation.None)
{
SpinWait spinner = new SpinWait();
while (list._currentOp != (int)ListOperation.None)
{
spinner.SpinOnce();
}
}
return list.Count > 0;
}
/// <summary>
/// Copies the <see cref="ConcurrentBag{T}"/> elements to an existing
/// one-dimensional <see cref="T:System.Array">Array</see>, starting at the specified array
/// index.
/// </summary>
/// <param name="array">The one-dimensional <see cref="T:System.Array">Array</see> that is the
/// destination of the elements copied from the
/// <see cref="ConcurrentBag{T}"/>. The <see
/// cref="T:System.Array">Array</see> must have zero-based indexing.</param>
/// <param name="index">The zero-based index in <paramref name="array"/> at which copying
/// begins.</param>
/// <exception cref="ArgumentNullException"><paramref name="array"/> is a null reference (Nothing in
/// Visual Basic).</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is less than
/// zero.</exception>
/// <exception cref="ArgumentException"><paramref name="index"/> is equal to or greater than the
/// length of the <paramref name="array"/>
/// -or- the number of elements in the source <see
/// cref="ConcurrentBag{T}"/> is greater than the available space from
/// <paramref name="index"/> to the end of the destination <paramref name="array"/>.</exception>
public void CopyTo(T[] array, int index)
{
if (array == null)
{
throw new ArgumentNullException("array", SR.ConcurrentBag_CopyTo_ArgumentNullException);
}
if (index < 0)
{
throw new ArgumentOutOfRangeException
("index", SR.ConcurrentBag_CopyTo_ArgumentOutOfRangeException);
}
// Short path if the bag is empty
if (_headList == null)
return;
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
ToList().CopyTo(array, index);
}
finally
{
UnfreezeBag(lockTaken);
}
}
/// <summary>
/// Copies the elements of the <see cref="T:System.Collections.ICollection"/> to an <see
/// cref="T:System.Array"/>, starting at a particular
/// <see cref="T:System.Array"/> index.
/// </summary>
/// <param name="array">The one-dimensional <see cref="T:System.Array">Array</see> that is the
/// destination of the elements copied from the
/// <see cref="ConcurrentBag{T}"/>. The <see
/// cref="T:System.Array">Array</see> must have zero-based indexing.</param>
/// <param name="index">The zero-based index in <paramref name="array"/> at which copying
/// begins.</param>
/// <exception cref="ArgumentNullException"><paramref name="array"/> is a null reference (Nothing in
/// Visual Basic).</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is less than
/// zero.</exception>
/// <exception cref="ArgumentException">
/// <paramref name="array"/> is multidimensional. -or-
/// <paramref name="array"/> does not have zero-based indexing. -or-
/// <paramref name="index"/> is equal to or greater than the length of the <paramref name="array"/>
/// -or- The number of elements in the source <see cref="T:System.Collections.ICollection"/> is
/// greater than the available space from <paramref name="index"/> to the end of the destination
/// <paramref name="array"/>. -or- The type of the source <see
/// cref="T:System.Collections.ICollection"/> cannot be cast automatically to the type of the
/// destination <paramref name="array"/>.
/// </exception>
void ICollection.CopyTo(Array array, int index)
{
if (array == null)
{
throw new ArgumentNullException("array", SR.ConcurrentBag_CopyTo_ArgumentNullException);
}
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
((ICollection)ToList()).CopyTo(array, index);
}
finally
{
UnfreezeBag(lockTaken);
}
}
/// <summary>
/// Copies the <see cref="ConcurrentBag{T}"/> elements to a new array.
/// </summary>
/// <returns>A new array containing a snapshot of elements copied from the <see
/// cref="ConcurrentBag{T}"/>.</returns>
public T[] ToArray()
{
// Short path if the bag is empty
if (_headList == null)
return Array.Empty<T>();
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
return ToList().ToArray();
}
finally
{
UnfreezeBag(lockTaken);
}
}
/// <summary>
/// Returns an enumerator that iterates through the <see
/// cref="ConcurrentBag{T}"/>.
/// </summary>
/// <returns>An enumerator for the contents of the <see
/// cref="ConcurrentBag{T}"/>.</returns>
/// <remarks>
/// The enumeration represents a moment-in-time snapshot of the contents
/// of the bag. It does not reflect any updates to the collection after
/// <see cref="GetEnumerator"/> was called. The enumerator is safe to use
/// concurrently with reads from and writes to the bag.
/// </remarks>
public IEnumerator<T> GetEnumerator()
{
// Short path if the bag is empty
if (_headList == null)
return ((IEnumerable<T>)Array.Empty<T>()).GetEnumerator();
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
return ToList().GetEnumerator();
}
finally
{
UnfreezeBag(lockTaken);
}
}
/// <summary>
/// Returns an enumerator that iterates through the <see
/// cref="ConcurrentBag{T}"/>.
/// </summary>
/// <returns>An enumerator for the contents of the <see
/// cref="ConcurrentBag{T}"/>.</returns>
/// <remarks>
/// The items enumerated represent a moment-in-time snapshot of the contents
/// of the bag. It does not reflect any update to the collection after
/// <see cref="GetEnumerator"/> was called.
/// </remarks>
IEnumerator IEnumerable.GetEnumerator()
{
return ((ConcurrentBag<T>)this).GetEnumerator();
}
/// <summary>
/// Gets the number of elements contained in the <see cref="ConcurrentBag{T}"/>.
/// </summary>
/// <value>The number of elements contained in the <see cref="ConcurrentBag{T}"/>.</value>
/// <remarks>
/// The count returned represents a moment-in-time snapshot of the contents
/// of the bag. It does not reflect any updates to the collection after
/// <see cref="GetEnumerator"/> was called.
/// </remarks>
public int Count
{
get
{
// Short path if the bag is empty
if (_headList == null)
return 0;
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
return GetCountInternal();
}
finally
{
UnfreezeBag(lockTaken);
}
}
}
/// <summary>
/// Gets a value that indicates whether the <see cref="ConcurrentBag{T}"/> is empty.
/// </summary>
/// <value>true if the <see cref="ConcurrentBag{T}"/> is empty; otherwise, false.</value>
public bool IsEmpty
{
get
{
if (_headList == null)
return true;
bool lockTaken = false;
try
{
FreezeBag(ref lockTaken);
ThreadLocalList currentList = _headList;
while (currentList != null)
{
if (currentList._head != null)
//at least this list is not empty, we return false
{
return false;
}
currentList = currentList._nextList;
}
return true;
}
finally
{
UnfreezeBag(lockTaken);
}
}
}
/// <summary>
/// Gets a value indicating whether access to the <see cref="T:System.Collections.ICollection"/> is
/// synchronized with the SyncRoot.
/// </summary>
/// <value>true if access to the <see cref="T:System.Collections.ICollection"/> is synchronized
/// with the SyncRoot; otherwise, false. For <see cref="ConcurrentBag{T}"/>, this property always
/// returns false.</value>
bool ICollection.IsSynchronized
{
get { return false; }
}
/// <summary>
/// Gets an object that can be used to synchronize access to the <see
/// cref="T:System.Collections.ICollection"/>. This property is not supported.
/// </summary>
/// <exception cref="T:System.NotSupportedException">The SyncRoot property is not supported.</exception>
object ICollection.SyncRoot
{
get
{
throw new NotSupportedException(SR.ConcurrentCollection_SyncRoot_NotSupported);
}
}
/// <summary>
/// A global lock object, used in two cases:
/// 1- To maintain the _tailList pointer for each new list addition process ( first time a thread called Add )
/// 2- To freeze the bag in GetEnumerator, CopyTo, ToArray and Count members
/// </summary>
private object GlobalListsLock
{
get
{
Debug.Assert(_locals != null);
return _locals;
}
}
#region Freeze bag helper methods
/// <summary>
/// Local helper method to freeze all bag operations, it
/// 1- Acquire the global lock to prevent any other thread to freeze the bag, and also new new thread can be added
/// to the dictionary
/// 2- Then Acquire all local lists locks to prevent steal and synchronized operations
/// 3- Wait for all un-synchronized operations to be done
/// </summary>
/// <param name="lockTaken">Retrieve the lock taken result for the global lock, to be passed to Unfreeze method</param>
private void FreezeBag(ref bool lockTaken)
{
Debug.Assert(!Monitor.IsEntered(GlobalListsLock));
// global lock to be safe against multi threads calls count and corrupt _needSync
Monitor.Enter(GlobalListsLock, ref lockTaken);
// This will force any future add/take operation to be synchronized
_needSync = true;
//Acquire all local lists locks
AcquireAllLocks();
// Wait for all un-synchronized operation to be done
WaitAllOperations();
}
/// <summary>
/// Local helper method to unfreeze the bag from a frozen state
/// </summary>
/// <param name="lockTaken">The lock taken result from the Freeze method</param>
private void UnfreezeBag(bool lockTaken)
{
ReleaseAllLocks();
_needSync = false;
if (lockTaken)
{
Monitor.Exit(GlobalListsLock);
}
}
/// <summary>
/// local helper method to acquire all local lists locks
/// </summary>
private void AcquireAllLocks()
{
Debug.Assert(Monitor.IsEntered(GlobalListsLock));
bool lockTaken = false;
ThreadLocalList currentList = _headList;
while (currentList != null)
{
// Try/Finally block to avoid thread abort between acquiring the lock and setting the taken flag
try
{
Monitor.Enter(currentList, ref lockTaken);
}
finally
{
if (lockTaken)
{
currentList._lockTaken = true;
lockTaken = false;
}
}
currentList = currentList._nextList;
}
}
/// <summary>
/// Local helper method to release all local lists locks
/// </summary>
private void ReleaseAllLocks()
{
ThreadLocalList currentList = _headList;
while (currentList != null)
{
if (currentList._lockTaken)
{
currentList._lockTaken = false;
Monitor.Exit(currentList);
}
currentList = currentList._nextList;
}
}
/// <summary>
/// Local helper function to wait all unsynchronized operations
/// </summary>
private void WaitAllOperations()
{
Debug.Assert(Monitor.IsEntered(GlobalListsLock));
ThreadLocalList currentList = _headList;
while (currentList != null)
{
if (currentList._currentOp != (int)ListOperation.None)
{
SpinWait spinner = new SpinWait();
while (currentList._currentOp != (int)ListOperation.None)
{
spinner.SpinOnce();
}
}
currentList = currentList._nextList;
}
}
/// <summary>
/// Local helper function to get the bag count, the caller should call it from Freeze/Unfreeze block
/// </summary>
/// <returns>The current bag count</returns>
private int GetCountInternal()
{
Debug.Assert(Monitor.IsEntered(GlobalListsLock));
int count = 0;
ThreadLocalList currentList = _headList;
while (currentList != null)
{
checked
{
count += currentList.Count;
}
currentList = currentList._nextList;
}
return count;
}
/// <summary>
/// Local helper function to return the bag item in a list, this is mainly used by CopyTo and ToArray
/// This is not thread safe, should be called in Freeze/UnFreeze bag block
/// </summary>
/// <returns>List the contains the bag items</returns>
private List<T> ToList()
{
Debug.Assert(Monitor.IsEntered(GlobalListsLock));
List<T> list = new List<T>();
ThreadLocalList currentList = _headList;
while (currentList != null)
{
Node currentNode = currentList._head;
while (currentNode != null)
{
list.Add(currentNode._value);
currentNode = currentNode._next;
}
currentList = currentList._nextList;
}
return list;
}
#endregion
#region Inner Classes
/// <summary>
/// A class that represents a node in the lock thread list
/// </summary>
internal class Node
{
public Node(T value)
{
_value = value;
}
public readonly T _value;
public Node _next;
public Node _prev;
}
/// <summary>
/// A class that represents the lock thread list
/// </summary>
internal class ThreadLocalList
{
// Tead node in the list, null means the list is empty
internal volatile Node _head;
// Tail node for the list
private volatile Node _tail;
// The current list operation
internal volatile int _currentOp;
// The list count from the Add/Take perspective
private int _count;
// The stealing count
internal int _stealCount;
// Next list in the dictionary values
internal volatile ThreadLocalList _nextList;
// Set if the locl lock is taken
internal bool _lockTaken;
// The owner thread for this list
internal int _ownerThreadId;
// the version of the list, incremented only when the list changed from empty to non empty state
internal volatile int _version;
/// <summary>
/// ThreadLocalList constructor
/// </summary>
/// <param name="ownerThread">The owner thread for this list</param>
internal ThreadLocalList(int ownerThreadId)
{
_ownerThreadId = ownerThreadId;
}
/// <summary>
/// Add new item to head of the list
/// </summary>
/// <param name="item">The item to add.</param>
/// <param name="updateCount">Whether to update the count.</param>
internal void Add(T item, bool updateCount)
{
checked
{
_count++;
}
Node node = new Node(item);
if (_head == null)
{
Debug.Assert(_tail == null);
_head = node;
_tail = node;
_version++; // changing from empty state to non empty state
}
else
{
node._next = _head;
_head._prev = node;
_head = node;
}
if (updateCount) // update the count to avoid overflow if this add is synchronized
{
_count = _count - _stealCount;
_stealCount = 0;
}
}
/// <summary>
/// Remove an item from the head of the list
/// </summary>
/// <param name="result">The removed item</param>
internal void Remove(out T result)
{
Debug.Assert(_head != null);
Node head = _head;
_head = _head._next;
if (_head != null)
{
_head._prev = null;
}
else
{
_tail = null;
}
_count--;
result = head._value;
}
/// <summary>
/// Peek an item from the head of the list
/// </summary>
/// <param name="result">the peeked item</param>
/// <returns>True if succeeded, false otherwise</returns>
internal bool Peek(out T result)
{
Node head = _head;
if (head != null)
{
result = head._value;
return true;
}
result = default(T);
return false;
}
/// <summary>
/// Steal an item from the tail of the list
/// </summary>
/// <param name="result">the removed item</param>
/// <param name="remove">remove or peek flag</param>
internal void Steal(out T result, bool remove)
{
Node tail = _tail;
Debug.Assert(tail != null);
if (remove) // Take operation
{
_tail = _tail._prev;
if (_tail != null)
{
_tail._next = null;
}
else
{
_head = null;
}
// Increment the steal count
_stealCount++;
}
result = tail._value;
}
/// <summary>
/// Gets the total list count, it's not thread safe, may provide incorrect count if it is called concurrently
/// </summary>
internal int Count
{
get
{
return _count - _stealCount;
}
}
}
#endregion
}
/// <summary>
/// List operations for ConcurrentBag
/// </summary>
internal enum ListOperation
{
None,
Add,
Take
};
}
| |
using System;
using System.IO;
using System.Security.Cryptography;
//using ICSharpCode.SharpZipLib.Zip;
//using ICSharpCode.SharpZipLib.Checksums;
using System.Configuration;
using System.Collections;
using System.Windows.Forms;
using DowReplayManager.NET.Types;
namespace DowReplayManager.NET.Handlers
{
public class ReplayHash
{
public string Filename;
public byte[] HashCode;
}
public class ReplayManager
{
public const string MANAGER_REPLAYFILERENAME_FAIL = "Unable to rename the replay file\n'{0}' to\n'{1}'.";
public const string MANAGER_REPLAYAVAIL_FAIL = "Unable to make the replay file '{0}' available.";
#region Properties
public enum ArchiveType
{
ZipCompression,
FileStore
}
private ArchiveType archive;
public ArchiveType Archive
{
get
{
return archive;
}
set
{
archive = value;
}
}
public string DoWPlaybackFolder;
public string ReplayManagerFile;
public string ReplayManagerFilePath;
public Logging log = null;
private int compressionlevel;
public int CompressionLevel
{
get
{
return compressionlevel;
}
set
{
compressionlevel = value;
}
}
private DowReplayManager.NET.Readers.StoreReader storeReader;
public DowReplayManager.NET.Readers.StoreReader StoreReader
{
get
{
return storeReader;
}
set
{
storeReader = value;
}
}
#endregion
public ReplayManager()
{
}
public ReplayManager(string path, string filename)
{
DoWPlaybackFolder = ConfigurationSettings.AppSettings["DoWPlaybackFolder"].ToString();
path = path.Remove(path.LastIndexOf(@"\"), path.Length - path.LastIndexOf(@"\"));
ReplayManagerFilePath = path;
ReplayManagerFile = filename;
//this should always exist... if it doesn't create it.
if (!Directory.Exists(path + @"\Replays"))
{
Directory.CreateDirectory(path + @"\Replays");
}
ReplayManagerFilePath = ReplayManagerFilePath + @"\Replays";
ReplayManagerFile = ReplayManagerFilePath + @"\Replays.zip";
}
/// <summary>
/// Adds the supplied file to the existing or newly created ReplayManager archive
/// </summary>
/// <param name="filenames">Arrary of fullpath filenames of the replays to add</param>
public void AddReplays(object[] filenames)
{
StoreReader.ReplayManagerPath = ReplayManagerFilePath;
if (Archive == ArchiveType.FileStore)
{
MigrateReplays(filenames);
StoreReader.AddReplays(filenames);
StoreReader.SaveReplays("rec.dat");
CullReplays(filenames);
}
else
{
/*
object[] inflated = null;
StoreReader.ReplayManagerPath = ReplayManagerFilePath;
if (File.Exists(ReplayManagerFile))
{
//take what is in already and uncompress them
ZipInputStream zfReader = new ZipInputStream(File.OpenRead(ReplayManagerFile));
inflated = InflateReplays(zfReader);
zfReader.Close();
}
//stick them back into a temp .zip file
ZipOutputStream zfWriter = new ZipOutputStream(File.Create(ReplayManagerFilePath + @"\temp.zip"));
zfWriter.SetLevel(0);
if (inflated != null)
DeflateReplays(inflated, zfWriter);
//migrate new replays here
MigrateReplays(filenames);
//add the new
DeflateReplays(filenames, zfWriter);
zfWriter.Finish();
zfWriter.Close();
StoreReader.AddReplays(filenames);
StoreReader.SaveReplays("rec.dat");
CullReplays(filenames);
File.Delete(ReplayManagerFile);
File.Move(ReplayManagerFilePath + @"\temp.zip", ReplayManagerFile);*/
}
}
private void MigrateReplays(object[] filenames)
{
foreach (string file in filenames)
{
//copy the replay here... ask if we need to overwrite
if (!File.Exists(ReplayManagerFilePath + @"\" + Path.GetFileName(file)))
{
File.Copy(file, ReplayManagerFilePath + @"\" + Path.GetFileName(file), false);
}
else
{
DialogResult result = MessageBox.Show(null,
"The replay " + Path.GetFileName(file) + " already exists. Do you wish to overwrite?",
"Overwrite existsing...",
MessageBoxButtons.YesNoCancel,
MessageBoxIcon.Question);
if (result == DialogResult.Yes)
{
File.Copy(file, ReplayManagerFilePath + @"\" + Path.GetFileName(file), true);
}
}
}
}
private void CullReplays(object[] filenames)
{
foreach (string file in filenames)
{
File.Delete(file);
/*if (Archive != ArchiveType.FileStore)
File.Delete(ReplayManagerFilePath + @"\" + Path.GetFileName(file));*/
}
}
/// <summary>
/// Poll the DoW Playback folder for new replays
/// </summary>
/// <param name="resursive"></param>
/// <returns>Returns an array of objects holding the filename and hashcode of each replay</returns>
public object[] PollPlaybackFolder(bool resursive)
{
string[] files = null;
try
{
files = Directory.GetFiles(DoWPlaybackFolder, "*.rec");
}
catch
{
MessageBox.Show(null, "It appears as if your DoWPlaybackFolder setting is incorrect. Please change it in the DoWRM.exe.Config file.", "Playback folder incorrect...", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
System.Collections.ArrayList list = new System.Collections.ArrayList();
if (files != null)
{
foreach (string file in files)
{
FileStream fs = File.OpenRead(file);
byte[] data = new byte[fs.Length];
fs.Read(data, 0, data.Length);
fs.Close();
MD5 md5 = new MD5CryptoServiceProvider();
byte[] hash = md5.ComputeHash(data);
ReplayHash replayHash = new ReplayHash();
replayHash.Filename = file;
replayHash.HashCode = hash;
list.Add(replayHash);
}
}
return list.ToArray();
}
public bool RenameReplayFile(string oldFilename, string newFilename)
{
oldFilename = ReplayManagerFilePath + @"\" + oldFilename;
newFilename = ReplayManagerFilePath + @"\" + newFilename;
try
{
File.Move(oldFilename, newFilename);
return true;
}
catch(Exception x)
{
return false;
}
}
/// <summary>
/// Renames the replays' internal name
/// </summary>
/// <param name="newName">New name to use for the replay</param>
public bool RenameReplay(Replay replay, string newName)
{
try
{
//load the replay data
FileStream fsreader = File.OpenRead(replay.Filename);
byte[] data = new byte[fsreader.Length];
fsreader.Read(data, 0, data.Length);
fsreader.Close();
//create a buffer for the adjusted file
int new_replaysize = replay.FileSize + ((newName.Length - replay.Name.Length) * 2);
byte[] buffer = new byte[new_replaysize];
System.IO.MemoryStream writer = new MemoryStream(buffer);
//calculate the new db & foldinfo len
int database_len = replay.DATABASE + ((newName.Length - replay.Name.Length) * 2);
int foldinfo_len = replay.FOLDINFO + ((newName.Length - replay.Name.Length) * 2);
//write everything into the new buffer up to the foldinfo len
writer.Write(data, 0, replay.FOLDINFOPOS);
byte[] foldinfo_byte = BitConverter.GetBytes(foldinfo_len);
writer.Write(foldinfo_byte, 0, foldinfo_byte.Length);
writer.Write(data, replay.FOLDINFOPOS + 4, (replay.DATABASEPOS - replay.FOLDINFOPOS - 4));
byte[] database_byte = BitConverter.GetBytes(database_len);
writer.Write(database_byte, 0, database_byte.Length);
writer.Write(data, replay.DATABASEPOS + 4, (replay.REPLAYLENPOS - replay.DATABASEPOS - 4));
byte[] replay_byte = BitConverter.GetBytes(newName.Length);
writer.Write(replay_byte, 0, replay_byte.Length);
//get the new name in bytes
System.Text.UnicodeEncoding encoder = new System.Text.UnicodeEncoding();
byte[] byte_name = encoder.GetBytes(newName);
writer.Write(byte_name, 0, byte_name.Length);
writer.Write(data, (replay.REPLAYLENPOS + 4 + (replay.Name.Length * 2)),
(replay.FileSize - ((replay.REPLAYLENPOS + 4) + replay.Name.Length * 2)));
writer.Close();
BinaryWriter binwriter = new BinaryWriter(File.Create(replay.Filename));
binwriter.Write(buffer);
binwriter.Close();
return true;
}
catch
{
return false;
}
}
public bool MakeAvailable(string filename)
{
try
{
//if it exists... get rid of it
log.Write(LogType.Info, 5, "Checking if '" + filename + "' exists in " + DoWPlaybackFolder);
if (File.Exists(DoWPlaybackFolder + @"\" + Path.GetFileName(filename)))
{
log.Write(LogType.Info, 5, "Removing '" + Path.GetFileName(filename) + "' from DoW Playback folder");
File.Delete(DoWPlaybackFolder + @"\" + Path.GetFileName(filename));
}
//if it doesnt put it in the playback folder
else
{
log.Write(LogType.Info, 5, "Making '" + Path.GetFileName(filename) + "' Available in DoW Playback folder");
File.Copy(filename, DoWPlaybackFolder + @"\" + Path.GetFileName(filename));
}
return true;
}
catch(Exception x)
{
log.Write(LogType.Error, 1, "MakeAvailable() " + x.StackTrace);
return false;
}
}
public Hashtable GetAvailable()
{
try
{
Hashtable hash = new Hashtable();
DirectoryInfo dir = new DirectoryInfo(DoWPlaybackFolder);
FileInfo[] files = dir.GetFiles("*.rec");
foreach (FileInfo file in files)
{
hash.Add(file.Name, null);
}
return hash;
}
catch
{
return null;
}
}
/// <summary>
/// Permanently remove replay files from both the DoW Playback folder and the Replays store
/// </summary>
/// <param name="filenames"></param>
/// <returns>True if totally successful</returns>
public bool DeleteReplays(object[] filenames)
{
try
{
foreach (string file in filenames)
{
//delete the file if its in the Playback Folder
if (File.Exists(DoWPlaybackFolder + @"\" + file))
File.Delete(DoWPlaybackFolder + @"\" + file);
//delete the file in the store
if (File.Exists(ReplayManagerFilePath + @"\" + file))
File.Delete(ReplayManagerFilePath + @"\" + file);
}
return true;
}
catch
{
return false;
}
}
/*private void DeflateReplays(object[] filenames, ZipOutputStream zfWriter)
{
foreach (string file in filenames)
{
FileStream fs = File.OpenRead(ReplayManagerFilePath + @"\" + Path.GetFileName(file));
byte[] data = new byte[fs.Length];
fs.Read(data, 0, data.Length);
fs.Close();
ZipEntry entry = new ZipEntry(Path.GetFileName(file));
Crc32 crc = new Crc32();
crc.Reset();
crc.Update(data);
entry.Crc = crc.Value;
zfWriter.PutNextEntry(entry);
zfWriter.Write(data, 0, data.Length);
File.Delete(file);
}
}*/
/*private object[] InflateReplays(ZipInputStream zfReader)
{
System.Collections.ArrayList list = new System.Collections.ArrayList();
ZipEntry entry = null;
int size = 2048;
byte[] data = null;
FileStream file = null;
while ((entry = zfReader.GetNextEntry()) != null)
{
list.Add(ReplayManagerFilePath + @"\" + entry.Name);
file = File.Create(ReplayManagerFilePath + @"\" + entry.Name);
data = new byte[2048];
while (true)
{
size = zfReader.Read(data, 0, data.Length);
if (size > 0)
file.Write(data, 0, size);
else
break;
}
file.Close();
}
return list.ToArray();
}*/
/*public bool RenameReplayFile(string oldFilename, string newFilename)
{
if (File.Exists(ReplayManagerFile))
{
oldFilename = ReplayManagerFilePath + @"\" + oldFilename;
newFilename = ReplayManagerFilePath + @"\" + newFilename;
object[] inflated = null;
//take what is in already and uncompress them
ZipInputStream zfReader = new ZipInputStream(File.OpenRead(ReplayManagerFile));
inflated = InflateReplays(zfReader);
zfReader.Close();
//stick them back into a temp .zip file
ZipOutputStream zfWriter = new ZipOutputStream(File.Create(ReplayManagerFilePath + @"\temp.zip"));
zfWriter.SetLevel(0);
//do the renaming
File.Move(oldFilename, newFilename);
for (int index = 0; index < inflated.Length; index++)
{
if ((string)inflated[index] == oldFilename)
{
inflated[index] = newFilename;
break;
}
}
if (inflated != null)
DeflateReplays(inflated, zfWriter);
zfWriter.Finish();
zfWriter.Close();
CullReplays(inflated);
File.Delete(ReplayManagerFile);
File.Move(ReplayManagerFilePath + @"\temp.zip", ReplayManagerFile);
return true;
}
return false;
}*/
/*public bool GetReplay(string filename, string location)
{
ZipInputStream zipfile = new ZipInputStream(File.OpenRead(ReplayManagerFile));
ZipEntry entry = null;
bool found = false;
while ((entry = zipfile.GetNextEntry()) != null)
{
if (Path.GetFileName(filename) == entry.Name)
{
found = true;
break;
}
}
if (found == true || entry != null)
{
FileStream file = File.Create(location);
int size = 2048;
byte[] data = new byte[2048];
while (true)
{
size = zipfile.Read(data, 0, data.Length);
if (size > 0)
file.Write(data, 0, size);
else
break;
}
file.Close();
}
zipfile.Close();
return true;
}*/
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
/*============================================================
**
** Class: DebuggerAttributes
**
**
** Purpose: Attributes for debugger
**
**
===========================================================*/
namespace System.Diagnostics
{
using System;
using System.Runtime.InteropServices;
[Serializable]
[AttributeUsage( AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false )]
public sealed class DebuggerStepThroughAttribute : Attribute
{
public DebuggerStepThroughAttribute()
{
}
}
[Serializable]
[AttributeUsage( AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false )]
public sealed class DebuggerStepperBoundaryAttribute : Attribute
{
public DebuggerStepperBoundaryAttribute()
{
}
}
[Serializable]
[AttributeUsage( AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Constructor, Inherited = false )]
public sealed class DebuggerHiddenAttribute : Attribute
{
public DebuggerHiddenAttribute()
{
}
}
[Serializable]
[AttributeUsage( AttributeTargets.Class | AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Constructor | AttributeTargets.Struct, Inherited = false )]
public sealed class DebuggerNonUserCodeAttribute : Attribute
{
public DebuggerNonUserCodeAttribute()
{
}
}
// Attribute class used by the compiler to mark modules.
// If present, then debugging information for everything in the
// assembly was generated by the compiler, and will be preserved
// by the Runtime so that the debugger can provide full functionality
// in the case of JIT attach. If not present, then the compiler may
// or may not have included debugging information, and the Runtime
// won't preserve the debugging info, which will make debugging after
// a JIT attach difficult.
[AttributeUsage( AttributeTargets.Assembly | AttributeTargets.Module, AllowMultiple = false )]
public sealed class DebuggableAttribute : Attribute
{
[Flags]
public enum DebuggingModes
{
None = 0x000,
Default = 0x001,
IgnoreSymbolStoreSequencePoints = 0x002,
EnableEditAndContinue = 0x004,
DisableOptimizations = 0x100,
}
private DebuggingModes m_debuggingModes;
public DebuggableAttribute( bool isJITTrackingEnabled, bool isJITOptimizerDisabled )
{
m_debuggingModes = 0;
if(isJITTrackingEnabled)
{
m_debuggingModes |= DebuggingModes.Default;
}
if(isJITOptimizerDisabled)
{
m_debuggingModes |= DebuggingModes.DisableOptimizations;
}
}
public DebuggableAttribute( DebuggingModes modes )
{
m_debuggingModes = modes;
}
public bool IsJITTrackingEnabled
{
get
{
return ((m_debuggingModes & DebuggingModes.Default) != 0);
}
}
public bool IsJITOptimizerDisabled
{
get
{
return ((m_debuggingModes & DebuggingModes.DisableOptimizations) != 0);
}
}
public DebuggingModes DebuggingFlags
{
get
{
return m_debuggingModes;
}
}
}
// DebuggerBrowsableState states are defined as follows:
// Never never show this element
// Expanded expansion of the class is done, so that all visible internal members are shown
// Collapsed expansion of the class is not performed. Internal visible members are hidden
// RootHidden The target element itself should not be shown, but should instead be
// automatically expanded to have its members displayed.
// Default value is collapsed
// Please also change the code which validates DebuggerBrowsableState variable (in this file)
// if you change this enum.
public enum DebuggerBrowsableState
{
Never = 0,
//Expanded is not supported in this release
//Expanded = 1,
Collapsed = 2,
RootHidden = 3,
}
// the one currently supported with the csee.dat
// (mcee.dat, autoexp.dat) file.
[AttributeUsage( AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = false )]
public sealed class DebuggerBrowsableAttribute : Attribute
{
private DebuggerBrowsableState m_state;
public DebuggerBrowsableAttribute( DebuggerBrowsableState state )
{
if(state < DebuggerBrowsableState.Never || state > DebuggerBrowsableState.RootHidden)
{
#if EXCEPTION_STRINGS
throw new ArgumentOutOfRangeException( "state" );
#else
throw new ArgumentOutOfRangeException();
#endif
}
m_state = state;
}
public DebuggerBrowsableState State
{
get
{
return m_state;
}
}
}
// DebuggerTypeProxyAttribute
[AttributeUsage( AttributeTargets.Struct | AttributeTargets.Class | AttributeTargets.Assembly, AllowMultiple = true )]
public sealed class DebuggerTypeProxyAttribute : Attribute
{
private string m_typeName;
private string m_targetName;
private Type m_target;
public DebuggerTypeProxyAttribute( Type type )
{
if(type == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "type" );
#else
throw new ArgumentNullException();
#endif
}
m_typeName = type.AssemblyQualifiedName;
}
public DebuggerTypeProxyAttribute( string typeName )
{
m_typeName = typeName;
}
public string ProxyTypeName
{
get
{
return m_typeName;
}
}
public Type Target
{
set
{
if(value == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "value" );
#else
throw new ArgumentNullException();
#endif
}
m_targetName = value.AssemblyQualifiedName;
m_target = value;
}
get
{
return m_target;
}
}
public string TargetTypeName
{
get
{
return m_targetName;
}
set
{
m_targetName = value;
}
}
}
// This attribute is used to control what is displayed for the given class or field
// in the data windows in the debugger. The single argument to this attribute is
// the string that will be displayed in the value column for instances of the type.
// This string can include text between { and } which can be either a field,
// property or method (as will be documented in mscorlib). In the C# case,
// a general expression will be allowed which only has implicit access to the this pointer
// for the current instance of the target type. The expression will be limited,
// however: there is no access to aliases, locals, or pointers.
// In addition, attributes on properties referenced in the expression are not processed.
[AttributeUsage( AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Delegate | AttributeTargets.Enum | AttributeTargets.Field | AttributeTargets.Property | AttributeTargets.Assembly, AllowMultiple = true )]
public sealed class DebuggerDisplayAttribute : Attribute
{
private string m_name;
private string m_value;
private string m_type;
private string m_targetName;
private Type m_target;
public DebuggerDisplayAttribute( string value )
{
if(value == null)
{
m_value = "";
}
else
{
m_value = value;
}
m_name = "";
m_type = "";
}
public string Value
{
get
{
return m_value;
}
}
public string Name
{
get
{
return m_name;
}
set
{
m_name = value;
}
}
public string Type
{
get
{
return m_type;
}
set
{
m_type = value;
}
}
public Type Target
{
set
{
if(value == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "value" );
#else
throw new ArgumentNullException();
#endif
}
m_targetName = value.AssemblyQualifiedName;
m_target = value;
}
get
{
return m_target;
}
}
public string TargetTypeName
{
get
{
return m_targetName;
}
set
{
m_targetName = value;
}
}
}
/// <summary>
/// Signifies that the attributed type has a visualizer which is pointed
/// to by the parameter type name strings.
/// </summary>
[AttributeUsage( AttributeTargets.Struct | AttributeTargets.Class | AttributeTargets.Assembly, AllowMultiple = true )]
public sealed class DebuggerVisualizerAttribute : Attribute
{
private string m_visualizerObjectSourceName;
private string m_visualizerName;
private string m_description;
private string m_targetName;
private Type m_target;
public DebuggerVisualizerAttribute( string visualizerTypeName )
{
m_visualizerName = visualizerTypeName;
}
public DebuggerVisualizerAttribute( string visualizerTypeName, string visualizerObjectSourceTypeName )
{
m_visualizerName = visualizerTypeName;
m_visualizerObjectSourceName = visualizerObjectSourceTypeName;
}
public DebuggerVisualizerAttribute( string visualizerTypeName, Type visualizerObjectSource )
{
if(visualizerObjectSource == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "visualizerObjectSource" );
#else
throw new ArgumentNullException();
#endif
}
m_visualizerName = visualizerTypeName;
m_visualizerObjectSourceName = visualizerObjectSource.AssemblyQualifiedName;
}
public DebuggerVisualizerAttribute( Type visualizer )
{
if(visualizer == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "visualizer" );
#else
throw new ArgumentNullException();
#endif
}
m_visualizerName = visualizer.AssemblyQualifiedName;
}
public DebuggerVisualizerAttribute( Type visualizer, Type visualizerObjectSource )
{
if(visualizer == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "visualizer" );
#else
throw new ArgumentNullException();
#endif
}
if(visualizerObjectSource == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "visualizerObjectSource" );
#else
throw new ArgumentNullException();
#endif
}
m_visualizerName = visualizer .AssemblyQualifiedName;
m_visualizerObjectSourceName = visualizerObjectSource.AssemblyQualifiedName;
}
public DebuggerVisualizerAttribute( Type visualizer, string visualizerObjectSourceTypeName )
{
if(visualizer == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "visualizer" );
#else
throw new ArgumentNullException();
#endif
}
m_visualizerName = visualizer.AssemblyQualifiedName;
m_visualizerObjectSourceName = visualizerObjectSourceTypeName;
}
public string VisualizerObjectSourceTypeName
{
get
{
return m_visualizerObjectSourceName;
}
}
public string VisualizerTypeName
{
get
{
return m_visualizerName;
}
}
public string Description
{
get
{
return m_description;
}
set
{
m_description = value;
}
}
public Type Target
{
set
{
if(value == null)
{
#if EXCEPTION_STRINGS
throw new ArgumentNullException( "value" );
#else
throw new ArgumentNullException();
#endif
}
m_targetName = value.AssemblyQualifiedName;
m_target = value;
}
get
{
return m_target;
}
}
public string TargetTypeName
{
set
{
m_targetName = value;
}
get
{
return m_targetName;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.Collections.Generic;
using ParquetSharp.External;
using ParquetSharp.Hadoop;
namespace ParquetSharp.Hadoop.MapRed
{
public class DeprecatedParquetInputFormat<V> : org.apache.hadoop.mapred.FileInputFormat<object, Container<V>>
{
protected ParquetInputFormat<V> realInputFormat = new ParquetInputFormat<V>();
public RecordReader<object, Container<V>> getRecordReader(InputSplit split, JobConf job,
Reporter reporter)
{
return new RecordReaderWrapper(split, job, reporter);
}
public InputSplit[] getSplits(JobConf job, int numSplits)
{
if (isTaskSideMetaData(job))
{
return base.getSplits(job, numSplits);
}
List<Footer> footers = getFooters(job);
List<ParquetInputSplit> splits = realInputFormat.getSplits(job, footers);
if (splits == null)
{
return null;
}
InputSplit[] resultSplits = new InputSplit[splits.Count];
int i = 0;
foreach (ParquetInputSplit split in splits)
{
resultSplits[i++] = new ParquetInputSplitWrapper(split);
}
return resultSplits;
}
public List<Footer> getFooters(JobConf job)
{
return realInputFormat.getFooters(job, asList(base.listStatus(job)));
}
private class RecordReaderWrapper : RecordReader<object, Container<V>>
{
private ParquetRecordReader<V> realReader;
private long splitLen; // for getPos()
private Container<V> valueContainer = null;
private bool firstRecord = false;
private bool eof = false;
public RecordReaderWrapper(
InputSplit oldSplit, JobConf oldJobConf, Reporter reporter)
{
splitLen = oldSplit.getLength();
try
{
realReader = new ParquetRecordReader<V>(
ParquetInputFormat<V>.getReadSupportInstance(oldJobConf),
ParquetInputFormat<V>.getFilter(oldJobConf));
if (oldSplit is ParquetInputSplitWrapper)
{
realReader.initialize(((ParquetInputSplitWrapper)oldSplit).realSplit, oldJobConf, reporter);
}
else if (oldSplit is FileSplit)
{
realReader.initialize((FileSplit)oldSplit, oldJobConf, reporter);
}
else {
throw new ArgumentException(
"Invalid split (not a FileSplit or ParquetInputSplitWrapper): " + oldSplit);
}
// read once to gain access to key and value objects
if (realReader.nextKeyValue())
{
firstRecord = true;
valueContainer = new Container<V>();
valueContainer.set(realReader.getCurrentValue());
}
else {
eof = true;
}
}
catch (InterruptedException e)
{
Thread.interrupted();
throw new IOException(e);
}
}
public void close()
{
realReader.close();
}
public object createKey()
{
return null;
}
public Container<V> createValue()
{
return valueContainer;
}
public long getPos()
{
return (long)(splitLen * getProgress());
}
public float getProgress()
{
try
{
return realReader.getProgress();
}
catch (InterruptedException e)
{
Thread.interrupted();
throw new IOException(e);
}
}
public bool next(object key, Container<V> value)
{
if (eof)
{
return false;
}
if (firstRecord)
{ // key & value are already read.
firstRecord = false;
return true;
}
try
{
if (realReader.nextKeyValue())
{
if (value != null) value.set(realReader.getCurrentValue());
return true;
}
}
catch (InterruptedException e)
{
throw new IOException(e);
}
eof = true; // strictly not required, just for consistency
return false;
}
}
public static bool isTaskSideMetaData(JobConf job)
{
return job.getBoolean(ParquetInputFormat<V>.TASK_SIDE_METADATA, true);
}
private class ParquetInputSplitWrapper : InputSplit
{
internal ParquetInputSplit realSplit;
public ParquetInputSplitWrapper() { }
public ParquetInputSplitWrapper(ParquetInputSplit realSplit)
{
this.realSplit = realSplit;
}
public override long getLength()
{
return realSplit.getLength();
}
public string[] getLocations()
{
return realSplit.getLocations();
}
public void readFields(DataInput @in)
{
realSplit = new ParquetInputSplit();
realSplit.readFields(@in);
}
public void write(DataOutput @out)
{
realSplit.write(@out);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO.PortsTests;
using System.Linq;
using System.Threading;
using Legacy.Support;
using Xunit;
namespace System.IO.Ports.Tests
{
public class SerialStream_BeginRead : PortsTest
{
// The number of random bytes to receive for read method testing
private const int numRndBytesToRead = 16;
// The number of random bytes to receive for large input buffer testing
private const int largeNumRndBytesToRead = 2048;
// When we test Read and do not care about actually reading anything we must still
// create an byte array to pass into the method the following is the size of the
// byte array used in this situation
private const int defaultByteArraySize = 1;
private const int defaultByteOffset = 0;
private const int defaultByteCount = 1;
// The maximum buffer size when an exception occurs
private const int maxBufferSizeForException = 255;
// The maximum buffer size when an exception is not expected
private const int maxBufferSize = 8;
// Maximum time to wait for processing the read command to complete
private const int MAX_WAIT_READ_COMPLETE = 1000;
#region Test Cases
[ConditionalFact(nameof(HasOneSerialPort))]
public void Buffer_Null()
{
VerifyReadException<ArgumentNullException>(null, 0, 1);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Offset_NEG1()
{
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], -1, defaultByteCount);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Offset_NEGRND()
{
var rndGen = new Random(-55);
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], rndGen.Next(int.MinValue, 0), defaultByteCount);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Offset_MinInt()
{
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], int.MinValue, defaultByteCount);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Count_NEG1()
{
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], defaultByteOffset, -1);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Count_NEGRND()
{
var rndGen = new Random(-55);
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], defaultByteOffset, rndGen.Next(int.MinValue, 0));
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Count_MinInt()
{
VerifyReadException<ArgumentOutOfRangeException>(new byte[defaultByteArraySize], defaultByteOffset, int.MinValue);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void OffsetCount_EQ_Length_Plus_1()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSizeForException);
int offset = rndGen.Next(0, bufferLength);
int count = bufferLength + 1 - offset;
VerifyReadException<ArgumentException>(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void OffsetCount_GT_Length()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSizeForException);
int offset = rndGen.Next(0, bufferLength);
int count = rndGen.Next(bufferLength + 1 - offset, int.MaxValue);
VerifyReadException<ArgumentException>(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Offset_GT_Length()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSizeForException);
int offset = rndGen.Next(bufferLength, int.MaxValue);
int count = defaultByteCount;
VerifyReadException<ArgumentException>(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void Count_GT_Length()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSizeForException);
int offset = defaultByteOffset;
int count = rndGen.Next(bufferLength + 1, int.MaxValue);
VerifyReadException<ArgumentException>(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasNullModem))]
public void OffsetCount_EQ_Length()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSize);
int offset = rndGen.Next(0, bufferLength - 1);
int count = bufferLength - offset;
VerifyRead(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasNullModem))]
public void Offset_EQ_Length_Minus_1()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSize);
int offset = bufferLength - 1;
var count = 1;
VerifyRead(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasNullModem))]
public void Count_EQ_Length()
{
var rndGen = new Random(-55);
int bufferLength = rndGen.Next(1, maxBufferSize);
var offset = 0;
int count = bufferLength;
VerifyRead(new byte[bufferLength], offset, count);
}
[ConditionalFact(nameof(HasNullModem))]
public void LargeInputBuffer()
{
int bufferLength = largeNumRndBytesToRead;
var offset = 0;
int count = bufferLength;
VerifyRead(new byte[bufferLength], offset, count, largeNumRndBytesToRead);
}
[ConditionalFact(nameof(HasNullModem))]
public void Callback()
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var callbackHandler = new CallbackHandler();
int elapsedTime;
Debug.WriteLine("Verifying BeginRead with a callback specified");
com1.Open();
com2.Open();
IAsyncResult readAsyncResult = com1.BaseStream.BeginRead(new byte[numRndBytesToRead], 0, numRndBytesToRead,
callbackHandler.Callback, null);
callbackHandler.BeginReadAsyncResult = readAsyncResult;
Assert.Null(readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously, "Should not have completed sync (read)");
Assert.False(readAsyncResult.IsCompleted, "Should not have completed yet");
com2.Write(new byte[numRndBytesToRead], 0, numRndBytesToRead);
// callbackHandler.ReadAsyncResult guarantees that the callback has been calledhowever it does not gauarentee that
// the code calling the callback has finished it's processing
IAsyncResult callbackReadAsyncResult = callbackHandler.ReadAysncResult;
// No we have to wait for the callbackHandler to complete
elapsedTime = 0;
while (!callbackReadAsyncResult.IsCompleted && elapsedTime < MAX_WAIT_READ_COMPLETE)
{
Thread.Sleep(10);
elapsedTime += 10;
}
Assert.Null(callbackReadAsyncResult.AsyncState);
Assert.False(callbackReadAsyncResult.CompletedSynchronously, "Should not have completed sync (cback)");
Assert.True(callbackReadAsyncResult.IsCompleted, "Should have completed (cback)");
Assert.Null(readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously, "Should not have completed sync (read)");
Assert.True(readAsyncResult.IsCompleted, "Should have completed (read)");
}
}
[ConditionalFact(nameof(HasNullModem))]
public void Callback_EndReadonCallback()
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var callbackHandler = new CallbackHandler(com1);
int elapsedTime;
Debug.WriteLine("Verifying BeginRead with a callback that calls EndRead");
com1.Open();
com2.Open();
IAsyncResult readAsyncResult = com1.BaseStream.BeginRead(new byte[numRndBytesToRead], 0, numRndBytesToRead,
callbackHandler.Callback, null);
callbackHandler.BeginReadAsyncResult = readAsyncResult;
Assert.Null(readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously);
Assert.False(readAsyncResult.IsCompleted);
com2.Write(new byte[numRndBytesToRead], 0, numRndBytesToRead);
// callbackHandler.ReadAsyncResult guarantees that the callback has been calledhowever it does not gauarentee that
// the code calling the callback has finished it's processing
IAsyncResult callbackReadAsyncResult = callbackHandler.ReadAysncResult;
// No we have to wait for the callbackHandler to complete
elapsedTime = 0;
while (!callbackReadAsyncResult.IsCompleted && elapsedTime < MAX_WAIT_READ_COMPLETE)
{
Thread.Sleep(10);
elapsedTime += 10;
}
Assert.Null(callbackReadAsyncResult.AsyncState);
Assert.False(callbackReadAsyncResult.CompletedSynchronously, "Should not have completed sync (cback)");
Assert.True(callbackReadAsyncResult.IsCompleted, "Should have completed (cback)");
Assert.Null(readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously, "Should not have completed sync (read)");
Assert.True(readAsyncResult.IsCompleted, "Should have completed (read)");
}
}
[ConditionalFact(nameof(HasNullModem))]
public void Callback_State()
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var callbackHandler = new CallbackHandler();
int elapsedTime;
Debug.WriteLine("Verifying BeginRead with a callback and state specified");
com1.Open();
com2.Open();
IAsyncResult readAsyncResult = com1.BaseStream.BeginRead(new byte[numRndBytesToRead], 0, numRndBytesToRead,
callbackHandler.Callback, this);
callbackHandler.BeginReadAsyncResult = readAsyncResult;
Assert.Equal(this, readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously);
Assert.False(readAsyncResult.IsCompleted);
com2.Write(new byte[numRndBytesToRead], 0, numRndBytesToRead);
// callbackHandler.ReadAsyncResult guarantees that the callback has been calledhowever it does not gauarentee that
// the code calling the callback has finished it's processing
IAsyncResult callbackReadAsyncResult = callbackHandler.ReadAysncResult;
// No we have to wait for the callbackHandler to complete
elapsedTime = 0;
while (!callbackReadAsyncResult.IsCompleted && elapsedTime < MAX_WAIT_READ_COMPLETE)
{
Thread.Sleep(10);
elapsedTime += 10;
}
Assert.Equal(this, callbackReadAsyncResult.AsyncState);
Assert.False(callbackReadAsyncResult.CompletedSynchronously);
Assert.True(callbackReadAsyncResult.IsCompleted);
Assert.Equal(this, readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously);
Assert.True(readAsyncResult.IsCompleted);
}
}
#endregion
#region Verification for Test Cases
private void VerifyReadException<T>(byte[] buffer, int offset, int count) where T : Exception
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
int bufferLength = null == buffer ? 0 : buffer.Length;
Debug.WriteLine("Verifying read method throws {0} buffer.Lenght={1}, offset={2}, count={3}",
typeof(T), bufferLength, offset, count);
com.Open();
Action a = () => com.BaseStream.BeginRead(buffer, offset, count, null, null);
Assert.Throws<T>(a);
}
}
private void VerifyRead(byte[] buffer, int offset, int count)
{
VerifyRead(buffer, offset, count, numRndBytesToRead);
}
private void VerifyRead(byte[] buffer, int offset, int count, int numberOfBytesToRead)
{
using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName))
{
var rndGen = new Random(-55);
var bytesToWrite = new byte[numberOfBytesToRead];
// Generate random bytes
for (var i = 0; i < bytesToWrite.Length; i++)
{
var randByte = (byte)rndGen.Next(0, 256);
bytesToWrite[i] = randByte;
}
// Generate some random bytes in the buffer
rndGen.NextBytes(buffer);
Debug.WriteLine("Verifying read method buffer.Lenght={0}, offset={1}, count={2} with {3} random chars", buffer.Length, offset, count, bytesToWrite.Length);
com1.ReadTimeout = 500;
com1.Open();
com2.Open();
VerifyBytesReadOnCom1FromCom2(com1, com2, bytesToWrite, buffer, offset, count);
}
}
private void VerifyBytesReadOnCom1FromCom2(SerialPort com1, SerialPort com2, byte[] bytesToWrite, byte[] rcvBuffer, int offset, int count)
{
var buffer = new byte[bytesToWrite.Length];
int totalBytesRead;
int bytesToRead;
var oldRcvBuffer = (byte[])rcvBuffer.Clone();
var callbackHandler = new CallbackHandler();
com2.Write(bytesToWrite, 0, bytesToWrite.Length);
com1.ReadTimeout = 500;
Thread.Sleep((int)(((bytesToWrite.Length * 10.0) / com1.BaudRate) * 1000) + 250);
totalBytesRead = 0;
bytesToRead = com1.BytesToRead;
do
{
IAsyncResult readAsyncResult = com1.BaseStream.BeginRead(rcvBuffer, offset, count,
callbackHandler.Callback, this);
readAsyncResult.AsyncWaitHandle.WaitOne();
callbackHandler.BeginReadAsyncResult = readAsyncResult;
int bytesRead = com1.BaseStream.EndRead(readAsyncResult);
IAsyncResult asyncResult = callbackHandler.ReadAysncResult;
Assert.Equal(this, asyncResult.AsyncState);
Assert.False(asyncResult.CompletedSynchronously);
Assert.True(asyncResult.IsCompleted);
Assert.Equal(this, readAsyncResult.AsyncState);
Assert.False(readAsyncResult.CompletedSynchronously);
Assert.True(readAsyncResult.IsCompleted);
if ((bytesToRead > bytesRead && count != bytesRead) ||
(bytesToRead <= bytesRead && bytesRead != bytesToRead))
{
// If we have not read all of the characters that we should have
Fail("ERROR!!!: Read did not return all of the characters that were in SerialPort buffer");
}
if (bytesToWrite.Length < totalBytesRead + bytesRead)
{
// If we have read in more characters then we expect
Fail("ERROR!!!: We have received more characters then were sent");
}
VerifyBuffer(rcvBuffer, oldRcvBuffer, offset, bytesRead);
Array.Copy(rcvBuffer, offset, buffer, totalBytesRead, bytesRead);
totalBytesRead += bytesRead;
if (bytesToWrite.Length - totalBytesRead != com1.BytesToRead)
{
Fail("ERROR!!!: Expected BytesToRead={0} actual={1}", bytesToWrite.Length - totalBytesRead, com1.BytesToRead);
}
oldRcvBuffer = (byte[])rcvBuffer.Clone();
bytesToRead = com1.BytesToRead;
} while (0 != com1.BytesToRead); // While there are more bytes to read
// Compare the bytes that were written with the ones we read
Assert.Equal(bytesToWrite, buffer.Take(bytesToWrite.Length).ToArray());
}
private void VerifyBuffer(byte[] actualBuffer, byte[] expectedBuffer, int offset, int count)
{
// Verify all character before the offset
for (var i = 0; i < offset; i++)
{
if (actualBuffer[i] != expectedBuffer[i])
{
Fail("ERROR!!!: Expected {0} in buffer at {1} actual {2}", (int)expectedBuffer[i], i, (int)actualBuffer[i]);
}
}
// Verify all character after the offset + count
for (int i = offset + count; i < actualBuffer.Length; i++)
{
if (actualBuffer[i] != expectedBuffer[i])
{
Fail("ERROR!!!: Expected {0} in buffer at {1} actual {2}", (int)expectedBuffer[i], i, (int)actualBuffer[i]);
}
}
}
private class CallbackHandler
{
private IAsyncResult _readAysncResult;
private IAsyncResult _beginReadAsyncResult;
private readonly SerialPort _com;
public CallbackHandler() : this(null) { }
public CallbackHandler(SerialPort com)
{
_com = com;
}
public void Callback(IAsyncResult readAysncResult)
{
lock (this)
{
_readAysncResult = readAysncResult;
Assert.True(readAysncResult.IsCompleted, "IAsyncResult passed into callback is not completed");
while (null == _beginReadAsyncResult)
{
Monitor.Wait(this);
}
if (null != _beginReadAsyncResult && !_beginReadAsyncResult.IsCompleted)
{
Fail("Err_7907azpu Expected IAsyncResult returned from begin read to not be completed");
}
if (null != _com)
{
_com.BaseStream.EndRead(_beginReadAsyncResult);
if (!_beginReadAsyncResult.IsCompleted)
{
Fail("Err_6498afead Expected IAsyncResult returned from begin read to not be completed");
}
if (!readAysncResult.IsCompleted)
{
Fail("Err_1398ehpo Expected IAsyncResult passed into callback to not be completed");
}
}
Monitor.Pulse(this);
}
}
public IAsyncResult ReadAysncResult
{
get
{
lock (this)
{
while (null == _readAysncResult)
{
Monitor.Wait(this);
}
return _readAysncResult;
}
}
}
public IAsyncResult BeginReadAsyncResult
{
get
{
return _beginReadAsyncResult;
}
set
{
lock (this)
{
_beginReadAsyncResult = value;
Monitor.Pulse(this);
}
}
}
}
#endregion
}
}
| |
using System.Security;
using System;
using System.Runtime.InteropServices;
[SecuritySafeCritical]
/// <summary>
/// GetLastWin32Error
/// </summary>
public class MarshalGetLastWin32Error
{
#region Private Fields
private const int c_MIN_STRING_LENGTH = 300;
private const int c_MAX_STRING_LENGTH = 301;
private const int CREATE_ALWAYS = unchecked((int)0x2);
private const int FILE_ATTRIBUTE_NORMAL = unchecked((int)0x00000080L);
#endregion
#region Public Methods
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
if (TestLibrary.Utilities.IsWindows)
{
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
}
#if !WinCoreSys
else
{
retVal = PosMacTest1() && retVal;
}
#endif
return retVal;
}
#region Positive Test Cases
public bool PosTest1()
{
bool retVal = true;
string filePath = null;
TestLibrary.TestFramework.BeginScenario("PosTest1: Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true");
try
{
CopyFile("MarshalGetLastWin32Error_DoesnotExist1.tx", "MarshalGetLastWin32Error_DoesnotExist2.txt", true);
if (Marshal.GetLastWin32Error() == 0)
{
TestLibrary.TestFramework.LogError("001.1", "Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true dose not return 0 when call function successed");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("001.0", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] filePath = " + filePath);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
finally
{
SetLastError(0);
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
string filePath = null;
TestLibrary.TestFramework.BeginScenario("PosTest2: Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true");
try
{
/*
* GetProcessHeap sets last error only if it fails.
* In case of success, the value is not set. So we are checking only failure scenario here.
*/
IntPtr pHandle = GetProcessHeap();
if ((pHandle == null) && (Marshal.GetLastWin32Error() == 0))
{
TestLibrary.TestFramework.LogError("002.1", "Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true returned 0 even if call failed");
retVal = false;
}
filePath = TestLibrary.Generator.GetString(-55, true, c_MIN_STRING_LENGTH, c_MAX_STRING_LENGTH);
CreateFile(filePath, 0, 0, IntPtr.Zero, CREATE_ALWAYS, 0, IntPtr.Zero);
if (Marshal.GetLastWin32Error() == 0)
{
TestLibrary.TestFramework.LogError("002.2", "Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true returns 0");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] filePath = " + filePath);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002.0", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] filePath = " + filePath);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
finally
{
SetLastError(0);
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
uint desiredCode = 0;
TestLibrary.TestFramework.BeginScenario("PosTest3: Call GetLastWin32Error for P/Invoke functions with after call SetLastError");
try
{
desiredCode = (uint)TestLibrary.Generator.GetInt32(-55);
SetLastError(desiredCode);
int actualCode = Marshal.GetLastWin32Error();
if (desiredCode != actualCode)
{
TestLibrary.TestFramework.LogError("003.1", "Call GetLastWin32Error for P/Invoke functions with after call SetLastError returns wrong value");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] desiredCode = " + desiredCode + ", actualCode = " + actualCode);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("003.0", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] desiredCode = " + desiredCode);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
#if !WinCoreSys
public bool PosMacTest1()
{
bool retVal = true;
string pwd = null;
TestLibrary.TestFramework.BeginScenario("PosMacTest1: Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true");
try
{
pwd = getenv("PWD");
if (Marshal.GetLastWin32Error() != 0)
{
TestLibrary.TestFramework.LogError("004.1", "Call GetLastWin32Error for P/Invoke functions with SetLastError flags sets to true dose not return 0 when call function successed");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004.0", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
#endif
#endregion
#endregion
public static int Main()
{
MarshalGetLastWin32Error test = new MarshalGetLastWin32Error();
TestLibrary.TestFramework.BeginTestCase("MarshalGetLastWin32Error");
if (test.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
#region Private Methods
#if !WinCoreSys
[SecurityCritical]
[DllImport("/usr/lib/libc.dylib")]
private static extern string getenv(string key);
#endif
[DllImport("kernel32.dll", SetLastError = true)]
private extern static void SetLastError(uint dwErrCode);
[DllImport("kernel32.dll", SetLastError = true)]
private extern static IntPtr CreateFile(
string lpFileName,
uint dwDesiredAccess,
uint dwShareMode,
IntPtr lpSecurityAttributes,
uint dwCreationDisposition,
uint dwFlagsAndAttributes,
IntPtr hTemplateFile);
[DllImport("Kernel32.dll", SetLastError=true)]
private extern static IntPtr GetProcessHeap();
[DllImport("Kernel32.dll", SetLastError = true)]
private extern static bool CopyFile(string lpExistingFileName, string lpNewFileName, bool bFailIfExists);
#endregion
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using osu.Framework.Allocation;
using osu.Framework.Bindables;
using osu.Framework.Graphics;
using osu.Framework.Input.Bindings;
using osu.Framework.MathUtils;
using osu.Framework.Threading;
using osu.Game.Beatmaps;
using osu.Game.Input.Bindings;
using osu.Game.Overlays.OSD;
using osu.Game.Rulesets.Mods;
namespace osu.Game.Overlays
{
/// <summary>
/// Handles playback of the global music track.
/// </summary>
public class MusicController : Component, IKeyBindingHandler<GlobalAction>
{
[Resolved]
private BeatmapManager beatmaps { get; set; }
public IBindableList<BeatmapSetInfo> BeatmapSets => beatmapSets;
private readonly BindableList<BeatmapSetInfo> beatmapSets = new BindableList<BeatmapSetInfo>();
public bool IsUserPaused { get; private set; }
/// <summary>
/// Fired when the global <see cref="WorkingBeatmap"/> has changed.
/// Includes direction information for display purposes.
/// </summary>
public event Action<WorkingBeatmap, TrackChangeDirection> TrackChanged;
[Resolved]
private IBindable<WorkingBeatmap> beatmap { get; set; }
[Resolved]
private IBindable<IReadOnlyList<Mod>> mods { get; set; }
[Resolved(canBeNull: true)]
private OnScreenDisplay onScreenDisplay { get; set; }
[BackgroundDependencyLoader]
private void load()
{
beatmapSets.AddRange(beatmaps.GetAllUsableBeatmapSets().OrderBy(_ => RNG.Next()));
beatmaps.ItemAdded += handleBeatmapAdded;
beatmaps.ItemRemoved += handleBeatmapRemoved;
}
protected override void LoadComplete()
{
beatmap.BindValueChanged(beatmapChanged, true);
mods.BindValueChanged(_ => ResetTrackAdjustments(), true);
base.LoadComplete();
}
/// <summary>
/// Change the position of a <see cref="BeatmapSetInfo"/> in the current playlist.
/// </summary>
/// <param name="beatmapSetInfo">The beatmap to move.</param>
/// <param name="index">The new position.</param>
public void ChangeBeatmapSetPosition(BeatmapSetInfo beatmapSetInfo, int index)
{
beatmapSets.Remove(beatmapSetInfo);
beatmapSets.Insert(index, beatmapSetInfo);
}
/// <summary>
/// Returns whether the current beatmap track is playing.
/// </summary>
public bool IsPlaying => current?.Track.IsRunning ?? false;
private void handleBeatmapAdded(BeatmapSetInfo set) =>
Schedule(() => beatmapSets.Add(set));
private void handleBeatmapRemoved(BeatmapSetInfo set) =>
Schedule(() => beatmapSets.RemoveAll(s => s.ID == set.ID));
private ScheduledDelegate seekDelegate;
public void SeekTo(double position)
{
seekDelegate?.Cancel();
seekDelegate = Schedule(() =>
{
if (!beatmap.Disabled)
current?.Track.Seek(position);
});
}
/// <summary>
/// Start playing the current track (if not already playing).
/// </summary>
public void Play()
{
if (!IsPlaying)
TogglePause();
}
/// <summary>
/// Toggle pause / play.
/// </summary>
/// <returns>Whether the operation was successful.</returns>
public bool TogglePause()
{
var track = current?.Track;
if (track == null)
{
if (beatmap.Disabled)
return false;
next(true);
return true;
}
if (track.IsRunning)
{
IsUserPaused = true;
track.Stop();
}
else
{
track.Start();
IsUserPaused = false;
}
return true;
}
/// <summary>
/// Play the previous track.
/// </summary>
/// <returns>Whether the operation was successful.</returns>
public bool PrevTrack()
{
queuedDirection = TrackChangeDirection.Prev;
var playable = BeatmapSets.TakeWhile(i => i.ID != current.BeatmapSetInfo.ID).LastOrDefault() ?? BeatmapSets.LastOrDefault();
if (playable != null)
{
if (beatmap is Bindable<WorkingBeatmap> working)
working.Value = beatmaps.GetWorkingBeatmap(playable.Beatmaps.First(), beatmap.Value);
beatmap.Value.Track.Restart();
return true;
}
return false;
}
/// <summary>
/// Play the next random or playlist track.
/// </summary>
/// <returns>Whether the operation was successful.</returns>
public bool NextTrack() => next();
private bool next(bool instant = false)
{
if (!instant)
queuedDirection = TrackChangeDirection.Next;
var playable = BeatmapSets.SkipWhile(i => i.ID != current.BeatmapSetInfo.ID).Skip(1).FirstOrDefault() ?? BeatmapSets.FirstOrDefault();
if (playable != null)
{
if (beatmap is Bindable<WorkingBeatmap> working)
working.Value = beatmaps.GetWorkingBeatmap(playable.Beatmaps.First(), beatmap.Value);
beatmap.Value.Track.Restart();
return true;
}
return false;
}
private WorkingBeatmap current;
private TrackChangeDirection? queuedDirection;
private void beatmapChanged(ValueChangedEvent<WorkingBeatmap> beatmap)
{
TrackChangeDirection direction = TrackChangeDirection.None;
if (current != null)
{
bool audioEquals = beatmap.NewValue?.BeatmapInfo?.AudioEquals(current.BeatmapInfo) ?? false;
if (audioEquals)
direction = TrackChangeDirection.None;
else if (queuedDirection.HasValue)
{
direction = queuedDirection.Value;
queuedDirection = null;
}
else
{
//figure out the best direction based on order in playlist.
var last = BeatmapSets.TakeWhile(b => b.ID != current.BeatmapSetInfo?.ID).Count();
var next = beatmap.NewValue == null ? -1 : BeatmapSets.TakeWhile(b => b.ID != beatmap.NewValue.BeatmapSetInfo?.ID).Count();
direction = last > next ? TrackChangeDirection.Prev : TrackChangeDirection.Next;
}
}
current = beatmap.NewValue;
TrackChanged?.Invoke(current, direction);
ResetTrackAdjustments();
queuedDirection = null;
}
public void ResetTrackAdjustments()
{
var track = current?.Track;
if (track == null)
return;
track.ResetSpeedAdjustments();
foreach (var mod in mods.Value.OfType<IApplicableToClock>())
mod.ApplyToClock(track);
}
protected override void Dispose(bool isDisposing)
{
base.Dispose(isDisposing);
if (beatmaps != null)
{
beatmaps.ItemAdded -= handleBeatmapAdded;
beatmaps.ItemRemoved -= handleBeatmapRemoved;
}
}
public bool OnPressed(GlobalAction action)
{
if (beatmap.Disabled)
return false;
switch (action)
{
case GlobalAction.MusicPlay:
if (TogglePause())
onScreenDisplay?.Display(new MusicControllerToast(IsPlaying ? "Play track" : "Pause track"));
return true;
case GlobalAction.MusicNext:
if (NextTrack())
onScreenDisplay?.Display(new MusicControllerToast("Next track"));
return true;
case GlobalAction.MusicPrev:
if (PrevTrack())
onScreenDisplay?.Display(new MusicControllerToast("Previous track"));
return true;
}
return false;
}
public bool OnReleased(GlobalAction action) => false;
public class MusicControllerToast : Toast
{
public MusicControllerToast(string action)
: base("Music Playback", action, string.Empty)
{
}
}
}
public enum TrackChangeDirection
{
None,
Next,
Prev
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Compute.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedInstanceGroupManagersClientTest
{
[xunit::FactAttribute]
public void GetRequestObject()
{
moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient> mockGrpcClient = new moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForZoneOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceGroupManagerRequest request = new GetInstanceGroupManagerRequest
{
Zone = "zone255f4ea8",
Project = "projectaa6ff846",
InstanceGroupManager = "instance_group_manager71b45dfc",
};
InstanceGroupManager expectedResponse = new InstanceGroupManager
{
Id = 11672635353343658936UL,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Zone = "zone255f4ea8",
CreationTimestamp = "creation_timestamp235e59a1",
StatefulPolicy = new StatefulPolicy(),
TargetSize = -93132225,
InstanceGroup = "instance_group6bf5a5ef",
Region = "regionedb20d96",
Versions =
{
new InstanceGroupManagerVersion(),
},
CurrentActions = new InstanceGroupManagerActionsSummary(),
UpdatePolicy = new InstanceGroupManagerUpdatePolicy(),
Status = new InstanceGroupManagerStatus(),
Fingerprint = "fingerprint009e6052",
InstanceTemplate = "instance_template6cae3083",
TargetPools =
{
"target_pools6fc69e1f",
},
BaseInstanceName = "base_instance_name7c1f304c",
Description = "description2cf9da67",
NamedPorts = { new NamedPort(), },
SelfLink = "self_link7e87f12d",
AutoHealingPolicies =
{
new InstanceGroupManagerAutoHealingPolicy(),
},
DistributionPolicy = new DistributionPolicy(),
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
InstanceGroupManagersClient client = new InstanceGroupManagersClientImpl(mockGrpcClient.Object, null);
InstanceGroupManager response = client.Get(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRequestObjectAsync()
{
moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient> mockGrpcClient = new moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForZoneOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceGroupManagerRequest request = new GetInstanceGroupManagerRequest
{
Zone = "zone255f4ea8",
Project = "projectaa6ff846",
InstanceGroupManager = "instance_group_manager71b45dfc",
};
InstanceGroupManager expectedResponse = new InstanceGroupManager
{
Id = 11672635353343658936UL,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Zone = "zone255f4ea8",
CreationTimestamp = "creation_timestamp235e59a1",
StatefulPolicy = new StatefulPolicy(),
TargetSize = -93132225,
InstanceGroup = "instance_group6bf5a5ef",
Region = "regionedb20d96",
Versions =
{
new InstanceGroupManagerVersion(),
},
CurrentActions = new InstanceGroupManagerActionsSummary(),
UpdatePolicy = new InstanceGroupManagerUpdatePolicy(),
Status = new InstanceGroupManagerStatus(),
Fingerprint = "fingerprint009e6052",
InstanceTemplate = "instance_template6cae3083",
TargetPools =
{
"target_pools6fc69e1f",
},
BaseInstanceName = "base_instance_name7c1f304c",
Description = "description2cf9da67",
NamedPorts = { new NamedPort(), },
SelfLink = "self_link7e87f12d",
AutoHealingPolicies =
{
new InstanceGroupManagerAutoHealingPolicy(),
},
DistributionPolicy = new DistributionPolicy(),
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<InstanceGroupManager>(stt::Task.FromResult(expectedResponse), null, null, null, null));
InstanceGroupManagersClient client = new InstanceGroupManagersClientImpl(mockGrpcClient.Object, null);
InstanceGroupManager responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
InstanceGroupManager responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Get()
{
moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient> mockGrpcClient = new moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForZoneOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceGroupManagerRequest request = new GetInstanceGroupManagerRequest
{
Zone = "zone255f4ea8",
Project = "projectaa6ff846",
InstanceGroupManager = "instance_group_manager71b45dfc",
};
InstanceGroupManager expectedResponse = new InstanceGroupManager
{
Id = 11672635353343658936UL,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Zone = "zone255f4ea8",
CreationTimestamp = "creation_timestamp235e59a1",
StatefulPolicy = new StatefulPolicy(),
TargetSize = -93132225,
InstanceGroup = "instance_group6bf5a5ef",
Region = "regionedb20d96",
Versions =
{
new InstanceGroupManagerVersion(),
},
CurrentActions = new InstanceGroupManagerActionsSummary(),
UpdatePolicy = new InstanceGroupManagerUpdatePolicy(),
Status = new InstanceGroupManagerStatus(),
Fingerprint = "fingerprint009e6052",
InstanceTemplate = "instance_template6cae3083",
TargetPools =
{
"target_pools6fc69e1f",
},
BaseInstanceName = "base_instance_name7c1f304c",
Description = "description2cf9da67",
NamedPorts = { new NamedPort(), },
SelfLink = "self_link7e87f12d",
AutoHealingPolicies =
{
new InstanceGroupManagerAutoHealingPolicy(),
},
DistributionPolicy = new DistributionPolicy(),
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
InstanceGroupManagersClient client = new InstanceGroupManagersClientImpl(mockGrpcClient.Object, null);
InstanceGroupManager response = client.Get(request.Project, request.Zone, request.InstanceGroupManager);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAsync()
{
moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient> mockGrpcClient = new moq::Mock<InstanceGroupManagers.InstanceGroupManagersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForZoneOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceGroupManagerRequest request = new GetInstanceGroupManagerRequest
{
Zone = "zone255f4ea8",
Project = "projectaa6ff846",
InstanceGroupManager = "instance_group_manager71b45dfc",
};
InstanceGroupManager expectedResponse = new InstanceGroupManager
{
Id = 11672635353343658936UL,
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Zone = "zone255f4ea8",
CreationTimestamp = "creation_timestamp235e59a1",
StatefulPolicy = new StatefulPolicy(),
TargetSize = -93132225,
InstanceGroup = "instance_group6bf5a5ef",
Region = "regionedb20d96",
Versions =
{
new InstanceGroupManagerVersion(),
},
CurrentActions = new InstanceGroupManagerActionsSummary(),
UpdatePolicy = new InstanceGroupManagerUpdatePolicy(),
Status = new InstanceGroupManagerStatus(),
Fingerprint = "fingerprint009e6052",
InstanceTemplate = "instance_template6cae3083",
TargetPools =
{
"target_pools6fc69e1f",
},
BaseInstanceName = "base_instance_name7c1f304c",
Description = "description2cf9da67",
NamedPorts = { new NamedPort(), },
SelfLink = "self_link7e87f12d",
AutoHealingPolicies =
{
new InstanceGroupManagerAutoHealingPolicy(),
},
DistributionPolicy = new DistributionPolicy(),
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<InstanceGroupManager>(stt::Task.FromResult(expectedResponse), null, null, null, null));
InstanceGroupManagersClient client = new InstanceGroupManagersClientImpl(mockGrpcClient.Object, null);
InstanceGroupManager responseCallSettings = await client.GetAsync(request.Project, request.Zone, request.InstanceGroupManager, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
InstanceGroupManager responseCancellationToken = await client.GetAsync(request.Project, request.Zone, request.InstanceGroupManager, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Build.Framework;
using Microsoft.Build.Utilities;
using System.Diagnostics;
using System.IO;
using System.Xml.Linq;
using System.Collections;
using System.Resources;
using Microsoft.Cci;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
namespace Microsoft.Build.Net.CoreRuntimeTask
{
public sealed class ResourceHandlingTask : Task
{
[Serializable()]
public sealed class ResWInfo
{
public DateTime ResWTimeUtc;
public string ResWPath;
public string ResourceIndexName;
public string NeutralResourceLanguage;
}
[Serializable()]
public sealed class PortableLibraryResourceStateInfo
{
public DateTime PLibTimeUtc;
public bool ContainsFrameworkResources;
public List<ResWInfo> ResWInfoList;
}
[Serializable()]
public sealed class ResourceHandlingState
{
[NonSerialized]
private TaskLoggingHelper _logger;
public Dictionary<string, PortableLibraryResourceStateInfo> PortableLibraryStatesLookup = new Dictionary<string, PortableLibraryResourceStateInfo>();
public void SetLogger(TaskLoggingHelper logger) { _logger = logger; }
public bool IsUpToDate(string assemblyPath, out bool containsFrameworkResources, out List<ResWInfo> reswInfoList)
{
reswInfoList = null;
containsFrameworkResources = false;
if (PortableLibraryStatesLookup == null)
{
PortableLibraryStatesLookup = new Dictionary<string, PortableLibraryResourceStateInfo>();
return false;
}
if (PortableLibraryStatesLookup.Count == 0)
{
return false;
}
try
{
if (assemblyPath == null || !File.Exists(assemblyPath))
{
return false;
}
PortableLibraryResourceStateInfo info;
if (!PortableLibraryStatesLookup.TryGetValue(assemblyPath, out info))
{
return false;
}
FileInfo fiPlib = new FileInfo(assemblyPath);
if (!fiPlib.LastWriteTimeUtc.Equals(info.PLibTimeUtc))
{
_logger.LogMessage(MessageImportance.Low, Resources.Message_CachedReswNotUpToDateAssemblyNewer, assemblyPath);
return false;
}
if (info.ResWInfoList == null)
{
return false;
}
else
{
foreach (ResWInfo reswInfo in info.ResWInfoList)
{
if (reswInfo.ResWPath == null || !File.Exists(reswInfo.ResWPath))
{
_logger.LogMessage(MessageImportance.Low, Resources.Message_CachedReswNotExists, assemblyPath, reswInfo.ResWPath);
return false;
}
FileInfo fiResW = new FileInfo(reswInfo.ResWPath);
if (!fiResW.LastWriteTimeUtc.Equals(reswInfo.ResWTimeUtc))
{
_logger.LogMessage(MessageImportance.Low, Resources.Message_CachedReswNotUpToDate, reswInfo.ResWPath);
return false;
}
}
}
foreach (ResWInfo reswInfo in info.ResWInfoList)
{
_logger.LogMessage(MessageImportance.Low, Resources.Message_UsingCachedResw, reswInfo.ResWPath, assemblyPath);
}
reswInfoList = info.ResWInfoList;
containsFrameworkResources = info.ContainsFrameworkResources;
return true;
}
catch (Exception e)
{
_logger.LogMessage(MessageImportance.Low, Resources.Error_UnspecifiedCheckUpToDate, assemblyPath, e.Message);
return false;
}
}
public void Save(string assemblyPath, DateTime plibTimeUtc, bool containsFrameworkResources, List<ResWInfo> reswInfoList)
{
try
{
PortableLibraryStatesLookup[assemblyPath] = new PortableLibraryResourceStateInfo() { PLibTimeUtc = plibTimeUtc, ContainsFrameworkResources = containsFrameworkResources, ResWInfoList = reswInfoList};
}
catch (Exception e)
{
_logger.LogMessage(MessageImportance.Low, Resources.Error_UnspecifiedSaveState, assemblyPath, e.Message);
}
}
}
[Required]
public ITaskItem[] AssemblyList { get; set; }
[Required]
public string OutResWPath { get; set; }
[Required]
public string StateFile { get; set; }
public bool SkipFrameworkResources { get; set; }
[Output]
public ITaskItem[] ReswFileList { get; set; }
[Output]
public ITaskItem[] UnprocessedAssemblyList { get; set; }
private MetadataReaderHost _host;
private ResourceHandlingState _state = null;
private List<ITaskItem> _mainAssemblies;
private List<ITaskItem> _satelliteAssemblies;
private HashSet<String> _processedAssemblies;
public override bool Execute()
{
ReswFileList = null;
UnprocessedAssemblyList = null;
List<ITaskItem> unprocessedAssemblyList = new List<ITaskItem>();
List<ITaskItem> reswList = new List<ITaskItem>();
_state = ReadStateFile(StateFile);
if (_state == null)
{
_state = new ResourceHandlingState();
}
_state.SetLogger(Log);
using (_host = new PeReader.DefaultHost())
{
try
{
// Separate main assemblies and satellite assemblies so main assemblies get processed first
_mainAssemblies = new List<ITaskItem>();
_satelliteAssemblies = new List<ITaskItem>();
_processedAssemblies = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (ITaskItem item in AssemblyList)
{
if (_processedAssemblies.Contains(item.ItemSpec))
{
continue;
}
_processedAssemblies.Add(item.ItemSpec);
if (item.ItemSpec.EndsWith(".dll", StringComparison.OrdinalIgnoreCase))
{
if (item.ItemSpec.EndsWith(".resources.dll", StringComparison.OrdinalIgnoreCase))
{
_satelliteAssemblies.Add(item);
}
else
{
_mainAssemblies.Add(item);
}
}
}
foreach (ITaskItem assemblyFilePath in _mainAssemblies.Concat(_satelliteAssemblies))
{
List<ResWInfo> resWInfoList = null;
bool containsFrameworkResources = false;
if (!_state.IsUpToDate(assemblyFilePath.ItemSpec, out containsFrameworkResources, out resWInfoList))
{
resWInfoList = ExtractAssemblyResWList(assemblyFilePath.ItemSpec, out containsFrameworkResources);
if (resWInfoList != null)
{
FileInfo fiAssembly = new FileInfo(assemblyFilePath.ItemSpec);
_state.Save(assemblyFilePath.ItemSpec, fiAssembly.LastWriteTimeUtc, containsFrameworkResources, resWInfoList);
}
}
if (resWInfoList != null)
{
foreach (ResWInfo reswInfo in resWInfoList)
{
TaskItem newTaskItem = new TaskItem(reswInfo.ResWPath);
newTaskItem.SetMetadata("ResourceIndexName", reswInfo.ResourceIndexName);
if (!String.IsNullOrEmpty(reswInfo.NeutralResourceLanguage))
{
newTaskItem.SetMetadata("NeutralResourceLanguage", reswInfo.NeutralResourceLanguage);
}
if (!containsFrameworkResources)
{
newTaskItem.SetMetadata("OriginalItemSpec", reswInfo.ResWPath); // Original GenerateResource behavior creates this additional metadata item on processed non-framework assemblies
reswList.Add(newTaskItem);
}
else if (!SkipFrameworkResources)
{
reswList.Add(newTaskItem);
}
}
}
}
UnprocessedAssemblyList = unprocessedAssemblyList.ToArray(); // For now this list will always be empty
ReswFileList = reswList.ToArray();
WriteStateFile(StateFile, _state);
}
catch (Exception e)
{
Log.LogError(Resources.Error_ResourceExtractionFailed, e.Message);
return false;
}
}
return true;
}
private ResourceHandlingState ReadStateFile(string stateFile)
{
try
{
if (!String.IsNullOrEmpty(stateFile) && File.Exists(stateFile))
{
using (FileStream fs = new FileStream(stateFile, FileMode.Open))
{
BinaryFormatter formatter = new BinaryFormatter();
object deserializedObject = formatter.Deserialize(fs);
ResourceHandlingState state = deserializedObject as ResourceHandlingState;
if (state == null && deserializedObject != null)
{
Log.LogMessage(MessageImportance.Normal, Resources.Message_UnspecifiedStateFileCorrupted, stateFile);
}
return state;
}
}
else
return null;
}
catch (Exception e)
{
Log.LogMessage(MessageImportance.Low, Resources.Message_UnspecifiedReadStateFile, e.Message);
return null;
}
}
private bool IsAtOutputFolder(string path)
{
try
{
return (Path.GetDirectoryName(path).Equals(OutResWPath.TrimEnd(new char[] {Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar}), StringComparison.OrdinalIgnoreCase));
}
catch
{
return false;
}
}
private void WriteStateFile(string stateFile, ResourceHandlingState state)
{
try
{
if (stateFile != null && stateFile.Length > 0 )
{
using (FileStream fs = new FileStream(stateFile, FileMode.Create))
{
BinaryFormatter formatter = new BinaryFormatter();
formatter.Serialize(fs, state);
}
}
}
catch (Exception e)
{
Log.LogMessage(MessageImportance.Low, Resources.Message_UnspecifiedSaveStateFile, e.Message);
}
}
private ResWInfo ExtractResourcesFromStream(Stream stream, IAssembly assembly, string resourceFileName, bool containsFrameworkResources)
{
string reswFilePath;
string resourceIndexName;
string neutralResourceLanguage = "";
if (containsFrameworkResources)
{
reswFilePath = OutResWPath + Path.AltDirectorySeparatorChar + resourceFileName + ".resw";
resourceIndexName = resourceFileName;
neutralResourceLanguage = "en-US";
}
else
{
string culturePath = "";
string culture = assembly.Culture;
string assemblyName = assembly.Name.Value;
if (!String.IsNullOrEmpty(culture))
{
culturePath = culture + Path.DirectorySeparatorChar;
}
else if (TryGetNeutralResourcesLanguageAttribute(assembly, out neutralResourceLanguage))
{
culturePath = neutralResourceLanguage + Path.DirectorySeparatorChar;
}
// Must have NeutralResourcesLanguageAttribute
// warning MSB3817: The assembly "<FullPath>\ClassLibrary1.dll" does not have a NeutralResourcesLanguageAttribute on it.
// To be used in an app package, portable libraries must define a NeutralResourcesLanguageAttribute on their main assembly
// (ie, the one containing code, not a satellite assembly).
else
{
return null;
}
if (resourceFileName.EndsWith("." + culture, StringComparison.OrdinalIgnoreCase))
{
resourceFileName = resourceFileName.Remove(resourceFileName.Length - (culture.Length + 1));
}
resourceIndexName = assemblyName.EndsWith(".resources", StringComparison.OrdinalIgnoreCase) ? assemblyName.Remove(assemblyName.Length - 10) : assemblyName;
reswFilePath = OutResWPath + resourceIndexName + Path.DirectorySeparatorChar + culturePath + resourceFileName + ".resw";
if (!Directory.Exists(Directory.GetParent(reswFilePath).ToString()))
{
Directory.CreateDirectory(Directory.GetParent(reswFilePath).ToString());
}
}
WriteResW(stream, reswFilePath);
FileInfo fiResW = new FileInfo(reswFilePath);
return new ResWInfo() { ResWPath = reswFilePath, ResWTimeUtc = fiResW.LastWriteTimeUtc, ResourceIndexName = resourceIndexName, NeutralResourceLanguage = neutralResourceLanguage };
}
private bool TryGetNeutralResourcesLanguageAttribute(IAssembly assembly, out String neutralResourceLanguage)
{
neutralResourceLanguage = "";
foreach (ICustomAttribute attribute in assembly.AssemblyAttributes)
{
if (TypeHelper.GetTypeName(attribute.Type, NameFormattingOptions.None).Equals("System.Resources.NeutralResourcesLanguageAttribute"))
{
if (attribute.Arguments.Count() > 0)
{
IMetadataConstant metadataConstant = attribute.Arguments.ElementAt(0) as IMetadataConstant;
if (metadataConstant == null)
{
return false; // Unable to parse
}
Object value = metadataConstant.Value;
if (!(value is String))
{
return false; // Expected to be a string
}
neutralResourceLanguage = (String)value;
return true;
}
}
}
return false;
}
private void WriteResW(Stream stream, string reswFilePath)
{
using (ResourceReader rr = new ResourceReader(stream))
{
using (ResXResourceWriter rw = new ResXResourceWriter(reswFilePath))
{
foreach (DictionaryEntry dict in rr)
{
rw.AddResource((string)dict.Key, dict.Value);
}
}
}
}
private List<ResWInfo> ExtractAssemblyResWList(string assemblyFilePath, out bool containsFrameworkResources)
{
containsFrameworkResources = false;
IAssembly assembly = _host.LoadUnitFrom(assemblyFilePath) as IAssembly;
if (assembly == null || assembly == Dummy.Assembly)
{
return null;
}
if (assembly.Resources == null)
{
return null;
}
List<ResWInfo> reswInfoList = new List<ResWInfo>();
string frameworkResourcesName = "FxResources." + assembly.Name.Value + ".SR.resources";
foreach (IResourceReference resourceReference in assembly.Resources)
{
if (!resourceReference.Resource.IsInExternalFile && resourceReference.Name.Value.EndsWith(".resources", StringComparison.OrdinalIgnoreCase))
{
const int BUFFERSIZE = 4096;
byte[] buffer = new byte[BUFFERSIZE];
int index = 0;
using (MemoryStream ms = new MemoryStream(BUFFERSIZE))
{
foreach (byte b in resourceReference.Resource.Data)
{
if (index == BUFFERSIZE)
{
ms.Write(buffer, 0, BUFFERSIZE);
index = 0;
}
buffer[index++] = b;
}
ms.Write(buffer, 0, index);
ms.Seek(0, SeekOrigin.Begin);
string resourceFileName = resourceReference.Name.Value.Remove(resourceReference.Name.Value.Length - 10);
bool isFrameworkResource = resourceReference.Name.Value.Equals(frameworkResourcesName, StringComparison.OrdinalIgnoreCase);
ResWInfo reswInfo = ExtractResourcesFromStream(ms, assembly, resourceFileName, isFrameworkResource);
if (reswInfo != null)
{
reswInfoList.Add(reswInfo);
}
if (isFrameworkResource)
{
containsFrameworkResources = true;
return reswInfoList;
}
}
}
}
return reswInfoList;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Win32.SafeHandles;
using System.Collections.Generic;
using Xunit;
namespace System.IO.MemoryMappedFiles.Tests
{
/// <summary>
/// Tests for MemoryMappedFile.CreateFromFile.
/// </summary>
public class MemoryMappedFileTests_CreateFromFile : MemoryMappedFilesTestBase
{
/// <summary>
/// Tests invalid arguments to the CreateFromFile path parameter.
/// </summary>
[Fact]
public void InvalidArguments_Path()
{
// null is an invalid path
Assert.Throws<ArgumentNullException>("path", () => MemoryMappedFile.CreateFromFile(null));
Assert.Throws<ArgumentNullException>("path", () => MemoryMappedFile.CreateFromFile(null, FileMode.Open));
Assert.Throws<ArgumentNullException>("path", () => MemoryMappedFile.CreateFromFile(null, FileMode.Open, CreateUniqueMapName()));
Assert.Throws<ArgumentNullException>("path", () => MemoryMappedFile.CreateFromFile(null, FileMode.Open, CreateUniqueMapName(), 4096));
Assert.Throws<ArgumentNullException>("path", () => MemoryMappedFile.CreateFromFile(null, FileMode.Open, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.Read));
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile fileStream parameter.
/// </summary>
[Fact]
public void InvalidArguments_FileStream()
{
// null is an invalid stream
Assert.Throws<ArgumentNullException>("fileStream", () => MemoryMappedFile.CreateFromFile(null, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile mode parameter.
/// </summary>
[Fact]
public void InvalidArguments_Mode()
{
// FileMode out of range
Assert.Throws<ArgumentOutOfRangeException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), (FileMode)42));
Assert.Throws<ArgumentOutOfRangeException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), (FileMode)42, null));
Assert.Throws<ArgumentOutOfRangeException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), (FileMode)42, null, 4096));
Assert.Throws<ArgumentOutOfRangeException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), (FileMode)42, null, 4096, MemoryMappedFileAccess.ReadWrite));
// FileMode.Append never allowed
Assert.Throws<ArgumentException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Append));
Assert.Throws<ArgumentException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Append, null));
Assert.Throws<ArgumentException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Append, null, 4096));
Assert.Throws<ArgumentException>("mode", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Append, null, 4096, MemoryMappedFileAccess.ReadWrite));
// FileMode.CreateNew/Create/OpenOrCreate can't be used with default capacity, as the file will be empty
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.CreateNew));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Create));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.OpenOrCreate));
// FileMode.Truncate can't be used with default capacity, as resulting file will be empty
using (TempFile file = new TempFile(GetTestFilePath()))
{
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Truncate));
}
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile access parameter.
/// </summary>
[Fact]
public void InvalidArguments_Access()
{
// Out of range access values with a path
Assert.Throws<ArgumentOutOfRangeException>("access", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Open, CreateUniqueMapName(), 4096, (MemoryMappedFileAccess)(-2)));
Assert.Throws<ArgumentOutOfRangeException>("access", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Open, CreateUniqueMapName(), 4096, (MemoryMappedFileAccess)(42)));
// Write-only access is not allowed on maps (only on views)
Assert.Throws<ArgumentException>("access", () => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.Open, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.Write));
// Test the same things, but with a FileStream instead of a path
using (TempFile file = new TempFile(GetTestFilePath()))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
// Out of range values with a stream
Assert.Throws<ArgumentOutOfRangeException>("access", () => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 4096, (MemoryMappedFileAccess)(-2), HandleInheritability.None, true));
Assert.Throws<ArgumentOutOfRangeException>("access", () => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 4096, (MemoryMappedFileAccess)(42), HandleInheritability.None, true));
// Write-only access is not allowed
Assert.Throws<ArgumentException>("access", () => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.Write, HandleInheritability.None, true));
}
}
/// <summary>
/// Tests various values of FileAccess used to construct a FileStream and MemoryMappedFileAccess used
/// to construct a map over that stream. The combinations should all be valid.
/// </summary>
[Theory]
[InlineData(FileAccess.ReadWrite, MemoryMappedFileAccess.Read)]
[InlineData(FileAccess.ReadWrite, MemoryMappedFileAccess.ReadWrite)]
[InlineData(FileAccess.ReadWrite, MemoryMappedFileAccess.CopyOnWrite)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.Read)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.CopyOnWrite)]
public void FileAccessAndMapAccessCombinations_Valid(FileAccess fileAccess, MemoryMappedFileAccess mmfAccess)
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
using (FileStream fs = new FileStream(file.Path, FileMode.Open, fileAccess))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, null, Capacity, mmfAccess, HandleInheritability.None, true))
{
ValidateMemoryMappedFile(mmf, Capacity, mmfAccess);
}
}
/// <summary>
/// Tests various values of FileAccess used to construct a FileStream and MemoryMappedFileAccess used
/// to construct a map over that stream on Windows. The combinations should all be invalid, resulting in exception.
/// </summary>
[PlatformSpecific(PlatformID.Windows)]
[Theory]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadWrite)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadExecute)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadWriteExecute)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.Read)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadWrite)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.CopyOnWrite)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadExecute)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadWriteExecute)]
[InlineData(FileAccess.ReadWrite, MemoryMappedFileAccess.ReadExecute)] // this and the next are explicitly left off of the Unix test due to differences in Unix permissions
[InlineData(FileAccess.ReadWrite, MemoryMappedFileAccess.ReadWriteExecute)]
public void FileAccessAndMapAccessCombinations_Invalid_Windows(FileAccess fileAccess, MemoryMappedFileAccess mmfAccess)
{
// On Windows, creating the file mapping does the permissions checks, so the exception comes from CreateFromFile.
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
using (FileStream fs = new FileStream(file.Path, FileMode.Open, fileAccess))
{
Assert.Throws<UnauthorizedAccessException>(() => MemoryMappedFile.CreateFromFile(fs, null, Capacity, mmfAccess, HandleInheritability.None, true));
}
}
/// <summary>
/// Tests various values of FileAccess used to construct a FileStream and MemoryMappedFileAccess used
/// to construct a map over that stream on Unix. The combinations should all be invalid, resulting in exception.
/// </summary>
[PlatformSpecific(PlatformID.AnyUnix)]
[Theory]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadWrite)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadExecute)]
[InlineData(FileAccess.Read, MemoryMappedFileAccess.ReadWriteExecute)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.Read)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadWrite)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.CopyOnWrite)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadExecute)]
[InlineData(FileAccess.Write, MemoryMappedFileAccess.ReadWriteExecute)]
public void FileAccessAndMapAccessCombinations_Invalid_Unix(FileAccess fileAccess, MemoryMappedFileAccess mmfAccess)
{
// On Unix we don't actually create the OS map until the view is created; this results in the permissions
// error being thrown from CreateView* instead of from CreateFromFile.
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
using (FileStream fs = new FileStream(file.Path, FileMode.Open, fileAccess))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, null, Capacity, mmfAccess, HandleInheritability.None, true))
{
Assert.Throws<UnauthorizedAccessException>(() => mmf.CreateViewAccessor());
}
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile mapName parameter.
/// </summary>
[Fact]
public void InvalidArguments_MapName()
{
using (TempFile file = new TempFile(GetTestFilePath()))
{
// Empty string is an invalid map name
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, string.Empty));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, string.Empty, 4096));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, string.Empty, 4096, MemoryMappedFileAccess.Read));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, string.Empty, 4096, MemoryMappedFileAccess.Read));
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(fs, string.Empty, 4096, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true));
}
}
}
/// <summary>
/// Test to verify that map names are left unsupported on Unix.
/// </summary>
[PlatformSpecific(PlatformID.AnyUnix)]
[Theory]
[MemberData("CreateValidMapNames")]
public void MapNamesNotSupported_Unix(string mapName)
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
{
Assert.Throws<PlatformNotSupportedException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, mapName));
Assert.Throws<PlatformNotSupportedException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, mapName, Capacity));
Assert.Throws<PlatformNotSupportedException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, mapName, Capacity, MemoryMappedFileAccess.ReadWrite));
Assert.Throws<PlatformNotSupportedException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, mapName, Capacity, MemoryMappedFileAccess.ReadWrite));
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
Assert.Throws<PlatformNotSupportedException>(() => MemoryMappedFile.CreateFromFile(fs, mapName, 4096, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true));
}
}
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile capacity parameter.
/// </summary>
[Fact]
public void InvalidArguments_Capacity()
{
using (TempFile file = new TempFile(GetTestFilePath()))
{
// Out of range values for capacity
Assert.Throws<ArgumentOutOfRangeException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, CreateUniqueMapName(), -1));
Assert.Throws<ArgumentOutOfRangeException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, CreateUniqueMapName(), -1, MemoryMappedFileAccess.Read));
// Positive capacity required when creating a map from an empty file
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, 0, MemoryMappedFileAccess.Read));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, CreateUniqueMapName(), 0, MemoryMappedFileAccess.Read));
// With Read, the capacity can't be larger than the backing file's size.
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, CreateUniqueMapName(), 1, MemoryMappedFileAccess.Read));
// Now with a FileStream...
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
// The subsequent tests are only valid we if we start with an empty FileStream, which we should have.
// This also verifies the previous failed tests didn't change the length of the file.
Assert.Equal(0, fs.Length);
// Out of range values for capacity
Assert.Throws<ArgumentOutOfRangeException>(() => MemoryMappedFile.CreateFromFile(fs, null, -1, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
// Default (0) capacity with an empty file
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(fs, null, 0, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 0, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
// Larger capacity than the underlying file, but read-only such that we can't expand the file
fs.SetLength(4096);
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(fs, null, 8192, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
Assert.Throws<ArgumentException>(() => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 8192, MemoryMappedFileAccess.Read, HandleInheritability.None, true));
// Capacity can't be less than the file size (for such cases a view can be created with the smaller size)
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => MemoryMappedFile.CreateFromFile(fs, null, 1, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true));
}
// Capacity can't be less than the file size
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, CreateUniqueMapName(), 1, MemoryMappedFileAccess.Read));
}
}
/// <summary>
/// Tests invalid arguments to the CreateFromFile inheritability parameter.
/// </summary>
[Theory]
[InlineData((HandleInheritability)(-1))]
[InlineData((HandleInheritability)(42))]
public void InvalidArguments_Inheritability(HandleInheritability inheritability)
{
// Out of range values for inheritability
using (TempFile file = new TempFile(GetTestFilePath()))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
Assert.Throws<ArgumentOutOfRangeException>("inheritability", () => MemoryMappedFile.CreateFromFile(fs, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.ReadWrite, inheritability, true));
}
}
/// <summary>
/// Test various combinations of arguments to CreateFromFile, focusing on the Open and OpenOrCreate modes,
/// and validating the creating maps each time they're created.
/// </summary>
[Theory]
[MemberData("MemberData_ValidArgumentCombinationsWithPath",
new FileMode[] { FileMode.Open, FileMode.OpenOrCreate },
new string[] { null, "CreateUniqueMapName()" },
new long[] { 1, 256, -1 /*pagesize*/, 10000 },
new MemoryMappedFileAccess[] { MemoryMappedFileAccess.Read, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileAccess.CopyOnWrite })]
public void ValidArgumentCombinationsWithPath_ModesOpenOrCreate(
FileMode mode, string mapName, long capacity, MemoryMappedFileAccess access)
{
// Test each of the four path-based CreateFromFile overloads
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path))
{
ValidateMemoryMappedFile(mmf, capacity);
}
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode))
{
ValidateMemoryMappedFile(mmf, capacity);
}
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode, mapName))
{
ValidateMemoryMappedFile(mmf, capacity);
}
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode, mapName, capacity))
{
ValidateMemoryMappedFile(mmf, capacity);
}
// Finally, re-test the last overload, this time with an empty file to start
using (TempFile file = new TempFile(GetTestFilePath()))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode, mapName, capacity))
{
ValidateMemoryMappedFile(mmf, capacity);
}
}
/// <summary>
/// Test various combinations of arguments to CreateFromFile, focusing on the CreateNew mode,
/// and validating the creating maps each time they're created.
/// </summary>
[Theory]
[MemberData("MemberData_ValidArgumentCombinationsWithPath",
new FileMode[] { FileMode.CreateNew },
new string[] { null, "CreateUniqueMapName()" },
new long[] { 1, 256, -1 /*pagesize*/, 10000 },
new MemoryMappedFileAccess[] { MemoryMappedFileAccess.Read, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileAccess.CopyOnWrite })]
public void ValidArgumentCombinationsWithPath_ModeCreateNew(
FileMode mode, string mapName, long capacity, MemoryMappedFileAccess access)
{
// For FileMode.CreateNew, the file will be created new and thus be empty, so we can only use the overloads
// that take a capacity, since the default capacity doesn't work with an empty file.
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, mapName, capacity))
{
ValidateMemoryMappedFile(mmf, capacity);
}
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, mapName, capacity, access))
{
ValidateMemoryMappedFile(mmf, capacity, access);
}
}
/// <summary>
/// Test various combinations of arguments to CreateFromFile, focusing on the Create and Truncate modes,
/// and validating the creating maps each time they're created.
/// </summary>
[Theory]
[MemberData("MemberData_ValidArgumentCombinationsWithPath",
new FileMode[] { FileMode.Create, FileMode.Truncate },
new string[] { null, "CreateUniqueMapName()" },
new long[] { 1, 256, -1 /*pagesize*/, 10000 },
new MemoryMappedFileAccess[] { MemoryMappedFileAccess.Read, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileAccess.CopyOnWrite })]
public void ValidArgumentCombinationsWithPath_ModesCreateOrTruncate(
FileMode mode, string mapName, long capacity, MemoryMappedFileAccess access)
{
// For FileMode.Create/Truncate, try existing files. Only the overloads that take a capacity are valid because
// both of these modes will cause the input file to be made empty, and an empty file doesn't work with the default capacity.
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode, mapName, capacity))
{
ValidateMemoryMappedFile(mmf, capacity);
}
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, mode, mapName, capacity, access))
{
ValidateMemoryMappedFile(mmf, capacity, access);
}
// For FileMode.Create, also try letting it create a new file (Truncate needs the file to have existed)
if (mode == FileMode.Create)
{
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, mapName, capacity))
{
ValidateMemoryMappedFile(mmf, capacity);
}
}
}
/// <summary>
/// Provides input data to the ValidArgumentCombinationsWithPath tests, yielding the full matrix
/// of combinations of input values provided, except for those that are known to be unsupported
/// (e.g. non-null map names on Unix), and with appropriate values substituted in for placeholders
/// listed in the MemberData attribute (e.g. actual system page size instead of -1).
/// </summary>
/// <param name="modes">The modes to yield.</param>
/// <param name="mapNames">
/// The names to yield.
/// non-null may be excluded based on platform.
/// "CreateUniqueMapName()" will be translated to an invocation of that method.
/// </param>
/// <param name="capacities">The capacities to yield. -1 will be translated to system page size.</param>
/// <param name="accesses">
/// The accesses to yield. Non-writable accesses will be skipped if the current mode doesn't support it.
/// </param>
public static IEnumerable<object[]> MemberData_ValidArgumentCombinationsWithPath(
FileMode[] modes, string[] mapNames, long[] capacities, MemoryMappedFileAccess[] accesses)
{
foreach (FileMode mode in modes)
{
foreach (string tmpMapName in mapNames)
{
if (tmpMapName != null && !MapNamesSupported)
{
continue;
}
string mapName = tmpMapName == "CreateUniqueMapName()" ? CreateUniqueMapName() : tmpMapName;
foreach (long tmpCapacity in capacities)
{
long capacity = tmpCapacity == -1 ? s_pageSize.Value : tmpCapacity;
foreach (MemoryMappedFileAccess access in accesses)
{
if ((mode == FileMode.Create || mode == FileMode.CreateNew || mode == FileMode.Truncate) &&
!IsWritable(access))
{
continue;
}
yield return new object[] { mode, mapName, capacity, access };
}
}
}
}
}
/// <summary>
/// Test various combinations of arguments to CreateFromFile that accepts a FileStream.
/// </summary>
[Theory]
[MemberData("MemberData_ValidArgumentCombinationsWithStream",
new string[] { null, "CreateUniqueMapName()" },
new long[] { 1, 256, -1 /*pagesize*/, 10000 },
new MemoryMappedFileAccess[] { MemoryMappedFileAccess.Read, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileAccess.CopyOnWrite },
new HandleInheritability[] { HandleInheritability.None, HandleInheritability.Inheritable },
new bool[] { false, true })]
public void ValidArgumentCombinationsWithStream(
string mapName, long capacity, MemoryMappedFileAccess access, HandleInheritability inheritability, bool leaveOpen)
{
// Create a file of the right size, then create the map for it.
using (TempFile file = new TempFile(GetTestFilePath(), capacity))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, mapName, capacity, access, inheritability, leaveOpen))
{
ValidateMemoryMappedFile(mmf, capacity, access, inheritability);
}
// Start with an empty file and let the map grow it to the right size. This requires write access.
if (IsWritable(access))
{
using (FileStream fs = File.Create(GetTestFilePath()))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, mapName, capacity, access, inheritability, leaveOpen))
{
ValidateMemoryMappedFile(mmf, capacity, access, inheritability);
}
}
}
/// <summary>
/// Provides input data to the ValidArgumentCombinationsWithStream tests, yielding the full matrix
/// of combinations of input values provided, except for those that are known to be unsupported
/// (e.g. non-null map names on Unix), and with appropriate values substituted in for placeholders
/// listed in the MemberData attribute (e.g. actual system page size instead of -1).
/// </summary>
/// <param name="mapNames">
/// The names to yield.
/// non-null may be excluded based on platform.
/// "CreateUniqueMapName()" will be translated to an invocation of that method.
/// </param>
/// <param name="capacities">The capacities to yield. -1 will be translated to system page size.</param>
/// <param name="accesses">
/// The accesses to yield. Non-writable accesses will be skipped if the current mode doesn't support it.
/// </param>
/// <param name="inheritabilities">The inheritabilities to yield.</param>
/// <param name="inheritabilities">The leaveOpen values to yield.</param>
public static IEnumerable<object[]> MemberData_ValidArgumentCombinationsWithStream(
string[] mapNames, long[] capacities, MemoryMappedFileAccess[] accesses, HandleInheritability[] inheritabilities, bool[] leaveOpens)
{
foreach (string tmpMapName in mapNames)
{
if (tmpMapName != null && !MapNamesSupported)
{
continue;
}
string mapName = tmpMapName == "CreateUniqueMapName()" ?
CreateUniqueMapName() :
tmpMapName;
foreach (long tmpCapacity in capacities)
{
long capacity = tmpCapacity == -1 ?
s_pageSize.Value :
tmpCapacity;
foreach (MemoryMappedFileAccess access in accesses)
{
foreach (HandleInheritability inheritability in inheritabilities)
{
foreach (bool leaveOpen in leaveOpens)
{
yield return new object[] { mapName, capacity, access, inheritability, leaveOpen };
}
}
}
}
}
}
/// <summary>
/// Test that a map using the default capacity (0) grows to the size of the underlying file.
/// </summary>
[Fact]
public void DefaultCapacityIsFileLength()
{
const int DesiredCapacity = 8192;
const int DefaultCapacity = 0;
// With path
using (TempFile file = new TempFile(GetTestFilePath(), DesiredCapacity))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, DefaultCapacity))
{
ValidateMemoryMappedFile(mmf, DesiredCapacity);
}
// With stream
using (TempFile file = new TempFile(GetTestFilePath(), DesiredCapacity))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, null, DefaultCapacity, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true))
{
ValidateMemoryMappedFile(mmf, DesiredCapacity);
}
}
/// <summary>
/// Test that appropriate exceptions are thrown creating a map with a non-existent file and a mode
/// that requires the file to exist.
/// </summary>
[Theory]
[InlineData(FileMode.Truncate)]
[InlineData(FileMode.Open)]
public void FileDoesNotExist(FileMode mode)
{
Assert.Throws<FileNotFoundException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath()));
Assert.Throws<FileNotFoundException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode));
Assert.Throws<FileNotFoundException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, null));
Assert.Throws<FileNotFoundException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, null, 4096));
Assert.Throws<FileNotFoundException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), mode, null, 4096, MemoryMappedFileAccess.ReadWrite));
}
/// <summary>
/// Test that appropriate exceptions are thrown creating a map with an existing file and a mode
/// that requires the file to not exist.
/// </summary>
[Fact]
public void FileAlreadyExists()
{
using (TempFile file = new TempFile(GetTestFilePath()))
{
// FileMode.CreateNew invalid when the file already exists
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.CreateNew));
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.CreateNew, CreateUniqueMapName()));
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.CreateNew, CreateUniqueMapName(), 4096));
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.CreateNew, CreateUniqueMapName(), 4096, MemoryMappedFileAccess.ReadWrite));
}
}
/// <summary>
/// Test exceptional behavior when trying to create a map for a file that's currently in use.
/// </summary>
[Fact]
public void FileInUse()
{
// Already opened with a FileStream
using (TempFile file = new TempFile(GetTestFilePath(), 4096))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path));
}
// Already opened with another map
using (TempFile file = new TempFile(GetTestFilePath(), 4096))
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path))
{
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(file.Path));
}
}
/// <summary>
/// Test the exceptional behavior of *Execute access levels.
/// </summary>
[PlatformSpecific(PlatformID.Windows)] // Unix model for executable differs from Windows
[Theory]
[InlineData(MemoryMappedFileAccess.ReadExecute)]
[InlineData(MemoryMappedFileAccess.ReadWriteExecute)]
public void FileNotOpenedForExecute(MemoryMappedFileAccess access)
{
using (TempFile file = new TempFile(GetTestFilePath(), 4096))
{
// The FileStream created by the map doesn't have GENERIC_EXECUTE set
Assert.Throws<UnauthorizedAccessException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, 4096, access));
// The FileStream opened explicitly doesn't have GENERIC_EXECUTE set
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
Assert.Throws<UnauthorizedAccessException>(() => MemoryMappedFile.CreateFromFile(fs, null, 4096, access, HandleInheritability.None, true));
}
}
}
/// <summary>
/// Test the behavior of various access levels when working with a read-only file.
/// </summary>
[Theory]
[InlineData(MemoryMappedFileAccess.Read)]
[InlineData(MemoryMappedFileAccess.ReadWrite)]
[InlineData(MemoryMappedFileAccess.CopyOnWrite)]
public void ReadOnlyFile(MemoryMappedFileAccess access)
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
{
FileAttributes original = File.GetAttributes(file.Path);
File.SetAttributes(file.Path, FileAttributes.ReadOnly);
try
{
if (access == MemoryMappedFileAccess.Read)
{
// The only access requested is Read; this should work with a read-only file.
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, Capacity, access))
{
ValidateMemoryMappedFile(mmf, Capacity, MemoryMappedFileAccess.Read);
}
}
else
{
// All write-access is denied with a read-only file.
Assert.Throws<UnauthorizedAccessException>(() => MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, Capacity, access));
}
}
finally
{
File.SetAttributes(file.Path, original);
}
}
}
/// <summary>
/// Test to ensure that leaveOpen is appropriately respected, either leaving the FileStream open
/// or closing it on disposal.
/// </summary>
[Theory]
[InlineData(true)]
[InlineData(false)]
public void LeaveOpenRespected_Basic(bool leaveOpen)
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath()))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
// Handle should still be open
SafeFileHandle handle = fs.SafeFileHandle;
Assert.False(handle.IsClosed);
// Create and close the map
MemoryMappedFile.CreateFromFile(fs, null, Capacity, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, leaveOpen).Dispose();
// The handle should now be open iff leaveOpen
Assert.NotEqual(leaveOpen, handle.IsClosed);
}
}
/// <summary>
/// Test to ensure that leaveOpen is appropriately respected, either leaving the FileStream open
/// or closing it on disposal.
/// </summary>
[Theory]
[InlineData(true)]
[InlineData(false)]
public void LeaveOpenRespected_OutstandingViews(bool leaveOpen)
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath()))
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
// Handle should still be open
SafeFileHandle handle = fs.SafeFileHandle;
Assert.False(handle.IsClosed);
// Create the map, create each of the views, then close the map
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(fs, null, Capacity, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, leaveOpen))
using (MemoryMappedViewAccessor acc = mmf.CreateViewAccessor(0, Capacity))
using (MemoryMappedViewStream s = mmf.CreateViewStream(0, Capacity))
{
// Explicitly close the map. The handle should now be open iff leaveOpen.
mmf.Dispose();
Assert.NotEqual(leaveOpen, handle.IsClosed);
// But the views should still be usable.
ValidateMemoryMappedViewAccessor(acc, Capacity, MemoryMappedFileAccess.ReadWrite);
ValidateMemoryMappedViewStream(s, Capacity, MemoryMappedFileAccess.ReadWrite);
}
}
}
/// <summary>
/// Test to validate we can create multiple maps from the same FileStream.
/// </summary>
[Fact]
public void MultipleMapsForTheSameFileStream()
{
const int Capacity = 4096;
using (TempFile file = new TempFile(GetTestFilePath(), Capacity))
using (FileStream fs = new FileStream(file.Path, FileMode.Open))
using (MemoryMappedFile mmf1 = MemoryMappedFile.CreateFromFile(fs, null, Capacity, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true))
using (MemoryMappedFile mmf2 = MemoryMappedFile.CreateFromFile(fs, null, Capacity, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true))
using (MemoryMappedViewAccessor acc1 = mmf1.CreateViewAccessor())
using (MemoryMappedViewAccessor acc2 = mmf2.CreateViewAccessor())
{
// The capacity of the two maps should be equal
Assert.Equal(acc1.Capacity, acc2.Capacity);
var rand = new Random();
for (int i = 1; i <= 10; i++)
{
// Write a value to one map, then read it from the other,
// ping-ponging between the two.
int pos = rand.Next((int)acc1.Capacity - 1);
MemoryMappedViewAccessor reader = acc1, writer = acc2;
if (i % 2 == 0)
{
reader = acc2;
writer = acc1;
}
writer.Write(pos, (byte)i);
writer.Flush();
Assert.Equal(i, reader.ReadByte(pos));
}
}
}
/// <summary>
/// Test to verify that the map's size increases the underlying file size if the map's capacity is larger.
/// </summary>
[Fact]
public void FileSizeExpandsToCapacity()
{
const int InitialCapacity = 256;
using (TempFile file = new TempFile(GetTestFilePath(), InitialCapacity))
{
// Create a map with a larger capacity, and verify the file has expanded.
MemoryMappedFile.CreateFromFile(file.Path, FileMode.Open, null, InitialCapacity * 2).Dispose();
using (FileStream fs = File.OpenRead(file.Path))
{
Assert.Equal(InitialCapacity * 2, fs.Length);
}
// Do the same thing again but with a FileStream.
using (FileStream fs = File.Open(file.Path, FileMode.Open))
{
MemoryMappedFile.CreateFromFile(fs, null, InitialCapacity * 4, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true).Dispose();
Assert.Equal(InitialCapacity * 4, fs.Length);
}
}
}
/// <summary>
/// Test the exceptional behavior when attempting to create a map so large it's not supported.
/// </summary>
[PlatformSpecific(~PlatformID.OSX)] // Because of the file-based backing, OS X pops up a warning dialog about being out-of-space (even though we clean up immediately)
[Fact]
public void TooLargeCapacity()
{
using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.CreateNew))
{
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(fs, null, long.MaxValue, MemoryMappedFileAccess.ReadWrite, HandleInheritability.None, true));
}
}
/// <summary>
/// Test to verify map names are handled appropriately, causing a conflict when they're active but
/// reusable in a sequential manner.
/// </summary>
[PlatformSpecific(PlatformID.Windows)]
[Theory]
[MemberData("CreateValidMapNames")]
public void ReusingNames_Windows(string name)
{
const int Capacity = 4096;
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.CreateNew, name, Capacity))
{
ValidateMemoryMappedFile(mmf, Capacity);
Assert.Throws<IOException>(() => MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.CreateNew, name, Capacity));
}
using (MemoryMappedFile mmf = MemoryMappedFile.CreateFromFile(GetTestFilePath(), FileMode.CreateNew, name, Capacity))
{
ValidateMemoryMappedFile(mmf, Capacity);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// This file defines many COM dual interfaces which are legacy and,
// cannot be changed. Tolerate possible obsoletion.
//
#pragma warning disable CS0618 // Type or member is obsolete
namespace System.DirectoryServices.AccountManagement
{
using System.Runtime.InteropServices;
using System;
using System.Security;
using System.Security.Permissions;
using System.Text;
internal class Constants
{
private Constants() { }
internal static Byte[] GUID_USERS_CONTAINER_BYTE = new Byte[] { 0xa9, 0xd1, 0xca, 0x15, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd };
internal static Byte[] GUID_COMPUTRS_CONTAINER_BYTE = new Byte[] { 0xaa, 0x31, 0x28, 0x25, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd };
internal static Byte[] GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE = new Byte[] { 0x22, 0xb7, 0x0c, 0x67, 0xd5, 0x6e, 0x4e, 0xfb, 0x91, 0xe9, 0x30, 0x0f, 0xca, 0x3d, 0xc1, 0xaa };
}
[SuppressUnmanagedCodeSecurityAttribute]
internal class SafeNativeMethods
{
// To stop the compiler from autogenerating a constructor for this class
private SafeNativeMethods() { }
[DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "GetCurrentThreadId", CharSet = CharSet.Unicode)]
static extern public int GetCurrentThreadId();
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaNtStatusToWinError", CharSet = CharSet.Unicode)]
static extern public int LsaNtStatusToWinError(int ntStatus);
}
[SuppressUnmanagedCodeSecurityAttribute]
internal class UnsafeNativeMethods
{
// To stop the compiler from autogenerating a constructor for this class
private UnsafeNativeMethods() { }
[DllImport(ExternDll.Activeds, ExactSpelling = true, EntryPoint = "ADsOpenObject", CharSet = System.Runtime.InteropServices.CharSet.Unicode)]
private static extern int IntADsOpenObject(string path, string userName, string password, int flags, [In, Out] ref Guid iid, [Out, MarshalAs(UnmanagedType.Interface)] out object ppObject);
[System.Security.SecurityCritical]
public static int ADsOpenObject(string path, string userName, string password, int flags, [In, Out] ref Guid iid, [Out, MarshalAs(UnmanagedType.Interface)] out object ppObject)
{
try
{
return IntADsOpenObject(path, userName, password, flags, ref iid, out ppObject);
}
catch (EntryPointNotFoundException)
{
throw new InvalidOperationException(StringResources.AdsiNotInstalled);
}
}
//
// ADSI Interopt
//
internal enum ADS_PASSWORD_ENCODING_ENUM
{
ADS_PASSWORD_ENCODE_REQUIRE_SSL = 0,
ADS_PASSWORD_ENCODE_CLEAR = 1
}
internal enum ADS_OPTION_ENUM
{
ADS_OPTION_SERVERNAME = 0,
ADS_OPTION_REFERRALS = 1,
ADS_OPTION_PAGE_SIZE = 2,
ADS_OPTION_SECURITY_MASK = 3,
ADS_OPTION_MUTUAL_AUTH_STATUS = 4,
ADS_OPTION_QUOTA = 5,
ADS_OPTION_PASSWORD_PORTNUMBER = 6,
ADS_OPTION_PASSWORD_METHOD = 7,
ADS_OPTION_ACCUMULATIVE_MODIFICATION = 8,
ADS_OPTION_SKIP_SID_LOOKUP = 9
}
[ComImport, Guid("7E99C0A2-F935-11D2-BA96-00C04FB6D0D1"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADsDNWithBinary
{
object BinaryValue { get; set; }
string DNString { get; set; }
}
[ComImport, Guid("9068270b-0939-11D1-8be1-00c04fd8d503"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADsLargeInteger
{
int HighPart { get; set; }
int LowPart { get; set; }
}
[SuppressUnmanagedCodeSecurityAttribute]
[ComImport, Guid("927971f5-0939-11d1-8be1-00c04fd8d503")]
public class ADsLargeInteger
{
}
[ComImport, Guid("46f14fda-232b-11d1-a808-00c04fd8d5a8"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IAdsObjectOptions
{
[return: MarshalAs(UnmanagedType.Struct)]
Object GetOption(
[In]
int option);
void PutOption(
[In]
int option,
[In, MarshalAs(UnmanagedType.Struct)]
Object vProp);
}
[ComImport, Guid("FD8256D0-FD15-11CE-ABC4-02608C9E7553"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADs
{
string Name
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Class
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string GUID
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string ADsPath
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Parent
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Schema
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
void GetInfo();
void SetInfo();
[return: MarshalAs(UnmanagedType.Struct)]
Object Get(
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName);
void Put(
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName,
[In, MarshalAs(UnmanagedType.Struct)]
Object vProp);
[return: MarshalAs(UnmanagedType.Struct)]
Object GetEx(
[In, MarshalAs(UnmanagedType.BStr)]
String bstrName);
void PutEx(
[In, MarshalAs(UnmanagedType.U4)]
int lnControlCode,
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName,
[In, MarshalAs(UnmanagedType.Struct)]
Object vProp);
void GetInfoEx(
[In, MarshalAs(UnmanagedType.Struct)]
Object vProperties,
[In, MarshalAs(UnmanagedType.U4)]
int lnReserved);
}
[ComImport, Guid("27636b00-410f-11cf-b1ff-02608c9e7553"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADsGroup
{
string Name
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Class
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string GUID
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string ADsPath
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Parent
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
string Schema
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
}
void GetInfo();
void SetInfo();
[return: MarshalAs(UnmanagedType.Struct)]
Object Get(
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName);
void Put(
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName,
[In, MarshalAs(UnmanagedType.Struct)]
Object vProp);
[return: MarshalAs(UnmanagedType.Struct)]
Object GetEx(
[In, MarshalAs(UnmanagedType.BStr)]
String bstrName);
void PutEx(
[In, MarshalAs(UnmanagedType.U4)]
int lnControlCode,
[In, MarshalAs(UnmanagedType.BStr)]
string bstrName,
[In, MarshalAs(UnmanagedType.Struct)]
Object vProp);
void GetInfoEx(
[In, MarshalAs(UnmanagedType.Struct)]
Object vProperties,
[In, MarshalAs(UnmanagedType.U4)]
int lnReserved);
string Description
{
[return: MarshalAs(UnmanagedType.BStr)]
get;
[param: MarshalAs(UnmanagedType.BStr)]
set;
}
IADsMembers Members();
bool IsMember([In, MarshalAs(UnmanagedType.BStr)] string bstrMember);
void Add([In, MarshalAs(UnmanagedType.BStr)] string bstrNewItem);
void Remove([In, MarshalAs(UnmanagedType.BStr)] string bstrItemToBeRemoved);
}
[ComImport, Guid("451a0030-72ec-11cf-b03b-00aa006e0975"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADsMembers
{
int Count
{
[return: MarshalAs(UnmanagedType.U4)]
get;
}
object _NewEnum
{
[return: MarshalAs(UnmanagedType.Interface)]
get;
}
object Filter
{
[return: MarshalAs(UnmanagedType.Struct)]
get;
[param: MarshalAs(UnmanagedType.Struct)]
set;
}
}
[SuppressUnmanagedCodeSecurityAttribute]
[ComImport, Guid("080d0d78-f421-11d0-a36e-00c04fb950dc")]
public class Pathname
{
}
[ComImport, Guid("d592aed4-f420-11d0-a36e-00c04fb950dc"), InterfaceTypeAttribute(ComInterfaceType.InterfaceIsDual)]
public interface IADsPathname
{
void Set(
[In, MarshalAs(UnmanagedType.BStr)] string bstrADsPath,
[In, MarshalAs(UnmanagedType.U4)] int lnSetType
);
void SetDisplayType(
[In, MarshalAs(UnmanagedType.U4)] int lnDisplayType
);
[return: MarshalAs(UnmanagedType.BStr)]
string Retrieve(
[In, MarshalAs(UnmanagedType.U4)] int lnFormatType
);
[return: MarshalAs(UnmanagedType.U4)]
int GetNumElements();
[return: MarshalAs(UnmanagedType.BStr)]
string
GetElement(
[In, MarshalAs(UnmanagedType.U4)] int lnElementIndex
);
void AddLeafElement(
[In, MarshalAs(UnmanagedType.BStr)] string bstrLeafElement
);
void RemoveLeafElement();
[return: MarshalAs(UnmanagedType.Struct)]
Object CopyPath();
[return: MarshalAs(UnmanagedType.BStr)]
string GetEscapedElement(
[In, MarshalAs(UnmanagedType.U4)] int lnReserved,
[In, MarshalAs(UnmanagedType.BStr)] string bstrInStr
);
int EscapedMode
{
[return: MarshalAs(UnmanagedType.U4)]
get;
[param: MarshalAs(UnmanagedType.U4)]
set;
}
}
//
// DSInteropt
//
/*
typedef enum
{
DsRole_RoleStandaloneWorkstation,
DsRole_RoleMemberWorkstation,
DsRole_RoleStandaloneServer,
DsRole_RoleMemberServer,
DsRole_RoleBackupDomainController,
DsRole_RolePrimaryDomainController,
DsRole_WorkstationWithSharedAccountDomain,
DsRole_ServerWithSharedAccountDomain,
DsRole_MemberWorkstationWithSharedAccountDomain,
DsRole_MemberServerWithSharedAccountDomain
}DSROLE_MACHINE_ROLE;
*/
public enum DSROLE_MACHINE_ROLE
{
DsRole_RoleStandaloneWorkstation,
DsRole_RoleMemberWorkstation,
DsRole_RoleStandaloneServer,
DsRole_RoleMemberServer,
DsRole_RoleBackupDomainController,
DsRole_RolePrimaryDomainController,
DsRole_WorkstationWithSharedAccountDomain,
DsRole_ServerWithSharedAccountDomain,
DsRole_MemberWorkstationWithSharedAccountDomain,
DsRole_MemberServerWithSharedAccountDomain
}
/*
typedef enum
{
DsRolePrimaryDomainInfoBasic,
DsRoleUpgradeStatus,
DsRoleOperationState,
DsRolePrimaryDomainInfoBasicEx
}DSROLE_PRIMARY_DOMAIN_INFO_LEVEL;
*/
public enum DSROLE_PRIMARY_DOMAIN_INFO_LEVEL
{
DsRolePrimaryDomainInfoBasic = 1,
DsRoleUpgradeStatus = 2,
DsRoleOperationState = 3,
DsRolePrimaryDomainInfoBasicEx = 4
}
/*
typedef struct _DSROLE_PRIMARY_DOMAIN_INFO_BASIC {
DSROLE_MACHINE_ROLE MachineRole;
ULONG Flags;
LPWSTR DomainNameFlat;
LPWSTR DomainNameDns;
LPWSTR DomainForestName;
GUID DomainGuid;
} DSROLE_PRIMARY_DOMAIN_INFO_BASIC, *PDSROLE_PRIMARY_DOMAIN_INFO_BASIC;
*/
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class DSROLE_PRIMARY_DOMAIN_INFO_BASIC
{
public DSROLE_MACHINE_ROLE MachineRole;
public uint Flags;
[MarshalAs(UnmanagedType.LPWStr)]
public string DomainNameFlat;
[MarshalAs(UnmanagedType.LPWStr)]
public string DomainNameDns;
[MarshalAs(UnmanagedType.LPWStr)]
public string DomainForestName;
public Guid DomainGuid = new Guid();
}
/*
DWORD DsRoleGetPrimaryDomainInformation(
LPCWSTR lpServer,
DSROLE_PRIMARY_DOMAIN_INFO_LEVEL InfoLevel,
PBYTE* Buffer
); */
[DllImport("dsrole.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "DsRoleGetPrimaryDomainInformation", CharSet = CharSet.Unicode)]
public static extern int DsRoleGetPrimaryDomainInformation(
[MarshalAs(UnmanagedType.LPTStr)] string lpServer,
[In] DSROLE_PRIMARY_DOMAIN_INFO_LEVEL InfoLevel,
out IntPtr Buffer);
/*typedef struct _DOMAIN_CONTROLLER_INFO {
LPTSTR DomainControllerName;
LPTSTR DomainControllerAddress;
ULONG DomainControllerAddressType;
GUID DomainGuid;
LPTSTR DomainName;
LPTSTR DnsForestName;
ULONG Flags;
LPTSTR DcSiteName;
LPTSTR ClientSiteName;
} DOMAIN_CONTROLLER_INFO, *PDOMAIN_CONTROLLER_INFO; */
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class DomainControllerInfo
{
public string DomainControllerName = null;
public string DomainControllerAddress = null;
public int DomainControllerAddressType = 0;
public Guid DomainGuid = new Guid();
public string DomainName = null;
public string DnsForestName = null;
public int Flags = 0;
public string DcSiteName = null;
public string ClientSiteName = null;
}
/*
void DsRoleFreeMemory(
PVOID Buffer
);
*/
[DllImport("dsrole.dll")]
public static extern int DsRoleFreeMemory(
[In] IntPtr buffer);
/*DWORD DsGetDcName(
LPCTSTR ComputerName,
LPCTSTR DomainName,
GUID* DomainGuid,
LPCTSTR SiteName,
ULONG Flags,
PDOMAIN_CONTROLLER_INFO* DomainControllerInfo
);*/
[DllImport("logoncli.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "DsGetDcNameW", CharSet = CharSet.Unicode)]
public static extern int DsGetDcName(
[In] string computerName,
[In] string domainName,
[In] IntPtr domainGuid,
[In] string siteName,
[In] int flags,
[Out] out IntPtr domainControllerInfo);
/* typedef struct _WKSTA_INFO_100 {
DWORD wki100_platform_id;
LMSTR wki100_computername;
LMSTR wki100_langroup;
DWORD wki100_ver_major;
DWORD wki100_ver_minor;
} WKSTA_INFO_100, *PWKSTA_INFO_100; */
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class WKSTA_INFO_100
{
public int wki100_platform_id = 0;
public string wki100_computername = null;
public string wki100_langroup = null;
public int wki100_ver_major = 0;
public int wki100_ver_minor = 0;
};
[DllImport("wkscli.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "NetWkstaGetInfo", CharSet = CharSet.Unicode)]
public static extern int NetWkstaGetInfo(string server, int level, ref IntPtr buffer);
[DllImport("netutils.dll")]
public static extern int NetApiBufferFree(
[In] IntPtr buffer);
//
// SID
//
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "ConvertSidToStringSidW", CharSet = CharSet.Unicode)]
public static extern bool ConvertSidToStringSid(IntPtr sid, ref string stringSid);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "ConvertStringSidToSidW", CharSet = CharSet.Unicode)]
public static extern bool ConvertStringSidToSid(string stringSid, ref IntPtr sid);
[DllImport("advapi32.dll")]
public static extern int GetLengthSid(IntPtr sid);
[DllImport("advapi32.dll", SetLastError = true)]
public static extern bool IsValidSid(IntPtr sid);
[DllImport("advapi32.dll")]
public static extern IntPtr GetSidIdentifierAuthority(IntPtr sid);
[DllImport("advapi32.dll")]
public static extern IntPtr GetSidSubAuthority(IntPtr sid, int index);
[DllImport("advapi32.dll")]
public static extern IntPtr GetSidSubAuthorityCount(IntPtr sid);
[DllImport("advapi32.dll")]
public static extern bool EqualDomainSid(IntPtr pSid1, IntPtr pSid2, ref bool equal);
[DllImport("advapi32.dll", SetLastError = true)]
public static extern bool CopySid(int destinationLength, IntPtr pSidDestination, IntPtr pSidSource);
[DllImport("kernel32.dll")]
public static extern IntPtr LocalFree(IntPtr ptr);
[DllImport("Credui.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "CredUIParseUserNameW", CharSet = CharSet.Unicode)]
public static extern int CredUIParseUserName(
string pszUserName,
StringBuilder pszUser,
System.UInt32 ulUserMaxChars,
StringBuilder pszDomain,
System.UInt32 ulDomainMaxChars
);
// These contants were taken from the wincred.h file
public const int CRED_MAX_USERNAME_LENGTH = 514;
public const int CRED_MAX_DOMAIN_TARGET_LENGTH = 338;
/*
BOOL LookupAccountSid(
LPCTSTR lpSystemName,
PSID lpSid,
LPTSTR lpName,
LPDWORD cchName,
LPTSTR lpReferencedDomainName,
LPDWORD cchReferencedDomainName,
PSID_NAME_USE peUse
);
*/
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "LookupAccountSidW", CharSet = CharSet.Unicode)]
public static extern bool LookupAccountSid(
string computerName,
IntPtr sid,
StringBuilder name,
ref int nameLength,
StringBuilder domainName,
ref int domainNameLength,
ref int usage);
//
// AuthZ functions
//
internal sealed class AUTHZ_RM_FLAG
{
private AUTHZ_RM_FLAG() { }
public static int AUTHZ_RM_FLAG_NO_AUDIT = 0x1;
public static int AUTHZ_RM_FLAG_INITIALIZE_UNDER_IMPERSONATION = 0x2;
public static int AUTHZ_VALID_RM_INIT_FLAGS = (AUTHZ_RM_FLAG_NO_AUDIT | AUTHZ_RM_FLAG_INITIALIZE_UNDER_IMPERSONATION);
}
[DllImport("authz.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "AuthzInitializeResourceManager", CharSet = CharSet.Unicode)]
static extern public bool AuthzInitializeResourceManager(
int flags,
IntPtr pfnAccessCheck,
IntPtr pfnComputeDynamicGroups,
IntPtr pfnFreeDynamicGroups,
string name,
out IntPtr rm
);
/*
BOOL WINAPI AuthzInitializeContextFromSid(
DWORD Flags,
PSID UserSid,
AUTHZ_RESOURCE_MANAGER_HANDLE AuthzResourceManager,
PLARGE_INTEGER pExpirationTime,
LUID Identifier,
PVOID DynamicGroupArgs,
PAUTHZ_CLIENT_CONTEXT_HANDLE pAuthzClientContext
);
*/
[DllImport("authz.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "AuthzInitializeContextFromSid", CharSet = CharSet.Unicode)]
static extern public bool AuthzInitializeContextFromSid(
int Flags,
IntPtr UserSid,
IntPtr AuthzResourceManager,
IntPtr pExpirationTime,
LUID Identitifier,
IntPtr DynamicGroupArgs,
out IntPtr pAuthzClientContext
);
/*
[DllImport("authz.dll", SetLastError=true, CallingConvention=CallingConvention.StdCall, EntryPoint="AuthzInitializeContextFromToken", CharSet=CharSet.Unicode)]
static extern public bool AuthzInitializeContextFromToken(
int Flags,
IntPtr TokenHandle,
IntPtr AuthzResourceManager,
IntPtr pExpirationTime,
LUID Identitifier,
IntPtr DynamicGroupArgs,
out IntPtr pAuthzClientContext
);
*/
[DllImport("authz.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "AuthzGetInformationFromContext", CharSet = CharSet.Unicode)]
static extern public bool AuthzGetInformationFromContext(
IntPtr hAuthzClientContext,
int InfoClass,
int BufferSize,
out int pSizeRequired,
IntPtr Buffer
);
[DllImport("authz.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "AuthzFreeContext", CharSet = CharSet.Unicode)]
static extern public bool AuthzFreeContext(
IntPtr AuthzClientContext
);
[DllImport("authz.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "AuthzFreeResourceManager", CharSet = CharSet.Unicode)]
static extern public bool AuthzFreeResourceManager(
IntPtr rm
);
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public struct LUID
{
public int low;
public int high;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class TOKEN_GROUPS
{
public int groupCount = 0;
public IntPtr groups = IntPtr.Zero;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class SID_AND_ATTR
{
public IntPtr pSid = IntPtr.Zero;
public int attrs = 0;
}
//
// Token
//
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class TOKEN_USER
{
public SID_AND_ATTR sidAndAttributes = new SID_AND_ATTR();
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class SID_IDENTIFIER_AUTHORITY
{
public byte b1 = 0;
public byte b2 = 0;
public byte b3 = 0;
public byte b4 = 0;
public byte b5 = 0;
public byte b6 = 0;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_OBJECT_ATTRIBUTES
{
public int length = 0;
public IntPtr rootDirectory = IntPtr.Zero;
public IntPtr objectName = IntPtr.Zero;
public int attributes = 0;
public IntPtr securityDescriptor = IntPtr.Zero;
public IntPtr securityQualityOfService = IntPtr.Zero;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class POLICY_ACCOUNT_DOMAIN_INFO
{
public LSA_UNICODE_STRING domainName = new LSA_UNICODE_STRING();
public IntPtr domainSid = IntPtr.Zero;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_UNICODE_STRING
{
public ushort length = 0;
public ushort maximumLength = 0;
public IntPtr buffer = IntPtr.Zero;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_UNICODE_STRING_Managed
{
public ushort length = 0;
public ushort maximumLength = 0;
public string buffer;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_TRANSLATED_NAME
{
public int use = 0;
public LSA_UNICODE_STRING name = new LSA_UNICODE_STRING();
public int domainIndex = 0;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_REFERENCED_DOMAIN_LIST
{
// To stop the compiler from autogenerating a constructor for this class
private LSA_REFERENCED_DOMAIN_LIST() { }
public int entries = 0;
public IntPtr domains = IntPtr.Zero;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public sealed class LSA_TRUST_INFORMATION
{
public LSA_UNICODE_STRING name = new LSA_UNICODE_STRING();
private IntPtr _pSid = IntPtr.Zero;
}
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "OpenThreadToken", CharSet = CharSet.Unicode)]
static extern public bool OpenThreadToken(
IntPtr threadHandle,
int desiredAccess,
bool openAsSelf,
ref IntPtr tokenHandle
);
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "OpenProcessToken", CharSet = CharSet.Unicode)]
static extern public bool OpenProcessToken(
IntPtr processHandle,
int desiredAccess,
ref IntPtr tokenHandle
);
[DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "CloseHandle", CharSet = CharSet.Unicode)]
static extern public bool CloseHandle(IntPtr handle);
[DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "GetCurrentThread", CharSet = CharSet.Unicode)]
static extern public IntPtr GetCurrentThread();
[DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "GetCurrentProcess", CharSet = CharSet.Unicode)]
static extern public IntPtr GetCurrentProcess();
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "GetTokenInformation", CharSet = CharSet.Unicode)]
static extern public bool GetTokenInformation(
IntPtr tokenHandle,
int tokenInformationClass,
IntPtr buffer,
int bufferSize,
ref int returnLength
);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaOpenPolicy", CharSet = CharSet.Unicode)]
static extern public int LsaOpenPolicy(
IntPtr lsaUnicodeString,
IntPtr lsaObjectAttributes,
int desiredAccess,
ref IntPtr policyHandle);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaQueryInformationPolicy", CharSet = CharSet.Unicode)]
static extern public int LsaQueryInformationPolicy(
IntPtr policyHandle,
int policyInformationClass,
ref IntPtr buffer
);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaLookupSids", CharSet = CharSet.Unicode)]
public static extern int LsaLookupSids(
IntPtr policyHandle,
int count,
IntPtr[] sids,
out IntPtr referencedDomains,
out IntPtr names
);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaFreeMemory", CharSet = CharSet.Unicode)]
static extern public int LsaFreeMemory(IntPtr buffer);
[DllImport("advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "LsaClose", CharSet = CharSet.Unicode)]
static extern public int LsaClose(IntPtr policyHandle);
//
// Impersonation
//
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "LogonUserW", CharSet = CharSet.Unicode)]
static extern public int LogonUser(
string lpszUsername,
string lpszDomain,
string lpszPassword,
int dwLogonType,
int dwLogonProvider,
ref IntPtr phToken);
[DllImport("advapi32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall, EntryPoint = "ImpersonateLoggedOnUser", CharSet = CharSet.Unicode)]
static extern public int ImpersonateLoggedOnUser(IntPtr hToken);
[DllImport("Advapi32.dll", CallingConvention = CallingConvention.StdCall, EntryPoint = "RevertToSelf", CharSet = CharSet.Unicode)]
static extern public int RevertToSelf();
public const int FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100,
FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200,
FORMAT_MESSAGE_FROM_STRING = 0x00000400,
FORMAT_MESSAGE_FROM_HMODULE = 0x00000800,
FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000,
FORMAT_MESSAGE_ARGUMENT_ARRAY = 0x00002000,
FORMAT_MESSAGE_MAX_WIDTH_MASK = 0x000000FF;
[DllImport("kernel32.dll", CharSet = System.Runtime.InteropServices.CharSet.Unicode)]
public static extern int FormatMessageW(int dwFlags, IntPtr lpSource, int dwMessageId,
int dwLanguageId, StringBuilder lpBuffer, int nSize, IntPtr arguments);
}
}
| |
namespace Xbehave.Test
{
using System;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Xbehave.Test.Infrastructure;
using Xunit;
using Xunit.Abstractions;
// In order to release allocated resources
// As a developer
// I want to register objects for disposal after a scenario has run
public class ObjectDisposalFeature : Feature
{
[Background]
public void Background() =>
"Given no events have occurred"
.x(() => typeof(ObjectDisposalFeature).ClearTestEvents());
[Scenario]
[Example(typeof(AStepWithThreeDisposables))]
[Example(typeof(ThreeStepsWithDisposables))]
[Example(typeof(AnAsyncStepWithThreeDisposables))]
public void ManyDisposablesInASingleStep(Type feature, ITestResultMessage[] results)
{
$"Given {feature}"
.x(() => { });
"When running the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"And there should be no failures"
.x(() => Assert.All(results, result => Assert.IsAssignableFrom<ITestPassed>(result)));
"And the disposables should each have been disposed in reverse order"
.x(() => Assert.Equal(new[] { "disposed3", "disposed2", "disposed1" }, typeof(ObjectDisposalFeature).GetTestEvents()));
}
[Scenario]
public void ADisposableWhichThrowExceptionsWhenDisposed(Type feature, ITestResultMessage[] results)
{
"Given a step with three disposables which throw exceptions when disposed"
.x(() => feature = typeof(StepWithThreeBadDisposables));
"When running the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"Then the there should be at least two results"
.x(() => Assert.InRange(results.Length, 2, int.MaxValue));
"And the first n-1 results should be passes"
.x(() => Assert.All(results.Reverse().Skip(1), result => Assert.IsAssignableFrom<ITestPassed>(result)));
"And the last result should be a failure"
.x(() => Assert.IsAssignableFrom<ITestFailed>(results.Last()));
"And the disposables should be disposed in reverse order"
.x(() => Assert.Equal(new[] { "disposed3", "disposed2", "disposed1" }, typeof(ObjectDisposalFeature).GetTestEvents()));
}
[Scenario]
[Example(typeof(StepsFollowedByAFailingStep))]
[Example(typeof(StepFailsToComplete))]
public void FailingSteps(Type feature, ITestResultMessage[] results)
{
$"Given {feature}"
.x(() => { });
"When running the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"Then there should be one failure"
.x(() => Assert.Single(results.OfType<ITestFailed>()));
"And the disposables should be disposed in reverse order"
.x(() => Assert.Equal(new[] { "disposed3", "disposed2", "disposed1" }, typeof(ObjectDisposalFeature).GetTestEvents()));
}
[Scenario]
public void DisposablesAndTeardowns(Type feature, ITestResultMessage[] results)
{
"Given steps with disposables and teardowns"
.x(() => feature = typeof(StepsWithDisposablesAndTeardowns));
"When running the scenario"
.x(() => results = this.Run<ITestResultMessage>(feature));
"And there should be no failures"
.x(() => Assert.All(results, result => Assert.IsAssignableFrom<ITestPassed>(result)));
"And the disposables and teardowns should be disposed/executed in reverse order"
.x(() => Assert.Equal(new[] { "teardown4", "disposed3", "teardown2", "disposed1" }, typeof(ObjectDisposalFeature).GetTestEvents()));
}
[Scenario]
public void NullDisposable() =>
"Given a null body"
.x(c => ((IDisposable)null).Using(c));
private static class AStepWithThreeDisposables
{
[Scenario]
public static void Scenario(Disposable disposable0, Disposable disposable1, Disposable disposable2)
{
"Given some disposables"
.x(c =>
{
disposable0 = new Disposable(1).Using(c);
disposable1 = new Disposable(2).Using(c);
disposable2 = new Disposable(3).Using(c);
});
"When using the disposables"
.x(() =>
{
disposable0.Use();
disposable1.Use();
disposable2.Use();
});
}
}
private static class StepWithThreeBadDisposables
{
[Scenario]
public static void Scenario(Disposable disposable0, Disposable disposable1, Disposable disposable2)
{
"Given some disposables"
.x(c =>
{
disposable0 = new BadDisposable(1).Using(c);
disposable1 = new BadDisposable(2).Using(c);
disposable2 = new BadDisposable(3).Using(c);
});
"When using the disposables"
.x(() =>
{
disposable0.Use();
disposable1.Use();
disposable2.Use();
});
}
}
private static class ThreeStepsWithDisposables
{
[Scenario]
public static void Scenario(Disposable disposable0, Disposable disposable1, Disposable disposable2)
{
"Given a disposable"
.x(c => disposable0 = new Disposable(1).Using(c));
"And another disposable"
.x(c => disposable1 = new Disposable(2).Using(c));
"And another disposable"
.x(c => disposable2 = new Disposable(3).Using(c));
"When using the disposables"
.x(() =>
{
disposable0.Use();
disposable1.Use();
disposable2.Use();
});
}
}
private static class StepsFollowedByAFailingStep
{
[Scenario]
public static void Scenario(Disposable disposable0, Disposable disposable1, Disposable disposable2)
{
"Given a disposable"
.x(c => disposable0 = new Disposable(1).Using(c));
"And another disposable"
.x(c => disposable1 = new Disposable(2).Using(c));
"And another disposable"
.x(c => disposable2 = new Disposable(3).Using(c));
"When using the disposables"
.x(() =>
{
disposable0.Use();
disposable1.Use();
disposable2.Use();
});
"Then something happens"
.x(() => Assert.Equal(0, 1));
}
}
private static class StepFailsToComplete
{
[Scenario]
public static void Scenario() =>
"Given some disposables"
.x(c =>
{
new Disposable(1).Using(c);
new Disposable(2).Using(c);
new Disposable(3).Using(c);
throw new InvalidOperationException();
});
}
private static class AnAsyncStepWithThreeDisposables
{
[Scenario]
public static void Scenario(Disposable disposable0, Disposable disposable1, Disposable disposable2)
{
"Given some disposables"
.x(async c =>
{
await Task.Yield();
disposable0 = new Disposable(1).Using(c);
disposable1 = new Disposable(2).Using(c);
disposable2 = new Disposable(3).Using(c);
});
"When using the disposables"
.x(() =>
{
disposable0.Use();
disposable1.Use();
disposable2.Use();
});
}
}
private static class StepsWithDisposablesAndTeardowns
{
[Scenario]
public static void Scenario()
{
"Given something"
.x(c => new Disposable(1).Using(c))
.Teardown(() => typeof(ObjectDisposalFeature).SaveTestEvent("teardown2"));
"And something else"
.x(c => new Disposable(3).Using(c))
.Teardown(() => typeof(ObjectDisposalFeature).SaveTestEvent("teardown4"));
}
}
private class Disposable : IDisposable
{
private readonly int number;
private bool isDisposed;
public Disposable(int number) => this.number = number;
~Disposable()
{
this.Dispose(false);
}
public void Use()
{
if (this.isDisposed)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
}
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
var @event = string.Concat("disposed", this.number.ToString(CultureInfo.InvariantCulture));
typeof(ObjectDisposalFeature).SaveTestEvent(@event);
this.isDisposed = true;
}
}
}
private sealed class BadDisposable : Disposable
{
public BadDisposable(int number)
: base(number)
{
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (disposing)
{
throw new NotImplementedException();
}
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.Sql.LegacySdk;
using Microsoft.Azure.Management.Sql.LegacySdk.Models;
namespace Microsoft.Azure.Management.Sql.LegacySdk
{
/// <summary>
/// The Windows Azure SQL Database management API provides a RESTful set of
/// web services that interact with Windows Azure SQL Database services to
/// manage your databases. The API enables users to create, retrieve,
/// update, and delete databases and servers.
/// </summary>
public static partial class RecommendedElasticPoolOperationsExtensions
{
/// <summary>
/// Returns information about an Azure SQL Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server for which the
/// Azure SQL Recommended Elastic Pool is.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a Get Azure Sql Recommended Resource
/// pool request.
/// </returns>
public static RecommendedElasticPoolGetResponse Get(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).GetAsync(resourceGroupName, serverName, recommendedElasticPoolName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about an Azure SQL Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server for which the
/// Azure SQL Recommended Elastic Pool is.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a Get Azure Sql Recommended Resource
/// pool request.
/// </returns>
public static Task<RecommendedElasticPoolGetResponse> GetAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return operations.GetAsync(resourceGroupName, serverName, recommendedElasticPoolName, CancellationToken.None);
}
/// <summary>
/// Returns information about an Azure SQL Database inside of an Azure
/// Sql Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server on which the
/// database is hosted.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Database Elastic Pool to be
/// retrieved.
/// </param>
/// <param name='databaseName'>
/// Required. The name of the Azure SQL Database to be retrieved.
/// </param>
/// <returns>
/// Represents the response to a Get Azure Sql Database request.
/// </returns>
public static DatabaseGetResponse GetDatabases(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName, string databaseName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).GetDatabasesAsync(resourceGroupName, serverName, recommendedElasticPoolName, databaseName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about an Azure SQL Database inside of an Azure
/// Sql Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server on which the
/// database is hosted.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Database Elastic Pool to be
/// retrieved.
/// </param>
/// <param name='databaseName'>
/// Required. The name of the Azure SQL Database to be retrieved.
/// </param>
/// <returns>
/// Represents the response to a Get Azure Sql Database request.
/// </returns>
public static Task<DatabaseGetResponse> GetDatabasesAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName, string databaseName)
{
return operations.GetDatabasesAsync(resourceGroupName, serverName, recommendedElasticPoolName, databaseName, CancellationToken.None);
}
/// <summary>
/// Returns information about Azure SQL Recommended Elastic Pools.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Serve belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server in which Azure
/// SQL Recommended Elastic Pools are hosted.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool request.
/// </returns>
public static RecommendedElasticPoolListResponse List(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).ListAsync(resourceGroupName, serverName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about Azure SQL Recommended Elastic Pools.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Serve belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server in which Azure
/// SQL Recommended Elastic Pools are hosted.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool request.
/// </returns>
public static Task<RecommendedElasticPoolListResponse> ListAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName)
{
return operations.ListAsync(resourceGroupName, serverName, CancellationToken.None);
}
/// <summary>
/// Returns information about an Azure SQL Database inside of an Azure
/// Sql Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server on which the
/// database is hosted.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Database request.
/// </returns>
public static DatabaseListResponse ListDatabases(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).ListDatabasesAsync(resourceGroupName, serverName, recommendedElasticPoolName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about an Azure SQL Database inside of an Azure
/// Sql Recommended Elastic Pool.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server on which the
/// database is hosted.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Database request.
/// </returns>
public static Task<DatabaseListResponse> ListDatabasesAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return operations.ListDatabasesAsync(resourceGroupName, serverName, recommendedElasticPoolName, CancellationToken.None);
}
/// <summary>
/// Returns information about Azure SQL Recommended Elastic Pools.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Serve belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server in which Azure
/// SQL Recommended Elastic Pools are hosted.
/// </param>
/// <param name='expand'>
/// Required. The comma separated list of child objects that we want to
/// expand on in response.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool request.
/// </returns>
public static RecommendedElasticPoolListResponse ListExpanded(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string expand)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).ListExpandedAsync(resourceGroupName, serverName, expand);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about Azure SQL Recommended Elastic Pools.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Recommended Serve belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server in which Azure
/// SQL Recommended Elastic Pools are hosted.
/// </param>
/// <param name='expand'>
/// Required. The comma separated list of child objects that we want to
/// expand on in response.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool request.
/// </returns>
public static Task<RecommendedElasticPoolListResponse> ListExpandedAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string expand)
{
return operations.ListExpandedAsync(resourceGroupName, serverName, expand, CancellationToken.None);
}
/// <summary>
/// Returns information about an recommended elastic pool metrics.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool metrics request.
/// </returns>
public static RecommendedElasticPoolListMetricsResponse ListMetrics(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IRecommendedElasticPoolOperations)s).ListMetricsAsync(resourceGroupName, serverName, recommendedElasticPoolName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about an recommended elastic pool metrics.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IRecommendedElasticPoolOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server.
/// </param>
/// <param name='recommendedElasticPoolName'>
/// Required. The name of the Azure SQL Recommended Elastic Pool to be
/// retrieved.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Recommended Elastic
/// Pool metrics request.
/// </returns>
public static Task<RecommendedElasticPoolListMetricsResponse> ListMetricsAsync(this IRecommendedElasticPoolOperations operations, string resourceGroupName, string serverName, string recommendedElasticPoolName)
{
return operations.ListMetricsAsync(resourceGroupName, serverName, recommendedElasticPoolName, CancellationToken.None);
}
}
}
| |
using System.Diagnostics;
using System;
using System.Management;
using System.Collections;
using Microsoft.VisualBasic;
using System.Data.SqlClient;
using System.Web.UI.Design;
using System.Data;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.IO;
using System.Configuration;
using System.Collections.Specialized;
using CrystalDecisions.Shared;
using CrystalDecisions.CrystalReports.Engine;
namespace ACSGhana.Web.Framework
{
namespace Reporting
{
public struct ConnectionPartValues
{
public string ServerName;
public string DatabaseName;
public string UserName;
public string Password;
public bool IntegratedSecurity;
}
public class ReportServices
{
public static FileStream GenerateExportStream(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft)
{
return new FileStream(GenerateExport(selectedReport, eft), FileMode.Open);
}
public static string GenerateExport(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft)
{
return GenerateExport(selectedReport, eft, string.Empty);
}
public static string GenerateExport(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft, string ExportFileName)
{
selectedReport.ExportOptions.ExportFormatType = eft;
string contentType = "";
// Make sure asp.net has create and delete permissions in the directory
string tempDir = HttpContext.Current.Server.MapPath(ConfigurationManager.AppSettings["tempDir"]);
string tempFileName = string.Concat(Path.GetFileNameWithoutExtension(selectedReport.FileName), ".");
if (! string.IsNullOrEmpty(ExportFileName))
{
tempFileName = string.Concat(ExportFileName, ".");
}
switch (eft)
{
case CrystalDecisions.Shared.ExportFormatType.PortableDocFormat:
tempFileName += "pdf";
break;
case CrystalDecisions.Shared.ExportFormatType.WordForWindows:
tempFileName += "doc";
break;
case CrystalDecisions.Shared.ExportFormatType.Excel:
tempFileName += "xls";
break;
case CrystalDecisions.Shared.ExportFormatType.HTML32:
case CrystalDecisions.Shared.ExportFormatType.HTML40:
tempFileName += "htm";
CrystalDecisions.Shared.HTMLFormatOptions hop = new CrystalDecisions.Shared.HTMLFormatOptions();
hop.HTMLBaseFolderName = tempDir;
hop.HTMLFileName = tempFileName;
selectedReport.ExportOptions.FormatOptions = hop;
break;
}
CrystalDecisions.Shared.DiskFileDestinationOptions dfo = new CrystalDecisions.Shared.DiskFileDestinationOptions();
dfo.DiskFileName = Path.Combine(tempDir, tempFileName);
selectedReport.ExportOptions.DestinationOptions = dfo;
selectedReport.ExportOptions.ExportDestinationType = CrystalDecisions.Shared.ExportDestinationType.DiskFile;
selectedReport.Export();
selectedReport.Close();
string tempFileNameUsed;
if (eft == CrystalDecisions.Shared.ExportFormatType.HTML32 || eft == CrystalDecisions.Shared.ExportFormatType.HTML40)
{
string[] fp = selectedReport.FilePath.Split("\\" .ToCharArray());
string leafDir = fp[fp.Length - 1];
// strip .rpt extension
leafDir = leafDir.Substring(0, leafDir.Length - 4);
tempFileNameUsed = string.Format("{0}{1}\\{2}", tempDir, leafDir, tempFileName);
}
else
{
tempFileNameUsed = Path.Combine(tempDir, tempFileName);
}
return tempFileNameUsed;
}
public static void ExportReport(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft, ref HttpResponse TargetResponse)
{
ExportReport(selectedReport, eft, TargetResponse, null);
}
public static void ExportReport(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft, string ExportFileName)
{
ExportReport(selectedReport, eft, null, ExportFileName);
}
public static void ExportReport(CrystalDecisions.CrystalReports.Engine.ReportDocument selectedReport, CrystalDecisions.Shared.ExportFormatType eft, HttpResponse TargetResponse, string ExportFileName)
{
string contentType = "";
HttpResponse currentReponse = TargetResponse;
if (currentReponse == null)
{
currentReponse = HttpContext.Current.Response;
}
string tempFileNameUsed = GenerateExport(selectedReport, eft, ExportFileName);
switch (eft)
{
case ExportFormatType.Excel:
contentType = "application/vnd.ms-excel";
break;
case ExportFormatType.PortableDocFormat:
contentType = "application/pdf";
break;
case ExportFormatType.RichText:
contentType = "application/rtf";
break;
case ExportFormatType.WordForWindows:
contentType = "application/doc";
break;
case ExportFormatType.HTML32:
contentType = "application/text";
break;
case ExportFormatType.HTML40:
contentType = "application/text";
break;
}
currentReponse.ClearContent();
currentReponse.ClearHeaders();
currentReponse.ContentType = contentType;
currentReponse.WriteFile(tempFileNameUsed);
currentReponse.Flush();
currentReponse.Close();
try
{
System.IO.File.Delete(tempFileNameUsed);
}
catch (System.Exception)
{
}
}
public static void SetReportParameters(ReportDocument TargetReport, NameValueCollection Params)
{
try
{
if (@Params.Count > 0)
{
ParameterFieldDefinitions crParameterFieldDefinitions = TargetReport.DataDefinition.ParameterFields;
foreach (ParameterFieldDefinition parafld in crParameterFieldDefinitions)
{
if (parafld.DiscreteOrRangeKind.ToString() == "DiscreteValue")
{
ParameterDiscreteValue discreteVal = new ParameterDiscreteValue();
ParameterValues curvalues = new ParameterValues();
discreteVal.Value = @Params[parafld.ParameterFieldName];
if (discreteVal.Value != null)
{
curvalues.Add(discreteVal);
parafld.ApplyCurrentValues(curvalues);
}
else
{
}
}
}
}
}
catch (System.Exception)
{
}
}
public static ConnectionPartValues GetConnectionInfo(string ConnectionString)
{
try
{
string strConn = ConnectionString;
string[] strArrConn = strConn.Split(';');
ConnectionPartValues strPartValue = new ConnectionPartValues();
foreach (string itm in strArrConn)
{
if (itm.Trim().StartsWith("Data Source="))
{
strPartValue.ServerName = itm.Split('=')[1];
}
if (itm.Trim().StartsWith("Initial Catalog="))
{
strPartValue.DatabaseName = itm.Split('=')[1];
}
if (itm.Trim().StartsWith("UID="))
{
strPartValue.UserName = itm.Split('=')[1];
}
if (itm.Trim().StartsWith("User ID="))
{
strPartValue.UserName = itm.Split('=')[1];
}
if (itm.Trim().StartsWith("Password="))
{
strPartValue.Password = itm.Split('=')[1];
}
if (itm.Trim().StartsWith("Integrated Security="))
{
strPartValue.IntegratedSecurity = true;
}
if (itm.Trim().StartsWith("Trusted Connection="))
{
strPartValue.IntegratedSecurity = true;
}
if (itm.Trim().EndsWith("SSPI"))
{
strPartValue.IntegratedSecurity = true;
}
}
return strPartValue;
}
catch (System.Exception)
{
return null;
}
}
public static void ParseReport(ref ReportDocument TargetReport, string NewConnectionString, NameValueCollection QueryString)
{
TableLogOnInfo crTableLogonInfo;
ConnectionInfo crConnectionInfo = new ConnectionInfo();
crTableLogonInfo = new TableLogOnInfo();
ConnectionPartValues partNames = GetConnectionInfo(NewConnectionString);
crConnectionInfo.ServerName = partNames.ServerName;
crConnectionInfo.DatabaseName = partNames.DatabaseName;
if (! partNames.IntegratedSecurity)
{
crConnectionInfo.UserID = partNames.UserName;
crConnectionInfo.Password = partNames.Password;
}
else
{
crConnectionInfo.IntegratedSecurity = true;
}
foreach (CrystalDecisions.CrystalReports.Engine.Table sourceTable in TargetReport.Database.Tables)
{
crTableLogonInfo.ConnectionInfo = crConnectionInfo;
sourceTable.ApplyLogOnInfo(crTableLogonInfo);
string strLocation = sourceTable.Location;
sourceTable.Location = crTableLogonInfo.ConnectionInfo.DatabaseName + ".dbo." + strLocation.Substring(strLocation.LastIndexOf(".") + 1);
}
SetReportParameters(TargetReport, QueryString);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Threading;
using Abp.Configuration.Startup;
using Abp.Dependency;
using Abp.Extensions;
namespace Abp.Localization.Dictionaries
{
/// <summary>
/// This class is used to build a localization source
/// which works on memory based dictionaries to find strings.
/// </summary>
public class DictionaryBasedLocalizationSource : IDictionaryBasedLocalizationSource
{
/// <summary>
/// Unique Name of the source.
/// </summary>
public string Name { get; private set; }
public ILocalizationDictionaryProvider DictionaryProvider { get { return _dictionaryProvider; } }
protected ILocalizationConfiguration LocalizationConfiguration { get; private set; }
private readonly ILocalizationDictionaryProvider _dictionaryProvider;
/// <summary>
///
/// </summary>
/// <param name="name"></param>
/// <param name="dictionaryProvider"></param>
public DictionaryBasedLocalizationSource(string name, ILocalizationDictionaryProvider dictionaryProvider)
{
if (name.IsNullOrEmpty())
{
throw new ArgumentNullException("name");
}
Name = name;
if (dictionaryProvider == null)
{
throw new ArgumentNullException("dictionaryProvider");
}
_dictionaryProvider = dictionaryProvider;
}
/// <inheritdoc/>
public virtual void Initialize(ILocalizationConfiguration configuration, IIocResolver iocResolver)
{
LocalizationConfiguration = configuration;
DictionaryProvider.Initialize(Name);
}
/// <inheritdoc/>
public string GetString(string name)
{
return GetString(name, CultureInfo.CurrentUICulture);
}
/// <inheritdoc/>
public string GetString(string name, CultureInfo culture)
{
var value = GetStringOrNull(name, culture);
if (value == null)
{
return ReturnGivenNameOrThrowException(name, culture);
}
return value;
}
public string GetStringOrNull(string name, bool tryDefaults = true)
{
return GetStringOrNull(name, CultureInfo.CurrentUICulture, tryDefaults);
}
public string GetStringOrNull(string name, CultureInfo culture, bool tryDefaults = true)
{
var cultureName = culture.Name;
var dictionaries = DictionaryProvider.Dictionaries;
//Try to get from original dictionary (with country code)
ILocalizationDictionary originalDictionary;
if (dictionaries.TryGetValue(cultureName, out originalDictionary))
{
var strOriginal = originalDictionary.GetOrNull(name);
if (strOriginal != null)
{
return strOriginal.Value;
}
}
if (!tryDefaults)
{
return null;
}
//Try to get from same language dictionary (without country code)
if (cultureName.Contains("-")) //Example: "tr-TR" (length=5)
{
ILocalizationDictionary langDictionary;
if (dictionaries.TryGetValue(GetBaseCultureName(cultureName), out langDictionary))
{
var strLang = langDictionary.GetOrNull(name);
if (strLang != null)
{
return strLang.Value;
}
}
}
//Try to get from default language
var defaultDictionary = DictionaryProvider.DefaultDictionary;
if (defaultDictionary == null)
{
return null;
}
var strDefault = defaultDictionary.GetOrNull(name);
if (strDefault == null)
{
return null;
}
return strDefault.Value;
}
/// <inheritdoc/>
public IReadOnlyList<LocalizedString> GetAllStrings(bool includeDefaults = true)
{
return GetAllStrings(CultureInfo.CurrentUICulture, includeDefaults);
}
/// <inheritdoc/>
public IReadOnlyList<LocalizedString> GetAllStrings(CultureInfo culture, bool includeDefaults = true)
{
//TODO: Can be optimized (example: if it's already default dictionary, skip overriding)
var dictionaries = DictionaryProvider.Dictionaries;
//Create a temp dictionary to build
var allStrings = new Dictionary<string, LocalizedString>();
if (includeDefaults)
{
//Fill all strings from default dictionary
var defaultDictionary = DictionaryProvider.DefaultDictionary;
if (defaultDictionary != null)
{
foreach (var defaultDictString in defaultDictionary.GetAllStrings())
{
allStrings[defaultDictString.Name] = defaultDictString;
}
}
//Overwrite all strings from the language based on country culture
if (culture.Name.Contains("-"))
{
ILocalizationDictionary langDictionary;
if (dictionaries.TryGetValue(GetBaseCultureName(culture.Name), out langDictionary))
{
foreach (var langString in langDictionary.GetAllStrings())
{
allStrings[langString.Name] = langString;
}
}
}
}
//Overwrite all strings from the original dictionary
ILocalizationDictionary originalDictionary;
if (dictionaries.TryGetValue(culture.Name, out originalDictionary))
{
foreach (var originalLangString in originalDictionary.GetAllStrings())
{
allStrings[originalLangString.Name] = originalLangString;
}
}
return allStrings.Values.ToImmutableList();
}
/// <summary>
/// Extends the source with given dictionary.
/// </summary>
/// <param name="dictionary">Dictionary to extend the source</param>
public virtual void Extend(ILocalizationDictionary dictionary)
{
DictionaryProvider.Extend(dictionary);
}
protected virtual string ReturnGivenNameOrThrowException(string name, CultureInfo culture)
{
return LocalizationSourceHelper.ReturnGivenNameOrThrowException(LocalizationConfiguration, Name, name, culture);
}
private static string GetBaseCultureName(string cultureName)
{
return cultureName.Contains("-")
? cultureName.Left(cultureName.IndexOf("-", StringComparison.Ordinal))
: cultureName;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void BroadcastScalarToVector256SByte()
{
var test = new SimpleUnaryOpTest__BroadcastScalarToVector256SByte();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleUnaryOpTest__BroadcastScalarToVector256SByte
{
private const int VectorSize = 32;
private const int Op1ElementCount = VectorSize / sizeof(SByte);
private const int RetElementCount = VectorSize / sizeof(SByte);
private static SByte[] _data = new SByte[Op1ElementCount];
private static Vector128<SByte> _clsVar;
private Vector128<SByte> _fld;
private SimpleUnaryOpTest__DataTable<SByte, SByte> _dataTable;
static SimpleUnaryOpTest__BroadcastScalarToVector256SByte()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (sbyte)(random.Next(0, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar), ref Unsafe.As<SByte, byte>(ref _data[0]), VectorSize);
}
public SimpleUnaryOpTest__BroadcastScalarToVector256SByte()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (sbyte)(random.Next(0, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld), ref Unsafe.As<SByte, byte>(ref _data[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (sbyte)(random.Next(0, sbyte.MaxValue)); }
_dataTable = new SimpleUnaryOpTest__DataTable<SByte, SByte>(_data, new SByte[RetElementCount], VectorSize);
}
public bool IsSupported => Avx2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Avx2.BroadcastScalarToVector256<SByte>(
Unsafe.Read<Vector128<SByte>>(_dataTable.inArrayPtr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Avx2.BroadcastScalarToVector256<SByte>(
Sse2.LoadVector128((SByte*)(_dataTable.inArrayPtr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Avx2.BroadcastScalarToVector256<SByte>(
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArrayPtr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.BroadcastScalarToVector256))
.MakeGenericMethod( new Type[] { typeof(SByte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<SByte>>(_dataTable.inArrayPtr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<SByte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.BroadcastScalarToVector256))
.MakeGenericMethod( new Type[] { typeof(SByte) })
.Invoke(null, new object[] {
Sse2.LoadVector128((SByte*)(_dataTable.inArrayPtr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<SByte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.BroadcastScalarToVector256))
.MakeGenericMethod( new Type[] { typeof(SByte) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArrayPtr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<SByte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Avx2.BroadcastScalarToVector256<SByte>(
_clsVar
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var firstOp = Unsafe.Read<Vector128<SByte>>(_dataTable.inArrayPtr);
var result = Avx2.BroadcastScalarToVector256<SByte>(firstOp);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var firstOp = Sse2.LoadVector128((SByte*)(_dataTable.inArrayPtr));
var result = Avx2.BroadcastScalarToVector256<SByte>(firstOp);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var firstOp = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArrayPtr));
var result = Avx2.BroadcastScalarToVector256<SByte>(firstOp);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleUnaryOpTest__BroadcastScalarToVector256SByte();
var result = Avx2.BroadcastScalarToVector256<SByte>(test._fld);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Avx2.BroadcastScalarToVector256<SByte>(_fld);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<SByte> firstOp, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray = new SByte[Op1ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray = new SByte[Op1ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(SByte[] firstOp, SByte[] result, [CallerMemberName] string method = "")
{
if (firstOp[0] != result[0])
{
Succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((firstOp[0] != result[i]))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Avx2)}.{nameof(Avx2.BroadcastScalarToVector256)}<SByte>(Vector128<SByte>): {method} failed:");
Console.WriteLine($" firstOp: ({string.Join(", ", firstOp)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using ZXing.Aztec;
using ZXing.Datamatrix;
using ZXing.IMB;
using ZXing.Maxicode;
using ZXing.OneD;
using ZXing.PDF417;
using ZXing.QrCode;
namespace ZXing
{
/// <summary>
/// MultiFormatReader is a convenience class and the main entry point into the library for most uses.
/// By default it attempts to decode all barcode formats that the library supports. Optionally, you
/// can provide a hints object to request different behavior, for example only decoding QR codes.
/// </summary>
/// <author>Sean Owen</author>
/// <author>dswitkin@google.com (Daniel Switkin)</author>
/// <author>www.Redivivus.in (suraj.supekar@redivivus.in) - Ported from ZXING Java Source</author>
public sealed class MultiFormatReader : Reader
{
private IDictionary<DecodeHintType, object> hints;
private IList<Reader> readers;
/// <summary> This version of decode honors the intent of Reader.decode(BinaryBitmap) in that it
/// passes null as a hint to the decoders. However, that makes it inefficient to call repeatedly.
/// Use setHints() followed by decodeWithState() for continuous scan applications.
///
/// </summary>
/// <param name="image">The pixel data to decode
/// </param>
/// <returns> The contents of the image
/// </returns>
/// <throws> ReaderException Any errors which occurred </throws>
public Result decode(BinaryBitmap image)
{
Hints = null;
return decodeInternal(image);
}
/// <summary> Decode an image using the hints provided. Does not honor existing state.
///
/// </summary>
/// <param name="image">The pixel data to decode
/// </param>
/// <param name="hints">The hints to use, clearing the previous state.
/// </param>
/// <returns> The contents of the image
/// </returns>
/// <throws> ReaderException Any errors which occurred </throws>
public Result decode(BinaryBitmap image, IDictionary<DecodeHintType, object> hints)
{
Hints = hints;
return decodeInternal(image);
}
/// <summary> Decode an image using the state set up by calling setHints() previously. Continuous scan
/// clients will get a <b>large</b> speed increase by using this instead of decode().
///
/// </summary>
/// <param name="image">The pixel data to decode
/// </param>
/// <returns> The contents of the image
/// </returns>
/// <throws> ReaderException Any errors which occurred </throws>
public Result decodeWithState(BinaryBitmap image)
{
// Make sure to set up the default state so we don't crash
if (readers == null)
{
Hints = null;
}
return decodeInternal(image);
}
/// <summary> This method adds state to the MultiFormatReader. By setting the hints once, subsequent calls
/// to decodeWithState(image) can reuse the same set of readers without reallocating memory. This
/// is important for performance in continuous scan clients.
///
/// </summary>
public IDictionary<DecodeHintType, object> Hints
{
set
{
hints = value;
var tryHarder = value != null && value.ContainsKey(DecodeHintType.TRY_HARDER);
var formats = value == null || !value.ContainsKey(DecodeHintType.POSSIBLE_FORMATS) ? null : (IList<BarcodeFormat>)value[DecodeHintType.POSSIBLE_FORMATS];
if (formats != null)
{
bool addOneDReader =
formats.Contains(BarcodeFormat.All_1D) ||
formats.Contains(BarcodeFormat.UPC_A) ||
formats.Contains(BarcodeFormat.UPC_E) ||
formats.Contains(BarcodeFormat.EAN_13) ||
formats.Contains(BarcodeFormat.EAN_8) ||
formats.Contains(BarcodeFormat.CODABAR) ||
formats.Contains(BarcodeFormat.CODE_39) ||
formats.Contains(BarcodeFormat.CODE_93) ||
formats.Contains(BarcodeFormat.CODE_128) ||
formats.Contains(BarcodeFormat.ITF) ||
formats.Contains(BarcodeFormat.RSS_14) ||
formats.Contains(BarcodeFormat.RSS_EXPANDED);
readers = new List<Reader>();
// Put 1D readers upfront in "normal" mode
if (addOneDReader && !tryHarder)
{
readers.Add(new MultiFormatOneDReader(value));
}
if (formats.Contains(BarcodeFormat.QR_CODE))
{
readers.Add(new QRCodeReader());
}
if (formats.Contains(BarcodeFormat.DATA_MATRIX))
{
readers.Add(new DataMatrixReader());
}
if (formats.Contains(BarcodeFormat.AZTEC))
{
readers.Add(new AztecReader());
}
if (formats.Contains(BarcodeFormat.PDF_417))
{
readers.Add(new PDF417Reader());
}
if (formats.Contains(BarcodeFormat.MAXICODE))
{
readers.Add(new MaxiCodeReader());
}
if (formats.Contains(BarcodeFormat.IMB))
{
readers.Add(new IMBReader());
}
// At end in "try harder" mode
if (addOneDReader && tryHarder)
{
readers.Add(new MultiFormatOneDReader(value));
}
}
if (readers == null ||
readers.Count == 0)
{
readers = readers ?? new List<Reader>();
if (!tryHarder)
{
readers.Add(new MultiFormatOneDReader(value));
}
readers.Add(new QRCodeReader());
readers.Add(new DataMatrixReader());
readers.Add(new AztecReader());
readers.Add(new PDF417Reader());
readers.Add(new MaxiCodeReader());
if (tryHarder)
{
readers.Add(new MultiFormatOneDReader(value));
}
}
}
}
/// <summary>
/// resets all specific readers
/// </summary>
public void reset()
{
if (readers != null)
{
foreach (var reader in readers)
{
reader.reset();
}
}
}
private Result decodeInternal(BinaryBitmap image)
{
if (readers != null)
{
var rpCallback = hints != null && hints.ContainsKey(DecodeHintType.NEED_RESULT_POINT_CALLBACK)
? (ResultPointCallback)hints[DecodeHintType.NEED_RESULT_POINT_CALLBACK]
: null;
var result = decodeInternal(image, rpCallback);
if (result != null)
return result;
if (hints != null
&& hints.ContainsKey(DecodeHintType.ALSO_INVERTED)
&& true.Equals(hints[DecodeHintType.ALSO_INVERTED]))
{
// Calling all readers again with inverted image
image.BlackMatrix.flip();
result = decodeInternal(image, rpCallback);
if (result != null)
return result;
}
}
return null;
}
private Result decodeInternal(BinaryBitmap image, ResultPointCallback rpCallback)
{
for (var index = 0; index < readers.Count; index++)
{
var reader = readers[index];
reader.reset();
var result = reader.decode(image, hints);
if (result != null)
{
// found a barcode, pushing the successful reader up front
// I assume that the same type of barcode is read multiple times
// so the reordering of the readers list should speed up the next reading
// a little bit
readers.RemoveAt(index);
readers.Insert(0, reader);
return result;
}
if (rpCallback != null)
rpCallback(null);
}
return null;
}
}
}
| |
// Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using System;
using System.Collections;
using System.Text;
using Alachisoft.NCache.Caching;
using Alachisoft.NCache.Common;
using System.Collections.Generic;
using Alachisoft.NCache.Common.Pooling;
using Runtime = Alachisoft.NCache.Runtime;
using System.Diagnostics;
using Alachisoft.NCache.SocketServer.RuntimeLogging;
using Alachisoft.NCache.Common.Monitoring;
using Alachisoft.NCache.SocketServer.Util;
namespace Alachisoft.NCache.SocketServer.Command
{
class BulkDeleteCommand : CommandBase
{
protected struct CommandInfo
{
public string RequestId;
public object[] Keys;
public BitSet FlagMap;
public short DsItemsRemovedId;
public string ProviderName;
public long ClientLastViewId;
public string IntendedRecipient;
}
private OperationResult _removeBulkResult = OperationResult.Success;
CommandInfo cmdInfo;
internal override OperationResult OperationResult
{
get
{
return _removeBulkResult;
}
}
public override string GetCommandParameters(out string commandName)
{
StringBuilder details = new StringBuilder();
commandName = "BulkDelete";
details.Append("Command Keys: " + cmdInfo.Keys.Length);
details.Append(" ; ");
if (cmdInfo.FlagMap != null)
details.Append("WriteThru: " + cmdInfo.FlagMap.IsBitSet(BitSetConstants.WriteThru));
//details.AppendLine("Dependency: " + cmdInfo. != null ? "true" : "false");
return details.ToString();
}
public override void ExecuteCommand(ClientManager clientManager, Alachisoft.NCache.Common.Protobuf.Command command)
{
int overload;
string exception = null;
Stopwatch stopWatch = new Stopwatch();
stopWatch.Start();
try
{
try
{
overload = command.MethodOverload;
cmdInfo = ParseCommand(command, clientManager);
}
catch (Exception exc)
{
_removeBulkResult = OperationResult.Failure;
if (!base.immatureId.Equals("-2"))
{
//PROTOBUF:RESPONSE
_serializedResponsePackets.Add(Alachisoft.NCache.Common.Util.ResponseHelper.SerializeExceptionResponseWithType(exc, command.requestID, command.commandID, clientManager.ClientVersion));
//_resultPacket = clientManager.ReplyPacket(base.ExceptionPacket(exc, base.immatureId), base.ParsingExceptionMessage(exc));
}
return;
}
//TODO
byte[] data = null;
try
{
NCache nCache = clientManager.CmdExecuter as NCache;
Notifications cbEnrty = null;
if (cmdInfo.DsItemsRemovedId != -1)
{
cbEnrty = new Notifications(clientManager.ClientID, -1, -1, -1, -1, cmdInfo.DsItemsRemovedId,
Runtime.Events.EventDataFilter.None, Runtime.Events.EventDataFilter.None); //DataFilter not required
}
OperationContext operationContext = new OperationContext(OperationContextFieldName.OperationType, OperationContextOperationType.CacheOperation);
operationContext.Add(OperationContextFieldName.RaiseCQNotification, true);
operationContext.Add(OperationContextFieldName.ClientLastViewId, cmdInfo.ClientLastViewId);
CommandsUtil.PopulateClientIdInContext(ref operationContext, clientManager.ClientAddress);
if (!string.IsNullOrEmpty(cmdInfo.IntendedRecipient))
operationContext.Add(OperationContextFieldName.IntendedRecipient, cmdInfo.IntendedRecipient);
operationContext.Add(OperationContextFieldName.ClientId, clientManager.ClientID);
operationContext.Add(OperationContextFieldName.ClientOperationTimeout, clientManager.RequestTimeout);
operationContext.CancellationToken = CancellationToken;
nCache.Cache.Delete(cmdInfo.Keys, cmdInfo.FlagMap, cbEnrty, cmdInfo.ProviderName, operationContext);
stopWatch.Stop();
Alachisoft.NCache.Common.Protobuf.Response response = new Alachisoft.NCache.Common.Protobuf.Response();
Alachisoft.NCache.Common.Protobuf.BulkDeleteResponse bulkDeleteResponse = new Alachisoft.NCache.Common.Protobuf.BulkDeleteResponse();
//response.requestId = Convert.ToInt64(cmdInfo.RequestId);
//response.commandID = command.commandID;
//response.intendedRecipient = cmdInfo.IntendedRecipient;
//response.responseType = Alachisoft.NCache.Common.Protobuf.Response.Type.DELETE_BULK;
//response.bulkDeleteResponse = bulkDeleteResponse;
//_serializedResponsePackets.Add(Alachisoft.NCache.Common.Util.ResponseHelper.SerializeResponse(response,Common.Protobuf.Response.Type.DELETE_BULK));
if (clientManager.ClientVersion >= 5000)
{
bulkDeleteResponse.intendedRecipient = cmdInfo.IntendedRecipient;
Common.Util.ResponseHelper.SetResponse(bulkDeleteResponse, command.requestID, command.commandID);
_serializedResponsePackets.Add(Common.Util.ResponseHelper.SerializeResponse(bulkDeleteResponse, Common.Protobuf.Response.Type.DELETE_BULK));
}
else
{
//PROTOBUF:RESPONSE
response.intendedRecipient = cmdInfo.IntendedRecipient;
response.bulkDeleteResponse = bulkDeleteResponse;
Common.Util.ResponseHelper.SetResponse(response, command.requestID, command.commandID, Common.Protobuf.Response.Type.DELETE_BULK);
_serializedResponsePackets.Add(Alachisoft.NCache.Common.Util.ResponseHelper.SerializeResponse(response));
}
}
catch (OperationCanceledException ex)
{
exception = ex.ToString();
Dispose();
}
catch (Exception exc)
{
_removeBulkResult = OperationResult.Failure;
exception = exc.ToString();
//PROTOBUF:RESPONSE
_serializedResponsePackets.Add(Alachisoft.NCache.Common.Util.ResponseHelper.SerializeExceptionResponseWithType(exc, command.requestID, command.commandID, clientManager.ClientVersion));
//_resultPacket = clientManager.ReplyPacket(base.ExceptionPacket(exc, cmdInfo.RequestId), base.ExceptionMessage(exc));
}
finally
{
try
{
TimeSpan executionTime = stopWatch.Elapsed;
if (Alachisoft.NCache.Management.APILogging.APILogManager.APILogManger != null && Alachisoft.NCache.Management.APILogging.APILogManager.EnableLogging)
{
APILogItemBuilder log = new APILogItemBuilder(MethodsName.DELETEBULK.ToLower());
// Hashtable expirationHint = log.GetDependencyExpirationAndQueryInfo(cmdInfo.ExpirationHint, cmdInfo.queryInfo);
log.GenerateBulkDeleteAPILogItem(cmdInfo.Keys.Length, cmdInfo.FlagMap, cmdInfo.ProviderName, cmdInfo.DsItemsRemovedId, overload, exception, executionTime, clientManager.ClientID.ToLower(), clientManager.ClientSocketId.ToString());
}
}
catch
{
}
}
}
finally
{
cmdInfo.FlagMap.MarkFree(NCModulesConstants.SocketServer);
}
}
public override void IncrementCounter(Alachisoft.NCache.SocketServer.Statistics.StatisticsCounter collector, long value)
{
if (collector != null)
{
collector.IncrementMsecPerDelBulkAvg(value);
}
}
//PROTOBUF
private CommandInfo ParseCommand(Alachisoft.NCache.Common.Protobuf.Command command, ClientManager clientManager)
{
CommandInfo cmdInfo = new CommandInfo();
Alachisoft.NCache.Common.Protobuf.BulkDeleteCommand bulkRemoveCommand = command.bulkDeleteCommand;
cmdInfo.Keys = new ArrayList(bulkRemoveCommand.keys).ToArray();
cmdInfo.DsItemsRemovedId = (short)bulkRemoveCommand.datasourceItemRemovedCallbackId;
BitSet bitset = BitSet.CreateAndMarkInUse(clientManager.CacheFakePool, NCModulesConstants.SocketServer);
bitset.Data =((byte)bulkRemoveCommand.flag);
cmdInfo.FlagMap = bitset;
cmdInfo.RequestId = bulkRemoveCommand.requestId.ToString();
cmdInfo.ProviderName = !string.IsNullOrEmpty(bulkRemoveCommand.providerName) ? bulkRemoveCommand.providerName : null;
cmdInfo.ClientLastViewId = command.clientLastViewId;
return cmdInfo;
}
//private CommandInfo ParseCommand(ref string command)
//{
// CommandInfo cmdInfo = new CommandInfo();
// int beginQuoteIndex = 0, endQuoteIndex = 0;
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// int size = Convert.ToInt32(command.Substring(beginQuoteIndex + 1, endQuoteIndex - beginQuoteIndex - 1));
// cmdInfo.Keys = new object[size];
// for (int i = 0; i < size; i++)
// {
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// //if (beginQuoteIndex + 1 == endQuoteIndex) throw new ArgumentNullException("keys[" + i.ToString() + "]");
// cmdInfo.Keys[i] = command.Substring(beginQuoteIndex + 1, endQuoteIndex - beginQuoteIndex - 1);
// }
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// cmdInfo.RequestId = command.Substring(beginQuoteIndex + 1, endQuoteIndex - beginQuoteIndex - 1);
// base.immatureId = cmdInfo.RequestId;
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// cmdInfo.FlagMap = new BitSet(Convert.ToByte(command.Substring(beginQuoteIndex + 1, endQuoteIndex - beginQuoteIndex - 1)));
// base.UpdateDelimIndexes(ref command, '"', ref beginQuoteIndex, ref endQuoteIndex);
// cmdInfo.DsItemsRemovedId = Convert.ToInt16(command.Substring(beginQuoteIndex + 1, endQuoteIndex - beginQuoteIndex - 1));
// return cmdInfo;
//}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.Serialization;
using System.Windows;
using System.Windows.Controls;
using Microsoft.Phone.Controls;
//using Windows.Devices.Geolocation;
//using Windows.UI.Core;
using WPCordovaClassLib;
using WPCordovaClassLib.Cordova;
using WPCordovaClassLib.Cordova.Commands;
using WPCordovaClassLib.Cordova.JSON;
using GoogleAds;
namespace Cordova.Extension.Commands
{
///
/// Google AD Mob wrapper for showing banner and interstitial adverts
///
public sealed class AdMob : BaseCommand
{
#region Const
// ad size
// only banner and smart banner supported on windows phones, see:
// https://developers.google.com/mobile-ads-sdk/docs/admob/wp/banner
public const string ADSIZE_BANNER = "BANNER";
public const string ADSIZE_SMART_BANNER = "SMART_BANNER";
//public const string ADSIZE_MEDIUM_RECTANGLE = "MEDIUM_RECTANGLE";
//public const string ADSIZE_FULL_BANNER = "FULL_BANNER";
//public const string ADSIZE_LEADERBOARD = "LEADERBOARD";
//public const string ADSIZE_SKYSCRAPER = "SKYSCRAPPER";
//public const string ADSIZE_CUSTOM = "CUSTOM";
// ad event
public const string EVENT_AD_LOADED = "onAdLoaded";
public const string EVENT_AD_FAILLOAD = "onAdFailLoad";
public const string EVENT_AD_PRESENT = "onAdPresent";
public const string EVENT_AD_LEAVEAPP = "onAdLeaveApp";
public const string EVENT_AD_DISMISS = "onAdDismiss";
public const string EVENT_AD_WILLPRESENT = "onAdWillPresent";
public const string EVENT_AD_WILLDISMISS = "onAdWillDismiss";
// ad type
public const string ADTYPE_BANNER = "banner";
public const string ADTYPE_INTERSTITIAL = "interstitial";
public const string ADTYPE_NATIVE = "native";
// options
public const string OPT_ADID = "adId";
public const string OPT_AUTO_SHOW = "autoShow";
public const string OPT_IS_TESTING = "isTesting";
public const string OPT_LOG_VERBOSE = "logVerbose";
public const string OPT_AD_SIZE = "adSize";
public const string OPT_WIDTH = "width";
public const string OPT_HEIGHT = "height";
public const string OPT_OVERLAP = "overlap";
public const string OPT_ORIENTATION_RENEW = "orientationRenew";
public const string OPT_POSITION = "position";
public const string OPT_X = "x";
public const string OPT_Y = "y";
public const string OPT_BANNER_ID = "bannerId";
public const string OPT_INTERSTITIAL_ID = "interstitialId";
private const string TEST_BANNER_ID = "ca-app-pub-6869992474017983/9375997553";
private const string TEST_INTERSTITIAL_ID = "ca-app-pub-6869992474017983/1355127956";
// banner positions
public const int NO_CHANGE = 0;
public const int TOP_LEFT = 1;
public const int TOP_CENTER = 2;
public const int TOP_RIGHT = 3;
public const int LEFT = 4;
public const int CENTER = 5;
public const int RIGHT = 6;
public const int BOTTOM_LEFT = 7;
public const int BOTTOM_CENTER = 8;
public const int BOTTOM_RIGHT = 9;
public const int POS_XY = 10;
#endregion
#region Members
private bool isTesting = false;
private bool logVerbose = false;
private string bannerId = "";
private string interstitialId = "";
private AdFormats adSize = AdFormats.SmartBanner;
private int adWidth = 320;
private int adHeight = 50;
private bool overlap = false;
private bool orientationRenew = true;
private int adPosition = BOTTOM_CENTER;
private int posX = 0;
private int posY = 0;
private bool autoShowBanner = true;
private bool autoShowInterstitial = false;
private bool bannerVisible = false;
private const string UI_LAYOUT_ROOT = "LayoutRoot";
private const string UI_CORDOVA_VIEW = "CordovaView";
private const int BANNER_HEIGHT_PORTRAIT = 50;
private const int BANNER_HEIGHT_LANDSCAPE = 32;
private RowDefinition row = null;
private AdView bannerAd = null;
private InterstitialAd interstitialAd = null;
private double initialViewHeight = 0.0;
private double initialViewWidth = 0.0;
#endregion
static AdFormats adSizeFromString(String size) {
if (ADSIZE_BANNER.Equals (size)) {
return AdFormats.Banner; //Banner (320x50, Phones and Tablets)
} else {
return AdFormats.SmartBanner; //Smart banner (Auto size, Phones and Tablets)
}
}
#region Public methods
public void setOptions(string args) {
if(logVerbose) Debug.WriteLine("AdMob.setOptions: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
__setOptions(options);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void createBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.createBanner: " + args);
try
{
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1)
{
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
if (options != null)
{
__setOptions(options);
string adId = TEST_BANNER_ID;
bool autoShow = true;
if (!string.IsNullOrEmpty(options.adId))
adId = options.adId;
//if (options.ContainsKey(OPT_AUTO_SHOW))
// autoShow = Convert.ToBoolean(options[OPT_AUTO_SHOW]);
__createBanner(adId, autoShow);
}
}
}
catch (Exception ex)
{
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void removeBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.removeBanner: " + args);
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() =>
{
__hideBanner();
// Remove event handlers
bannerAd.FailedToReceiveAd -= banner_onAdFailLoad;
bannerAd.LeavingApplication -= banner_onAdLeaveApp;
bannerAd.ReceivedAd -= banner_onAdLoaded;
bannerAd.ShowingOverlay -= banner_onAdPresent;
bannerAd.DismissingOverlay -= banner_onAdDismiss;
bannerAd = null;
});
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void prepareInterstitial(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.prepareInterstitial: " + args);
string adId = "";
bool autoShow = false;
try
{
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1)
{
var options = JsonHelper.Deserialize<AdMobOptions>(inputs[0]);
if (options != null)
{
__setOptions(options);
if (!string.IsNullOrEmpty(options.adId))
{
adId = options.adId;
//if (options.ContainsKey(OPT_AUTO_SHOW))
// autoShow = Convert.ToBoolean(options[OPT_AUTO_SHOW]);
__prepareInterstitial(adId, autoShow);
}
}
}
}
catch (Exception ex)
{
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void hideBanner(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.hideBanner: " + args);
__hideBanner();
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
public void showInterstitial(string args)
{
if (logVerbose) Debug.WriteLine("AdMob.showInterstitial: " + args);
if (interstitialAd != null)
{
__showInterstitial();
}
else
{
__prepareInterstitial(interstitialId, true);
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
#endregion
#region Private methods
private void __setOptions(AdMobOptions options)
{
if (options == null)
return;
if (options.isTesting.HasValue)
isTesting = options.isTesting.Value;
if (options.logVerbose.HasValue)
logVerbose = options.logVerbose.Value;
if (options.overlap.HasValue)
overlap = options.overlap.Value;
if (options.orientationRenew.HasValue)
orientationRenew = options.orientationRenew.Value;
if (options.position.HasValue)
adPosition = options.position.Value;
if (options.x.HasValue)
posX = options.x.Value;
if (options.y.HasValue)
posY = options.y.Value;
if (options.bannerId != null)
bannerId = options.bannerId;
if (options.interstitialId != null)
interstitialId = options.interstitialId;
if (options.adSize != null)
adSize = adSizeFromString( options.adSize );
if (options.width.HasValue)
adWidth = options.width.Value;
if (options.height.HasValue)
adHeight = options.height.Value;
}
private void __createBanner(string adId, bool autoShow) {
if (isTesting)
adId = TEST_BANNER_ID;
if ((adId!=null) && (adId.Length > 0))
bannerId = adId;
else
adId = bannerId;
autoShowBanner = autoShow;
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
if(bannerAd == null) {
bannerAd = new AdView {
Format = adSize,
AdUnitID = bannerId
};
// Add event handlers
bannerAd.FailedToReceiveAd += banner_onAdFailLoad;
bannerAd.LeavingApplication += banner_onAdLeaveApp;
bannerAd.ReceivedAd += banner_onAdLoaded;
bannerAd.ShowingOverlay += banner_onAdPresent;
bannerAd.DismissingOverlay += banner_onAdDismiss;
}
bannerVisible = false;
AdRequest adRequest = new AdRequest();
adRequest.ForceTesting = isTesting;
bannerAd.LoadAd( adRequest );
if(autoShowBanner) {
__showBanner(adPosition, posX, posY);
}
});
}
private void showBanner(string args) {
if(logVerbose) Debug.WriteLine("AdMob.showBanner: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
int position = Convert.ToInt32(inputs[0]);
__showBanner(position, 0, 0);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
private void showBannerAtXY(string args) {
if(logVerbose) Debug.WriteLine("AdMob.showBannerAtXY: " + args);
try {
string[] inputs = JsonHelper.Deserialize<string[]>(args);
if (inputs != null && inputs.Length >= 1) {
int x = Convert.ToInt32(inputs[0]);
int y = Convert.ToInt32(inputs[1]);
__showBanner(POS_XY, x, y);
}
} catch (Exception ex) {
DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION, ex.Message));
return;
}
DispatchCommandResult(new PluginResult(PluginResult.Status.OK));
}
private void __showBanner(int argPos, int argX, int argY) {
if (bannerAd == null) {
if(logVerbose) Debug.WriteLine("banner is null, call createBanner() first.");
return;
}
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
if(grid.Children.Contains(bannerAd)) grid.Children.Remove(bannerAd);
if(overlap) {
__showBannerOverlap(grid, adPosition);
} else {
if(! bannerVisible) {
initialViewHeight = view.ActualHeight;
initialViewWidth = view.ActualWidth;
frame.OrientationChanged += onOrientationChanged;
}
__showBannerSplit(grid, view, adPosition);
setCordovaViewHeight(frame, view);
}
bannerAd.Visibility = Visibility.Visible;
bannerVisible = true;
}
});
}
private void __showBannerOverlap(Grid grid, int position) {
switch ((position - 1) % 3) {
case 0:
bannerAd.HorizontalAlignment = HorizontalAlignment.Left;
break;
case 1:
bannerAd.HorizontalAlignment = HorizontalAlignment.Center;
break;
case 2:
bannerAd.HorizontalAlignment = HorizontalAlignment.Right;
break;
}
switch ((position - 1) / 3) {
case 0:
bannerAd.VerticalAlignment = VerticalAlignment.Top;
break;
case 1:
bannerAd.VerticalAlignment = VerticalAlignment.Center;
break;
case 2:
bannerAd.VerticalAlignment = VerticalAlignment.Bottom;
break;
}
grid.Children.Add (bannerAd);
}
private void __showBannerSplit(Grid grid, CordovaView view, int position) {
if(row == null) {
row = new RowDefinition();
row.Height = GridLength.Auto;
}
grid.Children.Add(bannerAd);
switch((position-1)/3) {
case 0:
grid.RowDefinitions.Insert(0,row);
Grid.SetRow(bannerAd, 0);
Grid.SetRow(view, 1);
break;
case 1:
case 2:
grid.RowDefinitions.Add(row);
Grid.SetRow(bannerAd, 1);
break;
}
}
private void __hideBanner() {
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
grid.Children.Remove(bannerAd);
grid.RowDefinitions.Remove(row);
row = null;
bannerAd.Visibility = Visibility.Collapsed;
bannerVisible = false;
if(! overlap) {
frame.OrientationChanged -= onOrientationChanged;
setCordovaViewHeight(frame, view);
}
}
});
}
private void __prepareInterstitial(string adId, bool autoShow) {
if (isTesting)
adId = TEST_INTERSTITIAL_ID;
if ((adId != null) && (adId.Length > 0)) {
interstitialId = adId;
} else {
adId = interstitialId;
}
autoShowInterstitial = autoShow;
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
interstitialAd = new InterstitialAd( interstitialId );
// Add event listeners
interstitialAd.ReceivedAd += interstitial_onAdLoaded;
interstitialAd.FailedToReceiveAd += interstitial_onAdFailLoad;
interstitialAd.ShowingOverlay += interstitial_onAdPresent;
interstitialAd.DismissingOverlay += interstitial_onAdDismiss;
AdRequest adRequest = new AdRequest();
adRequest.ForceTesting = isTesting;
interstitialAd.LoadAd(adRequest);
});
}
private void __showInterstitial() {
if (interstitialAd == null) {
if(logVerbose) Debug.WriteLine("interstitial is null, call prepareInterstitial() first.");
return;
}
Deployment.Current.Dispatcher.BeginInvoke(() => {
interstitialAd.ShowAd ();
});
}
// Events --------
// Device orientation
private void onOrientationChanged(object sender, OrientationChangedEventArgs e)
{
// Asynchronous UI threading call
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
Grid grid;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view) &&
TryCast(page.FindName(UI_LAYOUT_ROOT), out grid)) {
setCordovaViewHeight(frame, view);
}
});
}
/// Set cordova view height based on banner height and frame orientation
private void setCordovaViewHeight(PhoneApplicationFrame frame, CordovaView view) {
bool deduct = bannerVisible && (! overlap);
if (frame.Orientation == PageOrientation.Portrait ||
frame.Orientation == PageOrientation.PortraitDown ||
frame.Orientation == PageOrientation.PortraitUp) {
view.Height = initialViewHeight - (deduct ? BANNER_HEIGHT_PORTRAIT : 0);
} else {
view.Height = initialViewWidth - (deduct ? BANNER_HEIGHT_LANDSCAPE : 0);
}
fireEvent ("window", "resize", null);
}
// Banner events
private void banner_onAdFailLoad(object sender, AdErrorEventArgs args) {
fireAdErrorEvent (EVENT_AD_FAILLOAD, ADTYPE_BANNER, getErrCode(args.ErrorCode), getErrStr(args.ErrorCode));
}
private void banner_onAdLoaded(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LOADED, ADTYPE_BANNER);
if( (! bannerVisible) && autoShowBanner ) {
__showBanner(adPosition, posX, posY);
}
}
private void banner_onAdPresent(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_PRESENT, ADTYPE_BANNER);
}
private void banner_onAdLeaveApp(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LEAVEAPP, ADTYPE_BANNER);
}
private void banner_onAdDismiss(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_DISMISS, ADTYPE_BANNER);
}
// Interstitial events
private void interstitial_onAdFailLoad(object sender, AdErrorEventArgs args) {
fireAdErrorEvent (EVENT_AD_FAILLOAD, ADTYPE_INTERSTITIAL, getErrCode(args.ErrorCode), getErrStr(args.ErrorCode));
}
private void interstitial_onAdLoaded(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_LOADED, ADTYPE_INTERSTITIAL);
if (autoShowInterstitial) {
__showInterstitial ();
}
}
private void interstitial_onAdPresent(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_PRESENT, ADTYPE_INTERSTITIAL);
}
private void interstitial_onAdDismiss(object sender, AdEventArgs args) {
fireAdEvent (EVENT_AD_DISMISS, ADTYPE_INTERSTITIAL);
}
private int getErrCode(AdErrorCode errorCode) {
switch(errorCode) {
case AdErrorCode.InternalError: return 0;
case AdErrorCode.InvalidRequest: return 1;
case AdErrorCode.NetworkError: return 2;
case AdErrorCode.NoFill: return 3;
case AdErrorCode.Cancelled: return 4;
case AdErrorCode.StaleInterstitial: return 5;
case AdErrorCode.NoError: return 6;
}
return -1;
}
private string getErrStr(AdErrorCode errorCode) {
switch(errorCode) {
case AdErrorCode.InternalError: return "Internal error";
case AdErrorCode.InvalidRequest: return "Invalid request";
case AdErrorCode.NetworkError: return "Network error";
case AdErrorCode.NoFill: return "No fill";
case AdErrorCode.Cancelled: return "Cancelled";
case AdErrorCode.StaleInterstitial: return "Stale interstitial";
case AdErrorCode.NoError: return "No error";
}
return "Unknown";
}
private void fireAdEvent(string adEvent, string adType) {
string json = "{'adNetwork':'AdMob','adType':'" + adType + "','adEvent':'" + adEvent + "'}";
fireEvent("document", adEvent, json);
}
private void fireAdErrorEvent(string adEvent, string adType, int errCode, string errMsg) {
string json = "{'adNetwork':'AdMob','adType':'" + adType
+ "','adEvent':'" + adEvent + "','error':" + errCode + ",'reason':'" + errMsg + "'}";
fireEvent("document", adEvent, json);
}
private void fireEvent(string obj, string eventName, string jsonData) {
if(logVerbose) Debug.WriteLine( eventName );
string js = "";
if("window".Equals(obj)) {
js = "var evt=document.createEvent('UIEvents');evt.initUIEvent('" + eventName
+ "',true,false,window,0);window.dispatchEvent(evt);";
} else {
js = "javascript:cordova.fireDocumentEvent('" + eventName + "'";
if(jsonData != null) {
js += "," + jsonData;
}
js += ");";
}
Deployment.Current.Dispatcher.BeginInvoke(() => {
PhoneApplicationFrame frame;
PhoneApplicationPage page;
CordovaView view;
if (TryCast(Application.Current.RootVisual, out frame) &&
TryCast(frame.Content, out page) &&
TryCast(page.FindName(UI_CORDOVA_VIEW), out view)) {
// Asynchronous threading call
view.Browser.Dispatcher.BeginInvoke(() =>{
try {
view.Browser.InvokeScript("eval", new string[] { js });
} catch {
if(logVerbose) Debug.WriteLine("AdMob.fireEvent: Failed to invoke script: " + js);
}
});
}
});
}
#endregion
static bool TryCast<T>(object obj, out T result) where T : class {
result = obj as T;
return result != null;
}
}
}
| |
#region MIT License
/*
* Copyright (c) 2005-2008 Jonathan Mark Porter. http://physics2d.googlepages.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#endregion
#if UseDouble
using Scalar = System.Double;
#else
using Scalar = System.Single;
#endif
using System;
using System.Runtime.InteropServices;
using System.Collections.Generic;
using System.Text;
using AdvanceMath;
using Tao.OpenGl;
namespace Graphics2DDotNet
{
public static class GlHelper
{
class DeleteInfo
{
public int refresh;
public int[] names;
public DeleteInfo(int refresh, int[] names)
{
this.refresh = refresh;
this.names = names;
}
}
static int lastRefresh = -2;
#if UseDouble
public const int GlScalar = Gl.GL_DOUBLE;
#else
public const int GlScalar = Gl.GL_FLOAT;
#endif
static object syncRoot = new object();
static List<DeleteInfo> buffersARB = new List<DeleteInfo>();
static List<DeleteInfo> textures = new List<DeleteInfo>();
static List<DeleteInfo> glLists = new List<DeleteInfo>();
public static void GlScale(Scalar x, Scalar y, Scalar z)
{
#if UseDouble
Gl.glScaled(x, y, z);
#else
Gl.glScalef(x, y, z);
#endif
}
public static void GlTranslate(Scalar x, Scalar y, Scalar z)
{
#if UseDouble
Gl.glTranslated(x, y, z);
#else
Gl.glTranslatef(x, y, z);
#endif
}
public static void GlRotate(Scalar angle, Scalar x, Scalar y, Scalar z)
{
#if UseDouble
Gl.glRotated(angle, x, y, z);
#else
Gl.glRotatef(angle, x, y, z);
#endif
}
public static void GlVertex(Vector2D vertex)
{
#if UseDouble
Gl.glVertex2d(vertex.X, vertex.Y);
#else
Gl.glVertex2f(vertex.X, vertex.Y);
#endif
}
public static void GlVertex(Vector3D vertex)
{
#if UseDouble
Gl.glVertex3d(vertex.X, vertex.Y, vertex.Z);
#else
Gl.glVertex3f(vertex.X, vertex.Y, vertex.Z);
#endif
}
public static void GlVertex(Vector4D vertex)
{
#if UseDouble
Gl.glVertex4d(vertex.X, vertex.Y, vertex.Z,vertex.W);
#else
Gl.glVertex4f(vertex.X, vertex.Y, vertex.Z, vertex.W);
#endif
}
public static void GlLoadMatrix(Matrix4x4 matrix)
{
Scalar[] array = new Scalar[16];
Matrix4x4.CopyTranspose(ref matrix, array);
GlLoadMatrix(array);
}
public static void GlLoadMatrix(Matrix3x3 matrix)
{
Scalar[] array = new Scalar[16];
Matrix3x3.Copy2DToOpenGlMatrix(ref matrix, array);
GlLoadMatrix(array);
}
public static void GlLoadMatrix(Matrix2x3 matrix)
{
Scalar[] array = new Scalar[16];
Matrix2x3.Copy2DToOpenGlMatrix(ref matrix, array);
GlLoadMatrix(array);
}
public static void GlLoadMatrix(Scalar[] array)
{
#if UseDouble
Gl.glLoadMatrixd(array);
#else
Gl.glLoadMatrixf(array);
#endif
}
public static void GlMultMatrix(Matrix4x4 matrix)
{
Scalar[] array = new Scalar[16];
Matrix4x4.CopyTranspose(ref matrix, array);
GlMultMatrix(array);
}
public static void GlMultMatrix(Matrix3x3 matrix)
{
Scalar[] array = new Scalar[16];
Matrix3x3.Copy2DToOpenGlMatrix(ref matrix, array);
GlMultMatrix(array);
}
public static void GlMultMatrix(Matrix2x3 matrix)
{
Scalar[] array = new Scalar[16];
Matrix2x3.Copy2DToOpenGlMatrix(ref matrix, array);
GlMultMatrix(array);
}
public static void GlMultMatrix(Scalar[] array)
{
#if UseDouble
Gl.glMultMatrixd(array);
#else
Gl.glMultMatrixf(array);
#endif
}
public static void GlGetModelViewMatrix(Scalar[] array)
{
#if UseDouble
Gl.glGetDoublev(Gl.GL_MODELVIEW_MATRIX, array);
#else
Gl.glGetFloatv(Gl.GL_MODELVIEW_MATRIX, array);
#endif
}
public static void GlBufferData(int target, Array array, int size, int usage)
{
GCHandle handle = GCHandle.Alloc(array, GCHandleType.Pinned);
try
{
Gl.glBufferData(
target,
new IntPtr(size),
handle.AddrOfPinnedObject(),
usage);
}
finally
{
handle.Free();
}
}
public static void GlBufferDataARB(int target, Array array, int size, int usage)
{
GCHandle handle = GCHandle.Alloc(array, GCHandleType.Pinned);
try
{
Gl.glBufferDataARB(
target,
new IntPtr(size),
handle.AddrOfPinnedObject(),
usage);
}
finally
{
handle.Free();
}
}
public static void GlVertexPointer(int size, int type, int stride, Array array)
{
GCHandle handle = GCHandle.Alloc(array, GCHandleType.Pinned);
try
{
Gl.glVertexPointer(
size,
type,
stride,
handle.AddrOfPinnedObject());
}
finally
{
handle.Free();
}
}
public static void GlColor3(Scalar red, Scalar green, Scalar blue)
{
#if UseDouble
Gl.glColor3d(red, green, blue);
#else
Gl.glColor3f(red, green, blue);
#endif
}
public static void GlColor4(Scalar red, Scalar green, Scalar blue, Scalar alpha)
{
#if UseDouble
Gl.glColor4d(red, green, blue,alpha);
#else
Gl.glColor4f(red, green, blue, alpha);
#endif
}
public static void GlDeleteBuffersARB(int refresh, int[] names)
{
lock (syncRoot)
{
if (refresh >= lastRefresh)
{
buffersARB.Add(new DeleteInfo(refresh, names));
}
}
}
public static void GlDeleteTextures(int refresh, int[] names)
{
lock (syncRoot)
{
if (refresh >= lastRefresh)
{
textures.Add(new DeleteInfo(refresh, names));
}
}
}
public static void GlDeleteLists(int refresh, int name, int range)
{
lock (syncRoot)
{
if (refresh >= lastRefresh)
{
glLists.Add(new DeleteInfo(refresh, new int[] { name, range }));
}
}
}
static void DoGlDeleteBuffersARB(int refresh)
{
if (buffersARB.Count > 0)
{
for (int index = 0; index < buffersARB.Count; ++index)
{
DeleteInfo info = buffersARB[index];
if (info.refresh == refresh)
{
Gl.glDeleteBuffersARB(info.names.Length, info.names);
}
}
buffersARB.Clear();
}
}
static void DoGlDeleteTextures(int refresh)
{
if (textures.Count > 0)
{
for (int index = 0; index < buffersARB.Count; ++index)
{
DeleteInfo info = buffersARB[index];
if (info.refresh == refresh)
{
Gl.glDeleteTextures(info.names.Length, info.names);
}
}
textures.Clear();
}
}
static void DoGlDeleteLists(int refresh)
{
if (glLists.Count > 0)
{
for (int index = 0; index < glLists.Count; ++index)
{
DeleteInfo info = glLists[index];
if (info.refresh == refresh)
{
Gl.glDeleteLists(info.names[0], info.names[1]);
}
}
glLists.Clear();
}
}
internal static void DoDelete(int refresh)
{
lock (syncRoot)
{
lastRefresh = refresh;
DoGlDeleteBuffersARB(refresh);
DoGlDeleteTextures(refresh);
DoGlDeleteLists(refresh);
}
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Orleans.ApplicationParts;
using Orleans.CodeGeneration;
using Orleans.Metadata;
using Orleans.Runtime;
namespace Orleans.GrainReferences
{
/// <summary>
/// The central point for creating <see cref="GrainReference"/> instances.
/// </summary>
public sealed class GrainReferenceActivator
{
private readonly object _lockObj = new object();
private readonly IServiceProvider _serviceProvider;
private readonly IGrainReferenceActivatorProvider[] _providers;
private Dictionary<(GrainType, GrainInterfaceType), Entry> _activators = new Dictionary<(GrainType, GrainInterfaceType), Entry>();
public GrainReferenceActivator(
IServiceProvider serviceProvider,
IEnumerable<IGrainReferenceActivatorProvider> providers)
{
_serviceProvider = serviceProvider;
_providers = providers.ToArray();
}
public GrainReference CreateReference(GrainId grainId, GrainInterfaceType interfaceType)
{
if (!_activators.TryGetValue((grainId.Type, interfaceType), out var entry))
{
entry = CreateActivator(grainId.Type, interfaceType);
}
var result = entry.Activator.CreateReference(grainId);
return result;
}
private Entry CreateActivator(GrainType grainType, GrainInterfaceType interfaceType)
{
lock (_lockObj)
{
if (!_activators.TryGetValue((grainType, interfaceType), out var entry))
{
IGrainReferenceActivator activator = null;
foreach (var provider in _providers)
{
if (provider.TryGet(grainType, interfaceType, out activator))
{
break;
}
}
if (activator is null)
{
throw new InvalidOperationException($"Unable to find an {nameof(IGrainReferenceActivatorProvider)} for grain type {grainType}");
}
entry = new Entry(activator);
_activators = new Dictionary<(GrainType, GrainInterfaceType), Entry>(_activators) { [(grainType, interfaceType)] = entry };
}
return entry;
}
}
private readonly struct Entry
{
public Entry(IGrainReferenceActivator activator)
{
this.Activator = activator;
}
public IGrainReferenceActivator Activator { get; }
}
}
internal class UntypedGrainReferenceActivatorProvider : IGrainReferenceActivatorProvider
{
private readonly IServiceProvider _serviceProvider;
private IGrainReferenceRuntime _grainReferenceRuntime;
public UntypedGrainReferenceActivatorProvider(IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider;
}
public bool TryGet(GrainType grainType, GrainInterfaceType interfaceType, out IGrainReferenceActivator activator)
{
if (!interfaceType.IsDefault)
{
activator = default;
return false;
}
var runtime = _grainReferenceRuntime ??= _serviceProvider.GetRequiredService<IGrainReferenceRuntime>();
var shared = new GrainReferenceShared(grainType, interfaceType, runtime, InvokeMethodOptions.None);
activator = new UntypedGrainReferenceActivator(shared);
return true;
}
private class UntypedGrainReferenceActivator : IGrainReferenceActivator
{
private readonly GrainReferenceShared _shared;
public UntypedGrainReferenceActivator(GrainReferenceShared shared)
{
_shared = shared;
}
public GrainReference CreateReference(GrainId grainId)
{
return GrainReference.FromGrainId(_shared, grainId);
}
}
}
internal class ImrRpcProvider
{
private readonly TypeConverter _typeConverter;
private readonly Dictionary<GrainInterfaceType, (Type ReferenceType, Type InvokerType)> _mapping;
public ImrRpcProvider(
IServiceProvider serviceProvider,
IApplicationPartManager appParts,
GrainInterfaceTypeResolver resolver,
TypeConverter typeConverter)
{
_typeConverter = typeConverter;
var interfaces = appParts.CreateAndPopulateFeature<GrainInterfaceFeature>();
_mapping = new Dictionary<GrainInterfaceType, (Type ReferenceType, Type InvokerType)>();
foreach (var @interface in interfaces.Interfaces)
{
var id = resolver.GetGrainInterfaceType(@interface.InterfaceType);
_mapping[id] = (@interface.ReferenceType, @interface.InvokerType);
}
}
public bool TryGet(GrainInterfaceType interfaceType, out (Type ReferenceType, Type InvokerType) result)
{
GrainInterfaceType lookupId;
Type[] args;
if (GenericGrainInterfaceType.TryParse(interfaceType, out var genericId))
{
lookupId = genericId.GetGenericGrainType().Value;
args = genericId.GetArguments(_typeConverter);
}
else
{
lookupId = interfaceType;
args = default;
}
if (!_mapping.TryGetValue(lookupId, out var mapping))
{
result = default;
return false;
}
var (referenceType, invokerType) = mapping;
if (args is Type[])
{
referenceType = referenceType.MakeGenericType(args);
invokerType = invokerType.MakeGenericType(args);
}
result = (referenceType, invokerType);
return true;
}
}
internal class ImrGrainMethodInvokerProvider
{
private readonly ImrRpcProvider _rpcProvider;
private readonly IServiceProvider _serviceProvider;
private readonly ConcurrentDictionary<GrainInterfaceType, IGrainMethodInvoker> _invokers = new ConcurrentDictionary<GrainInterfaceType, IGrainMethodInvoker>();
public ImrGrainMethodInvokerProvider(ImrRpcProvider rpcProvider, IServiceProvider serviceProvider)
{
_rpcProvider = rpcProvider;
_serviceProvider = serviceProvider;
}
public bool TryGet(GrainInterfaceType interfaceType, out IGrainMethodInvoker invoker)
{
if (_invokers.TryGetValue(interfaceType, out invoker))
{
return true;
}
if (!_rpcProvider.TryGet(interfaceType, out var types))
{
invoker = default;
return false;
}
_invokers[interfaceType] = invoker = (IGrainMethodInvoker)ActivatorUtilities.CreateInstance(_serviceProvider, types.InvokerType);
return true;
}
}
internal class ImrGrainReferenceActivatorProvider : IGrainReferenceActivatorProvider
{
private readonly IServiceProvider _serviceProvider;
private readonly GrainPropertiesResolver _propertiesResolver;
private readonly ImrRpcProvider _rpcProvider;
private IGrainReferenceRuntime _grainReferenceRuntime;
public ImrGrainReferenceActivatorProvider(
IServiceProvider serviceProvider,
GrainPropertiesResolver propertiesResolver,
ImrRpcProvider rpcProvider)
{
_serviceProvider = serviceProvider;
_propertiesResolver = propertiesResolver;
_rpcProvider = rpcProvider;
}
public bool TryGet(GrainType grainType, GrainInterfaceType interfaceType, out IGrainReferenceActivator activator)
{
if (!_rpcProvider.TryGet(interfaceType, out var types))
{
activator = default;
return false;
}
var unordered = false;
var properties = _propertiesResolver.GetGrainProperties(grainType);
if (properties.Properties.TryGetValue(WellKnownGrainTypeProperties.Unordered, out var unorderedString)
&& string.Equals("true", unorderedString, StringComparison.OrdinalIgnoreCase))
{
unordered = true;
}
var invokeMethodOptions = unordered ? InvokeMethodOptions.Unordered : InvokeMethodOptions.None;
var runtime = _grainReferenceRuntime ??= _serviceProvider.GetRequiredService<IGrainReferenceRuntime>();
var shared = new GrainReferenceShared(grainType, interfaceType, runtime, invokeMethodOptions);
activator = new ImrGrainReferenceActivator(types.ReferenceType, shared);
return true;
}
private class ImrGrainReferenceActivator : IGrainReferenceActivator
{
private readonly Type _referenceType;
private readonly GrainReferenceShared _shared;
public ImrGrainReferenceActivator(Type referenceType, GrainReferenceShared shared)
{
_referenceType = referenceType;
_shared = shared;
}
public GrainReference CreateReference(GrainId grainId)
{
return (GrainReference)Activator.CreateInstance(
type: _referenceType,
bindingAttr: BindingFlags.Instance | BindingFlags.NonPublic,
binder: null,
args: new object[] { _shared, grainId.Key },
culture: CultureInfo.InvariantCulture);
}
}
}
public interface IGrainReferenceActivatorProvider
{
bool TryGet(GrainType grainType, GrainInterfaceType interfaceType, out IGrainReferenceActivator activator);
}
public interface IGrainReferenceActivator
{
public GrainReference CreateReference(GrainId grainId);
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
namespace Microsoft.Zelig.CodeGeneration.IR.CompilationSteps
{
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Microsoft.Zelig.Runtime.TypeSystem;
public sealed class CallsDataBase
{
internal class Entry
{
//
// State
//
int m_version;
ControlFlowGraphStateForCodeTransformation m_cfg;
ControlFlowGraphStateForCodeTransformation m_clonedCFG;
List< CallOperator > m_callsFromThisMethod;
List< CallOperator > m_callsToThisMethod;
//
// Constructor Methods
//
internal Entry( ControlFlowGraphStateForCodeTransformation cfg )
{
m_cfg = cfg;
m_version = -1;
m_callsFromThisMethod = new List< CallOperator >();
m_callsToThisMethod = new List< CallOperator >();
}
//
// Helper Methods
//
internal void Reset()
{
m_callsFromThisMethod.Clear();
m_callsToThisMethod .Clear();
}
internal void AddCallFromThisMethod( CallOperator call )
{
lock(this)
{
m_callsFromThisMethod.Add( call );
}
}
internal void AddCallToThisMethod( CallOperator call )
{
lock(this)
{
m_callsToThisMethod.Add( call );
}
}
internal void AnalyzeInlining()
{
if(m_version != m_cfg.Version)
{
m_version = m_cfg.Version;
bool fInline;
MethodRepresentation md = m_cfg.Method;
if(md.HasBuildTimeFlag( MethodRepresentation.BuildTimeAttributes.NoInline ))
{
fInline = false;
}
else if(md.HasBuildTimeFlag( MethodRepresentation.BuildTimeAttributes.Inline ))
{
fInline = true;
}
else
{
bool fCallToConstructor = false;
int iCall = 0;
int iGetter = 0;
int iSetter = 0;
int iOther = 0;
var set = SetFactory.NewWithReferenceEquality< BasicBlock >();
var queue = new Queue < BasicBlock >();
var bbStart = m_cfg.NormalizedEntryBasicBlock;
var bbEnd = m_cfg.NormalizedExitBasicBlock;
set .Insert ( bbStart );
queue.Enqueue( bbStart );
while(queue.Count > 0)
{
var bb = queue.Dequeue();
foreach(var edge in bb.Successors)
{
var bbNext = edge.Successor;
if(bbNext != bbEnd && set.Insert( bbNext ) == false)
{
queue.Enqueue( bbNext );
}
}
foreach(Operator op in bb.Operators)
{
if(op is CallOperator)
{
CallOperator call = (CallOperator)op;
if(call.TargetMethod is ConstructorMethodRepresentation)
{
fCallToConstructor = true;
}
else
{
iCall++;
}
}
else if(op is LoadFieldOperator)
{
iGetter++;
}
else if(op is StoreFieldOperator)
{
iSetter++;
}
else if(op is NopOperator ||
op is UnconditionalControlOperator ||
op is AbstractAssignmentOperator ||
op is ReturnControlOperator )
{
// Ignore them.
}
else
{
iOther++;
}
}
}
if(!fCallToConstructor && iGetter == 0 && iSetter == 0 && iOther == 0)
{
fInline = true; // Noop method.
}
else if(fCallToConstructor && iGetter == 0 && iSetter == 0 && iOther == 0)
{
fInline = true; // Empty constructor.
}
else if(!fCallToConstructor && iGetter == 1 && iSetter == 0 && iOther < 2)
{
fInline = true; // Simple getter.
}
else if(!fCallToConstructor && iGetter == 0 && iSetter == 1 && iOther == 0)
{
fInline = true; // Simple setter.
}
else if(!fCallToConstructor && iGetter == 0 && iSetter == 0 && iCall == 0 && iOther < 4)
{
fInline = true; // Simple method with no calls.
}
else
{
fInline = false;
}
}
if(fInline)
{
m_clonedCFG = m_cfg.Clone( null );
}
else
{
m_clonedCFG = null;
}
}
}
internal void EnsureCloned()
{
if(m_clonedCFG == null)
{
m_clonedCFG = m_cfg.Clone( null );
}
}
internal void QueueInlining( GrowOnlySet< ControlFlowGraphStateForCodeTransformation > touched ,
GrowOnlyHashTable< ControlFlowGraphStateForCodeTransformation, List< CallOperator > > workList )
{
if(m_clonedCFG != null)
{
for(int pos = m_callsToThisMethod.Count; --pos >= 0; )
{
bool fRemove = false;
CallOperator call = m_callsToThisMethod[pos];
while(true)
{
BasicBlock bb = call.BasicBlock;
if(bb == null)
{
//
// Dead operator, just remove it.
//
fRemove = true;
break;
}
var cfg = (ControlFlowGraphStateForCodeTransformation)bb.Owner;
if(cfg == m_cfg)
{
//
// Can't self-inline.
//
break;
}
//
// Detect inlining loops and stop them.
//
var an = call.GetAnnotation< InliningPathAnnotation >();
if(an != null)
{
if(ArrayUtility.FindInNotNullArray( an.Path, m_clonedCFG.Method ) >= 0)
{
break;
}
}
var callType = call.CallType;
if(callType == CallOperator.CallKind.Virtual ||
callType == CallOperator.CallKind.Indirect )
{
break;
}
var cfgInlineSite = (ControlFlowGraphStateForCodeTransformation)call.BasicBlock.Owner;
touched.Insert( cfgInlineSite );
HashTableWithListFactory.AddUnique( workList, cfgInlineSite, call );
fRemove = true;
break;
}
if(fRemove)
{
m_callsToThisMethod.RemoveAt( pos );
}
}
}
}
//
// Access Methods
//
internal ControlFlowGraphStateForCodeTransformation Cfg
{
get
{
return m_cfg;
}
}
internal ControlFlowGraphStateForCodeTransformation ClonedCfg
{
get
{
return m_clonedCFG;
}
}
internal List< CallOperator > CallFromThisMethod
{
get
{
return m_callsFromThisMethod;
}
}
internal List< CallOperator > CallToThisMethod
{
get
{
return m_callsToThisMethod;
}
}
}
//
// State
//
private GrowOnlyHashTable< ControlFlowGraphStateForCodeTransformation, Entry > m_entries;
private GrowOnlySet < CallOperator > m_forcedInlines;
//
// Constructor Methods
//
public CallsDataBase()
{
m_entries = HashTableFactory.NewWithReferenceEquality< ControlFlowGraphStateForCodeTransformation, Entry >();
m_forcedInlines = SetFactory .NewWithReferenceEquality< CallOperator >();
}
//
// Helper Methods
//
public void RegisterCallSite( CallOperator call )
{
var cfgFrom = (ControlFlowGraphStateForCodeTransformation) call.BasicBlock.Owner;
var cfgTo = TypeSystemForCodeTransformation.GetCodeForMethod( call.TargetMethod );
Entry enFrom = GetEntry( cfgFrom, true );
enFrom.AddCallFromThisMethod( call );
if(cfgTo != null)
{
Entry enTo = GetEntry( cfgTo, true );
enTo.AddCallToThisMethod( call );
}
}
public void QueueForForcedInlining( CallOperator call )
{
lock(TypeSystemForCodeTransformation.Lock) // It's called from multiple threads during parallel phase executions.
{
m_forcedInlines.Insert( call );
}
}
public void ClearCallSites()
{
m_entries.Clear();
}
public void ResetCallSites()
{
foreach(Entry en in m_entries.Values)
{
en.Reset();
}
}
public List< CallOperator > CallsToMethod( MethodRepresentation md )
{
var cfg = TypeSystemForCodeTransformation.GetCodeForMethod( md );
Entry en = GetEntry( cfg, false );
return (en != null) ? en.CallToThisMethod : null;
}
public List< CallOperator > CallsFromMethod( MethodRepresentation md )
{
var cfg = TypeSystemForCodeTransformation.GetCodeForMethod( md );
Entry en = GetEntry( cfg, false );
return (en != null) ? en.CallFromThisMethod : null;
}
//--//
public void Analyze( TypeSystemForCodeTransformation typeSystem )
{
ResetCallSites();
ParallelTransformationsHandler.EnumerateFlowGraphs( typeSystem, delegate( ControlFlowGraphStateForCodeTransformation cfg )
{
foreach(var call in cfg.FilterOperators< CallOperator >())
{
RegisterCallSite( call );
}
} );
}
public void AnalyzeForInlining()
{
foreach(Entry en in m_entries.Values)
{
en.AnalyzeInlining();
}
}
public GrowOnlySet< ControlFlowGraphStateForCodeTransformation > ExecuteInlining( TypeSystemForCodeTransformation typeSystem )
{
var workList = HashTableFactory.NewWithReferenceEquality< ControlFlowGraphStateForCodeTransformation, List< CallOperator > >();
var touched = SetFactory .NewWithReferenceEquality< ControlFlowGraphStateForCodeTransformation >();
var touched2 = touched.CloneSettings();
while(true)
{
workList.Clear();
touched2.Clear();
foreach(Entry en in m_entries.Values)
{
en.QueueInlining( touched2, workList );
}
foreach(CallOperator call in m_forcedInlines)
{
if(!call.IsDetached)
{
var cfgInlineSite = (ControlFlowGraphStateForCodeTransformation)call.BasicBlock.Owner;
var cfgTarget = TypeSystemForCodeTransformation.GetCodeForMethod( call.TargetMethod );
touched2.Insert( cfgInlineSite );
var en = GetEntry( cfgTarget, true );
en.EnsureCloned();
HashTableWithListFactory.AddUnique( workList, cfgInlineSite, call );
}
}
if(workList.Count == 0)
{
break;
}
//--//
var lockList = HashTableFactory.NewWithReferenceEquality< ControlFlowGraphStateForCodeTransformation, IDisposable >();
foreach(var cfg in workList.Keys)
{
foreach(var call in workList[cfg])
{
var cfgTarget = TypeSystemForCodeTransformation.GetCodeForMethod( call.TargetMethod );
Entry en;
m_entries.TryGetValue( cfgTarget, out en );
var cfgCloned = en.ClonedCfg;
if(lockList.ContainsKey( cfgCloned ) == false)
{
var bbEntry = cfgCloned.NormalizedEntryBasicBlock;
var bbExit = cfgCloned.NormalizedExitBasicBlock;
lockList[cfgTarget] = cfgCloned.LockFlowInformation();
}
}
}
ParallelTransformationsHandler.EnumerateFlowGraphs( typeSystem, delegate( ControlFlowGraphStateForCodeTransformation cfg )
{
List< CallOperator > calls;
bool fGot = false;
if(workList.TryGetValue( cfg, out calls ))
{
foreach(var call in calls)
{
var cfgTarget = TypeSystemForCodeTransformation.GetCodeForMethod( call.TargetMethod );
Entry en;
m_entries.TryGetValue( cfgTarget, out en );
cfg.TraceToFile( "InlineCall" );
using(ControlFlowGraphState.AddExceptionToThreadMethodLock( call.TargetMethod ))
{
Transformations.InlineCall.Execute( call, en.ClonedCfg, null );
}
cfg.TraceToFile( "InlineCall-Post" );
fGot = true;
}
if(fGot)
{
Transformations.CommonMethodRedundancyElimination.Execute( cfg );
cfg.DropDeadVariables();
}
}
} );
foreach(var disp in lockList.Values)
{
disp.Dispose();
}
touched.Merge( touched2 );
}
return touched;
}
//--//
Entry GetEntry( ControlFlowGraphStateForCodeTransformation cfg ,
bool fAllocate )
{
lock(TypeSystemForCodeTransformation.Lock) // It's called from multiple threads during parallel phase executions.
{
Entry entry;
if(m_entries.TryGetValue( cfg, out entry ) == false && fAllocate)
{
entry = new Entry( cfg );
m_entries[cfg] = entry;
}
return entry;
}
}
}
}
| |
using UnityEngine;
namespace UnitySampleAssets.ImageEffects
{
[ExecuteInEditMode]
[RequireComponent(typeof (Camera))]
[AddComponentMenu("Image Effects/Depth of Field (Lens Blur, Scatter, DX11)")]
public class DepthOfFieldScatter : PostEffectsBase
{
public bool visualizeFocus = false;
public float focalLength = 10.0f;
public float focalSize = 0.05f;
public float aperture = 11.5f;
public Transform focalTransform = null;
public float maxBlurSize = 2.0f;
public bool highResolution = false;
public enum BlurType
{
DiscBlur = 0,
DX11 = 1,
}
public enum BlurSampleCount
{
Low = 0,
Medium = 1,
High = 2,
}
public BlurType blurType = BlurType.DiscBlur;
public BlurSampleCount blurSampleCount = BlurSampleCount.High;
public bool nearBlur = false;
public float foregroundOverlap = 1.0f;
public Shader dofHdrShader;
private Material dofHdrMaterial = null;
public Shader dx11BokehShader;
private Material dx11bokehMaterial;
public float dx11BokehThreshhold = 0.5f;
public float dx11SpawnHeuristic = 0.0875f;
public Texture2D dx11BokehTexture = null;
public float dx11BokehScale = 1.2f;
public float dx11BokehIntensity = 2.5f;
private float focalDistance01 = 10.0f;
private ComputeBuffer cbDrawArgs;
private ComputeBuffer cbPoints;
private float internalBlurWidth = 1.0f;
protected override bool CheckResources()
{
CheckSupport(true); // only requires depth, not HDR
dofHdrMaterial = CheckShaderAndCreateMaterial(dofHdrShader, dofHdrMaterial);
if (supportDX11 && blurType == BlurType.DX11)
{
dx11bokehMaterial = CheckShaderAndCreateMaterial(dx11BokehShader, dx11bokehMaterial);
CreateComputeResources();
}
if (!isSupported)
ReportAutoDisable();
return isSupported;
}
private void OnEnable()
{
GetComponent<Camera>().depthTextureMode |= DepthTextureMode.Depth;
}
private void OnDisable()
{
ReleaseComputeResources();
if (dofHdrMaterial) DestroyImmediate(dofHdrMaterial);
dofHdrMaterial = null;
if (dx11bokehMaterial) DestroyImmediate(dx11bokehMaterial);
dx11bokehMaterial = null;
}
private void ReleaseComputeResources()
{
if (cbDrawArgs != null) cbDrawArgs.Release();
cbDrawArgs = null;
if (cbPoints != null) cbPoints.Release();
cbPoints = null;
}
private void CreateComputeResources()
{
if (cbDrawArgs == null)
{
cbDrawArgs = new ComputeBuffer(1, 16, ComputeBufferType.DrawIndirect);
var args = new int[4];
args[0] = 0;
args[1] = 1;
args[2] = 0;
args[3] = 0;
cbDrawArgs.SetData(args);
}
if (cbPoints == null)
{
cbPoints = new ComputeBuffer(90000, 12 + 16, ComputeBufferType.Append);
}
}
private float FocalDistance01(float worldDist)
{
return
GetComponent<Camera>().WorldToViewportPoint((worldDist - GetComponent<Camera>().nearClipPlane)*GetComponent<Camera>().transform.forward +
GetComponent<Camera>().transform.position).z/(GetComponent<Camera>().farClipPlane - GetComponent<Camera>().nearClipPlane);
}
private void WriteCoc(RenderTexture fromTo, RenderTexture temp1, RenderTexture temp2, bool fgDilate)
{
dofHdrMaterial.SetTexture("_FgOverlap", null);
if (nearBlur && fgDilate)
{
// capture fg coc
Graphics.Blit(fromTo, temp2, dofHdrMaterial, 4);
// special blur
float fgAdjustment = internalBlurWidth*foregroundOverlap;
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, fgAdjustment, 0.0f, fgAdjustment));
Graphics.Blit(temp2, temp1, dofHdrMaterial, 2);
dofHdrMaterial.SetVector("_Offsets", new Vector4(fgAdjustment, 0.0f, 0.0f, fgAdjustment));
Graphics.Blit(temp1, temp2, dofHdrMaterial, 2);
// "merge up" with background COC
dofHdrMaterial.SetTexture("_FgOverlap", temp2);
Graphics.Blit(fromTo, fromTo, dofHdrMaterial, 13);
}
else
{
// capture full coc in alpha channel (fromTo is not read, but bound to detect screen flip)
Graphics.Blit(fromTo, fromTo, dofHdrMaterial, 0);
}
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (!CheckResources())
{
Graphics.Blit(source, destination);
return;
}
// clamp & prepare values so they make sense
if (aperture < 0.0f) aperture = 0.0f;
if (maxBlurSize < 0.1f) maxBlurSize = 0.1f;
focalSize = Mathf.Clamp(focalSize, 0.0f, 2.0f);
internalBlurWidth = Mathf.Max(maxBlurSize, 0.0f);
// focal & coc calculations
focalDistance01 = (focalTransform)
? (GetComponent<Camera>().WorldToViewportPoint(focalTransform.position)).z/(GetComponent<Camera>().farClipPlane)
: FocalDistance01(focalLength);
dofHdrMaterial.SetVector("_CurveParams", new Vector4(1.0f, focalSize, aperture/10.0f, focalDistance01));
// possible render texture helpers
RenderTexture rtLow = null;
RenderTexture rtLow2 = null;
RenderTexture rtSuperLow1 = null;
RenderTexture rtSuperLow2 = null;
float fgBlurDist = internalBlurWidth*foregroundOverlap;
if (visualizeFocus)
{
//
// 2.
// visualize coc
//
//
rtLow = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
rtLow2 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
WriteCoc(source, rtLow, rtLow2, true);
Graphics.Blit(source, destination, dofHdrMaterial, 16);
}
else if ((blurType == BlurType.DX11) && dx11bokehMaterial)
{
//
// 1.
// optimized dx11 bokeh scatter
//
//
if (highResolution)
{
internalBlurWidth = internalBlurWidth < 0.1f ? 0.1f : internalBlurWidth;
fgBlurDist = internalBlurWidth*foregroundOverlap;
rtLow = RenderTexture.GetTemporary(source.width, source.height, 0, source.format);
var dest2 = RenderTexture.GetTemporary(source.width, source.height, 0, source.format);
// capture COC
WriteCoc(source, null, null, false);
// blur a bit so we can do a frequency check
rtSuperLow1 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
rtSuperLow2 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
Graphics.Blit(source, rtSuperLow1, dofHdrMaterial, 15);
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, 1.5f, 0.0f, 1.5f));
Graphics.Blit(rtSuperLow1, rtSuperLow2, dofHdrMaterial, 19);
dofHdrMaterial.SetVector("_Offsets", new Vector4(1.5f, 0.0f, 0.0f, 1.5f));
Graphics.Blit(rtSuperLow2, rtSuperLow1, dofHdrMaterial, 19);
// capture fg coc
if (nearBlur)
Graphics.Blit(source, rtSuperLow2, dofHdrMaterial, 4);
dx11bokehMaterial.SetTexture("_BlurredColor", rtSuperLow1);
dx11bokehMaterial.SetFloat("_SpawnHeuristic", dx11SpawnHeuristic);
dx11bokehMaterial.SetVector("_BokehParams",
new Vector4(dx11BokehScale, dx11BokehIntensity,
Mathf.Clamp(dx11BokehThreshhold, 0.005f, 4.0f),
internalBlurWidth));
dx11bokehMaterial.SetTexture("_FgCocMask", nearBlur ? rtSuperLow2 : null);
// collect bokeh candidates and replace with a darker pixel
Graphics.SetRandomWriteTarget(1, cbPoints);
Graphics.Blit(source, rtLow, dx11bokehMaterial, 0);
Graphics.ClearRandomWriteTargets();
// fg coc blur happens here (after collect!)
if (nearBlur)
{
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, fgBlurDist, 0.0f, fgBlurDist));
Graphics.Blit(rtSuperLow2, rtSuperLow1, dofHdrMaterial, 2);
dofHdrMaterial.SetVector("_Offsets", new Vector4(fgBlurDist, 0.0f, 0.0f, fgBlurDist));
Graphics.Blit(rtSuperLow1, rtSuperLow2, dofHdrMaterial, 2);
// merge fg coc with bg coc
Graphics.Blit(rtSuperLow2, rtLow, dofHdrMaterial, 3);
}
// NEW: LAY OUT ALPHA on destination target so we get nicer outlines for the high rez version
Graphics.Blit(rtLow, dest2, dofHdrMaterial, 20);
// box blur (easier to merge with bokeh buffer)
dofHdrMaterial.SetVector("_Offsets", new Vector4(internalBlurWidth, 0.0f, 0.0f, internalBlurWidth));
Graphics.Blit(rtLow, source, dofHdrMaterial, 5);
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, internalBlurWidth, 0.0f, internalBlurWidth));
Graphics.Blit(source, dest2, dofHdrMaterial, 21);
// apply bokeh candidates
Graphics.SetRenderTarget(dest2);
ComputeBuffer.CopyCount(cbPoints, cbDrawArgs, 0);
dx11bokehMaterial.SetBuffer("pointBuffer", cbPoints);
dx11bokehMaterial.SetTexture("_MainTex", dx11BokehTexture);
dx11bokehMaterial.SetVector("_Screen",
new Vector3(1.0f/(1.0f*source.width), 1.0f/(1.0f*source.height),
internalBlurWidth));
dx11bokehMaterial.SetPass(2);
Graphics.DrawProceduralIndirect(MeshTopology.Points, cbDrawArgs, 0);
Graphics.Blit(dest2, destination); // hackaround for DX11 high resolution flipfun (OPTIMIZEME)
RenderTexture.ReleaseTemporary(dest2);
RenderTexture.ReleaseTemporary(rtSuperLow1);
RenderTexture.ReleaseTemporary(rtSuperLow2);
}
else
{
rtLow = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
rtLow2 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
fgBlurDist = internalBlurWidth*foregroundOverlap;
// capture COC & color in low resolution
WriteCoc(source, null, null, false);
source.filterMode = FilterMode.Bilinear;
Graphics.Blit(source, rtLow, dofHdrMaterial, 6);
// blur a bit so we can do a frequency check
rtSuperLow1 = RenderTexture.GetTemporary(rtLow.width >> 1, rtLow.height >> 1, 0, rtLow.format);
rtSuperLow2 = RenderTexture.GetTemporary(rtLow.width >> 1, rtLow.height >> 1, 0, rtLow.format);
Graphics.Blit(rtLow, rtSuperLow1, dofHdrMaterial, 15);
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, 1.5f, 0.0f, 1.5f));
Graphics.Blit(rtSuperLow1, rtSuperLow2, dofHdrMaterial, 19);
dofHdrMaterial.SetVector("_Offsets", new Vector4(1.5f, 0.0f, 0.0f, 1.5f));
Graphics.Blit(rtSuperLow2, rtSuperLow1, dofHdrMaterial, 19);
RenderTexture rtLow3 = null;
if (nearBlur)
{
// capture fg coc
rtLow3 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
Graphics.Blit(source, rtLow3, dofHdrMaterial, 4);
}
dx11bokehMaterial.SetTexture("_BlurredColor", rtSuperLow1);
dx11bokehMaterial.SetFloat("_SpawnHeuristic", dx11SpawnHeuristic);
dx11bokehMaterial.SetVector("_BokehParams",
new Vector4(dx11BokehScale, dx11BokehIntensity,
Mathf.Clamp(dx11BokehThreshhold, 0.005f, 4.0f),
internalBlurWidth));
dx11bokehMaterial.SetTexture("_FgCocMask", rtLow3);
// collect bokeh candidates and replace with a darker pixel
Graphics.SetRandomWriteTarget(1, cbPoints);
Graphics.Blit(rtLow, rtLow2, dx11bokehMaterial, 0);
Graphics.ClearRandomWriteTargets();
RenderTexture.ReleaseTemporary(rtSuperLow1);
RenderTexture.ReleaseTemporary(rtSuperLow2);
// fg coc blur happens here (after collect!)
if (nearBlur)
{
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, fgBlurDist, 0.0f, fgBlurDist));
Graphics.Blit(rtLow3, rtLow, dofHdrMaterial, 2);
dofHdrMaterial.SetVector("_Offsets", new Vector4(fgBlurDist, 0.0f, 0.0f, fgBlurDist));
Graphics.Blit(rtLow, rtLow3, dofHdrMaterial, 2);
// merge fg coc with bg coc
Graphics.Blit(rtLow3, rtLow2, dofHdrMaterial, 3);
}
// box blur (easier to merge with bokeh buffer)
dofHdrMaterial.SetVector("_Offsets", new Vector4(internalBlurWidth, 0.0f, 0.0f, internalBlurWidth));
Graphics.Blit(rtLow2, rtLow, dofHdrMaterial, 5);
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, internalBlurWidth, 0.0f, internalBlurWidth));
Graphics.Blit(rtLow, rtLow2, dofHdrMaterial, 5);
// apply bokeh candidates
Graphics.SetRenderTarget(rtLow2);
ComputeBuffer.CopyCount(cbPoints, cbDrawArgs, 0);
dx11bokehMaterial.SetBuffer("pointBuffer", cbPoints);
dx11bokehMaterial.SetTexture("_MainTex", dx11BokehTexture);
dx11bokehMaterial.SetVector("_Screen",
new Vector3(1.0f/(1.0f*rtLow2.width), 1.0f/(1.0f*rtLow2.height),
internalBlurWidth));
dx11bokehMaterial.SetPass(1);
Graphics.DrawProceduralIndirect(MeshTopology.Points, cbDrawArgs, 0);
// upsample & combine
dofHdrMaterial.SetTexture("_LowRez", rtLow2);
dofHdrMaterial.SetTexture("_FgOverlap", rtLow3);
dofHdrMaterial.SetVector("_Offsets",
((1.0f*source.width)/(1.0f*rtLow2.width))*internalBlurWidth*Vector4.one);
Graphics.Blit(source, destination, dofHdrMaterial, 9);
if (rtLow3) RenderTexture.ReleaseTemporary(rtLow3);
}
}
else
{
//
// 2.
// poisson disc style blur in low resolution
//
//
rtLow = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
rtLow2 = RenderTexture.GetTemporary(source.width >> 1, source.height >> 1, 0, source.format);
source.filterMode = FilterMode.Bilinear;
if (highResolution) internalBlurWidth *= 2.0f;
WriteCoc(source, rtLow, rtLow2, true);
int blurPass = (blurSampleCount == BlurSampleCount.High || blurSampleCount == BlurSampleCount.Medium)
? 17
: 11;
if (highResolution)
{
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, internalBlurWidth, 0.025f, internalBlurWidth));
Graphics.Blit(source, destination, dofHdrMaterial, blurPass);
}
else
{
dofHdrMaterial.SetVector("_Offsets", new Vector4(0.0f, internalBlurWidth, 0.1f, internalBlurWidth));
// blur
Graphics.Blit(source, rtLow, dofHdrMaterial, 6);
Graphics.Blit(rtLow, rtLow2, dofHdrMaterial, blurPass);
// cheaper blur in high resolution, upsample and combine
dofHdrMaterial.SetTexture("_LowRez", rtLow2);
dofHdrMaterial.SetTexture("_FgOverlap", null);
dofHdrMaterial.SetVector("_Offsets",
Vector4.one*((1.0f*source.width)/(1.0f*rtLow2.width))*internalBlurWidth);
Graphics.Blit(source, destination, dofHdrMaterial, blurSampleCount == BlurSampleCount.High ? 18 : 12);
}
}
if (rtLow) RenderTexture.ReleaseTemporary(rtLow);
if (rtLow2) RenderTexture.ReleaseTemporary(rtLow2);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Linq;
using System.Security.Cryptography.Asn1;
using System.Security.Cryptography.Pkcs.Asn1;
using System.Security.Cryptography.X509Certificates;
using Internal.Cryptography;
namespace System.Security.Cryptography.Pkcs
{
public sealed class Rfc3161TimestampRequest
{
private byte[] _encodedBytes;
private Rfc3161TimeStampReq _parsedData;
private Rfc3161TimestampRequest()
{
}
public int Version => _parsedData.Version;
public ReadOnlyMemory<byte> GetMessageHash() => _parsedData.MessageImprint.HashedMessage;
public Oid HashAlgorithmId => _parsedData.MessageImprint.HashAlgorithm.Algorithm;
public Oid RequestedPolicyId => _parsedData.ReqPolicy;
public bool RequestSignerCertificate => _parsedData.CertReq;
public ReadOnlyMemory<byte>? GetNonce() => _parsedData.Nonce;
public bool HasExtensions => _parsedData.Extensions?.Length > 0;
public X509ExtensionCollection GetExtensions()
{
var coll = new X509ExtensionCollection();
if (!HasExtensions)
{
return coll;
}
X509ExtensionAsn[] rawExtensions = _parsedData.Extensions;
foreach (X509ExtensionAsn rawExtension in rawExtensions)
{
X509Extension extension = new X509Extension(
rawExtension.ExtnId,
rawExtension.ExtnValue.ToArray(),
rawExtension.Critical);
// Currently there are no extensions defined.
// Should this dip into CryptoConfig or other extensible
// mechanisms for the CopyTo rich type uplift?
coll.Add(extension);
}
return coll;
}
public Rfc3161TimestampToken ProcessResponse(ReadOnlyMemory<byte> source, out int bytesConsumed)
{
Rfc3161RequestResponseStatus status;
Rfc3161TimestampToken token;
if (ProcessResponse(source, out token, out status, out int localBytesRead, shouldThrow: true))
{
Debug.Assert(status == Rfc3161RequestResponseStatus.Accepted);
bytesConsumed = localBytesRead;
return token;
}
Debug.Fail($"AcceptResponse should have thrown or returned true (status={status})");
throw new CryptographicException();
}
private bool ProcessResponse(
ReadOnlyMemory<byte> source,
out Rfc3161TimestampToken token,
out Rfc3161RequestResponseStatus status,
out int bytesConsumed,
bool shouldThrow)
{
status = Rfc3161RequestResponseStatus.Unknown;
token = null;
Rfc3161TimeStampResp resp;
try
{
AsnReader reader = new AsnReader(source, AsnEncodingRules.DER);
int localBytesRead = reader.PeekEncodedValue().Length;
Rfc3161TimeStampResp.Decode(reader, out resp);
bytesConsumed = localBytesRead;
}
catch (CryptographicException) when (!shouldThrow)
{
bytesConsumed = 0;
status = Rfc3161RequestResponseStatus.DoesNotParse;
return false;
}
// bytesRead will be set past this point
PkiStatus pkiStatus = (PkiStatus)resp.Status.Status;
if (pkiStatus != PkiStatus.Granted &&
pkiStatus != PkiStatus.GrantedWithMods)
{
if (shouldThrow)
{
throw new CryptographicException(
SR.Format(
SR.Cryptography_TimestampReq_Failure,
pkiStatus,
resp.Status.FailInfo.GetValueOrDefault()));
}
status = Rfc3161RequestResponseStatus.RequestFailed;
return false;
}
if (!Rfc3161TimestampToken.TryDecode(resp.TimeStampToken.GetValueOrDefault(), out token, out _))
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_TimestampReq_BadResponse);
}
bytesConsumed = 0;
status = Rfc3161RequestResponseStatus.DoesNotParse;
return false;
}
status = ValidateResponse(token, shouldThrow);
return status == Rfc3161RequestResponseStatus.Accepted;
}
public byte[] Encode()
{
return _encodedBytes.CloneByteArray();
}
public bool TryEncode(Span<byte> destination, out int bytesWritten)
{
if (destination.Length < _encodedBytes.Length)
{
bytesWritten = 0;
return false;
}
_encodedBytes.AsSpan().CopyTo(destination);
bytesWritten = _encodedBytes.Length;
return true;
}
public static Rfc3161TimestampRequest CreateFromSignerInfo(
SignerInfo signerInfo,
HashAlgorithmName hashAlgorithm,
Oid requestedPolicyId = null,
ReadOnlyMemory<byte>? nonce = null,
bool requestSignerCertificates = false,
X509ExtensionCollection extensions = null)
{
if (signerInfo == null)
{
throw new ArgumentNullException(nameof(signerInfo));
}
// https://tools.ietf.org/html/rfc3161, Appendix A.
//
// The value of messageImprint field within TimeStampToken shall be a
// hash of the value of signature field within SignerInfo for the
// signedData being time-stamped.
return CreateFromData(
signerInfo.GetSignature(),
hashAlgorithm,
requestedPolicyId,
nonce,
requestSignerCertificates,
extensions);
}
public static Rfc3161TimestampRequest CreateFromData(
ReadOnlySpan<byte> data,
HashAlgorithmName hashAlgorithm,
Oid requestedPolicyId = null,
ReadOnlyMemory<byte>? nonce = null,
bool requestSignerCertificates = false,
X509ExtensionCollection extensions = null)
{
using (IncrementalHash hasher = IncrementalHash.CreateHash(hashAlgorithm))
{
hasher.AppendData(data);
byte[] digest = hasher.GetHashAndReset();
return CreateFromHash(
digest,
hashAlgorithm,
requestedPolicyId,
nonce,
requestSignerCertificates,
extensions);
}
}
public static Rfc3161TimestampRequest CreateFromHash(
ReadOnlyMemory<byte> hash,
HashAlgorithmName hashAlgorithm,
Oid requestedPolicyId = null,
ReadOnlyMemory<byte>? nonce = null,
bool requestSignerCertificates = false,
X509ExtensionCollection extensions = null)
{
string oidStr = PkcsHelpers.GetOidFromHashAlgorithm(hashAlgorithm);
return CreateFromHash(
hash,
new Oid(oidStr, oidStr),
requestedPolicyId,
nonce,
requestSignerCertificates,
extensions);
}
/// <summary>
/// Create a timestamp request using a pre-computed hash value.
/// </summary>
/// <param name="hash">The pre-computed hash value to be timestamped.</param>
/// <param name="hashAlgorithmId">
/// The Object Identifier (OID) for the hash algorithm which produced <paramref name="hash"/>.
/// </param>
/// <param name="requestedPolicyId">
/// The Object Identifier (OID) for a timestamp policy the Timestamp Authority (TSA) should use,
/// or <c>null</c> to express no preference.
/// </param>
/// <param name="nonce">
/// An optional nonce (number used once) to uniquely identify this request to pair it with the response.
/// The value is interpreted as an unsigned big-endian integer and may be normalized to the encoding format.
/// </param>
/// <param name="requestSignerCertificates">
/// Indicates whether the Timestamp Authority (TSA) must (<c>true</c>) or must not (<c>false</c>) include
/// the signing certificate in the issued timestamp token.
/// </param>
/// <param name="extensions">RFC3161 extensions to present with the request.</param>
/// <returns>
/// An <see cref="Rfc3161TimestampRequest"/> representing the chosen values.
/// </returns>
/// <seealso cref="Encode"/>
/// <seealso cref="TryEncode"/>
public static Rfc3161TimestampRequest CreateFromHash(
ReadOnlyMemory<byte> hash,
Oid hashAlgorithmId,
Oid requestedPolicyId = null,
ReadOnlyMemory<byte>? nonce = null,
bool requestSignerCertificates = false,
X509ExtensionCollection extensions = null)
{
// Normalize the nonce:
if (nonce.HasValue)
{
ReadOnlyMemory<byte> nonceMemory = nonce.Value;
ReadOnlySpan<byte> nonceSpan = nonceMemory.Span;
// If it's empty, or it would be negative, insert the requisite byte.
if (nonceSpan.Length == 0 || nonceSpan[0] >= 0x80)
{
byte[] temp = new byte[nonceSpan.Length + 1];
nonceSpan.CopyTo(temp.AsSpan(1));
nonce = temp;
}
else
{
int slice = 0;
// Find all extra leading 0x00 values and trim them off.
while (slice < nonceSpan.Length && nonceSpan[slice] == 0)
{
slice++;
}
// Back up one if it was all zero, or we turned the number negative.
if (slice == nonceSpan.Length || nonceSpan[slice] >= 0x80)
{
slice--;
}
nonce = nonceMemory.Slice(slice);
}
}
var req = new Rfc3161TimeStampReq
{
Version = 1,
MessageImprint = new MessageImprint
{
HashAlgorithm =
{
Algorithm = hashAlgorithmId,
Parameters = AlgorithmIdentifierAsn.ExplicitDerNull,
},
HashedMessage = hash,
},
ReqPolicy = requestedPolicyId,
CertReq = requestSignerCertificates,
Nonce = nonce,
};
if (extensions != null)
{
req.Extensions =
extensions.OfType<X509Extension>().Select(e => new X509ExtensionAsn(e)).ToArray();
}
// The RFC implies DER (see TryParse), and DER is the most widely understood given that
// CER isn't specified.
const AsnEncodingRules ruleSet = AsnEncodingRules.DER;
using (AsnWriter writer = new AsnWriter(ruleSet))
{
req.Encode(writer);
byte[] encodedBytes = writer.Encode();
// Make sure everything normalizes
req = Rfc3161TimeStampReq.Decode(encodedBytes, ruleSet);
return new Rfc3161TimestampRequest
{
_encodedBytes = writer.Encode(),
_parsedData = req,
};
}
}
public static bool TryDecode(
ReadOnlyMemory<byte> encodedBytes,
out Rfc3161TimestampRequest request,
out int bytesConsumed)
{
try
{
// RFC 3161 doesn't have a concise statement that TimeStampReq will
// be DER encoded, but under the email protocol (3.1), file protocol (3.2),
// socket protocol (3.3) and HTTP protocol (3.4) they all say DER for the
// transmission.
//
// Since nothing says BER, assume DER only.
const AsnEncodingRules RuleSet = AsnEncodingRules.DER;
AsnReader reader = new AsnReader(encodedBytes, RuleSet);
ReadOnlyMemory<byte> firstElement = reader.PeekEncodedValue();
Rfc3161TimeStampReq.Decode(reader, out Rfc3161TimeStampReq req);
request = new Rfc3161TimestampRequest
{
_parsedData = req,
_encodedBytes = firstElement.ToArray(),
};
bytesConsumed = firstElement.Length;
return true;
}
catch (CryptographicException)
{
}
request = null;
bytesConsumed = 0;
return false;
}
private Rfc3161RequestResponseStatus ValidateResponse(
Rfc3161TimestampToken token,
bool shouldThrow)
{
Debug.Assert(token != null);
// This method validates the acceptance criteria sprinkled throughout the
// field descriptions in https://tools.ietf.org/html/rfc3161#section-2.4.1 and
// https://tools.ietf.org/html/rfc3161#section-2.4.2
if (!token.VerifyHash(GetMessageHash().Span, HashAlgorithmId.Value))
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_BadHashValue);
}
return Rfc3161RequestResponseStatus.HashMismatch;
}
Rfc3161TimestampTokenInfo tokenInfo = token.TokenInfo;
// We only understand V1 messaging and validation
if (tokenInfo.Version != 1)
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_TimestampReq_BadResponse);
}
return Rfc3161RequestResponseStatus.VersionTooNew;
}
// reqPolicy is what the policy SHOULD be, so we can't reject it here.
ReadOnlyMemory<byte>? requestNonce = GetNonce();
ReadOnlyMemory<byte>? responseNonce = tokenInfo.GetNonce();
// The RFC says that if a nonce was in the request it MUST be present in
// the response and it MUST be equal.
//
// It does not say that if no nonce was requested that the response MUST NOT include one, so
// don't check anything if no nonce was requested.
if (requestNonce != null)
{
if (responseNonce == null ||
!requestNonce.Value.Span.SequenceEqual(responseNonce.Value.Span))
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_TimestampReq_BadNonce);
}
return Rfc3161RequestResponseStatus.NonceMismatch;
}
}
SignedCms tokenCms = token.AsSignedCms();
if (RequestSignerCertificate)
{
// If the certificate was requested it
// A) MUST be present in token.AsSignedCms().Certificates
// B) the ESSCertID(2) identifier MUST be correct.
//
// Other certificates are permitted, and will not be validated.
if (tokenCms.SignerInfos[0].Certificate == null)
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_TimestampReq_NoCertFound);
}
return Rfc3161RequestResponseStatus.RequestedCertificatesMissing;
}
}
else
{
// If no certificate was requested then the CMS Certificates collection
// MUST be empty.
if (tokenCms.Certificates.Count != 0)
{
if (shouldThrow)
{
throw new CryptographicException(SR.Cryptography_TimestampReq_UnexpectedCertFound);
}
return Rfc3161RequestResponseStatus.UnexpectedCertificates;
}
}
return Rfc3161RequestResponseStatus.Accepted;
}
}
}
| |
/* ====================================================================
Copyright (C) 2004-2008 fyiReporting Software, LLC
Copyright (C) 2011 Peter Gill <peter@majorsilence.com>
This file is part of the fyiReporting RDL project.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, email info@fyireporting.com or visit
the website www.fyiReporting.com.
*/
using System;
using System.Xml;
using System.Data;
using System.Collections;
namespace fyiReporting.Data
{
/// <summary>
/// WebServiceCommand
/// </summary>
public class WebServiceCommand : IDbCommand
{
WebServiceConnection _wsc; // connection we're running under
string _cmd; // command to execute
int _Timeout; // timeout limit on invoking webservice (only applies to invoking service)
// parsed constituents of the command
string _Url; // url of the wsdl file
string _Service; // service name
string _Operation; // operation name
string _RepeatField; // Specifies the name of the array that should be repeated (only 1 can be)
ArrayList _Columns; // Columns specified for the request
DataParameterCollection _Parameters = new DataParameterCollection();
public WebServiceCommand(WebServiceConnection conn)
{
_wsc = conn;
}
internal string Url
{
get
{
// Check to see if "Url" or "@Url" is a parameter
IDbDataParameter dp= _Parameters["Url"] as IDbDataParameter;
if (dp == null)
dp= _Parameters["@Url"] as IDbDataParameter;
// Then check to see if the Url value is a parameter?
if (dp == null)
dp = _Parameters[_Url] as IDbDataParameter;
if (dp != null)
return dp.Value != null? dp.Value.ToString(): _Url; // don't pass null; pass existing value
return _Url; // the value must be a constant
}
set {_Url = value;}
}
internal string Operation
{
get
{
IDbDataParameter dp= _Parameters["Operation"] as IDbDataParameter;
if (dp == null)
dp= _Parameters["@Operation"] as IDbDataParameter;
// Then check to see if the Operation value is a parameter?
if (dp == null)
dp = _Parameters[_Operation] as IDbDataParameter;
return (dp != null && dp.Value != null)? dp.Value.ToString(): _Operation; // don't pass null; pass existing value
}
set {_Operation = value;}
}
internal string RepeatField
{
get
{
IDbDataParameter dp= _Parameters["RepeatField"] as IDbDataParameter;
if (dp == null)
dp= _Parameters["@RepeatField"] as IDbDataParameter;
// Then check to see if the RepeatField value is a parameter?
if (dp == null)
dp = _Parameters[_RepeatField] as IDbDataParameter;
return (dp != null && dp.Value != null)? null: _RepeatField;
}
set {_RepeatField = value;}
}
internal string Service
{
get
{
IDbDataParameter dp= _Parameters["Service"] as IDbDataParameter;
if (dp == null)
dp= _Parameters["@Service"] as IDbDataParameter;
// Then check to see if the RowsXPath value is a parameter?
if (dp == null)
dp = _Parameters[_Service] as IDbDataParameter;
return (dp != null && dp.Value != null)? dp.Value.ToString(): _Service; // don't pass null; pass existing value
}
set {_Service = value;}
}
internal ArrayList Columns
{
get {return _Columns;}
set {_Columns = value;}
}
#region IDbCommand Members
public void Cancel()
{
throw new NotImplementedException("Cancel not implemented");
}
public void Prepare()
{
return; // Prepare is a noop
}
public System.Data.CommandType CommandType
{
get
{
throw new NotImplementedException("CommandType not implemented");
}
set
{
throw new NotImplementedException("CommandType not implemented");
}
}
public IDataReader ExecuteReader(System.Data.CommandBehavior behavior)
{
if (!(behavior == CommandBehavior.SingleResult ||
behavior == CommandBehavior.SchemaOnly))
throw new ArgumentException("ExecuteReader supports SingleResult and SchemaOnly only.");
return new WebServiceDataReader(behavior, _wsc, this);
}
IDataReader System.Data.IDbCommand.ExecuteReader()
{
return ExecuteReader(System.Data.CommandBehavior.SingleResult);
}
public object ExecuteScalar()
{
throw new NotImplementedException("ExecuteScalar not implemented");
}
public int ExecuteNonQuery()
{
throw new NotImplementedException("ExecuteNonQuery not implemented");
}
public int CommandTimeout
{
get
{
return _Timeout;
}
set
{
_Timeout = value;
}
}
public IDbDataParameter CreateParameter()
{
return new WebServiceDataParameter();
}
public IDbConnection Connection
{
get
{
return this._wsc;
}
set
{
throw new NotImplementedException("Setting Connection not implemented");
}
}
public System.Data.UpdateRowSource UpdatedRowSource
{
get
{
throw new NotImplementedException("UpdatedRowSource not implemented");
}
set
{
throw new NotImplementedException("UpdatedRowSource not implemented");
}
}
public string CommandText
{
get
{
return this._cmd;
}
set
{
// Parse the command string for keyword value pairs separated by ';'
string[] args = value.Split(';');
string url=null;
string operation=null;
string service=null;
string[] columns=null;
foreach(string arg in args)
{
string[] param = arg.Trim().Split('=');
if (param == null || param.Length != 2)
continue;
string key = param[0].Trim().ToLower();
string val = param[1];
switch (key)
{
case "url":
case "file":
url = val;
break;
case "service":
service = val;
break;
case "operation":
operation = val;
break;
case "columns":
// column list is separated by ','
columns = val.Trim().Split(',');
break;
default:
throw new ArgumentException(string.Format("{0} is an unknown parameter key", param[0]));
}
}
// User must specify both the url and the RowsXPath
if (url == null || operation == null || service == null)
throw new ArgumentException("CommandText requires 'Url', 'Service', and 'Operation' parameters.");
_cmd = value;
_Url = url;
_Operation = operation;
_Service = service;
if (columns != null)
_Columns = new ArrayList(columns);
}
}
public IDataParameterCollection Parameters
{
get
{
return _Parameters;
}
}
public IDbTransaction Transaction
{
get
{
throw new NotImplementedException("Transaction not implemented");
}
set
{
throw new NotImplementedException("Transaction not implemented");
}
}
#endregion
#region IDisposable Members
public void Dispose()
{
// nothing to dispose of
}
#endregion
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.Providers.Streams.AzureQueue;
using Orleans.Providers.Streams.Common;
using Orleans.Runtime;
using Orleans.Serialization;
using Orleans.Streams;
using TestExtensions;
using Xunit;
using Xunit.Abstractions;
using Orleans.Internal;
namespace Tester.AzureUtils.Streaming
{
[Collection(TestEnvironmentFixture.DefaultCollection)]
[TestCategory("Azure"), TestCategory("Streaming")]
public class AzureQueueAdapterTests : AzureStorageBasicTests, IAsyncLifetime
{
private readonly ITestOutputHelper output;
private readonly TestEnvironmentFixture fixture;
private const int NumBatches = 20;
private const int NumMessagesPerBatch = 20;
public static readonly string AZURE_QUEUE_STREAM_PROVIDER_NAME = "AQAdapterTests";
private readonly ILoggerFactory loggerFactory;
private static readonly SafeRandom Random = new SafeRandom();
private static List<string> azureQueueNames = AzureQueueUtilities.GenerateQueueNames($"AzureQueueAdapterTests-{Guid.NewGuid()}", 8);
public AzureQueueAdapterTests(ITestOutputHelper output, TestEnvironmentFixture fixture)
{
this.output = output;
this.fixture = fixture;
this.loggerFactory = this.fixture.Services.GetService<ILoggerFactory>();
BufferPool.InitGlobalBufferPool(new SiloMessagingOptions());
}
public Task InitializeAsync() => Task.CompletedTask;
public async Task DisposeAsync()
{
if (!string.IsNullOrWhiteSpace(TestDefaultConfiguration.DataConnectionString))
{
await AzureQueueStreamProviderUtils.DeleteAllUsedAzureQueues(this.loggerFactory, azureQueueNames, new AzureQueueOptions().ConfigureTestDefaults());
}
}
[SkippableFact, TestCategory("Functional"), TestCategory("Halo")]
public async Task SendAndReceiveFromAzureQueue()
{
var options = new AzureQueueOptions
{
MessageVisibilityTimeout = TimeSpan.FromSeconds(30),
QueueNames = azureQueueNames
};
options.ConfigureTestDefaults();
var serializationManager = this.fixture.Services.GetService<SerializationManager>();
var clusterOptions = this.fixture.Services.GetRequiredService<IOptions<ClusterOptions>>();
var queueCacheOptions = new SimpleQueueCacheOptions();
var queueDataAdapter = new AzureQueueDataAdapterV2(serializationManager);
var adapterFactory = new AzureQueueAdapterFactory(
AZURE_QUEUE_STREAM_PROVIDER_NAME,
options,
queueCacheOptions,
queueDataAdapter,
this.fixture.Services,
clusterOptions,
serializationManager,
loggerFactory);
adapterFactory.Init();
await SendAndReceiveFromQueueAdapter(adapterFactory);
}
private async Task SendAndReceiveFromQueueAdapter(IQueueAdapterFactory adapterFactory)
{
IQueueAdapter adapter = await adapterFactory.CreateAdapter();
IQueueAdapterCache cache = adapterFactory.GetQueueAdapterCache();
// Create receiver per queue
IStreamQueueMapper mapper = adapterFactory.GetStreamQueueMapper();
Dictionary<QueueId, IQueueAdapterReceiver> receivers = mapper.GetAllQueues().ToDictionary(queueId => queueId, adapter.CreateReceiver);
Dictionary<QueueId, IQueueCache> caches = mapper.GetAllQueues().ToDictionary(queueId => queueId, cache.CreateQueueCache);
await Task.WhenAll(receivers.Values.Select(receiver => receiver.Initialize(TimeSpan.FromSeconds(5))));
// test using 2 streams
Guid streamId1 = Guid.NewGuid();
Guid streamId2 = Guid.NewGuid();
int receivedBatches = 0;
var streamsPerQueue = new ConcurrentDictionary<QueueId, HashSet<StreamId>>();
// reader threads (at most 2 active queues because only two streams)
var work = new List<Task>();
foreach( KeyValuePair<QueueId, IQueueAdapterReceiver> receiverKvp in receivers)
{
QueueId queueId = receiverKvp.Key;
var receiver = receiverKvp.Value;
var qCache = caches[queueId];
Task task = Task.Factory.StartNew(() =>
{
while (receivedBatches < NumBatches)
{
var messages = receiver.GetQueueMessagesAsync(QueueAdapterConstants.UNLIMITED_GET_QUEUE_MSG).Result.ToArray();
if (!messages.Any())
{
continue;
}
foreach (IBatchContainer message in messages)
{
streamsPerQueue.AddOrUpdate(queueId,
id => new HashSet<StreamId> { message.StreamId },
(id, set) =>
{
set.Add(message.StreamId);
return set;
});
this.output.WriteLine("Queue {0} received message on stream {1}", queueId,
message.StreamId);
Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents<int>().Count()); // "Half the events were ints"
Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents<string>().Count()); // "Half the events were strings"
}
Interlocked.Add(ref receivedBatches, messages.Length);
qCache.AddToCache(messages);
}
});
work.Add(task);
}
// send events
List<object> events = CreateEvents(NumMessagesPerBatch);
work.Add(Task.Factory.StartNew(() => Enumerable.Range(0, NumBatches)
.Select(i => i % 2 == 0 ? streamId1 : streamId2)
.ToList()
.ForEach(streamId =>
adapter.QueueMessageBatchAsync(StreamId.Create(streamId.ToString(), streamId),
events.Take(NumMessagesPerBatch).ToArray(), null, RequestContextExtensions.Export(this.fixture.SerializationManager)).Wait())));
await Task.WhenAll(work);
// Make sure we got back everything we sent
Assert.Equal(NumBatches, receivedBatches);
// check to see if all the events are in the cache and we can enumerate through them
StreamSequenceToken firstInCache = new EventSequenceTokenV2(0);
foreach (KeyValuePair<QueueId, HashSet<StreamId>> kvp in streamsPerQueue)
{
var receiver = receivers[kvp.Key];
var qCache = caches[kvp.Key];
foreach (StreamId streamGuid in kvp.Value)
{
// read all messages in cache for stream
IQueueCacheCursor cursor = qCache.GetCacheCursor(streamGuid, firstInCache);
int messageCount = 0;
StreamSequenceToken tenthInCache = null;
StreamSequenceToken lastToken = firstInCache;
while (cursor.MoveNext())
{
Exception ex;
messageCount++;
IBatchContainer batch = cursor.GetCurrent(out ex);
this.output.WriteLine("Token: {0}", batch.SequenceToken);
Assert.True(batch.SequenceToken.CompareTo(lastToken) >= 0, $"order check for event {messageCount}");
lastToken = batch.SequenceToken;
if (messageCount == 10)
{
tenthInCache = batch.SequenceToken;
}
}
this.output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid);
Assert.Equal(NumBatches / 2, messageCount);
Assert.NotNull(tenthInCache);
// read all messages from the 10th
cursor = qCache.GetCacheCursor(streamGuid, tenthInCache);
messageCount = 0;
while (cursor.MoveNext())
{
messageCount++;
}
this.output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid);
const int expected = NumBatches / 2 - 10 + 1; // all except the first 10, including the 10th (10 + 1)
Assert.Equal(expected, messageCount);
}
}
}
private List<object> CreateEvents(int count)
{
return Enumerable.Range(0, count).Select(i =>
{
if (i % 2 == 0)
{
return Random.Next(int.MaxValue) as object;
}
return Random.Next(int.MaxValue).ToString(CultureInfo.InvariantCulture);
}).ToList();
}
internal static string MakeClusterId()
{
const string DeploymentIdFormat = "cluster-{0}";
string now = DateTime.UtcNow.ToString("yyyy-MM-dd-hh-mm-ss-ffff");
return string.Format(DeploymentIdFormat, now);
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using OmniSharp.Models;
using OmniSharp.Options;
using OmniSharp.Roslyn.CSharp.Services.Refactoring;
using OmniSharp.Services;
using OmniSharp.Tests;
using TestUtility.Annotate;
using TestUtility.Fake;
using Xunit;
namespace OmniSharp.Roslyn.CSharp.Tests
{
public class FixUsingsFacts
{
private readonly string fileName = "test.cs";
private readonly LoggerFactory _loggerFactory;
private readonly ILogger<FixUsingsFacts> _logger;
private readonly IOmnisharpAssemblyLoader _loader;
public FixUsingsFacts()
{
_loggerFactory = new LoggerFactory();
_loggerFactory.AddConsole();
_logger = _loggerFactory.CreateLogger<FixUsingsFacts>();
_loader = new AnnotateAssemblyLoader(_logger);
}
[Fact]
public async Task FixUsings_AddsUsingSingle()
{
const string fileContents = @"namespace nsA
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
string expectedFileContents = @"using nsA;
namespace nsA
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingSingleForFrameworkMethod()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingSingleForFrameworkClass()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()()
{
var s = new StringBuilder();
}
}
}";
string expectedFileContents = @"using System.Text;
namespace OmniSharp
{
public class class1
{
public void method1()()
{
var s = new StringBuilder();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingMultiple()
{
const string fileContents = @"namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classY{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
var c2 = new classY();
}
}
}";
string expectedFileContents = @"using nsA;
using nsB;
namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classY{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
var c2 = new classY();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingMultipleForFramework()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
var sb = new StringBuilder();
}
}
}";
string expectedFileContents = @"using System;
using System.Text;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
var sb = new StringBuilder();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_ReturnsAmbiguousResult()
{
const string fileContents = @"
namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new $classX();
}
}
}";
var classLineColumn = TestHelpers.GetLineAndColumnFromDollar(TestHelpers.RemovePercentMarker(fileContents));
var fileContentNoDollarMarker = TestHelpers.RemoveDollarMarker(fileContents);
var expectedUnresolved = new List<QuickFix>();
expectedUnresolved.Add(new QuickFix()
{
Line = classLineColumn.Line,
Column = classLineColumn.Column,
FileName = fileName,
Text = "`classX` is ambiguous"
});
await AssertUnresolvedReferences(fileContentNoDollarMarker, expectedUnresolved);
}
[Fact]
public async Task FixUsings_ReturnsNoUsingsForAmbiguousResult()
{
const string fileContents = @"namespace nsA {
public class classX{}
}
namespace nsB {
public class classX{}
}
namespace OmniSharp {
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
await AssertBufferContents(fileContents, fileContents);
}
[Fact]
public async Task FixUsings_AddsUsingForExtension()
{
const string fileContents = @"namespace nsA {
public static class StringExtension {
public static void Whatever(this string astring) {}
}
}
namespace OmniSharp {
public class class1
{
public method1()
{
""string"".Whatever();
}
}
}";
string expectedFileContents = @"using nsA;
namespace nsA {
public static class StringExtension {
public static void Whatever(this string astring) {}
}
}
namespace OmniSharp {
public class class1
{
public method1()
{
""string"".Whatever();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact(Skip = "Need to find a way to load System.Linq in to test host.")]
public async Task FixUsings_AddsUsingLinqMethodSyntax()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
List<string> first = new List<string>();
var testing = first.Where(s => s == ""abc"");
}
}
}";
string expectedFileContents = @"using System.Collections.Generic;
using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
List<string> first = new List<string>();
var testing = first.Where(s => s == ""abc"");
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingLinqQuerySyntax()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
int[] numbers = { 5, 4, 1, 3, 9, 8, 6, 7, 2, 0 };
var lowNums =
from n in numbers
where n < 5
select n;
}
}
}";
string expectedFileContents = @"using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
int[] numbers = { 5, 4, 1, 3, 9, 8, 6, 7, 2, 0 };
var lowNums =
from n in numbers
where n < 5
select n;
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_RemoveDuplicateUsing()
{
const string fileContents = @"using System;
using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
const string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_RemoveUnusedUsing()
{
const string fileContents = @"using System;
using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
const string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Guid.NewGuid();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
private async Task AssertBufferContents(string fileContents, string expectedFileContents)
{
var response = await RunFixUsings(fileContents);
Assert.Equal(FlattenNewLines(expectedFileContents), FlattenNewLines(response.Buffer));
}
private string FlattenNewLines(string input)
{
return input.Replace("\r\n", "\n");
}
private async Task AssertUnresolvedReferences(string fileContents, List<QuickFix> expectedUnresolved)
{
var response = await RunFixUsings(fileContents);
var qfList = response.AmbiguousResults.ToList();
Assert.Equal(qfList.Count(), expectedUnresolved.Count());
var i = 0;
foreach (var expectedQuickFix in expectedUnresolved)
{
Assert.Equal(qfList[i].Line, expectedQuickFix.Line);
Assert.Equal(qfList[i].Column, expectedQuickFix.Column);
Assert.Equal(qfList[i].FileName, expectedQuickFix.FileName);
Assert.Equal(qfList[i].Text, expectedQuickFix.Text);
i++;
}
}
private async Task<FixUsingsResponse> RunFixUsings(string fileContents)
{
var host = TestHelpers.CreatePluginHost(new[] { typeof(FixUsingService).GetTypeInfo().Assembly });
var workspace = await TestHelpers.CreateSimpleWorkspace(host, fileContents, fileName);
var fakeOptions = new FakeOmniSharpOptions();
fakeOptions.Options = new OmniSharpOptions(new FormattingOptions() { NewLine = "\n" });
var providers = host.GetExports<ICodeActionProvider>();
var controller = new FixUsingService(workspace, new FakeLoggerFactory(), _loader, providers);
var request = new FixUsingsRequest
{
FileName = fileName,
Buffer = fileContents
};
return await controller.Handle(request);
}
}
}
| |
//
// SCSharp.Mpq.Fnt
//
// Authors:
// Chris Toshok (toshok@gmail.com)
//
// Copyright 2006-2010 Chris Toshok
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Collections.Generic;
using System.Text;
namespace SCSharp
{
public class Glyph
{
internal Glyph (int width, int height, int xoffset, int yoffset,
byte[,] bitmap)
{
this.bitmap = bitmap;
this.width = width;
this.height = height;
this.xoffset = xoffset;
this.yoffset = yoffset;
}
int width;
int height;
int xoffset;
int yoffset;
byte[,] bitmap;
public byte[,] Bitmap {
get { return bitmap; }
}
public int Width {
get { return width; }
}
public int Height {
get { return height; }
}
public int XOffset {
get { return xoffset; }
}
public int YOffset {
get { return yoffset; }
}
}
public class Fnt : MpqResource {
Stream stream;
public Fnt ()
{
}
public void ReadFromStream (Stream stream)
{
this.stream = stream;
ReadFontHeader ();
ReadGlyphOffsets ();
glyphs = new Dictionary<int,Glyph> ();
}
void ReadFontHeader ()
{
/*uint name =*/ Util.ReadDWord (stream);
lowIndex = Util.ReadByte (stream);
highIndex = Util.ReadByte (stream);
if (lowIndex > highIndex) {
byte tmp = lowIndex;
lowIndex = highIndex;
highIndex = tmp;
}
maxWidth = Util.ReadByte (stream);
maxHeight = Util.ReadByte (stream);
/*uint unknown =*/ Util.ReadDWord (stream);
}
Dictionary<uint,uint> offsets;
void ReadGlyphOffsets ()
{
offsets = new Dictionary<uint,uint> ();
for (uint c = lowIndex; c < highIndex; c++)
offsets.Add (c, Util.ReadDWord (stream));
}
Glyph GetGlyph (int c)
{
if (glyphs.ContainsKey (c))
return glyphs[c];
stream.Position = offsets[(uint)c];
byte letterWidth = Util.ReadByte (stream);
byte letterHeight = Util.ReadByte (stream);
byte letterXOffset = Util.ReadByte (stream);
byte letterYOffset = Util.ReadByte (stream);
byte[,] bitmap = new byte[letterHeight, letterWidth];
int x, y;
x = letterWidth - 1;
y = letterHeight - 1;
while (true) {
byte b = Util.ReadByte (stream);
int count = (b & 0xF8) >> 3;
byte cmap_entry = (byte)(b & 0x07);
for (int i = 0; i < count; i ++) {
bitmap[y,x] = 0;
x--;
if (x < 0) {
x = letterWidth - 1;
y--;
if (y < 0)
goto done;
}
}
bitmap[y,x] = (byte)cmap_entry;
x--;
if (x < 0) {
x = letterWidth - 1;
y--;
if (y < 0)
goto done;
}
}
done:
glyphs.Add (c,
new Glyph (letterWidth,
letterHeight,
letterXOffset,
letterYOffset,
bitmap));
return glyphs[c];
}
public Glyph this [int index] {
get {
if (index < lowIndex || index > highIndex)
throw new ArgumentOutOfRangeException ("index",
String.Format ("value of {0} out of range of {1}-{2}", index, lowIndex, highIndex));
return GetGlyph (index);
}
}
public int SpaceSize {
get { return this[109-1].Width; /* 109 = ascii for 'm' */ }
}
public int LineSize {
get { return maxHeight; }
}
public int MaxWidth {
get { return maxWidth; }
}
public int MaxHeight {
get { return maxHeight; }
}
Dictionary<int,Glyph> glyphs;
byte highIndex;
byte lowIndex;
byte maxWidth;
byte maxHeight;
public void DumpGlyphs()
{
for (int c = lowIndex; c < highIndex; c++) {
Console.WriteLine ("Letter: {0}", c);
DumpGlyph (c);
}
}
public void DumpGlyph (int c)
{
Glyph g = GetGlyph (c);
byte[,] bitmap = g.Bitmap;
for (int y = g.Height - 1; y >= 0 ; y --) {
for (int x = g.Width - 1; x >= 0; x --) {
if (bitmap[y,x] == 0)
Console.Write (" ");
else
Console.Write ("#");
}
Console.WriteLine ();
}
Console.WriteLine ();
}
}
}
| |
using System;
using Microsoft.WindowsAzure.Storage;
using Microsoft.Extensions.Logging;
using Microsoft.WindowsAzure.Storage.Auth;
using Microsoft.WindowsAzure.Storage.Queue;
using Newtonsoft.Json;
using AzureQueueApp.Models;
using GenFu;
using System.Threading.Tasks;
using System.Threading;
using System.Collections.Generic;
using System.Linq;
namespace AzureQueueApp
{
/*
Common interface for this application
*/
public interface IApplication
{
Task<IApplication> InitializeAsync();
// removes/processes batch of up to 20 tickets at once
Task BatchRemove();
// change content of message in the queue
Task ChangeMessage();
// clears all messages from the queue
Task ClearMessages();
// gets lengths of the queue
Task GetLength();
// insert a single message into queue
Task InsertMessage();
// peek a single message from the queue
Task PeekMessage();
// Removes message from the queue
Task RemoveMessage();
}
public class Application : IApplication
{
public static Task<IApplication> CreateAsync(AzureStorageOptions options)
{
var app = new Application(options);
return app.InitializeAsync();
}
Application(AzureStorageOptions options)
{
this.options = options;
storageCredentials = new StorageCredentials(options.AccountName, options.AccountKey);
Logger.Get().LogInformation("Azure configuration");
Logger.Get().LogInformation($"account: {options.AccountName} key: {options.AccountKey} queue: {options.QueueName}");
}
public async Task<IApplication> InitializeAsync()
{
bool useHttps = true;
// Retrieve storage account from credentials
storageAccount = new CloudStorageAccount(storageCredentials, useHttps);
// Create the queue client
queueClient = storageAccount.CreateCloudQueueClient();
// Retrieve a reference to a queue
queue = queueClient.GetQueueReference(options.QueueName);
// Create the queue if it doesn't already exist
bool created = await queue.CreateIfNotExistsAsync();
Logger.Get().LogInformation($"Queue {queue.Name} CreateIfNotExists={created}");
return this;
}
/*
https://azure.microsoft.com/en-us/documentation/articles/storage-dotnet-how-to-use-queues/#leverage-additional-options-for-de-queuing-messages
There are two ways you can customize message retrieval from a queue. First, you can get a batch of messages (up to 32). Second, you can set a longer or shorter invisibility timeout, allowing your code more or less time to fully process each message. The following code example uses the GetMessages method to get 20 messages in one call. Then it processes each message using a foreach loop. It also sets the invisibility timeout to five minutes for each message. Note that the 5 minutes starts for all messages at the same time, so after 5 minutes have passed since the call to GetMessages, any messages which have not been deleted will become visible again.
*/
public async Task BatchRemove()
{
Logger.Get().LogInformation("BatchRemove");
// get 20 messages async
// https://channel9.msdn.com/Shows/Azure-Friday/Azure-Queues-103-Batch-Processing-with-Mark-Simms
IEnumerable<CloudQueueMessage> messages = await queue.GetMessagesAsync(20, TimeSpan.FromMinutes(5), null, null);
if (messages.Any())
{
int counter = 1;
foreach (var message in messages)
{
try
{
Logger.Get().LogInformation($"Processing ticket {counter}");
TicketRequest ticket = await Task.Factory.StartNew(() =>
JsonConvert.DeserializeObject<TicketRequest>(message.AsString)
);
LogTicketRequest(ticket);
await Task.Factory.StartNew(() => Thread.Sleep(1000));
Logger.Get().LogInformation("Finished processing ticket");
await queue.DeleteMessageAsync(message);
Logger.Get().LogInformation("Message removed");
counter++;
}
catch (Exception ex)
{
Logger.Get().LogError($"Error: {ex.Message}");
}
}
Logger.Get().LogInformation("All messages processed");
}
else
{
Logger.Get().LogWarning($"The {queue.Name} appears to be empty");
}
}
public async Task ChangeMessage()
{
Logger.Get().LogInformation("ChangeMessage");
// Peek at the next message
CloudQueueMessage message = await queue.GetMessageAsync();
if (message != null)
{
TicketRequest ticket = await Task.Factory.StartNew(() => JsonConvert.DeserializeObject<TicketRequest>(message.AsString));
LogTicketRequest(ticket);
// add a free ticket :)
ticket.NumberOfTickets = ticket.NumberOfTickets + 1;
string json = await Task.Factory.StartNew(() => JsonConvert.SerializeObject(ticket));
message.SetMessageContent(json);
await queue.UpdateMessageAsync(message,
TimeSpan.FromSeconds(60.0),
MessageUpdateFields.Content | MessageUpdateFields.Visibility);
// log change ticket
LogTicketRequest(ticket);
}
else
{
Logger.Get().LogWarning($"The {queue.Name} appears to be empty");
}
}
public async Task ClearMessages()
{
Logger.Get().LogInformation("ClearMessages");
await queue.ClearAsync();
Logger.Get().LogInformation("Cleared");
}
/*
https://azure.microsoft.com/en-us/documentation/articles/storage-dotnet-how-to-use-queues/
You can get an estimate of the number of messages in a queue. The FetchAttributes method asks the Queue service to retrieve the queue attributes, including the message count. The ApproximateMessageCount property returns the last value retrieved by the FetchAttributes method, without calling the Queue service.
*/
public async Task GetLength()
{
Logger.Get().LogInformation("GetLength");
// Fetch the queue attributes.
await queue.FetchAttributesAsync();
// Retrieve the cached approximate message count.
int? cachedMessageCount = queue.ApproximateMessageCount;
Logger.Get().LogInformation($"Number of messages in {queue.Name} queue: {cachedMessageCount}");
}
public async Task InsertMessage()
{
Logger.Get().LogInformation("InsertMessage");
// GenFu configuration specific for our example
A.Configure<TicketRequest>()
.Fill(t => t.OrderDate)
.AsFutureDate();
A.Configure<TicketRequest>()
.Fill(t => t.NumberOfTickets)
.WithinRange(1, 10);
A.Configure<TicketRequest>()
.Fill(t => t.Email)
.AsEmailAddressForDomain("example.com");
TicketRequest ticket = A.New<TicketRequest>();
string json = await Task.Factory.StartNew(() => JsonConvert.SerializeObject(ticket));
// Create a message and add it to the queue.
CloudQueueMessage message = new CloudQueueMessage(json);
// Async enqueue the message
await Task.Factory.StartNew(() => queue.AddMessage(message));
// @see https://github.com/Azure/azure-storage-net/issues/220
// await queue.AddMessageAsync(message);
LogTicketRequest(ticket);
}
public async Task PeekMessage()
{
Logger.Get().LogInformation("PeekMessage");
// Peek at the next message
CloudQueueMessage msg = await queue.PeekMessageAsync();
if (msg != null)
{
TicketRequest ticket = await Task.Factory.StartNew(() =>
JsonConvert.DeserializeObject<TicketRequest>(msg.AsString)
);
LogTicketRequest(ticket);
}
else
{
Logger.Get().LogWarning($"The {queue.Name} appears to be empty");
}
}
/*
https://azure.microsoft.com/en-us/documentation/articles/storage-dotnet-how-to-use-queues/
Your code de-queues a message from a queue in two steps. When you call GetMessage, you get the next message in a queue. A message returned from GetMessage becomes invisible to any other code reading messages from this queue. By default, this message stays invisible for 30 seconds. To finish removing the message from the queue, you must also call DeleteMessage. This two-step process of removing a message assures that if your code fails to process a message due to hardware or software failure, another instance of your code can get the same message and try again. Your code calls DeleteMessage right after the message has been processed.
*/
public async Task RemoveMessage()
{
Logger.Get().LogInformation("RemoveMessage");
// Get the next message
CloudQueueMessage message = await queue.GetMessageAsync();
if (message != null)
{
TicketRequest ticket = await Task.Factory.StartNew(() =>
JsonConvert.DeserializeObject<TicketRequest>(message.AsString)
);
LogTicketRequest(ticket);
Logger.Get().LogInformation("Processing ticket");
await Task.Factory.StartNew(() => Thread.Sleep(1000));
Logger.Get().LogInformation("Finished processing ticket");
await queue.DeleteMessageAsync(message);
Logger.Get().LogInformation("Message removed");
}
else
{
Logger.Get().LogWarning($"The {queue.Name} appears to be empty");
}
}
private void LogTicketRequest(TicketRequest ticket)
{
if (ticket == null)
{
Logger.Get().LogWarning("Failed to deserialize ticket");
return;
};
Logger.Get().LogInformation($"Ticket: #{ticket.TicketId} for: {ticket.Email} date: {ticket.OrderDate} total: {ticket.NumberOfTickets}");
}
private AzureStorageOptions options;
private StorageCredentials storageCredentials;
private CloudStorageAccount storageAccount;
private CloudQueueClient queueClient;
private CloudQueue queue;
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Projection
// Description: The basic module for MapWindow version 6.0
// ********************************************************************************************************
//
// The Original Code is from MapWindow.dll version 6.0
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 8/14/2009 4:52:35 PM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
// Name | Date | Comment
// --------------------|------------|------------------------------------------------------------
// Ted Dunsford | 5/3/2010 | Updated project to DotSpatial.Projection and license to LGPL
// ********************************************************************************************************
#pragma warning disable 1591
namespace DotSpatial.Projections.ProjectedCategories
{
/// <summary>
/// NationalGridsNewZealand
/// </summary>
public class NationalGridsNewZealand : CoordinateSystemCategory
{
#region Private Variables
public readonly ProjectionInfo ChathamIslands1979MapGrid;
public readonly ProjectionInfo NZGD1949AmuriCircuit;
public readonly ProjectionInfo NZGD1949BayofPlentyCircuit;
public readonly ProjectionInfo NZGD1949BluffCircuit;
public readonly ProjectionInfo NZGD1949BullerCircuit;
public readonly ProjectionInfo NZGD1949CollingwoodCircuit;
public readonly ProjectionInfo NZGD1949GawlerCircuit;
public readonly ProjectionInfo NZGD1949GreyCircuit;
public readonly ProjectionInfo NZGD1949HawkesBayCircuit;
public readonly ProjectionInfo NZGD1949HokitikaCircuit;
public readonly ProjectionInfo NZGD1949JacksonsBayCircuit;
public readonly ProjectionInfo NZGD1949KarameaCircuit;
public readonly ProjectionInfo NZGD1949LindisPeakCircuit;
public readonly ProjectionInfo NZGD1949MarlboroughCircuit;
public readonly ProjectionInfo NZGD1949MountEdenCircuit;
public readonly ProjectionInfo NZGD1949MountNicholasCircuit;
public readonly ProjectionInfo NZGD1949MountPleasantCircuit;
public readonly ProjectionInfo NZGD1949MountYorkCircuit;
public readonly ProjectionInfo NZGD1949NelsonCircuit;
public readonly ProjectionInfo NZGD1949NorthTaieriCircuit;
public readonly ProjectionInfo NZGD1949ObservationPointCircuit;
public readonly ProjectionInfo NZGD1949OkaritoCircuit;
public readonly ProjectionInfo NZGD1949PovertyBayCircuit;
public readonly ProjectionInfo NZGD1949TaranakiCircuit;
public readonly ProjectionInfo NZGD1949TimaruCircuit;
public readonly ProjectionInfo NZGD1949TuhirangiCircuit;
public readonly ProjectionInfo NZGD1949UTMZone58S;
public readonly ProjectionInfo NZGD1949UTMZone59S;
public readonly ProjectionInfo NZGD1949UTMZone60S;
public readonly ProjectionInfo NZGD1949WairarapaCircuit;
public readonly ProjectionInfo NZGD1949WanganuiCircuit;
public readonly ProjectionInfo NZGD1949WellingtonCircuit;
public readonly ProjectionInfo NZGD2000AmuriCircuit;
public readonly ProjectionInfo NZGD2000BayofPlentyCircuit;
public readonly ProjectionInfo NZGD2000BluffCircuit;
public readonly ProjectionInfo NZGD2000BullerCircuit;
public readonly ProjectionInfo NZGD2000ChathamIslandCircuit;
public readonly ProjectionInfo NZGD2000CollingwoodCircuit;
public readonly ProjectionInfo NZGD2000GawlerCircuit;
public readonly ProjectionInfo NZGD2000GreyCircuit;
public readonly ProjectionInfo NZGD2000HawkesBayCircuit;
public readonly ProjectionInfo NZGD2000HokitikaCircuit;
public readonly ProjectionInfo NZGD2000JacksonsBayCircuit;
public readonly ProjectionInfo NZGD2000KarameaCircuit;
public readonly ProjectionInfo NZGD2000LindisPeakCircuit;
public readonly ProjectionInfo NZGD2000MarlboroughCircuit;
public readonly ProjectionInfo NZGD2000MountEdenCircuit;
public readonly ProjectionInfo NZGD2000MountNicholasCircuit;
public readonly ProjectionInfo NZGD2000MountPleasantCircuit;
public readonly ProjectionInfo NZGD2000MountYorkCircuit;
public readonly ProjectionInfo NZGD2000NelsonCircuit;
public readonly ProjectionInfo NZGD2000NewZealandTransverseMercator;
public readonly ProjectionInfo NZGD2000NorthTaieriCircuit;
public readonly ProjectionInfo NZGD2000ObservationPointCircuit;
public readonly ProjectionInfo NZGD2000OkaritoCircuit;
public readonly ProjectionInfo NZGD2000PovertyBayCircuit;
public readonly ProjectionInfo NZGD2000TaranakiCircuit;
public readonly ProjectionInfo NZGD2000TimaruCircuit;
public readonly ProjectionInfo NZGD2000TuhirangiCircuit;
public readonly ProjectionInfo NZGD2000UTMZone58S;
public readonly ProjectionInfo NZGD2000UTMZone59S;
public readonly ProjectionInfo NZGD2000UTMZone60S;
public readonly ProjectionInfo NZGD2000WairarapaCircuit;
public readonly ProjectionInfo NZGD2000WanganuiCircuit;
public readonly ProjectionInfo NZGD2000WellingtonCircuit;
public readonly ProjectionInfo NewZealandMapGrid;
public readonly ProjectionInfo NewZealandNorthIsland;
public readonly ProjectionInfo NewZealandSouthIsland;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of NationalGridsNewZealand
/// </summary>
public NationalGridsNewZealand()
{
ChathamIslands1979MapGrid = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44 +lon_0=-176.5 +k=0.999600 +x_0=350000 +y_0=650000 +ellps=intl +units=m +no_defs ");
NewZealandMapGrid = ProjectionInfo.FromProj4String("+proj=nzmg +lat_0=-41 +lon_0=173 +x_0=2510000 +y_0=6023150 +ellps=intl +datum=nzgd49 +units=m +no_defs ");
NewZealandNorthIsland = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39 +lon_0=175.5 +k=1.000000 +x_0=274319.5243848086 +y_0=365759.3658464114 +ellps=intl +to_meter=0.9143984146160287 +no_defs ");
NewZealandSouthIsland = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44 +lon_0=171.5 +k=1.000000 +x_0=457199.2073080143 +y_0=457199.2073080143 +ellps=intl +to_meter=0.9143984146160287 +no_defs ");
NZGD1949AmuriCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.68911658333333 +lon_0=173.0101333888889 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949BayofPlentyCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-37.76124980555556 +lon_0=176.46619725 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949BluffCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-46.60000961111111 +lon_0=168.342872 +k=1.000000 +x_0=300002.66 +y_0=699999.58 +ellps=intl +units=m +no_defs ");
NZGD1949BullerCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.81080286111111 +lon_0=171.5812600555556 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949CollingwoodCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.71475905555556 +lon_0=172.6720465 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949GawlerCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.74871155555556 +lon_0=171.3607484722222 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949GreyCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.33369427777778 +lon_0=171.5497713055556 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949HawkesBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.65092930555556 +lon_0=176.6736805277778 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949HokitikaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.88632236111111 +lon_0=170.9799935 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949JacksonsBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.97780288888889 +lon_0=168.606267 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949KarameaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.28991152777778 +lon_0=172.1090281944444 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949LindisPeakCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44.73526797222222 +lon_0=169.4677550833333 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949MarlboroughCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.54448666666666 +lon_0=173.8020741111111 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949MountEdenCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-36.87986527777778 +lon_0=174.7643393611111 +k=0.999900 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949MountNicholasCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.13290258333333 +lon_0=168.3986411944444 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949MountPleasantCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.59063758333333 +lon_0=172.7271935833333 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949MountYorkCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.56372616666666 +lon_0=167.7388617777778 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949NelsonCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.27454472222222 +lon_0=173.2993168055555 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949NorthTaieriCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.86151336111111 +lon_0=170.2825891111111 +k=0.999960 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949ObservationPointCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.81619661111111 +lon_0=170.6285951666667 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949OkaritoCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.11012813888889 +lon_0=170.2609258333333 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949PovertyBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-38.62470277777778 +lon_0=177.8856362777778 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949TaranakiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.13575830555556 +lon_0=174.22801175 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949TimaruCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44.40222036111111 +lon_0=171.0572508333333 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949TuhirangiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.51247038888889 +lon_0=175.6400368055556 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949UTMZone58S = ProjectionInfo.FromProj4String("+proj=utm +zone=58 +south +ellps=intl +units=m +no_defs ");
NZGD1949UTMZone59S = ProjectionInfo.FromProj4String("+proj=utm +zone=59 +south +ellps=intl +units=m +no_defs ");
NZGD1949UTMZone60S = ProjectionInfo.FromProj4String("+proj=utm +zone=60 +south +ellps=intl +units=m +no_defs ");
NZGD1949WairarapaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.92553263888889 +lon_0=175.6473496666667 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949WanganuiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.24194713888889 +lon_0=175.4880996111111 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD1949WellingtonCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.30131963888888 +lon_0=174.7766231111111 +k=1.000000 +x_0=300000 +y_0=700000 +ellps=intl +units=m +no_defs ");
NZGD2000AmuriCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.68888888888888 +lon_0=173.01 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000BayofPlentyCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-37.76111111111111 +lon_0=176.4661111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000BluffCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-46.6 +lon_0=168.3427777777778 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000BullerCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.81055555555555 +lon_0=171.5811111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000ChathamIslandCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44 +lon_0=-176.5 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000CollingwoodCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.71472222222223 +lon_0=172.6719444444444 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000GawlerCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.74861111111111 +lon_0=171.3605555555555 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000GreyCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.33361111111111 +lon_0=171.5497222222222 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000HawkesBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.65083333333333 +lon_0=176.6736111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000HokitikaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-42.88611111111111 +lon_0=170.9797222222222 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000JacksonsBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.97777777777778 +lon_0=168.6061111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000KarameaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.28972222222222 +lon_0=172.1088888888889 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000LindisPeakCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44.735 +lon_0=169.4675 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000MarlboroughCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.54444444444444 +lon_0=173.8019444444444 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000MountEdenCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-36.87972222222222 +lon_0=174.7641666666667 +k=0.999900 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000MountNicholasCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.13277777777778 +lon_0=168.3986111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000MountPleasantCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.59055555555556 +lon_0=172.7269444444445 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000MountYorkCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.56361111111111 +lon_0=167.7386111111111 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000NelsonCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.27444444444444 +lon_0=173.2991666666667 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000NewZealandTransverseMercator = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=0 +lon_0=173 +k=0.999600 +x_0=1600000 +y_0=10000000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000NorthTaieriCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.86138888888889 +lon_0=170.2825 +k=0.999960 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000ObservationPointCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-45.81611111111111 +lon_0=170.6283333333333 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000OkaritoCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-43.11 +lon_0=170.2608333333333 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000PovertyBayCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-38.62444444444444 +lon_0=177.8855555555556 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000TaranakiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.13555555555556 +lon_0=174.2277777777778 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000TimaruCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-44.40194444444445 +lon_0=171.0572222222222 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000TuhirangiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-39.51222222222222 +lon_0=175.64 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000UTMZone58S = ProjectionInfo.FromProj4String("+proj=utm +zone=58 +south +ellps=GRS80 +units=m +no_defs ");
NZGD2000UTMZone59S = ProjectionInfo.FromProj4String("+proj=utm +zone=59 +south +ellps=GRS80 +units=m +no_defs ");
NZGD2000UTMZone60S = ProjectionInfo.FromProj4String("+proj=utm +zone=60 +south +ellps=GRS80 +units=m +no_defs ");
NZGD2000WairarapaCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.92527777777777 +lon_0=175.6472222222222 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000WanganuiCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-40.24194444444444 +lon_0=175.4880555555555 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
NZGD2000WellingtonCircuit = ProjectionInfo.FromProj4String("+proj=tmerc +lat_0=-41.3011111111111 +lon_0=174.7763888888889 +k=1.000000 +x_0=400000 +y_0=800000 +ellps=GRS80 +units=m +no_defs ");
ChathamIslands1979MapGrid.Name = "Chatham_Islands_1979_Map_Grid";
NewZealandMapGrid.Name = "GD_1949_New_Zealand_Map_Grid";
NewZealandNorthIsland.Name = "New_Zealand_North_Island";
NewZealandSouthIsland.Name = "New_Zealand_South_Island";
NZGD1949AmuriCircuit.Name = "NZGD_1949_Amuri_Circuit";
NZGD1949BayofPlentyCircuit.Name = "NZGD_1949_Bay_of_Plenty_Circuit";
NZGD1949BluffCircuit.Name = "NZGD_1949_Bluff_Circuit";
NZGD1949BullerCircuit.Name = "NZGD_1949_Buller_Circuit";
NZGD1949CollingwoodCircuit.Name = "NZGD_1949_Collingwood_Circuit";
NZGD1949GawlerCircuit.Name = "NZGD_1949_Gawler_Circuit";
NZGD1949GreyCircuit.Name = "NZGD_1949_Grey_Circuit";
NZGD1949HawkesBayCircuit.Name = "NZGD_1949_Hawkes_Bay_Circuit";
NZGD1949HokitikaCircuit.Name = "NZGD_1949_Hokitika_Circuit";
NZGD1949JacksonsBayCircuit.Name = "NZGD_1949_Jacksons_Bay_Circuit";
NZGD1949KarameaCircuit.Name = "NZGD_1949_Karamea_Circuit";
NZGD1949LindisPeakCircuit.Name = "NZGD_1949_Lindis_Peak_Circuit";
NZGD1949MarlboroughCircuit.Name = "NZGD_1949_Marlborough_Circuit";
NZGD1949MountEdenCircuit.Name = "NZGD_1949_Mount_Eden_Circuit";
NZGD1949MountNicholasCircuit.Name = "NZGD_1949_Mount_Nicholas_Circuit";
NZGD1949MountPleasantCircuit.Name = "NZGD_1949_Mount_Pleasant_Circuit";
NZGD1949MountYorkCircuit.Name = "NZGD_1949_Mount_York_Circuit";
NZGD1949NelsonCircuit.Name = "NZGD_1949_Nelson_Circuit";
NZGD1949NorthTaieriCircuit.Name = "NZGD_1949_North_Taieri_Circuit";
NZGD1949ObservationPointCircuit.Name = "NZGD_1949_Observation_Point_Circuit";
NZGD1949OkaritoCircuit.Name = "NZGD_1949_Okarito_Circuit";
NZGD1949PovertyBayCircuit.Name = "NZGD_1949_Poverty_Bay_Circuit";
NZGD1949TaranakiCircuit.Name = "NZGD_1949_Taranaki_Circuit";
NZGD1949TimaruCircuit.Name = "NZGD_1949_Timaru_Circuit";
NZGD1949TuhirangiCircuit.Name = "NZGD_1949_Tuhirangi_Circuit";
NZGD1949UTMZone58S.Name = "NZGD_1949_UTM_Zone_58S";
NZGD1949UTMZone59S.Name = "NZGD_1949_UTM_Zone_59S";
NZGD1949UTMZone60S.Name = "NZGD_1949_UTM_Zone_60S";
NZGD1949WairarapaCircuit.Name = "NZGD_1949_Wairarapa_Circuit";
NZGD1949WanganuiCircuit.Name = "NZGD_1949_Wanganui_Circuit";
NZGD1949WellingtonCircuit.Name = "NZGD_1949_Wellington_Circuit";
NZGD2000AmuriCircuit.Name = "NZGD_2000_Amuri_Circuit";
NZGD2000BayofPlentyCircuit.Name = "NZGD_2000_Bay_of_Plenty_Circuit";
NZGD2000BluffCircuit.Name = "NZGD_2000_Bluff_Circuit";
NZGD2000BullerCircuit.Name = "NZGD_2000_Buller_Circuit";
NZGD2000ChathamIslandCircuit.Name = "NZGD_2000_Chatham_Island_Circuit";
NZGD2000CollingwoodCircuit.Name = "NZGD_2000_Collingwood_Circuit";
NZGD2000GawlerCircuit.Name = "NZGD_2000_Gawler_Circuit";
NZGD2000GreyCircuit.Name = "NZGD_2000_Grey_Circuit";
NZGD2000HawkesBayCircuit.Name = "NZGD_2000_Hawkes_Bay_Circuit";
NZGD2000HokitikaCircuit.Name = "NZGD_2000_Hokitika_Circuit";
NZGD2000JacksonsBayCircuit.Name = "NZGD_2000_Jacksons_Bay_Circuit";
NZGD2000KarameaCircuit.Name = "NZGD_2000_Karamea_Circuit";
NZGD2000LindisPeakCircuit.Name = "NZGD_2000_Lindis_Peak_Circuit";
NZGD2000MarlboroughCircuit.Name = "NZGD_2000_Marlborough_Circuit";
NZGD2000MountEdenCircuit.Name = "NZGD_2000_Mount_Eden_Circuit";
NZGD2000MountNicholasCircuit.Name = "NZGD_2000_Mount_Nicholas_Circuit";
NZGD2000MountPleasantCircuit.Name = "NZGD_2000_Mount_Pleasant_Circuit";
NZGD2000MountYorkCircuit.Name = "NZGD_2000_Mount_York_Circuit";
NZGD2000NelsonCircuit.Name = "NZGD_2000_Nelson_Circuit";
NZGD2000NewZealandTransverseMercator.Name = "NZGD_2000_New_Zealand_Transverse_Mercator";
NZGD2000NorthTaieriCircuit.Name = "NZGD_2000_North_Taieri_Circuit";
NZGD2000ObservationPointCircuit.Name = "NZGD_2000_Observation_Point_Circuit";
NZGD2000OkaritoCircuit.Name = "NZGD_2000_Okarito_Circuit";
NZGD2000PovertyBayCircuit.Name = "NZGD_2000_Poverty_Bay_Circuit";
NZGD2000TaranakiCircuit.Name = "NZGD_2000_Taranaki_Circuit";
NZGD2000TimaruCircuit.Name = "NZGD_2000_Timaru_Circuit";
NZGD2000TuhirangiCircuit.Name = "NZGD_2000_Tuhirangi_Circuit";
NZGD2000UTMZone58S.Name = "NZGD_2000_UTM_Zone_58S";
NZGD2000UTMZone59S.Name = "NZGD_2000_UTM_Zone_59S";
NZGD2000UTMZone60S.Name = "NZGD_2000_UTM_Zone_60S";
NZGD2000WairarapaCircuit.Name = "NZGD_2000_Wairarapa_Circuit";
NZGD2000WanganuiCircuit.Name = "NZGD_2000_Wanganui_Circuit";
NZGD2000WellingtonCircuit.Name = "NZGD_2000_Wellington_Circuit";
ChathamIslands1979MapGrid.GeographicInfo.Name = "GCS_Chatham_Islands_1979";
NewZealandMapGrid.GeographicInfo.Name = "GCS_New_Zealand_1949";
NewZealandNorthIsland.GeographicInfo.Name = "GCS_New_Zealand_1949";
NewZealandSouthIsland.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949AmuriCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949BayofPlentyCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949BluffCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949BullerCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949CollingwoodCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949GawlerCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949GreyCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949HawkesBayCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949HokitikaCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949JacksonsBayCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949KarameaCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949LindisPeakCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949MarlboroughCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949MountEdenCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949MountNicholasCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949MountPleasantCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949MountYorkCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949NelsonCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949NorthTaieriCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949ObservationPointCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949OkaritoCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949PovertyBayCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949TaranakiCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949TimaruCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949TuhirangiCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949UTMZone58S.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949UTMZone59S.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949UTMZone60S.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949WairarapaCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949WanganuiCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD1949WellingtonCircuit.GeographicInfo.Name = "GCS_New_Zealand_1949";
NZGD2000AmuriCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000BayofPlentyCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000BluffCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000BullerCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000ChathamIslandCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000CollingwoodCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000GawlerCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000GreyCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000HawkesBayCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000HokitikaCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000JacksonsBayCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000KarameaCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000LindisPeakCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000MarlboroughCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000MountEdenCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000MountNicholasCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000MountPleasantCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000MountYorkCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000NelsonCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000NewZealandTransverseMercator.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000NorthTaieriCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000ObservationPointCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000OkaritoCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000PovertyBayCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000TaranakiCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000TimaruCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000TuhirangiCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000UTMZone58S.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000UTMZone59S.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000UTMZone60S.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000WairarapaCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000WanganuiCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
NZGD2000WellingtonCircuit.GeographicInfo.Name = "GCS_NZGD_2000";
ChathamIslands1979MapGrid.GeographicInfo.Datum.Name = "D_Chatham_Islands_1979";
NewZealandMapGrid.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NewZealandNorthIsland.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NewZealandSouthIsland.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949AmuriCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949BayofPlentyCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949BluffCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949BullerCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949CollingwoodCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949GawlerCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949GreyCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949HawkesBayCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949HokitikaCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949JacksonsBayCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949KarameaCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949LindisPeakCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949MarlboroughCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949MountEdenCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949MountNicholasCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949MountPleasantCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949MountYorkCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949NelsonCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949NorthTaieriCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949ObservationPointCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949OkaritoCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949PovertyBayCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949TaranakiCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949TimaruCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949TuhirangiCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949UTMZone58S.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949UTMZone59S.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949UTMZone60S.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949WairarapaCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949WanganuiCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD1949WellingtonCircuit.GeographicInfo.Datum.Name = "D_New_Zealand_1949";
NZGD2000AmuriCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000BayofPlentyCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000BluffCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000BullerCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000ChathamIslandCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000CollingwoodCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000GawlerCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000GreyCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000HawkesBayCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000HokitikaCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000JacksonsBayCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000KarameaCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000LindisPeakCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000MarlboroughCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000MountEdenCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000MountNicholasCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000MountPleasantCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000MountYorkCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000NelsonCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000NewZealandTransverseMercator.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000NorthTaieriCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000ObservationPointCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000OkaritoCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000PovertyBayCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000TaranakiCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000TimaruCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000TuhirangiCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000UTMZone58S.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000UTMZone59S.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000UTMZone60S.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000WairarapaCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000WanganuiCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
NZGD2000WellingtonCircuit.GeographicInfo.Datum.Name = "D_NZGD_2000";
}
#endregion
}
}
#pragma warning restore 1591
| |
/*
* Copyright 2014 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Text.RegularExpressions;
namespace ZXing.Client.Result
{
/// <summary>
/// Detects a result that is likely a vehicle identification number.
/// @author Sean Owen
/// </summary>
public class VINResultParser : ResultParser
{
#if NETFX_CORE || PORTABLE || UNITY || NETSTANDARD1_0 || NETSTANDARD1_1 || NETSTANDARD1_2
private static readonly Regex IOQ = new Regex("[IOQ]");
private static readonly Regex AZ09 = new Regex(@"\A(?:" + "[A-Z0-9]{17}" + @")\z");
#else
private static readonly Regex IOQ = new Regex("[IOQ]", RegexOptions.Compiled);
private static readonly Regex AZ09 = new Regex(@"\A(?:" + "[A-Z0-9]{17}" + @")\z", RegexOptions.Compiled);
#endif
/// <summary>
/// attempt to parse the raw result to the specific type
/// </summary>
/// <param name="result"></param>
/// <returns></returns>
public override ParsedResult parse(ZXing.Result result)
{
try
{
if (result.BarcodeFormat != BarcodeFormat.CODE_39)
{
return null;
}
var rawText = result.Text;
rawText = IOQ.Replace(rawText, "").Trim();
var az09Match = AZ09.Match(rawText);
if (!az09Match.Success)
{
return null;
}
if (!checkChecksum(rawText))
{
return null;
}
var wmi = rawText.Substring(0, 3);
return new VINParsedResult(rawText,
wmi,
rawText.Substring(3, 6),
rawText.Substring(9, 8),
countryCode(wmi),
rawText.Substring(3, 5),
modelYear(rawText[9]),
rawText[10],
rawText.Substring(11));
}
catch
{
return null;
}
}
private static bool checkChecksum(String vin)
{
var sum = 0;
for (var i = 0; i < vin.Length; i++)
{
sum += vinPositionWeight(i + 1) * vinCharValue(vin[i]);
}
var currentCheckChar = vin[8];
var expectedCheckChar = checkChar(sum % 11);
return currentCheckChar == expectedCheckChar;
}
private static int vinCharValue(char c)
{
if (c >= 'A' && c <= 'I')
{
return (c - 'A') + 1;
}
if (c >= 'J' && c <= 'R')
{
return (c - 'J') + 1;
}
if (c >= 'S' && c <= 'Z')
{
return (c - 'S') + 2;
}
if (c >= '0' && c <= '9')
{
return c - '0';
}
throw new ArgumentException(c.ToString());
}
private static int vinPositionWeight(int position)
{
if (position >= 1 && position <= 7)
{
return 9 - position;
}
if (position == 8)
{
return 10;
}
if (position == 9)
{
return 0;
}
if (position >= 10 && position <= 17)
{
return 19 - position;
}
throw new ArgumentException();
}
private static char checkChar(int remainder)
{
if (remainder < 10)
{
return (char)('0' + remainder);
}
if (remainder == 10)
{
return 'X';
}
throw new ArgumentException();
}
private static int modelYear(char c)
{
if (c >= 'E' && c <= 'H')
{
return (c - 'E') + 1984;
}
if (c >= 'J' && c <= 'N')
{
return (c - 'J') + 1988;
}
if (c == 'P')
{
return 1993;
}
if (c >= 'R' && c <= 'T')
{
return (c - 'R') + 1994;
}
if (c >= 'V' && c <= 'Y')
{
return (c - 'V') + 1997;
}
if (c >= '1' && c <= '9')
{
return (c - '1') + 2001;
}
if (c >= 'A' && c <= 'D')
{
return (c - 'A') + 2010;
}
throw new ArgumentException(c.ToString());
}
private static String countryCode(String wmi)
{
char c1 = wmi[0];
char c2 = wmi[1];
switch (c1)
{
case '1':
case '4':
case '5':
return "US";
case '2':
return "CA";
case '3':
if (c2 >= 'A' && c2 <= 'W')
{
return "MX";
}
break;
case '9':
if ((c2 >= 'A' && c2 <= 'E') || (c2 >= '3' && c2 <= '9'))
{
return "BR";
}
break;
case 'J':
if (c2 >= 'A' && c2 <= 'T')
{
return "JP";
}
break;
case 'K':
if (c2 >= 'L' && c2 <= 'R')
{
return "KO";
}
break;
case 'L':
return "CN";
case 'M':
if (c2 >= 'A' && c2 <= 'E')
{
return "IN";
}
break;
case 'S':
if (c2 >= 'A' && c2 <= 'M')
{
return "UK";
}
if (c2 >= 'N' && c2 <= 'T')
{
return "DE";
}
break;
case 'V':
if (c2 >= 'F' && c2 <= 'R')
{
return "FR";
}
if (c2 >= 'S' && c2 <= 'W')
{
return "ES";
}
break;
case 'W':
return "DE";
case 'X':
if (c2 == '0' || (c2 >= '3' && c2 <= '9'))
{
return "RU";
}
break;
case 'Z':
if (c2 >= 'A' && c2 <= 'R')
{
return "IT";
}
break;
}
return null;
}
}
}
| |
//
// Mono.Unix/UnixFile.cs
//
// Authors:
// Jonathan Pryor (jonpryor@vt.edu)
//
// (C) 2004 Jonathan Pryor
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Text;
using Mono.Unix;
namespace Mono.Unix {
public struct UnixPipes
{
public UnixPipes (UnixStream reading, UnixStream writing)
{
Reading = reading;
Writing = writing;
}
public UnixStream Reading;
public UnixStream Writing;
}
public sealed /* static */ class UnixFile
{
private UnixFile () {}
public static bool CanAccess (string path, AccessMode mode)
{
int r = Syscall.access (path, mode);
return r == 0;
}
public static void Delete (string path)
{
int r = Syscall.unlink (path);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static bool Exists (string path)
{
int r = Syscall.access (path, AccessMode.F_OK);
if (r == 0)
return true;
return false;
}
public static long GetConfigurationValue (string path, PathConf name)
{
Syscall.SetLastError ((Error) 0);
long r = Syscall.pathconf (path, name);
if (r == -1 && Syscall.GetLastError() != (Error) 0)
UnixMarshal.ThrowExceptionForLastError ();
return r;
}
public static DateTime GetLastAccessTime (string path)
{
return new UnixFileInfo (path).LastAccessTime;
}
public static Stat GetFileStatus (string path)
{
Stat stat;
int r = Syscall.stat (path, out stat);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
return stat;
}
public static DateTime GetLastWriteTime (string path)
{
return new UnixFileInfo(path).LastWriteTime;
}
public static DateTime GetLastStatusChangeTime (string path)
{
return new UnixFileInfo (path).LastStatusChangeTime;
}
public static FilePermissions GetPermissions (string path)
{
return new UnixFileInfo (path).Permissions;
}
public static string ReadLink (string path)
{
string r = TryReadLink (path);
if (r == null)
UnixMarshal.ThrowExceptionForLastError ();
return r;
}
public static string TryReadLink (string path)
{
// Who came up with readlink(2)? There doesn't seem to be a way to
// properly handle it.
StringBuilder sb = new StringBuilder (512);
int r = Syscall.readlink (path, sb);
if (r == -1)
return null;
return sb.ToString (0, r);
}
public static void SetPermissions (string path, FilePermissions perms)
{
int r = Syscall.chmod (path, perms);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static UnixStream Create (string path)
{
FilePermissions mode = // 0644
FilePermissions.S_IRUSR | FilePermissions.S_IWUSR |
FilePermissions.S_IRGRP | FilePermissions.S_IROTH;
return Create (path, mode);
}
public static UnixStream Create (string path, FilePermissions mode)
{
int fd = Syscall.creat (path, mode);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixPipes CreatePipes ()
{
int reading, writing;
int r = Syscall.pipe (out reading, out writing);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
return new UnixPipes (new UnixStream (reading), new UnixStream (writing));
}
public static UnixStream Open (string path, OpenFlags flags)
{
int fd = Syscall.open (path, flags);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixStream Open (string path, OpenFlags flags, FilePermissions mode)
{
int fd = Syscall.open (path, flags, mode);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixStream Open (string path, FileMode mode)
{
OpenFlags flags = UnixConvert.ToOpenFlags (mode, FileAccess.ReadWrite);
int fd = Syscall.open (path, flags);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixStream Open (string path, FileMode mode, FileAccess access)
{
OpenFlags flags = UnixConvert.ToOpenFlags (mode, access);
int fd = Syscall.open (path, flags);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixStream Open (string path, FileMode mode, FileAccess access, FilePermissions perms)
{
OpenFlags flags = UnixConvert.ToOpenFlags (mode, access);
int fd = Syscall.open (path, flags, perms);
if (fd < 0)
UnixMarshal.ThrowExceptionForLastError ();
return new UnixStream (fd);
}
public static UnixStream OpenRead (string path)
{
return Open (path, FileMode.Open, FileAccess.Read);
}
public static UnixStream OpenWrite (string path)
{
return Open (path, FileMode.OpenOrCreate, FileAccess.Write);
}
public static void SetOwner (string path, uint owner, uint group)
{
int r = Syscall.chown (path, owner, group);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void SetOwner (string path, string owner)
{
Passwd pw = Syscall.getpwnam (owner);
if (pw == null)
throw new ArgumentException (Locale.GetText ("invalid username"), "owner");
uint uid = pw.pw_uid;
uint gid = pw.pw_gid;
SetOwner (path, uid, gid);
}
public static void SetOwner (string path, string owner, string group)
{
uint uid = UnixUser.GetUserId (owner);
uint gid = UnixGroup.GetGroupId (group);
SetOwner (path, uid, gid);
}
public static void SetLinkOwner (string path, uint owner, uint group)
{
int r = Syscall.lchown (path, owner, group);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void SetLinkOwner (string path, string owner)
{
Passwd pw = Syscall.getpwnam (owner);
if (pw == null)
throw new ArgumentException (Locale.GetText ("invalid username"), "owner");
uint uid = pw.pw_uid;
uint gid = pw.pw_gid;
SetLinkOwner (path, uid, gid);
}
public static void SetLinkOwner (string path, string owner, string group)
{
uint uid = UnixUser.GetUserId (owner);
uint gid = UnixGroup.GetGroupId (group);
SetLinkOwner (path, uid, gid);
}
public static void AdviseNormalAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_NORMAL);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseNormalAccess (int fd)
{
AdviseNormalAccess (fd, 0, 0);
}
public static void AdviseNormalAccess (FileStream file, long offset, long len)
{
AdviseNormalAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseNormalAccess (FileStream file)
{
AdviseNormalAccess (file.Handle.ToInt32());
}
public static void AdviseNormalAccess (UnixStream stream, long offset, long len)
{
AdviseNormalAccess (stream.Handle, offset, len);
}
public static void AdviseNormalAccess (UnixStream stream)
{
AdviseNormalAccess (stream.Handle);
}
public static void AdviseSequentialAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_SEQUENTIAL);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseSequentialAccess (int fd)
{
AdviseSequentialAccess (fd, 0, 0);
}
public static void AdviseSequentialAccess (FileStream file, long offset, long len)
{
AdviseSequentialAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseSequentialAccess (FileStream file)
{
AdviseSequentialAccess (file.Handle.ToInt32());
}
public static void AdviseSequentialAccess (UnixStream stream, long offset, long len)
{
AdviseSequentialAccess (stream.Handle, offset, len);
}
public static void AdviseSequentialAccess (UnixStream stream)
{
AdviseSequentialAccess (stream.Handle);
}
public static void AdviseRandomAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_RANDOM);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseRandomAccess (int fd)
{
AdviseRandomAccess (fd, 0, 0);
}
public static void AdviseRandomAccess (FileStream file, long offset, long len)
{
AdviseRandomAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseRandomAccess (FileStream file)
{
AdviseRandomAccess (file.Handle.ToInt32());
}
public static void AdviseRandomAccess (UnixStream stream, long offset, long len)
{
AdviseRandomAccess (stream.Handle, offset, len);
}
public static void AdviseRandomAccess (UnixStream stream)
{
AdviseRandomAccess (stream.Handle);
}
public static void AdviseNeedAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_WILLNEED);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseNeedAccess (int fd)
{
AdviseNeedAccess (fd, 0, 0);
}
public static void AdviseNeedAccess (FileStream file, long offset, long len)
{
AdviseNeedAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseNeedAccess (FileStream file)
{
AdviseNeedAccess (file.Handle.ToInt32());
}
public static void AdviseNeedAccess (UnixStream stream, long offset, long len)
{
AdviseNeedAccess (stream.Handle, offset, len);
}
public static void AdviseNeedAccess (UnixStream stream)
{
AdviseNeedAccess (stream.Handle);
}
public static void AdviseNoAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_DONTNEED);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseNoAccess (int fd)
{
AdviseNoAccess (fd, 0, 0);
}
public static void AdviseNoAccess (FileStream file, long offset, long len)
{
AdviseNoAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseNoAccess (FileStream file)
{
AdviseNoAccess (file.Handle.ToInt32());
}
public static void AdviseNoAccess (UnixStream stream, long offset, long len)
{
AdviseNoAccess (stream.Handle, offset, len);
}
public static void AdviseNoAccess (UnixStream stream)
{
AdviseNoAccess (stream.Handle);
}
public static void AdviseOnceAccess (int fd, long offset, long len)
{
int r = Syscall.posix_fadvise (fd, offset, len,
PosixFadviseAdvice.POSIX_FADV_NOREUSE);
UnixMarshal.ThrowExceptionForLastErrorIf (r);
}
public static void AdviseOnceAccess (int fd)
{
AdviseOnceAccess (fd, 0, 0);
}
public static void AdviseOnceAccess (FileStream file, long offset, long len)
{
AdviseOnceAccess (file.Handle.ToInt32(), offset, len);
}
public static void AdviseOnceAccess (FileStream file)
{
AdviseOnceAccess (file.Handle.ToInt32());
}
public static void AdviseOnceAccess (UnixStream stream, long offset, long len)
{
AdviseOnceAccess (stream.Handle, offset, len);
}
public static void AdviseOnceAccess (UnixStream stream)
{
AdviseOnceAccess (stream.Handle);
}
}
}
// vim: noexpandtab
| |
//
// BaseTrackListView.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
//
// Copyright (C) 2007-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Mono.Unix;
using Gtk;
using Hyena.Data;
using Hyena.Data.Gui;
using Hyena.Gui;
using Banshee.Collection.Database;
using Banshee.Sources;
using Banshee.ServiceStack;
using Banshee.MediaEngine;
using Banshee.Playlist;
using Banshee.Gui;
namespace Banshee.Collection.Gui
{
public class BaseTrackListView : SearchableListView<TrackInfo>
{
public BaseTrackListView () : base ()
{
RulesHint = true;
RowOpaquePropertyName = "Enabled";
RowBoldPropertyName = "IsPlaying";
ServiceManager.PlayerEngine.ConnectEvent (
OnPlayerEvent, PlayerEvent.StartOfStream | PlayerEvent.StateChange);
ForceDragSourceSet = true;
IsEverReorderable = true;
RowActivated += (o, a) => {
var source = ServiceManager.SourceManager.ActiveSource as ITrackModelSource;
if (source != null && source.TrackModel == Model) {
ServiceManager.Get<InterfaceActionService> ().TrackActions["PlayTrack"].Activate ();
}
};
DragFailed += (o, a) => {
int x, y;
GetPointer (out x, out y);
bool inside_list = (x >= 0 && y >= 0) && (x < Allocation.Width && y < Allocation.Height);
if (inside_list && a.Result == DragResult.NoTarget) {
PlaylistSource playlist = ServiceManager.SourceManager.ActiveSource as PlaylistSource;
if (playlist != null && !IsReorderable) {
Hyena.Log.Information (
Catalog.GetString ("Cannot Reorder While Sorted"),
Catalog.GetString ("To put the playlist in manual sort mode, click the currently sorted column header until the sort arrow goes away."),
true
);
}
}
};
}
protected BaseTrackListView (IntPtr raw) : base (raw)
{
}
public override bool SelectOnRowFound {
get { return true; }
}
private static TargetEntry [] source_targets = new TargetEntry [] {
ListViewDragDropTarget.ModelSelection,
Banshee.Gui.DragDrop.DragDropTarget.UriList
};
protected override TargetEntry [] DragDropSourceEntries {
get { return source_targets; }
}
protected override bool OnKeyPressEvent (Gdk.EventKey press)
{
// Have o act the same as enter - activate the selection
if (GtkUtilities.NoImportantModifiersAreSet () && press.Key == Gdk.Key.o && ActivateSelection ()) {
return true;
}
return base.OnKeyPressEvent (press);
}
protected override bool OnPopupMenu ()
{
ServiceManager.Get<InterfaceActionService> ().TrackActions["TrackContextMenuAction"].Activate ();
return true;
}
private string user_query;
protected override void OnModelReloaded ()
{
base.OnModelReloaded ();
var model = Model as IFilterable;
if (model != null && user_query != model.UserQuery) {
// Make sure selected tracks are visible as the user edits the query.
CenterOnSelection ();
user_query = model.UserQuery;
}
}
private void OnPlayerEvent (PlayerEventArgs args)
{
if (args.Event == PlayerEvent.StartOfStream) {
UpdateSelection ();
} else if (args.Event == PlayerEvent.StateChange) {
QueueDraw ();
}
}
private TrackInfo current_track;
private void UpdateSelection ()
{
TrackInfo old_track = current_track;
current_track = ServiceManager.PlayerEngine.CurrentTrack;
var track_model = Model as TrackListModel;
if (track_model == null) {
return;
}
if (Selection.Count > 1) {
return;
}
int old_index = Selection.FirstIndex;
TrackInfo selected_track = Selection.Count == 1 ? track_model[old_index] : null;
if (selected_track != null && !selected_track.TrackEqual (old_track)) {
return;
}
int current_index = track_model.IndexOf (current_track);
if (current_index == -1) {
return;
}
Selection.Clear (false);
Selection.QuietSelect (current_index);
Selection.FocusedIndex = current_index;
if (old_index == -1 || IsRowVisible (old_index)) {
CenterOn (current_index);
}
}
#region Drag and Drop
protected override void OnDragSourceSet ()
{
base.OnDragSourceSet ();
Drag.SourceSetIconName (this, "audio-x-generic");
}
protected override bool OnDragDrop (Gdk.DragContext context, int x, int y, uint time_)
{
y = TranslateToListY (y);
if (Gtk.Drag.GetSourceWidget (context) == this) {
PlaylistSource playlist = ServiceManager.SourceManager.ActiveSource as PlaylistSource;
if (playlist != null) {
//Gtk.Drag.
int row = GetModelRowAt (0, y);
if (row != GetModelRowAt (0, y + ChildSize.Height / 2)) {
row += 1;
}
if (playlist.TrackModel.Selection.Contains (row)) {
// can't drop within the selection
return false;
}
playlist.ReorderSelectedTracks (row);
return true;
}
}
return false;
}
protected override void OnDragDataGet (Gdk.DragContext context, SelectionData selection_data, uint info, uint time)
{
if (info == Banshee.Gui.DragDrop.DragDropTarget.UriList.Info) {
ITrackModelSource track_source = ServiceManager.SourceManager.ActiveSource as ITrackModelSource;
if (track_source != null) {
System.Text.StringBuilder sb = new System.Text.StringBuilder ();
foreach (TrackInfo track in track_source.TrackModel.SelectedItems) {
sb.Append (track.Uri);
sb.Append ("\r\n");
}
byte [] data = System.Text.Encoding.UTF8.GetBytes (sb.ToString ());
selection_data.Set (context.ListTargets ()[0], 8, data, data.Length);
}
}
}
#endregion
}
}
| |
/*
* Copyright (c) 2006, Clutch, Inc.
* Original Author: Jeff Cesnik
* All rights reserved.
*
* - Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Neither the name of the openmetaverse.org nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using OpenSim.Framework;
using System;
using System.Net;
using System.Net.Sockets;
namespace OpenMetaverse
{
/// <summary>
/// Base UDP server
/// </summary>
public abstract class OpenSimUDPBase
{
private static readonly ILog m_log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// This method is called when an incoming packet is received
/// </summary>
/// <param name="buffer">Incoming packet buffer</param>
public abstract void PacketReceived(UDPPacketBuffer buffer);
/// <summary>UDP port to bind to in server mode</summary>
protected int m_udpPort;
/// <summary>Local IP address to bind to in server mode</summary>
protected IPAddress m_localBindAddress;
/// <summary>UDP socket, used in either client or server mode</summary>
private Socket m_udpSocket;
/// <summary>Flag to process packets asynchronously or synchronously</summary>
private bool m_asyncPacketHandling;
/// <summary>
/// Are we to use object pool(s) to reduce memory churn when receiving data?
/// </summary>
public bool UsePools { get; protected set; }
/// <summary>
/// Pool to use for handling data. May be null if UsePools = false;
/// </summary>
protected OpenSim.Framework.Pool<UDPPacketBuffer> Pool { get; private set; }
/// <summary>Returns true if the server is currently listening for inbound packets, otherwise false</summary>
public bool IsRunningInbound { get; private set; }
/// <summary>Returns true if the server is currently sending outbound packets, otherwise false</summary>
/// <remarks>If IsRunningOut = false, then any request to send a packet is simply dropped.</remarks>
public bool IsRunningOutbound { get; private set; }
/// <summary>
/// Number of UDP receives.
/// </summary>
public int UdpReceives { get; private set; }
/// <summary>
/// Number of UDP sends
/// </summary>
public int UdpSends { get; private set; }
/// <summary>
/// Number of receives over which to establish a receive time average.
/// </summary>
private readonly static int s_receiveTimeSamples = 500;
/// <summary>
/// Current number of samples taken to establish a receive time average.
/// </summary>
private int m_currentReceiveTimeSamples;
/// <summary>
/// Cumulative receive time for the sample so far.
/// </summary>
private int m_receiveTicksInCurrentSamplePeriod;
/// <summary>
/// The average time taken for each require receive in the last sample.
/// </summary>
public float AverageReceiveTicksForLastSamplePeriod { get; private set; }
/// <summary>
/// Default constructor
/// </summary>
/// <param name="bindAddress">Local IP address to bind the server to</param>
/// <param name="port">Port to listening for incoming UDP packets on</param>
/// /// <param name="usePool">Are we to use an object pool to get objects for handing inbound data?</param>
public OpenSimUDPBase(IPAddress bindAddress, int port)
{
m_localBindAddress = bindAddress;
m_udpPort = port;
}
/// <summary>
/// Start inbound UDP packet handling.
/// </summary>
/// <param name="recvBufferSize">The size of the receive buffer for
/// the UDP socket. This value is passed up to the operating system
/// and used in the system networking stack. Use zero to leave this
/// value as the default</param>
/// <param name="asyncPacketHandling">Set this to true to start
/// receiving more packets while current packet handler callbacks are
/// still running. Setting this to false will complete each packet
/// callback before the next packet is processed</param>
/// <remarks>This method will attempt to set the SIO_UDP_CONNRESET flag
/// on the socket to get newer versions of Windows to behave in a sane
/// manner (not throwing an exception when the remote side resets the
/// connection). This call is ignored on Mono where the flag is not
/// necessary</remarks>
public void StartInbound(int recvBufferSize, bool asyncPacketHandling)
{
m_asyncPacketHandling = asyncPacketHandling;
if (!IsRunningInbound)
{
m_log.DebugFormat("[UDPBASE]: Starting inbound UDP loop");
const int SIO_UDP_CONNRESET = -1744830452;
IPEndPoint ipep = new IPEndPoint(m_localBindAddress, m_udpPort);
m_log.DebugFormat(
"[UDPBASE]: Binding UDP listener using internal IP address config {0}:{1}",
ipep.Address, ipep.Port);
m_udpSocket = new Socket(
AddressFamily.InterNetwork,
SocketType.Dgram,
ProtocolType.Udp);
try
{
if(m_udpSocket.Ttl < 128)
{
m_udpSocket.Ttl = 128;
}
}
catch(SocketException)
{
m_log.Debug("[UDPBASE]: Failed to increase default TTL");
}
try
{
// This udp socket flag is not supported under mono,
// so we'll catch the exception and continue
m_udpSocket.IOControl(SIO_UDP_CONNRESET, new byte[] { 0 }, null);
m_log.Debug("[UDPBASE]: SIO_UDP_CONNRESET flag set");
}
catch (SocketException)
{
m_log.Debug("[UDPBASE]: SIO_UDP_CONNRESET flag not supported on this platform, ignoring");
}
if (recvBufferSize != 0)
m_udpSocket.ReceiveBufferSize = recvBufferSize;
m_udpSocket.Bind(ipep);
IsRunningInbound = true;
// kick off an async receive. The Start() method will return, the
// actual receives will occur asynchronously and will be caught in
// AsyncEndRecieve().
AsyncBeginReceive();
}
}
/// <summary>
/// Start outbound UDP packet handling.
/// </summary>
public void StartOutbound()
{
m_log.DebugFormat("[UDPBASE]: Starting outbound UDP loop");
IsRunningOutbound = true;
}
public void StopInbound()
{
if (IsRunningInbound)
{
m_log.DebugFormat("[UDPBASE]: Stopping inbound UDP loop");
IsRunningInbound = false;
m_udpSocket.Close();
}
}
public void StopOutbound()
{
m_log.DebugFormat("[UDPBASE]: Stopping outbound UDP loop");
IsRunningOutbound = false;
}
protected virtual bool EnablePools()
{
if (!UsePools)
{
Pool = new Pool<UDPPacketBuffer>(() => new UDPPacketBuffer(), 500);
UsePools = true;
return true;
}
return false;
}
protected virtual bool DisablePools()
{
if (UsePools)
{
UsePools = false;
// We won't null out the pool to avoid a race condition with code that may be in the middle of using it.
return true;
}
return false;
}
private void AsyncBeginReceive()
{
UDPPacketBuffer buf;
// FIXME: Disabled for now as this causes issues with reused packet objects interfering with each other
// on Windows with m_asyncPacketHandling = true, though this has not been seen on Linux.
// Possibly some unexpected issue with fetching UDP data concurrently with multiple threads. Requires more investigation.
// if (UsePools)
// buf = Pool.GetObject();
// else
buf = new UDPPacketBuffer();
if (IsRunningInbound)
{
try
{
// kick off an async read
m_udpSocket.BeginReceiveFrom(
//wrappedBuffer.Instance.Data,
buf.Data,
0,
UDPPacketBuffer.BUFFER_SIZE,
SocketFlags.None,
ref buf.RemoteEndPoint,
AsyncEndReceive,
//wrappedBuffer);
buf);
}
catch (SocketException e)
{
if (e.SocketErrorCode == SocketError.ConnectionReset)
{
m_log.Warn("[UDPBASE]: SIO_UDP_CONNRESET was ignored, attempting to salvage the UDP listener on port " + m_udpPort);
bool salvaged = false;
while (!salvaged)
{
try
{
m_udpSocket.BeginReceiveFrom(
//wrappedBuffer.Instance.Data,
buf.Data,
0,
UDPPacketBuffer.BUFFER_SIZE,
SocketFlags.None,
ref buf.RemoteEndPoint,
AsyncEndReceive,
//wrappedBuffer);
buf);
salvaged = true;
}
catch (SocketException) { }
catch (ObjectDisposedException) { return; }
}
m_log.Warn("[UDPBASE]: Salvaged the UDP listener on port " + m_udpPort);
}
}
catch (ObjectDisposedException e)
{
m_log.Error(
string.Format("[UDPBASE]: Error processing UDP begin receive {0}. Exception ", UdpReceives), e);
}
catch (Exception e)
{
m_log.Error(
string.Format("[UDPBASE]: Error processing UDP begin receive {0}. Exception ", UdpReceives), e);
}
}
}
private void AsyncEndReceive(IAsyncResult iar)
{
// Asynchronous receive operations will complete here through the call
// to AsyncBeginReceive
if (IsRunningInbound)
{
UdpReceives++;
// Asynchronous mode will start another receive before the
// callback for this packet is even fired. Very parallel :-)
if (m_asyncPacketHandling)
AsyncBeginReceive();
try
{
// get the buffer that was created in AsyncBeginReceive
// this is the received data
UDPPacketBuffer buffer = (UDPPacketBuffer)iar.AsyncState;
int startTick = Environment.TickCount;
// get the length of data actually read from the socket, store it with the
// buffer
buffer.DataLength = m_udpSocket.EndReceiveFrom(iar, ref buffer.RemoteEndPoint);
// call the abstract method PacketReceived(), passing the buffer that
// has just been filled from the socket read.
PacketReceived(buffer);
// If more than one thread can be calling AsyncEndReceive() at once (e.g. if m_asyncPacketHandler)
// then a particular stat may be inaccurate due to a race condition. We won't worry about this
// since this should be rare and won't cause a runtime problem.
if (m_currentReceiveTimeSamples >= s_receiveTimeSamples)
{
AverageReceiveTicksForLastSamplePeriod
= (float)m_receiveTicksInCurrentSamplePeriod / s_receiveTimeSamples;
m_receiveTicksInCurrentSamplePeriod = 0;
m_currentReceiveTimeSamples = 0;
}
else
{
m_receiveTicksInCurrentSamplePeriod += Environment.TickCount - startTick;
m_currentReceiveTimeSamples++;
}
}
catch (SocketException se)
{
m_log.Error(
string.Format(
"[UDPBASE]: Error processing UDP end receive {0}, socket error code {1}. Exception ",
UdpReceives, se.ErrorCode),
se);
}
catch (ObjectDisposedException e)
{
m_log.Error(
string.Format("[UDPBASE]: Error processing UDP end receive {0}. Exception ", UdpReceives), e);
}
catch (Exception e)
{
m_log.Error(
string.Format("[UDPBASE]: Error processing UDP end receive {0}. Exception ", UdpReceives), e);
}
finally
{
// if (UsePools)
// Pool.ReturnObject(buffer);
// Synchronous mode waits until the packet callback completes
// before starting the receive to fetch another packet
if (!m_asyncPacketHandling)
AsyncBeginReceive();
}
}
}
public void AsyncBeginSend(UDPPacketBuffer buf)
{
// if (IsRunningOutbound)
// {
try
{
m_udpSocket.BeginSendTo(
buf.Data,
0,
buf.DataLength,
SocketFlags.None,
buf.RemoteEndPoint,
AsyncEndSend,
buf);
}
catch (SocketException) { }
catch (ObjectDisposedException) { }
// }
}
void AsyncEndSend(IAsyncResult result)
{
try
{
// UDPPacketBuffer buf = (UDPPacketBuffer)result.AsyncState;
m_udpSocket.EndSendTo(result);
UdpSends++;
}
catch (SocketException) { }
catch (ObjectDisposedException) { }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Collections.Tests
{
public class ReadOnlyCollectionBaseTests
{
private static MyReadOnlyCollectionBase CreateCollection()
{
var fooArray = new Foo[100];
for (int i = 0; i < 100; i++)
{
fooArray[i] = new Foo(i, i.ToString());
}
return new MyReadOnlyCollectionBase(fooArray);
}
[Fact]
public static void TestSyncRoot()
{
MyReadOnlyCollectionBase collection = CreateCollection();
Assert.False(collection.SyncRoot is ArrayList);
Assert.Same(collection.SyncRoot, collection.SyncRoot);
}
[Fact]
public static void TestAddRange_Count()
{
MyReadOnlyCollectionBase collection = CreateCollection();
Assert.Equal(100, collection.Count);
}
[Fact]
public static void TestCopyTo_ZeroIndex()
{
MyReadOnlyCollectionBase collection = CreateCollection();
var copyArray = new Foo[100];
collection.CopyTo(copyArray, 0);
Assert.Equal(100, copyArray.Length);
for (int i = 0; i < 100; i++)
{
Assert.Equal(i, copyArray[i].IntValue);
Assert.Equal(i.ToString(), copyArray[i].StringValue);
}
}
[Fact]
public static void TestCopyTo_NonZeroIndex()
{
MyReadOnlyCollectionBase collection = CreateCollection();
var copyArray = new Foo[200];
collection.CopyTo(copyArray, 100);
Assert.Equal(200, copyArray.Length);
for (int i = 0; i < 100; i++)
{
Assert.Equal(i, copyArray[100 + i].IntValue);
Assert.Equal(i.ToString(), copyArray[100 + i].StringValue);
}
}
[Fact]
public static void TestCopyTo_Invalid()
{
MyReadOnlyCollectionBase collection = CreateCollection();
Assert.Throws<ArgumentNullException>(() => collection.CopyTo(null, 0)); // Array is null
Assert.Throws<ArgumentException>(() => collection.CopyTo(new Foo[100], 50)); // Index + collection.Count > array.Length
Assert.Throws<ArgumentOutOfRangeException>(() => collection.CopyTo(new Foo[100], -1)); // Index < 0
}
[Fact]
public static void TestGetEnumerator()
{
MyReadOnlyCollectionBase collection = CreateCollection();
IEnumerator enumerator = collection.GetEnumerator();
// Calling current should throw when the enumerator has not started enumerating
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
int counter = 0;
while (enumerator.MoveNext())
{
Foo current = (Foo)enumerator.Current;
Assert.Equal(counter, current.IntValue);
Assert.Equal(counter.ToString(), current.StringValue);
counter++;
}
Assert.Equal(collection.Count, counter);
// Calling current should throw when the enumerator has finished enumerating
Assert.Throws<InvalidOperationException>(() => (Foo)enumerator.Current);
// Calling current should throw when the enumerator is reset
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => (Foo)enumerator.Current);
}
[Fact]
public static void TestIsSynchronized()
{
MyReadOnlyCollectionBase collection = CreateCollection();
Assert.False(((ICollection)collection).IsSynchronized);
}
[Fact]
public static void TestIListMethods()
{
MyReadOnlyCollectionBase collection = CreateCollection();
for (int i = 0; i < 100; i++)
{
Assert.Equal(i, collection[i].IntValue);
Assert.Equal(i.ToString(), collection[i].StringValue);
Assert.Equal(i, collection.IndexOf(new Foo(i, i.ToString())));
Assert.True(collection.Contains(new Foo(i, i.ToString())));
}
}
[Fact]
public static void TestIListProperties()
{
MyReadOnlyCollectionBase collection = CreateCollection();
Assert.True(collection.IsFixedSize);
Assert.True(collection.IsReadOnly);
}
[Fact]
public static void TestVirtualMethods()
{
VirtualTestReadOnlyCollection collectionBase = new VirtualTestReadOnlyCollection();
Assert.Equal(collectionBase.Count, int.MinValue);
Assert.Null(collectionBase.GetEnumerator());
}
// ReadOnlyCollectionBase is provided to be used as the base class for strongly typed collections. Lets use one of our own here.
// This collection only allows the type Foo
private class MyReadOnlyCollectionBase : ReadOnlyCollectionBase
{
//we need a way of initializing this collection
public MyReadOnlyCollectionBase(Foo[] values)
{
InnerList.AddRange(values);
}
public Foo this[int indx]
{
get { return (Foo)InnerList[indx]; }
}
public void CopyTo(Array array, int index)
{
((ICollection)this).CopyTo(array, index);// Use the base class explicit implementation of ICollection.CopyTo
}
public virtual object SyncRoot
{
get
{
return ((ICollection)this).SyncRoot;// Use the base class explicit implementation of ICollection.SyncRoot
}
}
public int IndexOf(Foo f)
{
return ((IList)InnerList).IndexOf(f);
}
public bool Contains(Foo f)
{
return ((IList)InnerList).Contains(f);
}
public bool IsFixedSize
{
get { return true; }
}
public bool IsReadOnly
{
get { return true; }
}
}
private class VirtualTestReadOnlyCollection : ReadOnlyCollectionBase
{
public override int Count
{
get
{
return int.MinValue;
}
}
public override IEnumerator GetEnumerator()
{
return null;
}
}
private class Foo
{
public Foo()
{
}
public Foo(int intValue, string stringValue)
{
_intValue = intValue;
_stringValue = stringValue;
}
private int _intValue;
public int IntValue
{
get { return _intValue; }
set { _intValue = value; }
}
private string _stringValue;
public string StringValue
{
get { return _stringValue; }
set { _stringValue = value; }
}
public override bool Equals(object obj)
{
if (obj == null)
return false;
if (!(obj is Foo))
return false;
if ((((Foo)obj).IntValue == _intValue) && (((Foo)obj).StringValue == _stringValue))
return true;
return false;
}
public override int GetHashCode()
{
return _intValue;
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using osu.Framework.Allocation;
using osu.Framework.Bindables;
using osu.Framework.Caching;
using osu.Framework.Extensions.EnumExtensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Graphics.Sprites;
using osu.Framework.Input.Events;
using osu.Framework.Layout;
using osu.Game.Graphics;
using osu.Game.Graphics.Containers;
using osu.Game.Graphics.Sprites;
using osu.Game.Graphics.UserInterface;
using osuTK;
using osuTK.Graphics;
namespace osu.Game.Overlays
{
public class SettingsToolboxGroup : Container, IExpandable
{
public const int CONTAINER_WIDTH = 270;
private const float transition_duration = 250;
private const int border_thickness = 2;
private const int header_height = 30;
private const int corner_radius = 5;
private const float fade_duration = 800;
private const float inactive_alpha = 0.5f;
private readonly Cached headerTextVisibilityCache = new Cached();
private readonly FillFlowContainer content;
private readonly IconButton button;
public BindableBool Expanded { get; } = new BindableBool(true);
private Color4 expandedColour;
private readonly OsuSpriteText headerText;
/// <summary>
/// Create a new instance.
/// </summary>
/// <param name="title">The title to be displayed in the header of this group.</param>
public SettingsToolboxGroup(string title)
{
AutoSizeAxes = Axes.Y;
Width = CONTAINER_WIDTH;
Masking = true;
CornerRadius = corner_radius;
BorderColour = Color4.Black;
BorderThickness = border_thickness;
InternalChildren = new Drawable[]
{
new Box
{
RelativeSizeAxes = Axes.Both,
Colour = Color4.Black,
Alpha = 0.5f,
},
new FillFlowContainer
{
Direction = FillDirection.Vertical,
RelativeSizeAxes = Axes.X,
AutoSizeAxes = Axes.Y,
Children = new Drawable[]
{
new Container
{
Name = @"Header",
Origin = Anchor.TopCentre,
Anchor = Anchor.TopCentre,
RelativeSizeAxes = Axes.X,
Height = header_height,
Children = new Drawable[]
{
headerText = new OsuSpriteText
{
Origin = Anchor.CentreLeft,
Anchor = Anchor.CentreLeft,
Text = title.ToUpperInvariant(),
Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 17),
Padding = new MarginPadding { Left = 10, Right = 30 },
},
button = new IconButton
{
Origin = Anchor.Centre,
Anchor = Anchor.CentreRight,
Position = new Vector2(-15, 0),
Icon = FontAwesome.Solid.Bars,
Scale = new Vector2(0.75f),
Action = () => Expanded.Toggle(),
},
}
},
content = new FillFlowContainer
{
Name = @"Content",
Origin = Anchor.TopCentre,
Anchor = Anchor.TopCentre,
Direction = FillDirection.Vertical,
RelativeSizeAxes = Axes.X,
AutoSizeDuration = transition_duration,
AutoSizeEasing = Easing.OutQuint,
AutoSizeAxes = Axes.Y,
Padding = new MarginPadding(15),
Spacing = new Vector2(0, 15),
}
}
},
};
}
protected override bool OnInvalidate(Invalidation invalidation, InvalidationSource source)
{
if (invalidation.HasFlagFast(Invalidation.DrawSize))
headerTextVisibilityCache.Invalidate();
return base.OnInvalidate(invalidation, source);
}
protected override void Update()
{
base.Update();
if (!headerTextVisibilityCache.IsValid)
// These toolbox grouped may be contracted to only show icons.
// For now, let's hide the header to avoid text truncation weirdness in such cases.
headerText.FadeTo(headerText.DrawWidth < DrawWidth ? 1 : 0, 150, Easing.OutQuint);
}
[Resolved(canBeNull: true)]
private IExpandingContainer expandingContainer { get; set; }
private bool expandedByContainer;
protected override void LoadComplete()
{
base.LoadComplete();
expandingContainer?.Expanded.BindValueChanged(containerExpanded =>
{
if (containerExpanded.NewValue && !Expanded.Value)
{
Expanded.Value = true;
expandedByContainer = true;
}
else if (!containerExpanded.NewValue && expandedByContainer)
{
Expanded.Value = false;
expandedByContainer = false;
}
updateActiveState();
}, true);
Expanded.BindValueChanged(v =>
{
content.ClearTransforms();
if (v.NewValue)
content.AutoSizeAxes = Axes.Y;
else
{
content.AutoSizeAxes = Axes.None;
content.ResizeHeightTo(0, transition_duration, Easing.OutQuint);
}
button.FadeColour(Expanded.Value ? expandedColour : Color4.White, 200, Easing.InOutQuint);
}, true);
this.Delay(600).Schedule(updateActiveState);
}
protected override bool OnHover(HoverEvent e)
{
updateActiveState();
return false;
}
protected override void OnHoverLost(HoverLostEvent e)
{
updateActiveState();
base.OnHoverLost(e);
}
[BackgroundDependencyLoader]
private void load(OsuColour colours)
{
expandedColour = colours.Yellow;
}
private void updateActiveState()
{
this.FadeTo(IsHovered || expandingContainer?.Expanded.Value == true ? 1 : inactive_alpha, fade_duration, Easing.OutQuint);
}
protected override Container<Drawable> Content => content;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// In the desktop version of the framework, this file is generated from ProviderBase\DbConnectionHelper.cs
// #line 1 "e:\\fxdata\\src\\ndp\\fx\\src\\data\\system\\data\\providerbase\\dbconnectionhelper.cs"
using System.Data.Common;
using System.Data.ProviderBase;
using System.Diagnostics;
using System.Threading;
namespace System.Data.SqlClient
{
public sealed partial class SqlConnection : DbConnection
{
private static readonly DbConnectionFactory s_connectionFactory = SqlConnectionFactory.SingletonInstance;
private DbConnectionOptions _userConnectionOptions;
private DbConnectionPoolGroup _poolGroup;
private DbConnectionInternal _innerConnection;
private int _closeCount;
public SqlConnection() : base()
{
GC.SuppressFinalize(this);
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
private void CopyFrom(SqlConnection connection)
{
ADP.CheckArgumentNull(connection, "connection");
_userConnectionOptions = connection.UserConnectionOptions;
_poolGroup = connection.PoolGroup;
if (DbConnectionClosedNeverOpened.SingletonInstance == connection._innerConnection)
{
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
else
{
_innerConnection = DbConnectionClosedPreviouslyOpened.SingletonInstance;
}
}
internal int CloseCount
{
get
{
return _closeCount;
}
}
internal DbConnectionFactory ConnectionFactory
{
get
{
return s_connectionFactory;
}
}
internal DbConnectionOptions ConnectionOptions
{
get
{
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup;
return ((null != poolGroup) ? poolGroup.ConnectionOptions : null);
}
}
private string ConnectionString_Get()
{
bool hidePassword = InnerConnection.ShouldHidePassword;
DbConnectionOptions connectionOptions = UserConnectionOptions;
return ((null != connectionOptions) ? connectionOptions.UsersConnectionString(hidePassword) : "");
}
private void ConnectionString_Set(string value)
{
DbConnectionPoolKey key = new DbConnectionPoolKey(value);
ConnectionString_Set(key);
}
private void ConnectionString_Set(DbConnectionPoolKey key)
{
DbConnectionOptions connectionOptions = null;
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = ConnectionFactory.GetConnectionPoolGroup(key, null, ref connectionOptions);
DbConnectionInternal connectionInternal = InnerConnection;
bool flag = connectionInternal.AllowSetConnectionString;
if (flag)
{
flag = SetInnerConnectionFrom(DbConnectionClosedBusy.SingletonInstance, connectionInternal);
if (flag)
{
_userConnectionOptions = connectionOptions;
_poolGroup = poolGroup;
_innerConnection = DbConnectionClosedNeverOpened.SingletonInstance;
}
}
if (!flag)
{
throw ADP.OpenConnectionPropertySet(ADP.ConnectionString, connectionInternal.State);
}
}
internal DbConnectionInternal InnerConnection
{
get
{
return _innerConnection;
}
}
internal System.Data.ProviderBase.DbConnectionPoolGroup PoolGroup
{
get
{
return _poolGroup;
}
set
{
Debug.Assert(null != value, "null poolGroup");
_poolGroup = value;
}
}
internal DbConnectionOptions UserConnectionOptions
{
get
{
return _userConnectionOptions;
}
}
internal void Abort(Exception e)
{
DbConnectionInternal innerConnection = _innerConnection;
if (ConnectionState.Open == innerConnection.State)
{
Interlocked.CompareExchange(ref _innerConnection, DbConnectionClosedPreviouslyOpened.SingletonInstance, innerConnection);
innerConnection.DoomThisConnection();
}
}
internal void AddWeakReference(object value, int tag)
{
InnerConnection.AddWeakReference(value, tag);
}
override protected DbCommand CreateDbCommand()
{
DbCommand command = null;
DbProviderFactory providerFactory = ConnectionFactory.ProviderFactory;
command = providerFactory.CreateCommand();
command.Connection = this;
return command;
}
override protected void Dispose(bool disposing)
{
if (disposing)
{
_userConnectionOptions = null;
_poolGroup = null;
Close();
}
DisposeMe(disposing);
base.Dispose(disposing);
}
partial void RepairInnerConnection();
internal void NotifyWeakReference(int message)
{
InnerConnection.NotifyWeakReference(message);
}
internal void PermissionDemand()
{
Debug.Assert(DbConnectionClosedConnecting.SingletonInstance == _innerConnection, "not connecting");
System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup;
DbConnectionOptions connectionOptions = ((null != poolGroup) ? poolGroup.ConnectionOptions : null);
if ((null == connectionOptions) || connectionOptions.IsEmpty)
{
throw ADP.NoConnectionString();
}
DbConnectionOptions userConnectionOptions = UserConnectionOptions;
Debug.Assert(null != userConnectionOptions, "null UserConnectionOptions");
}
internal void RemoveWeakReference(object value)
{
InnerConnection.RemoveWeakReference(value);
}
internal void SetInnerConnectionEvent(DbConnectionInternal to)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
ConnectionState originalState = _innerConnection.State & ConnectionState.Open;
ConnectionState currentState = to.State & ConnectionState.Open;
if ((originalState != currentState) && (ConnectionState.Closed == currentState))
{
unchecked { _closeCount++; }
}
_innerConnection = to;
if (ConnectionState.Closed == originalState && ConnectionState.Open == currentState)
{
OnStateChange(DbConnectionInternal.StateChangeOpen);
}
else if (ConnectionState.Open == originalState && ConnectionState.Closed == currentState)
{
OnStateChange(DbConnectionInternal.StateChangeClosed);
}
else
{
Debug.Assert(false, "unexpected state switch");
if (originalState != currentState)
{
OnStateChange(new StateChangeEventArgs(originalState, currentState));
}
}
}
internal bool SetInnerConnectionFrom(DbConnectionInternal to, DbConnectionInternal from)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != from, "from null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
bool result = (from == Interlocked.CompareExchange<DbConnectionInternal>(ref _innerConnection, to, from));
return result;
}
internal void SetInnerConnectionTo(DbConnectionInternal to)
{
Debug.Assert(null != _innerConnection, "null InnerConnection");
Debug.Assert(null != to, "to null InnerConnection");
_innerConnection = to;
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System.Collections.Generic;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.OptionalModules.Scripting.Minimodule.WorldX;
namespace OpenSim.Region.OptionalModules.Scripting.Minimodule
{
public class World : System.MarshalByRefObject, IWorld, IWorldAudio
{
private readonly Scene m_internalScene;
private readonly ISecurityCredential m_security;
private readonly Heightmap m_heights;
private readonly ObjectAccessor m_objs;
public World(Scene internalScene, ISecurityCredential securityCredential)
{
m_security = securityCredential;
m_internalScene = internalScene;
m_heights = new Heightmap(m_internalScene);
m_objs = new ObjectAccessor(m_internalScene, securityCredential);
}
#region Events
#region OnNewUser
private event OnNewUserDelegate _OnNewUser;
private bool _OnNewUserActive;
public event OnNewUserDelegate OnNewUser
{
add
{
if (!_OnNewUserActive)
{
_OnNewUserActive = true;
m_internalScene.EventManager.OnNewPresence += EventManager_OnNewPresence;
}
_OnNewUser += value;
}
remove
{
_OnNewUser -= value;
if (_OnNewUser == null)
{
_OnNewUserActive = false;
m_internalScene.EventManager.OnNewPresence -= EventManager_OnNewPresence;
}
}
}
void EventManager_OnNewPresence(ScenePresence presence)
{
if (_OnNewUser != null)
{
NewUserEventArgs e = new NewUserEventArgs();
e.Avatar = new SPAvatar(m_internalScene, presence.UUID, m_security);
_OnNewUser(this, e);
}
}
#endregion
#region OnChat
private event OnChatDelegate _OnChat;
private bool _OnChatActive;
public IWorldAudio Audio
{
get { return this; }
}
public event OnChatDelegate OnChat
{
add
{
if (!_OnChatActive)
{
_OnChatActive = true;
m_internalScene.EventManager.OnChatFromClient += EventManager_OnChatFromClient;
m_internalScene.EventManager.OnChatFromWorld += EventManager_OnChatFromWorld;
}
_OnChat += value;
}
remove
{
_OnChat -= value;
if (_OnChat == null)
{
_OnChatActive = false;
m_internalScene.EventManager.OnChatFromClient -= EventManager_OnChatFromClient;
m_internalScene.EventManager.OnChatFromWorld -= EventManager_OnChatFromWorld;
}
}
}
void EventManager_OnChatFromWorld(object sender, OSChatMessage chat)
{
if (_OnChat != null)
{
HandleChatPacket(chat);
return;
}
}
private void HandleChatPacket(OSChatMessage chat)
{
if (string.IsNullOrEmpty(chat.Message))
return;
// Object?
if (chat.Sender == null && chat.SenderObject != null)
{
ChatEventArgs e = new ChatEventArgs();
e.Sender = new SOPObject(m_internalScene, ((SceneObjectPart) chat.SenderObject).LocalId, m_security);
e.Text = chat.Message;
e.Channel = chat.Channel;
_OnChat(this, e);
return;
}
// Avatar?
if (chat.Sender != null && chat.SenderObject == null)
{
ChatEventArgs e = new ChatEventArgs();
e.Sender = new SPAvatar(m_internalScene, chat.SenderUUID, m_security);
e.Text = chat.Message;
e.Channel = chat.Channel;
_OnChat(this, e);
return;
}
// Skip if other
}
void EventManager_OnChatFromClient(object sender, OSChatMessage chat)
{
if (_OnChat != null)
{
HandleChatPacket(chat);
return;
}
}
#endregion
#endregion
public IObjectAccessor Objects
{
get { return m_objs; }
}
public IParcel[] Parcels
{
get
{
List<ILandObject> m_los = m_internalScene.LandChannel.AllParcels();
List<IParcel> m_parcels = new List<IParcel>(m_los.Count);
foreach (ILandObject landObject in m_los)
{
m_parcels.Add(new LOParcel(m_internalScene, landObject.LandData.LocalID));
}
return m_parcels.ToArray();
}
}
public IAvatar[] Avatars
{
get
{
List<EntityBase> ents = m_internalScene.Entities.GetAllByType<ScenePresence>();
IAvatar[] rets = new IAvatar[ents.Count];
for (int i = 0; i < ents.Count; i++)
{
EntityBase ent = ents[i];
rets[i] = new SPAvatar(m_internalScene, ent.UUID, m_security);
}
return rets;
}
}
public IHeightmap Terrain
{
get { return m_heights; }
}
#region Implementation of IWorldAudio
public void PlaySound(UUID audio, Vector3 position, double volume)
{
ISoundModule soundModule = m_internalScene.RequestModuleInterface<ISoundModule>();
if (soundModule != null)
{
soundModule.TriggerSound(audio, UUID.Zero, UUID.Zero, UUID.Zero, volume, position,
m_internalScene.RegionInfo.RegionHandle, 0);
}
}
public void PlaySound(UUID audio, Vector3 position)
{
ISoundModule soundModule = m_internalScene.RequestModuleInterface<ISoundModule>();
if (soundModule != null)
{
soundModule.TriggerSound(audio, UUID.Zero, UUID.Zero, UUID.Zero, 1.0, position,
m_internalScene.RegionInfo.RegionHandle, 0);
}
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace ChallengeAccepted.Api.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace ICSimulator
{
public interface IBufRingBackpressure
{
bool getCredit(Flit f, object sender, int bubble);
}
public class BufRingNetwork_Router : Router
{
BufRingNetwork_NIC[] _nics;
int _nic_count;
public BufRingNetwork_Router(Coord c) : base(c)
{
_nics = new BufRingNetwork_NIC[Config.bufrings_n];
_nic_count = 0;
}
protected override void _doStep()
{
// nothing
}
public override bool canInjectFlit(Flit f)
{
for (int i = 0; i < _nic_count; i++)
if (_nics[i].Inject == null)
return true;
return false;
}
public override void InjectFlit(Flit f)
{
int baseIdx = Simulator.rand.Next(_nic_count);
for (int i = 0; i < _nic_count; i++)
if (_nics[(i+baseIdx)%_nic_count].Inject == null) {
_nics[(i+baseIdx)%_nic_count].Inject = f;
return;
}
throw new Exception("Could not inject flit -- no free slots!");
}
public void statsIJ(Flit f)
{
statsInjectFlit(f);
}
public void acceptFlit(Flit f)
{
statsEjectFlit(f);
if (f.packet.nrOfArrivedFlits + 1 == f.packet.nrOfFlits)
statsEjectPacket(f.packet);
m_n.receiveFlit(f);
}
public void addNIC(BufRingNetwork_NIC nic)
{
_nics[_nic_count++] = nic;
}
}
public class BufRingNetwork_NIC : IBufRingBackpressure
{
BufRingNetwork_Router _router;
IBufRingBackpressure _downstream;
Flit _inject;
Link _in, _out;
Queue<Flit> _buf;
int _credits;
public Flit Inject { get { return _inject; } set { _inject = value; } }
public bool getCredit(Flit f, object sender, int bubble)
{
if (_credits > bubble) {
_credits--;
return true;
}
else
return false;
}
public BufRingNetwork_NIC(int localring, int id)
{
_buf = new Queue<Flit>();
_credits = Config.bufrings_localbuf;
_inject = null;
}
public void setRouter(BufRingNetwork_Router router)
{
_router = router;
}
public bool doStep()
{
bool somethingMoved = false;
// handle input from ring
if (_in.Out != null) {
Flit f = _in.Out;
_in.Out = null;
somethingMoved = true;
if (f.packet.dest.ID == _router.coord.ID) {
_credits++;
_router.acceptFlit(f);
}
else {
_buf.Enqueue(f);
Simulator.stats.bufrings_nic_enqueue.Add();
}
}
// handle through traffic
if (_buf.Count > 0) {
Flit f = _buf.Peek();
if (_downstream.getCredit(f, this, 0)) {
_buf.Dequeue();
_credits++;
_out.In = f;
Simulator.stats.bufrings_nic_dequeue.Add();
somethingMoved = true;
}
}
// handle injections
if (_out.In == null && _inject != null) {
if (_downstream.getCredit(_inject, this, 2)) {
_out.In = _inject;
_router.statsIJ(_inject);
_inject = null;
Simulator.stats.bufrings_nic_inject.Add();
somethingMoved = true;
}
}
if (_inject != null)
Simulator.stats.bufrings_nic_starve.Add();
if (_out.In != null)
Simulator.stats.bufrings_link_traverse[1].Add();
Simulator.stats.bufrings_nic_occupancy.Add(_buf.Count);
return somethingMoved;
}
public void setInput(Link l)
{
_in = l;
}
public void setOutput(Link l, IBufRingBackpressure downstream)
{
_out = l;
_downstream = downstream;
}
public static void Map(int ID, out int localring, out int localid)
{
localring = ID/Config.bufrings_branching;
localid = ID%Config.bufrings_branching;
}
public void nuke(Queue<Flit> flits)
{
while (_buf.Count > 0) {
flits.Enqueue(_buf.Dequeue());
}
if (_inject != null)
flits.Enqueue(_inject);
_credits = Config.bufrings_localbuf;
_inject = null;
}
}
public class BufRingNetwork_IRI : IBufRingBackpressure
{
Link _gin, _gout, _lin, _lout;
Queue<Flit> _bufGL, _bufLG, _bufL, _bufG;
int _creditGL, _creditLG, _creditL, _creditG;
int _id;
IBufRingBackpressure _downstreamL, _downstreamG;
public bool getCredit(Flit f, object sender, int bubble)
{
int ring, localid;
BufRingNetwork_NIC.Map(f.packet.dest.ID, out ring, out localid);
if (sender is BufRingNetwork_IRI) { // on global ring
if (ring != _id) {
if (_creditG > bubble) {
_creditG--;
return true;
}
else
return false;
}
else {
if (_creditGL > bubble) {
_creditGL--;
return true;
}
else
return false;
}
}
else if (sender is BufRingNetwork_NIC) { // on local ring
if (ring != _id) {
if (_creditLG > bubble) {
_creditLG--;
return true;
}
else
return false;
}
else {
if (_creditL > bubble) {
_creditL--;
return true;
}
else
return false;
}
}
return false;
}
public BufRingNetwork_IRI(int localring)
{
if (Config.N != Config.bufrings_branching*Config.bufrings_branching)
throw new Exception("Wrong size for 2-level bufrings network! Check that N = (bufrings_branching)^2.");
_id = localring;
_bufGL = new Queue<Flit>();
_bufLG = new Queue<Flit>();
_bufL = new Queue<Flit>();
_bufG = new Queue<Flit>();
_creditGL = Config.bufrings_G2L;
_creditLG = Config.bufrings_L2G;
_creditL = Config.bufrings_localbuf;
_creditG = Config.bufrings_globalbuf;
}
public void setGlobalInput(Link l) { _gin = l; }
public void setGlobalOutput(Link l, IBufRingBackpressure b) { _gout = l; _downstreamG = b; }
public void setLocalInput(Link l) { _lin = l; }
public void setLocalOutput(Link l, IBufRingBackpressure b) { _lout = l; _downstreamL = b; }
public bool doStep()
{
bool somethingMoved = false;
// handle inputs
// global input
if (_gin.Out != null) {
Flit f = _gin.Out;
_gin.Out = null;
somethingMoved = true;
int ring, localid;
BufRingNetwork_NIC.Map(f.packet.dest.ID, out ring, out localid);
if (ring == _id) {
_bufGL.Enqueue(f);
Simulator.stats.bufrings_iri_enqueue_gl[0].Add();
}
else {
_bufG.Enqueue(f);
Simulator.stats.bufrings_iri_enqueue_g[0].Add();
}
}
// local input
if (_lin.Out != null) {
Flit f = _lin.Out;
_lin.Out = null;
somethingMoved = true;
int ring, localid;
BufRingNetwork_NIC.Map(f.packet.dest.ID, out ring, out localid);
if (ring == _id) {
_bufL.Enqueue(f);
Simulator.stats.bufrings_iri_enqueue_l[0].Add();
}
else {
_bufLG.Enqueue(f);
Simulator.stats.bufrings_iri_enqueue_lg[0].Add();
}
}
// handle outputs
// global output (on-ring traffic)
if (_gout.In == null && _bufG.Count > 0) {
Flit f = _bufG.Peek();
if (_downstreamG.getCredit(f, this, 0)) {
_bufG.Dequeue();
Simulator.stats.bufrings_iri_dequeue_g[0].Add();
_creditG++;
_gout.In = f;
somethingMoved = true;
}
}
// global output (transfer traffic)
if (_gout.In == null && _bufLG.Count > 0) {
Flit f = _bufLG.Peek();
if (_downstreamG.getCredit(f, this, 1)) {
_bufLG.Dequeue();
Simulator.stats.bufrings_iri_dequeue_lg[0].Add();
_creditLG++;
_gout.In = f;
somethingMoved = true;
}
}
// local output (on-ring traffic)
if (_lout.In == null && _bufL.Count > 0) {
Flit f = _bufL.Peek();
if (_downstreamL.getCredit(f, this, 0)) {
_bufL.Dequeue();
Simulator.stats.bufrings_iri_dequeue_l[0].Add();
_creditL++;
_lout.In = f;
somethingMoved = true;
}
}
// local output (transfer traffic)
if (_lout.In == null && _bufGL.Count > 0) {
Flit f = _bufGL.Peek();
if (_downstreamL.getCredit(f, this, 1)) {
_bufGL.Dequeue();
Simulator.stats.bufrings_iri_dequeue_gl[0].Add();
_creditGL++;
_lout.In = f;
somethingMoved = true;
}
}
if (_gout.In != null)
Simulator.stats.bufrings_link_traverse[0].Add();
if (_lout.In != null)
Simulator.stats.bufrings_link_traverse[1].Add();
Simulator.stats.bufrings_iri_occupancy_g[0].Add(_bufG.Count);
Simulator.stats.bufrings_iri_occupancy_l[0].Add(_bufL.Count);
Simulator.stats.bufrings_iri_occupancy_gl[0].Add(_bufGL.Count);
Simulator.stats.bufrings_iri_occupancy_lg[0].Add(_bufLG.Count);
return somethingMoved;
}
public void nuke(Queue<Flit> flits)
{
while (_bufGL.Count > 0)
flits.Enqueue(_bufGL.Dequeue());
while (_bufG.Count > 0)
flits.Enqueue(_bufG.Dequeue());
while (_bufL.Count > 0)
flits.Enqueue(_bufL.Dequeue());
while (_bufLG.Count > 0)
flits.Enqueue(_bufLG.Dequeue());
_creditGL = Config.bufrings_G2L;
_creditLG = Config.bufrings_L2G;
_creditL = Config.bufrings_localbuf;
_creditG = Config.bufrings_globalbuf;
}
}
public class BufRingNetwork : Network
{
BufRingNetwork_NIC[] _nics;
BufRingNetwork_IRI[] _iris;
BufRingNetwork_Router[] _routers;
public BufRingNetwork(int dimX, int dimY) : base(dimX, dimY)
{
X = dimX;
Y = dimY;
}
public override void setup()
{
// boilerplate
nodes = new Node[Config.N];
cache = new CmpCache();
ParseFinish(Config.finish);
workload = new Workload(Config.traceFilenames);
mapping = new NodeMapping_AllCPU_SharedCache();
links = new List<Link>();
_routers = new BufRingNetwork_Router[Config.N];
// create routers and nodes
for (int n = 0; n < Config.N; n++)
{
Coord c = new Coord(n);
nodes[n] = new Node(mapping, c);
_routers[n] = new BufRingNetwork_Router(c);
_routers[n].setNode(nodes[n]);
nodes[n].setRouter(_routers[n]);
}
int B = Config.bufrings_branching;
// create the NICs and IRIs
_nics = new BufRingNetwork_NIC[Config.N * Config.bufrings_n];
_iris = new BufRingNetwork_IRI[B * Config.bufrings_n];
// for each copy of the network...
for (int copy = 0; copy < Config.bufrings_n; copy++) {
// for each local ring...
for (int ring = 0; ring < B; ring++) {
// create global ring interface
_iris[copy*B + ring] = new BufRingNetwork_IRI(ring);
// create local NICs (ring stops)
for (int local = 0; local < B; local++)
_nics[copy*Config.N + ring*B + local] = new BufRingNetwork_NIC(ring, local);
// connect with links
for (int local = 1; local < B; local++)
{
Link l = new Link(Config.bufrings_locallat - 1);
links.Add(l);
_nics[copy*Config.N + ring*B + local - 1].setOutput(l,
_nics[copy*Config.N + ring*B + local]);
_nics[copy*Config.N + ring*B + local].setInput(l);
}
Link iriIn = new Link(Config.bufrings_locallat - 1), iriOut = new Link(Config.bufrings_locallat - 1);
links.Add(iriIn);
links.Add(iriOut);
_nics[copy*Config.N + ring*B + B-1].setOutput(iriIn,
_iris[copy*B + ring]);
_nics[copy*Config.N + ring*B + 0].setInput(iriOut);
_iris[copy*B + ring].setLocalInput(iriIn);
_iris[copy*B + ring].setLocalOutput(iriOut,
_nics[copy*Config.N + ring*B + 0]);
}
// connect IRIs with links to make up global ring
for (int ring = 0; ring < B; ring++) {
Link globalLink = new Link(Config.bufrings_globallat - 1);
links.Add(globalLink);
_iris[copy*B + ring].setGlobalOutput(globalLink,
_iris[copy*B + (ring+1)%B]);
_iris[copy*B + (ring+1)%B].setGlobalInput(globalLink);
}
// add the corresponding NIC to each node/router
for (int id = 0; id < Config.N; id++) {
int ring, local;
BufRingNetwork_NIC.Map(id, out ring, out local);
_routers[id].addNIC(_nics[copy * Config.N + ring*B + local]);
_nics[copy * Config.N + ring*B + local].setRouter(_routers[id]);
}
}
}
public override void doStep()
{
bool somethingMoved = false;
doStats();
for (int n = 0; n < Config.N; n++)
nodes[n].doStep();
// step the network sim: first, routers
foreach (BufRingNetwork_NIC nic in _nics)
if (nic.doStep())
somethingMoved = true;
foreach (BufRingNetwork_IRI iri in _iris)
if (iri.doStep())
somethingMoved = true;
bool stalled = false;
foreach (BufRingNetwork_NIC nic in _nics)
if (nic.Inject != null)
stalled = true;
// now, step each link
foreach (Link l in links)
l.doStep();
if (stalled && !somethingMoved)
nuke();
}
void nuke()
{
Console.WriteLine("NUKE! Cycle {0}.", Simulator.CurrentRound);
Simulator.stats.bufrings_nuke.Add();
// first, collect all flits from the network and reset credits, etc
Queue<Flit> flits = new Queue<Flit>();
foreach (BufRingNetwork_NIC nic in _nics)
nic.nuke(flits);
foreach (BufRingNetwork_IRI iri in _iris)
iri.nuke(flits);
foreach (Link l in links)
if (l.Out != null) {
flits.Enqueue(l.Out);
l.Out = null;
}
// now deliver all collected flits
while (flits.Count > 0) {
Flit f = flits.Dequeue();
_routers[f.packet.dest.ID].acceptFlit(f);
}
}
public override void close()
{
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.KeyVault
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Microsoft.Azure.KeyVault.WebKey;
/// <summary>
/// Performs cryptographic key operations and vault operations against the
/// Key Vault service.
/// </summary>
public partial interface IKeyVaultClient : IDisposable
{
/// <summary>
/// The base URI of the service.
/// </summary>
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
JsonSerializerSettings SerializationSettings { get; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
JsonSerializerSettings DeserializationSettings { get; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
ServiceClientCredentials Credentials { get; }
/// <summary>
/// Client Api Version.
/// </summary>
string ApiVersion { get; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running
/// Operations. Default value is 30.
/// </summary>
int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is
/// generated and included in each request. Default is true.
/// </summary>
bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Creates a new, named, key in the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='kty'>
/// The type of key to create. Valid key types, see JsonWebKeyType.
/// Supported JsonWebKey key types (kty) for Elliptic Curve, RSA,
/// HSM, Octet. Possible values include: 'EC', 'RSA', 'RSA-HSM', 'oct'
/// </param>
/// <param name='keySize'>
/// The key size in bytes. e.g. 1024 or 2048.
/// </param>
/// <param name='keyOps'>
/// </param>
/// <param name='keyAttributes'>
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> CreateKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string kty, int? keySize = default(int?), IList<string> keyOps = default(IList<string>), KeyAttributes keyAttributes = default(KeyAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Imports a key into the specified vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='key'>
/// The Json web key
/// </param>
/// <param name='hsm'>
/// Whether to import as a hardware key (HSM) or software key
/// </param>
/// <param name='keyAttributes'>
/// The key management attributes
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> ImportKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, JsonWebKey key, bool? hsm = default(bool?), KeyAttributes keyAttributes = default(KeyAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes the specified key
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> DeleteKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the Key Attributes associated with the specified key
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='keyOps'>
/// Json web key operations. For more information on possible key
/// operations, see JsonWebKeyOperation.
/// </param>
/// <param name='keyAttributes'>
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> UpdateKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, IList<string> keyOps = default(IList<string>), KeyAttributes keyAttributes = default(KeyAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Retrieves the public portion of a key plus its attributes
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> GetKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of the specified key
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<KeyItem>>> GetKeyVersionsWithHttpMessagesAsync(string vaultBaseUrl, string keyName, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List keys in the specified vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<KeyItem>>> GetKeysWithHttpMessagesAsync(string vaultBaseUrl, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Requests that a backup of the specified key be downloaded to the
/// client.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<BackupKeyResult>> BackupKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Restores the backup key in to a vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyBundleBackup'>
/// the backup blob associated with a key bundle
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyBundle>> RestoreKeyWithHttpMessagesAsync(string vaultBaseUrl, byte[] keyBundleBackup, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Encrypts an arbitrary sequence of bytes using an encryption key
/// that is stored in Azure Key Vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// algorithm identifier. Possible values include: 'RSA-OAEP', 'RSA1_5'
/// </param>
/// <param name='value'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyOperationResult>> EncryptWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] value, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Decrypts a single block of encrypted data
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// algorithm identifier. Possible values include: 'RSA-OAEP', 'RSA1_5'
/// </param>
/// <param name='value'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyOperationResult>> DecryptWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] value, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates a signature from a digest using the specified key in the
/// vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// The signing/verification algorithm identifier. For more
/// information on possible algorithm types, see
/// JsonWebKeySignatureAlgorithm. Possible values include: 'RS256',
/// 'RS384', 'RS512', 'RSNULL'
/// </param>
/// <param name='value'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyOperationResult>> SignWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] value, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Verifies a signature using the specified key
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// The signing/verification algorithm. For more information on
/// possible algorithm types, see JsonWebKeySignatureAlgorithm.
/// Possible values include: 'RS256', 'RS384', 'RS512', 'RSNULL'
/// </param>
/// <param name='digest'>
/// The digest used for signing
/// </param>
/// <param name='signature'>
/// The signature to be verified
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyVerifyResult>> VerifyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] digest, byte[] signature, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Wraps a symmetric key using the specified key
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// algorithm identifier. Possible values include: 'RSA-OAEP', 'RSA1_5'
/// </param>
/// <param name='value'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyOperationResult>> WrapKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] value, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Unwraps a symmetric key using the specified key in the vault that
/// has initially been used for wrapping the key.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='keyName'>
/// The name of the key
/// </param>
/// <param name='keyVersion'>
/// The version of the key
/// </param>
/// <param name='algorithm'>
/// algorithm identifier. Possible values include: 'RSA-OAEP', 'RSA1_5'
/// </param>
/// <param name='value'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<KeyOperationResult>> UnwrapKeyWithHttpMessagesAsync(string vaultBaseUrl, string keyName, string keyVersion, string algorithm, byte[] value, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Sets a secret in the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='secretName'>
/// The name of the secret in the given vault
/// </param>
/// <param name='value'>
/// The value of the secret
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='contentType'>
/// Type of the secret value such as a password
/// </param>
/// <param name='secretAttributes'>
/// The secret management attributes
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<SecretBundle>> SetSecretWithHttpMessagesAsync(string vaultBaseUrl, string secretName, string value, IDictionary<string, string> tags = default(IDictionary<string, string>), string contentType = default(string), SecretAttributes secretAttributes = default(SecretAttributes), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a secret from the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='secretName'>
/// The name of the secret in the given vault
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<SecretBundle>> DeleteSecretWithHttpMessagesAsync(string vaultBaseUrl, string secretName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the attributes associated with the specified secret
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='secretName'>
/// The name of the secret in the given vault
/// </param>
/// <param name='secretVersion'>
/// The version of the secret
/// </param>
/// <param name='contentType'>
/// Type of the secret value such as a password
/// </param>
/// <param name='secretAttributes'>
/// The secret management attributes
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<SecretBundle>> UpdateSecretWithHttpMessagesAsync(string vaultBaseUrl, string secretName, string secretVersion, string contentType = default(string), SecretAttributes secretAttributes = default(SecretAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a secret.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='secretName'>
/// The name of the secret in the given vault
/// </param>
/// <param name='secretVersion'>
/// The version of the secret
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<SecretBundle>> GetSecretWithHttpMessagesAsync(string vaultBaseUrl, string secretName, string secretVersion, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List secrets in the specified vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='maxresults'>
/// Maximum number of secrets to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<SecretItem>>> GetSecretsWithHttpMessagesAsync(string vaultBaseUrl, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of the specified secret
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='secretName'>
/// The name of the secret in the given vault
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<SecretItem>>> GetSecretVersionsWithHttpMessagesAsync(string vaultBaseUrl, string secretName, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List certificates in the specified vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateItem>>> GetCertificatesWithHttpMessagesAsync(string vaultBaseUrl, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a certificate from the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate in the given vault
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateBundle>> DeleteCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Sets the certificate contacts for the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='contacts'>
/// The contacts for the vault certificates.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<Contacts>> SetCertificateContactsWithHttpMessagesAsync(string vaultBaseUrl, Contacts contacts, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the certificate contacts for the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<Contacts>> GetCertificateContactsWithHttpMessagesAsync(string vaultBaseUrl, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes the certificate contacts for the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<Contacts>> DeleteCertificateContactsWithHttpMessagesAsync(string vaultBaseUrl, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List certificate issuers for the specified vault.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateIssuerItem>>> GetCertificateIssuersWithHttpMessagesAsync(string vaultBaseUrl, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Sets the specified certificate issuer.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='issuerName'>
/// The name of the issuer.
/// </param>
/// <param name='provider'>
/// The issuer provider.
/// </param>
/// <param name='credentials'>
/// The credentials to be used for the issuer.
/// </param>
/// <param name='organizationDetails'>
/// Details of the organization as provided to the issuer.
/// </param>
/// <param name='attributes'>
/// Attributes of the issuer object.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IssuerBundle>> SetCertificateIssuerWithHttpMessagesAsync(string vaultBaseUrl, string issuerName, string provider, IssuerCredentials credentials = default(IssuerCredentials), OrganizationDetails organizationDetails = default(OrganizationDetails), IssuerAttributes attributes = default(IssuerAttributes), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the specified certificate issuer.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='issuerName'>
/// The name of the issuer.
/// </param>
/// <param name='provider'>
/// The issuer provider.
/// </param>
/// <param name='credentials'>
/// The credentials to be used for the issuer.
/// </param>
/// <param name='organizationDetails'>
/// Details of the organization as provided to the issuer.
/// </param>
/// <param name='attributes'>
/// Attributes of the issuer object.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IssuerBundle>> UpdateCertificateIssuerWithHttpMessagesAsync(string vaultBaseUrl, string issuerName, string provider = default(string), IssuerCredentials credentials = default(IssuerCredentials), OrganizationDetails organizationDetails = default(OrganizationDetails), IssuerAttributes attributes = default(IssuerAttributes), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the specified certificate issuer.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='issuerName'>
/// The name of the issuer.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IssuerBundle>> GetCertificateIssuerWithHttpMessagesAsync(string vaultBaseUrl, string issuerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes the specified certificate issuer.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='issuerName'>
/// The name of the issuer.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IssuerBundle>> DeleteCertificateIssuerWithHttpMessagesAsync(string vaultBaseUrl, string issuerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates a new certificate version. If this is the first version,
/// the certificate resource is created.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='certificatePolicy'>
/// The management policy for the certificate
/// </param>
/// <param name='certificateAttributes'>
/// The attributes of the certificate (optional)
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateOperation>> CreateCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, CertificatePolicy certificatePolicy = default(CertificatePolicy), CertificateAttributes certificateAttributes = default(CertificateAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Imports a certificate into the specified vault
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='base64EncodedCertificate'>
/// Base64 encoded representation of the certificate object to import.
/// This certificate needs to contain the private key.
/// </param>
/// <param name='password'>
/// If the private key in base64EncodedCertificate is encrypted, the
/// password used for encryption
/// </param>
/// <param name='certificatePolicy'>
/// The management policy for the certificate
/// </param>
/// <param name='certificateAttributes'>
/// The attributes of the certificate (optional)
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateBundle>> ImportCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, string base64EncodedCertificate, string password = default(string), CertificatePolicy certificatePolicy = default(CertificatePolicy), CertificateAttributes certificateAttributes = default(CertificateAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of a certificate.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='maxresults'>
/// Maximum number of results to return.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateItem>>> GetCertificateVersionsWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, int? maxresults = default(int?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the policy for a certificate.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate in the given vault.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificatePolicy>> GetCertificatePolicyWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the policy for a certificate. Set appropriate members in
/// the certificatePolicy that must be updated. Leave others as null.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate in the given vault.
/// </param>
/// <param name='certificatePolicy'>
/// The policy for the certificate.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificatePolicy>> UpdateCertificatePolicyWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, CertificatePolicy certificatePolicy, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the attributes associated with the specified certificate
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate in the given vault
/// </param>
/// <param name='certificateVersion'>
/// The version of the certificate
/// </param>
/// <param name='certificatePolicy'>
/// The management policy for the certificate
/// </param>
/// <param name='certificateAttributes'>
/// The attributes of the certificate (optional)
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateBundle>> UpdateCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, string certificateVersion, CertificatePolicy certificatePolicy = default(CertificatePolicy), CertificateAttributes certificateAttributes = default(CertificateAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a Certificate.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate in the given vault
/// </param>
/// <param name='certificateVersion'>
/// The version of the certificate
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateBundle>> GetCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, string certificateVersion, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates a certificate operation.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='cancellationRequested'>
/// Indicates if cancellation was requested on the certificate
/// operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateOperation>> UpdateCertificateOperationWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, bool cancellationRequested, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the certificate operation response.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateOperation>> GetCertificateOperationWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes the certificate operation.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateOperation>> DeleteCertificateOperationWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Merges a certificate or a certificate chain with a key pair
/// existing on the server.
/// </summary>
/// <param name='vaultBaseUrl'>
/// The vault name, e.g. https://myvault.vault.azure.net
/// </param>
/// <param name='certificateName'>
/// The name of the certificate
/// </param>
/// <param name='x509Certificates'>
/// The certificate or the certificate chain to merge
/// </param>
/// <param name='certificateAttributes'>
/// The attributes of the certificate (optional)
/// </param>
/// <param name='tags'>
/// Application-specific metadata in the form of key-value pairs
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<CertificateBundle>> MergeCertificateWithHttpMessagesAsync(string vaultBaseUrl, string certificateName, IList<byte[]> x509Certificates, CertificateAttributes certificateAttributes = default(CertificateAttributes), IDictionary<string, string> tags = default(IDictionary<string, string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of the specified key
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<KeyItem>>> GetKeyVersionsNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List keys in the specified vault
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<KeyItem>>> GetKeysNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List secrets in the specified vault
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<SecretItem>>> GetSecretsNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of the specified secret
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<SecretItem>>> GetSecretVersionsNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List certificates in the specified vault
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateItem>>> GetCertificatesNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List certificate issuers for the specified vault.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateIssuerItem>>> GetCertificateIssuersNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List the versions of a certificate.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<CertificateItem>>> GetCertificateVersionsNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.Text;
using Microsoft.VisualStudio.Text;
using Roslyn.Test.Utilities;
using Roslyn.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.MetadataAsSource
{
public abstract partial class AbstractMetadataAsSourceTests
{
public const string DefaultMetadataSource = "public class C {}";
public const string DefaultSymbolMetadataName = "C";
internal class TestContext : IDisposable
{
private readonly TestWorkspace _workspace;
private readonly IMetadataAsSourceFileService _metadataAsSourceService;
private readonly ITextBufferFactoryService _textBufferFactoryService;
public static async Task<TestContext> CreateAsync(string projectLanguage = null, IEnumerable<string> metadataSources = null, bool includeXmlDocComments = false, string sourceWithSymbolReference = null)
{
projectLanguage = projectLanguage ?? LanguageNames.CSharp;
metadataSources = metadataSources ?? SpecializedCollections.EmptyEnumerable<string>();
metadataSources = !metadataSources.Any()
? new[] { AbstractMetadataAsSourceTests.DefaultMetadataSource }
: metadataSources;
var workspace = await CreateWorkspaceAsync(projectLanguage, metadataSources, includeXmlDocComments, sourceWithSymbolReference);
return new TestContext(workspace);
}
public TestContext(TestWorkspace workspace)
{
_workspace = workspace;
_metadataAsSourceService = _workspace.GetService<IMetadataAsSourceFileService>();
_textBufferFactoryService = _workspace.GetService<ITextBufferFactoryService>();
}
public Solution CurrentSolution
{
get { return _workspace.CurrentSolution; }
}
public Project DefaultProject
{
get { return this.CurrentSolution.Projects.First(); }
}
public Task<MetadataAsSourceFile> GenerateSourceAsync(ISymbol symbol, Project project = null)
{
project = project ?? this.DefaultProject;
// Generate and hold onto the result so it can be disposed of with this context
return _metadataAsSourceService.GetGeneratedFileAsync(project, symbol);
}
public async Task<MetadataAsSourceFile> GenerateSourceAsync(string symbolMetadataName = null, Project project = null)
{
symbolMetadataName = symbolMetadataName ?? AbstractMetadataAsSourceTests.DefaultSymbolMetadataName;
project = project ?? this.DefaultProject;
// Get an ISymbol corresponding to the metadata name
var compilation = await project.GetCompilationAsync();
var diagnostics = compilation.GetDiagnostics().ToArray();
Assert.Equal(0, diagnostics.Length);
var symbol = await ResolveSymbolAsync(symbolMetadataName, compilation);
// Generate and hold onto the result so it can be disposed of with this context
var result = await _metadataAsSourceService.GetGeneratedFileAsync(project, symbol);
return result;
}
private static string GetSpaceSeparatedTokens(string source)
{
var tokens = source.Split(new[] { ' ', '\r', '\n', '\t' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).Where(s => s != string.Empty);
return string.Join(" ", tokens);
}
public void VerifyResult(MetadataAsSourceFile file, string expected, bool compareTokens = true)
{
var actual = File.ReadAllText(file.FilePath).Trim();
var actualSpan = file.IdentifierLocation.SourceSpan;
if (compareTokens)
{
// Compare tokens and verify location relative to the generated tokens
expected = GetSpaceSeparatedTokens(expected);
actual = GetSpaceSeparatedTokens(actual.Insert(actualSpan.Start, "[|").Insert(actualSpan.End + 2, "|]"));
}
else
{
// Compare exact texts and verify that the location returned is exactly that
// indicated by expected
TextSpan expectedSpan;
MarkupTestFile.GetSpan(expected.TrimStart().TrimEnd(), out expected, out expectedSpan);
Assert.Equal(expectedSpan.Start, actualSpan.Start);
Assert.Equal(expectedSpan.End, actualSpan.End);
}
Assert.Equal(expected, actual);
}
public async Task GenerateAndVerifySourceAsync(string symbolMetadataName, string expected, bool compareTokens = true, Project project = null)
{
var result = await GenerateSourceAsync(symbolMetadataName, project);
VerifyResult(result, expected, compareTokens);
}
public void VerifyDocumentReused(MetadataAsSourceFile a, MetadataAsSourceFile b)
{
Assert.Same(a.FilePath, b.FilePath);
}
public void VerifyDocumentNotReused(MetadataAsSourceFile a, MetadataAsSourceFile b)
{
Assert.NotSame(a.FilePath, b.FilePath);
}
public void Dispose()
{
try
{
_metadataAsSourceService.CleanupGeneratedFiles();
}
finally
{
_workspace.Dispose();
}
}
public async Task<ISymbol> ResolveSymbolAsync(string symbolMetadataName, Compilation compilation = null)
{
if (compilation == null)
{
compilation = await this.DefaultProject.GetCompilationAsync();
var diagnostics = compilation.GetDiagnostics().ToArray();
Assert.Equal(0, diagnostics.Length);
}
foreach (var reference in compilation.References)
{
var assemblySymbol = (IAssemblySymbol)compilation.GetAssemblyOrModuleSymbol(reference);
var namedTypeSymbol = assemblySymbol.GetTypeByMetadataName(symbolMetadataName);
if (namedTypeSymbol != null)
{
return namedTypeSymbol;
}
else
{
// The symbol name could possibly be referring to the member of a named
// type. Parse the member symbol name.
var lastDotIndex = symbolMetadataName.LastIndexOf('.');
if (lastDotIndex < 0)
{
// The symbol name is not a member name and the named type was not found
// in this assembly
continue;
}
// The member symbol name itself could contain a dot (e.g. '.ctor'), so make
// sure we don't cut that off
while (lastDotIndex > 0 && symbolMetadataName[lastDotIndex - 1] == '.')
{
--lastDotIndex;
}
var memberSymbolName = symbolMetadataName.Substring(lastDotIndex + 1);
var namedTypeName = symbolMetadataName.Substring(0, lastDotIndex);
namedTypeSymbol = assemblySymbol.GetTypeByMetadataName(namedTypeName);
if (namedTypeSymbol != null)
{
var memberSymbol = namedTypeSymbol.GetMembers()
.Where(member => member.MetadataName == memberSymbolName)
.FirstOrDefault();
if (memberSymbol != null)
{
return memberSymbol;
}
}
}
}
return null;
}
private static bool ContainsVisualBasicKeywords(string input)
{
return
input.Contains("Class") ||
input.Contains("Structure") ||
input.Contains("Namespace") ||
input.Contains("Sub") ||
input.Contains("Function") ||
input.Contains("Dim");
}
private static string DeduceLanguageString(string input)
{
return ContainsVisualBasicKeywords(input)
? LanguageNames.VisualBasic : LanguageNames.CSharp;
}
private static Task<TestWorkspace> CreateWorkspaceAsync(string projectLanguage, IEnumerable<string> metadataSources, bool includeXmlDocComments, string sourceWithSymbolReference)
{
var xmlString = string.Concat(@"
<Workspace>
<Project Language=""", projectLanguage, @""" CommonReferences=""true"">");
metadataSources = metadataSources ?? new[] { AbstractMetadataAsSourceTests.DefaultMetadataSource };
foreach (var source in metadataSources)
{
var metadataLanguage = DeduceLanguageString(source);
xmlString = string.Concat(xmlString, string.Format(@"
<MetadataReferenceFromSource Language=""{0}"" CommonReferences=""true"" IncludeXmlDocComments=""{2}"">
<Document FilePath=""MetadataDocument"">
{1}
</Document>
</MetadataReferenceFromSource>",
metadataLanguage,
SecurityElement.Escape(source),
includeXmlDocComments.ToString()));
}
if (sourceWithSymbolReference != null)
{
xmlString = string.Concat(xmlString, string.Format(@"
<Document FilePath=""SourceDocument"">
{0}
</Document>",
sourceWithSymbolReference));
}
xmlString = string.Concat(xmlString, @"
</Project>
</Workspace>");
return TestWorkspace.CreateAsync(xmlString);
}
internal Document GetDocument(MetadataAsSourceFile file)
{
using (var reader = new StreamReader(file.FilePath))
{
var textBuffer = _textBufferFactoryService.CreateTextBuffer(reader, _textBufferFactoryService.TextContentType);
Assert.True(_metadataAsSourceService.TryAddDocumentToWorkspace(file.FilePath, textBuffer));
return textBuffer.AsTextContainer().GetRelatedDocuments().Single();
}
}
internal async Task<ISymbol> GetNavigationSymbolAsync()
{
var testDocument = _workspace.Documents.Single(d => d.FilePath == "SourceDocument");
var document = _workspace.CurrentSolution.GetDocument(testDocument.Id);
var syntaxRoot = await document.GetSyntaxRootAsync();
var semanticModel = await document.GetSemanticModelAsync();
return semanticModel.GetSymbolInfo(syntaxRoot.FindNode(testDocument.SelectedSpans.Single())).Symbol;
}
private class GenerationResult
{
public readonly MetadataAsSourceFile File;
public GenerationResult(MetadataAsSourceFile file)
{
this.File = file;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq; using System.Text;
using Wintellect.PowerCollections;
class Event:IComparable {
public DateTime date;
public String title;
public String location;
public Event
( DateTime date , String title , String location ) {
this.date = date;
this.title = title;
this.location = location;
}
public int
CompareTo(object obj)
{
Event other =
obj as Event;
int byDate = this.date. CompareTo (other.date);
int byTitle = this.title.CompareTo (other.title);
int byLocation;
byLocation = this.location.CompareTo(other.location);
if (byDate == 0)
{
if( byTitle==0 )
return byLocation;
else
{ return byTitle; }
}
else
return byDate;
}
public override string ToString ( )
{
StringBuilder toString=new StringBuilder ();
toString.Append( date
.ToString("yyyy-MM-ddTHH:mm:ss") );
toString.Append(" | "+title);
if (location != null && location != "") toString.Append(" | " + location);
return toString.ToString();
}
}
class Program
{
static StringBuilder output = new StringBuilder();
static class Messages
{
public static void EventAdded()
{ output.Append("Event added\n"); }
public static void EventDeleted(int x)
{
if ( x == 0 ) NoEventsFound();
else output.AppendFormat("{0} events deleted\n",x);
}
public static void NoEventsFound() { output.Append( "No events found\n" ); }
public static void PrintEvent ( Event eventToPrint ) {
if( eventToPrint!=null ) {
output.Append( eventToPrint+"\n");
}
}
}
class EventHolder
{
MultiDictionary<string, Event>
byTitle = new MultiDictionary<string,
Event>(true);
OrderedBag<Event>
byDate = new OrderedBag<
Event>();
public void AddEvent(DateTime date,
string title,
string location)
{
Event newEvent = new Event(date,
title,
location);
byTitle.Add(
title.ToLower(),
newEvent
);
byDate.Add(newEvent); Messages.EventAdded();
}
public void DeleteEvents(string titleToDelete)
{
string title = titleToDelete
.ToLower();
int removed = 0;
foreach ( var eventToRemove in byTitle [ title ] ) {
removed ++;
byDate.Remove ( eventToRemove );
}
byTitle.Remove (title);
Messages.EventDeleted( removed );
}
public void ListEvents(DateTime date, int count)
{
OrderedBag<Event>.View
eventsToShow = byDate.RangeFrom(new Event(
date, "", ""),
true);
int showed = 0;
foreach (var eventToShow in eventsToShow)
{
if (showed == count) break;
Messages.PrintEvent(eventToShow);
showed++;
}
if (showed == 0) Messages.NoEventsFound();
}
} static EventHolder events = new EventHolder();
static void Main(string[] args) {
while (ExecuteNextCommand()) { }
Console.WriteLine(output);
}
private static bool ExecuteNextCommand()
{
string command = Console.ReadLine();
if(command[0]=='A'){AddEvent(command);return true;}
if(command[0]=='D'){DeleteEvents(command);return true;}
if(command[0]=='L'){ListEvents(command);return true;}
if(command[0]=='E'){return false;}
return false;
}
private static void ListEvents(string command)
{
int pipeIndex = command.IndexOf('|')
;
DateTime date = GetDate(
command,
"ListEvents");
string countString = command.Substring(
pipeIndex + 1
);
int count = int.Parse(countString);
events.ListEvents(date,
count);
}
private static void
DeleteEvents( string command )
{
string title = command.Substring
(
"DeleteEvents".Length + 1
);
events.DeleteEvents(title);
}
private static void AddEvent(string command)
{
DateTime date; string title; string location;
GetParameters(command, "AddEvent",
out date, out title, out location);
events.AddEvent
(date, title, location);
}
private static void GetParameters(string commandForExecution, string commandType, out DateTime dateAndTime, out string eventTitle, out string eventLocation)
{
dateAndTime = GetDate(commandForExecution, commandType);
int firstPipeIndex = commandForExecution.IndexOf('|');
int lastPipeIndex = commandForExecution.LastIndexOf('|');
if ( firstPipeIndex == lastPipeIndex ){ eventTitle =
commandForExecution.Substring(firstPipeIndex
+ 1).Trim();
eventLocation = "";
}
else {
eventTitle = commandForExecution.Substring(
firstPipeIndex + 1,
lastPipeIndex - firstPipeIndex - 1)
.Trim();
eventLocation = commandForExecution.Substring(lastPipeIndex + 1).Trim(); }
}
private
static
DateTime
GetDate(
string command,
string commandType
)
{
DateTime date = DateTime.Parse(command
.Substring(commandType.Length +
1, 20));
return date;
}
}
| |
//-----------------------------------------------------------------------------
// <copyright file="SmtpNtlmAuthenticationModule.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------------
namespace System.Net.Mail
{
using System;
using System.Collections;
using System.IO;
using System.Net;
using System.Security.Permissions;
using System.ComponentModel;
using System.Security.Authentication.ExtendedProtection;
internal class SmtpNegotiateAuthenticationModule : ISmtpAuthenticationModule
{
Hashtable sessions = new Hashtable();
internal SmtpNegotiateAuthenticationModule()
{
}
#region ISmtpAuthenticationModule Members
// Security this method will access NetworkCredential properties that demand UnmanagedCode and Environment Permission
[EnvironmentPermission(SecurityAction.Assert, Unrestricted=true)]
[SecurityPermission(SecurityAction.Assert, Flags=SecurityPermissionFlag.UnmanagedCode)]
public Authorization Authenticate(string challenge, NetworkCredential credential, object sessionCookie, string spn, ChannelBinding channelBindingToken)
{
if(Logging.On)Logging.Enter(Logging.Web, this, "Authenticate", null);
try {
lock (this.sessions)
{
NTAuthentication clientContext = this.sessions[sessionCookie] as NTAuthentication;
if (clientContext == null)
{
if(credential == null){
return null;
}
this.sessions[sessionCookie] =
clientContext =
new NTAuthentication(false, "Negotiate", credential, spn,
ContextFlags.Connection | ContextFlags.InitIntegrity, channelBindingToken);
}
byte[] byteResp;
string resp = null;
if (!clientContext.IsCompleted) {
// If auth is not yet completed keep producing
// challenge responses with GetOutgoingBlob
SecurityStatus statusCode;
byte[] decodedChallenge = null;
if (challenge != null) {
decodedChallenge =
Convert.FromBase64String(challenge);
}
byteResp = clientContext.GetOutgoingBlob(
decodedChallenge,
false,
out statusCode);
// Note sure why this is here...keeping it.
if (clientContext.IsCompleted && byteResp == null) {
resp = "\r\n";
}
if (byteResp != null) {
resp = Convert.ToBase64String(byteResp);
}
} else {
// If auth completed and still have a challenge then
// server may be doing "correct" form of GSSAPI SASL.
// Validate incoming and produce outgoing SASL security
// layer negotiate message.
resp = GetSecurityLayerOutgoingBlob(
challenge,
clientContext);
}
return new Authorization(resp, clientContext.IsCompleted);
}
}
finally {
if(Logging.On)Logging.Exit(Logging.Web, this, "Authenticate", null);
}
}
public string AuthenticationType
{
get
{
return "gssapi";
}
}
public void CloseContext(object sessionCookie) {
NTAuthentication clientContext = null;
lock (sessions) {
clientContext = sessions[sessionCookie] as NTAuthentication;
if (clientContext != null) {
sessions.Remove(sessionCookie);
}
}
if (clientContext != null) {
clientContext.CloseContext();
}
}
#endregion
// Function for SASL security layer negotiation after
// authorization completes.
//
// Returns null for failure, Base64 encoded string on
// success.
private string GetSecurityLayerOutgoingBlob(
string challenge,
NTAuthentication clientContext) {
// must have a security layer challenge
if (challenge == null)
return null;
// "unwrap" challenge
byte[] input = Convert.FromBase64String(challenge);
int len;
try {
len = clientContext.VerifySignature(input, 0, input.Length);
}
catch (Win32Exception) {
// any decrypt failure is an auth failure
return null;
}
// Per RFC 2222 Section 7.2.2:
// the client should then expect the server to issue a
// token in a subsequent challenge. The client passes
// this token to GSS_Unwrap and interprets the first
// octet of cleartext as a bit-mask specifying the
// security layers supported by the server and the
// second through fourth octets as the maximum size
// output_message to send to the server.
// Section 7.2.3
// The security layer and their corresponding bit-masks
// are as follows:
// 1 No security layer
// 2 Integrity protection
// Sender calls GSS_Wrap with conf_flag set to FALSE
// 4 Privacy protection
// Sender calls GSS_Wrap with conf_flag set to TRUE
//
// Exchange 2007 and our client only support
// "No security layer". Therefore verify first byte is value 1
// and the 2nd-4th bytes are value zero since token size is not
// applicable when there is no security layer.
if (len < 4 || // expect 4 bytes
input[0] != 1 || // first value 1
input[1] != 0 || // rest value 0
input[2] != 0 ||
input[3] != 0) {
return null;
}
// Continuing with RFC 2222 section 7.2.2:
// The client then constructs data, with the first octet
// containing the bit-mask specifying the selected security
// layer, the second through fourth octets containing in
// network byte order the maximum size output_message the client
// is able to receive, and the remaining octets containing the
// authorization identity.
//
// So now this contructs the "wrapped" response. The response is
// payload is identical to the received server payload and the
// "authorization identity" is not supplied as it is unnecessary.
// let MakeSignature figure out length of output
byte[] output = null;
try {
len = clientContext.MakeSignature(input, 0, 4, ref output);
}
catch (Win32Exception) {
// any decrypt failure is an auth failure
return null;
}
// return Base64 encoded string of signed payload
return Convert.ToBase64String(output, 0, len);
}
}
}
| |
// * **************************************************************************
// * Copyright (c) McCreary, Veselka, Bragg & Allen, P.C.
// * This source code is subject to terms and conditions of the MIT License.
// * A copy of the license can be found in the License.txt file
// * at the root of this distribution.
// * By using this source code in any fashion, you are agreeing to be bound by
// * the terms of the MIT License.
// * You must not remove this notice from this software.
// * **************************************************************************
using System;
using System.Linq;
using FluentAssert;
using FluentWebControls.Extensions;
using FluentWebControls.Tests.Extensions;
using LinqToHtml;
using NUnit.Framework;
namespace FluentWebControls.Tests
{
public class TextAreaDataTest
{
[TestFixture]
public class When_asked_to_create_a_textarea_for_a_property
{
private string _id;
private bool _isRequired;
private string _label;
private int? _maxLength;
private int? _maxValue;
private int? _minLength;
private int? _minValue;
private HTMLDocument _result;
private Type _type;
private string _value;
[SetUp]
public void BeforeEachTest()
{
_isRequired = false;
_minLength = null;
_maxLength = null;
_minValue = null;
_maxValue = null;
_type = typeof(string);
_id = "theId";
_value = "value";
}
[Test]
public void Given_a_label()
{
Test.Verify(
with_a_label,
when_asked_to_create_a_textarea_for_a_property,
should_include_a_label_with_the_label_text,
should_set_the_label_for_attribute_to_the_id_of_the_textarea
);
}
[Test]
public void Given_a_non_empty_value_without_characters_that_need_to_be_escaped()
{
Test.Verify(
with_a_non_empty_value_without_characters_that_need_to_be_escaped,
when_asked_to_create_a_textarea_for_a_property,
should_set_the_content_to_the_value
);
}
[Test]
public void Given_a_null_value()
{
Test.Verify(
with_a_null_value,
when_asked_to_create_a_textarea_for_a_property,
should_set_the_content_to_empty_string
);
}
[Test]
public void Given_a_value_containing_characters_that_need_to_be_escaped()
{
Test.Verify(
with_a_value_containing_characters_that_need_to_be_escaped,
when_asked_to_create_a_textarea_for_a_property,
should_set_the_content_to_the_escaped_value
);
}
[Test]
public void Given_the_Property_does_not_have_a_maximum_length()
{
Test.Verify(
with_a_property_that_does_not_have_a_maximum_length,
when_asked_to_create_a_textarea_for_a_property,
should_not_mark_the_textarea_with_the_maximum_length
);
}
[Test]
public void Given_the_Property_does_not_have_a_minimum_length()
{
Test.Verify(
with_a_property_that_does_not_have_a_minimum_length,
when_asked_to_create_a_textarea_for_a_property,
should_not_mark_the_textarea_with_the_minimum_length
);
}
[Test]
public void Given_the_Property_has_a_maximum_length()
{
Test.Verify(
with_a_property_that_has_a_maximum_length,
when_asked_to_create_a_textarea_for_a_property,
should_mark_the_textarea_with_the_maximum_length
);
}
[Test]
public void Given_the_Property_has_a_minimum_length()
{
Test.Verify(
with_a_property_that_has_a_minimum_length,
when_asked_to_create_a_textarea_for_a_property,
should_mark_the_textarea_with_the_minimum_length
);
}
[Test]
public void Given_the_Property_is_not_required()
{
Test.Verify(
with_a_property_that_is_not_required,
when_asked_to_create_a_textarea_for_a_property,
should_not_mark_the_textarea_as_required,
should_not_add_the_visual_required_indicator
);
}
[Test]
public void Given_the_Property_is_required()
{
Test.Verify(
with_a_property_that_is_required,
when_asked_to_create_a_textarea_for_a_property,
should_mark_the_textarea_as_required,
should_add_the_visual_required_indicator
);
}
protected string Id => _id.ToCamelCase();
private HTMLTag Label
{
get { return _result.ChildTags.FirstOrDefault(x => x.Type == "label"); }
}
private HTMLTag RequiredIndicator
{
get { return _result.ChildTags.FirstOrDefault(x => x.Type == "em"); }
}
private HTMLTag Tag => _result.ChildTags.WithAttributeNamed("class").First();
private void should_add_the_visual_required_indicator()
{
var requiredIndicator = RequiredIndicator;
requiredIndicator.ShouldNotBeNull();
requiredIndicator.Content.ShouldBeEqualTo("*");
}
private void should_include_a_label_with_the_label_text()
{
var label = Label;
label.ShouldNotBeNull();
label.Content.ShouldNotBeNull();
label.Content.ShouldBeEqualTo(_label);
}
private void should_mark_the_textarea_as_required()
{
var @class = Tag.Attributes.FirstOrDefault(x => x.Name == "class");
@class.ShouldNotBeNull();
// ReSharper disable once PossibleNullReferenceException
@class.Value.ShouldBeEqualTo("required textbox");
}
private void should_mark_the_textarea_with_the_maximum_length()
{
var maxLength = Tag.Attributes.FirstOrDefault(x => x.Name == "maxlength");
maxLength.ShouldNotBeNull();
// ReSharper disable once PossibleNullReferenceException
maxLength.Value.ShouldBeEqualTo(_maxLength.ToString());
}
private void should_mark_the_textarea_with_the_minimum_length()
{
var minLength = Tag.Attributes.FirstOrDefault(x => x.Name == "minlength");
minLength.ShouldNotBeNull();
// ReSharper disable once PossibleNullReferenceException
minLength.Value.ShouldBeEqualTo(_minLength.ToString());
}
private void should_not_add_the_visual_required_indicator()
{
var requiredIndicator = RequiredIndicator;
requiredIndicator.ShouldBeNull();
}
private void should_not_mark_the_textarea_as_required()
{
var @class = Tag.Attributes.FirstOrDefault(x => x.Name == "class");
@class.ShouldNotBeNull();
// ReSharper disable once PossibleNullReferenceException
@class.Value.ShouldBeEqualTo("textbox");
}
private void should_not_mark_the_textarea_with_the_maximum_length()
{
var maxLength = Tag.Attributes.FirstOrDefault(x => x.Name == "maxlength");
maxLength.ShouldBeNull();
}
private void should_not_mark_the_textarea_with_the_minimum_length()
{
var minLength = Tag.Attributes.FirstOrDefault(x => x.Name == "minlength");
minLength.ShouldBeNull();
}
private void should_set_the_content_to_empty_string()
{
var content = Tag.Content;
content.ShouldBeEqualTo("");
}
private void should_set_the_content_to_the_escaped_value()
{
var content = Tag.RawContent;
content.ShouldBeEqualTo(_value.EscapeForHtml());
}
private void should_set_the_content_to_the_value()
{
var content = Tag.Content;
content.ShouldBeEqualTo(_value);
}
private void should_set_the_label_for_attribute_to_the_id_of_the_textarea()
{
var label = Label;
var @for = label.Attributes.FirstOrDefault(x => x.Name == "for");
@for.ShouldNotBeNull();
var tag = Tag;
var id = tag.Attributes.FirstOrDefault(x => x.Name == "id");
// ReSharper disable PossibleNullReferenceException
@for.Value.ShouldBeEqualTo(id.Value);
// ReSharper restore PossibleNullReferenceException
}
private void when_asked_to_create_a_textarea_for_a_property()
{
var propertyMetaData = PropertyMetaDataMocker.CreateStub(Id, _isRequired, _minLength, _maxLength, _minValue, _maxValue, _type);
var textArea = new TextAreaData(_value)
.WithValidationFrom(propertyMetaData)
.WithLabel(_label)
.WithId(Id);
var resultHtml = textArea.ToString();
_result = HTMLParser.Parse("<all>" + resultHtml + "</all>");
}
private void with_a_label()
{
_label = "Name:";
}
private void with_a_non_empty_value_without_characters_that_need_to_be_escaped()
{
_value = "The quick brown fox";
}
private void with_a_null_value()
{
_value = null;
}
private void with_a_property_that_does_not_have_a_maximum_length()
{
_maxLength = null;
}
private void with_a_property_that_does_not_have_a_minimum_length()
{
_minLength = null;
}
private void with_a_property_that_has_a_maximum_length()
{
_maxLength = 10;
}
private void with_a_property_that_has_a_minimum_length()
{
_minLength = 6;
}
private void with_a_property_that_is_not_required()
{
_isRequired = false;
}
private void with_a_property_that_is_required()
{
_isRequired = true;
}
private void with_a_value_containing_characters_that_need_to_be_escaped()
{
_value = "<&>";
}
}
}
}
| |
namespace Antlr.Runtime
{
using ConditionalAttribute = System.Diagnostics.ConditionalAttribute;
using Console = System.Console;
using IDebugEventListener = Antlr.Runtime.Debug.IDebugEventListener;
internal delegate int SpecialStateTransitionHandler( DFA dfa, int s, IIntStream input );
internal class DFA
{
protected short[] eot;
protected short[] eof;
protected char[] min;
protected char[] max;
protected short[] accept;
protected short[] special;
protected short[][] transition;
protected int decisionNumber;
protected BaseRecognizer recognizer;
public readonly bool debug = false;
public DFA()
: this( new SpecialStateTransitionHandler( SpecialStateTransitionDefault ) )
{
}
public DFA( SpecialStateTransitionHandler specialStateTransition )
{
this.SpecialStateTransition = specialStateTransition ?? new SpecialStateTransitionHandler( SpecialStateTransitionDefault );
}
public virtual string Description
{
get
{
return "n/a";
}
}
public virtual int Predict( IIntStream input )
{
if ( debug )
{
Console.Error.WriteLine( "Enter DFA.predict for decision " + decisionNumber );
}
int mark = input.Mark(); // remember where decision started in input
int s = 0; // we always start at s0
try
{
for ( ; ; )
{
if ( debug )
Console.Error.WriteLine( "DFA " + decisionNumber + " state " + s + " LA(1)=" + (char)input.LA( 1 ) + "(" + input.LA( 1 ) +
"), index=" + input.Index );
int specialState = special[s];
if ( specialState >= 0 )
{
if ( debug )
{
Console.Error.WriteLine( "DFA " + decisionNumber +
" state " + s + " is special state " + specialState );
}
s = SpecialStateTransition( this, specialState, input );
if ( debug )
{
Console.Error.WriteLine( "DFA " + decisionNumber +
" returns from special state " + specialState + " to " + s );
}
if ( s == -1 )
{
NoViableAlt( s, input );
return 0;
}
input.Consume();
continue;
}
if ( accept[s] >= 1 )
{
if ( debug )
Console.Error.WriteLine( "accept; predict " + accept[s] + " from state " + s );
return accept[s];
}
// look for a normal char transition
char c = (char)input.LA( 1 ); // -1 == \uFFFF, all tokens fit in 65000 space
if ( c >= min[s] && c <= max[s] )
{
int snext = transition[s][c - min[s]]; // move to next state
if ( snext < 0 )
{
// was in range but not a normal transition
// must check EOT, which is like the else clause.
// eot[s]>=0 indicates that an EOT edge goes to another
// state.
if ( eot[s] >= 0 )
{ // EOT Transition to accept state?
if ( debug )
Console.Error.WriteLine( "EOT transition" );
s = eot[s];
input.Consume();
// TODO: I had this as return accept[eot[s]]
// which assumed here that the EOT edge always
// went to an accept...faster to do this, but
// what about predicated edges coming from EOT
// target?
continue;
}
NoViableAlt( s, input );
return 0;
}
s = snext;
input.Consume();
continue;
}
if ( eot[s] >= 0 )
{ // EOT Transition?
if ( debug )
Console.Error.WriteLine( "EOT transition" );
s = eot[s];
input.Consume();
continue;
}
if ( c == unchecked( (char)TokenTypes.EndOfFile ) && eof[s] >= 0 )
{ // EOF Transition to accept state?
if ( debug )
Console.Error.WriteLine( "accept via EOF; predict " + accept[eof[s]] + " from " + eof[s] );
return accept[eof[s]];
}
// not in range and not EOF/EOT, must be invalid symbol
if ( debug )
{
Console.Error.WriteLine( "min[" + s + "]=" + min[s] );
Console.Error.WriteLine( "max[" + s + "]=" + max[s] );
Console.Error.WriteLine( "eot[" + s + "]=" + eot[s] );
Console.Error.WriteLine( "eof[" + s + "]=" + eof[s] );
for ( int p = 0; p < transition[s].Length; p++ )
{
Console.Error.Write( transition[s][p] + " " );
}
Console.Error.WriteLine();
}
NoViableAlt( s, input );
return 0;
}
}
finally
{
input.Rewind( mark );
}
}
protected virtual void NoViableAlt( int s, IIntStream input )
{
if ( recognizer.state.backtracking > 0 )
{
recognizer.state.failed = true;
return;
}
NoViableAltException nvae =
new NoViableAltException( Description,
decisionNumber,
s,
input );
Error( nvae );
throw nvae;
}
public virtual void Error( NoViableAltException nvae )
{
}
public SpecialStateTransitionHandler SpecialStateTransition
{
get;
private set;
}
//public virtual int specialStateTransition( int s, IntStream input )
//{
// return -1;
//}
static int SpecialStateTransitionDefault( DFA dfa, int s, IIntStream input )
{
return -1;
}
public static short[] UnpackEncodedString( string encodedString )
{
// walk first to find how big it is.
int size = 0;
for ( int i = 0; i < encodedString.Length; i += 2 )
{
size += encodedString[i];
}
short[] data = new short[size];
int di = 0;
for ( int i = 0; i < encodedString.Length; i += 2 )
{
char n = encodedString[i];
char v = encodedString[i + 1];
// add v n times to data
for ( int j = 1; j <= n; j++ )
{
data[di++] = (short)v;
}
}
return data;
}
public static char[] UnpackEncodedStringToUnsignedChars( string encodedString )
{
// walk first to find how big it is.
int size = 0;
for ( int i = 0; i < encodedString.Length; i += 2 )
{
size += encodedString[i];
}
char[] data = new char[size];
int di = 0;
for ( int i = 0; i < encodedString.Length; i += 2 )
{
char n = encodedString[i];
char v = encodedString[i + 1];
// add v n times to data
for ( int j = 1; j <= n; j++ )
{
data[di++] = v;
}
}
return data;
}
[Conditional("ANTLR_DEBUG")]
protected virtual void DebugRecognitionException(RecognitionException ex)
{
IDebugEventListener dbg = recognizer.DebugListener;
if (dbg != null)
dbg.RecognitionException(ex);
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Reflection;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
namespace Newtonsoft.Json.Utilities
{
internal static class TypeExtensions
{
#if NETFX_CORE || PORTABLE
private static BindingFlags DefaultFlags = BindingFlags.Public | BindingFlags.Static | BindingFlags.Instance;
public static MethodInfo GetGetMethod(this PropertyInfo propertyInfo)
{
return propertyInfo.GetGetMethod(false);
}
public static MethodInfo GetGetMethod(this PropertyInfo propertyInfo, bool nonPublic)
{
MethodInfo getMethod = propertyInfo.GetMethod;
if (getMethod != null && (getMethod.IsPublic || nonPublic))
return getMethod;
return null;
}
public static MethodInfo GetSetMethod(this PropertyInfo propertyInfo)
{
return propertyInfo.GetSetMethod(false);
}
public static MethodInfo GetSetMethod(this PropertyInfo propertyInfo, bool nonPublic)
{
MethodInfo setMethod = propertyInfo.SetMethod;
if (setMethod != null && (setMethod.IsPublic || nonPublic))
return setMethod;
return null;
}
public static bool IsSubclassOf(this Type type, Type c)
{
return type.GetTypeInfo().IsSubclassOf(c);
}
public static bool IsAssignableFrom(this Type type, Type c)
{
return type.GetTypeInfo().IsAssignableFrom(c.GetTypeInfo());
}
#endif
public static MethodInfo Method(this Delegate d)
{
#if !(NETFX_CORE || PORTABLE)
return d.Method;
#else
return d.GetMethodInfo();
#endif
}
public static MemberTypes MemberType(this MemberInfo memberInfo)
{
#if !(NETFX_CORE || PORTABLE || PORTABLE40)
return memberInfo.MemberType;
#else
if (memberInfo is PropertyInfo)
return MemberTypes.Property;
else if (memberInfo is FieldInfo)
return MemberTypes.Field;
else if (memberInfo is EventInfo)
return MemberTypes.Event;
else if (memberInfo is MethodInfo)
return MemberTypes.Method;
else
return MemberTypes.Other;
#endif
}
public static bool ContainsGenericParameters(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.ContainsGenericParameters;
#else
return type.GetTypeInfo().ContainsGenericParameters;
#endif
}
public static bool IsInterface(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsInterface;
#else
return type.GetTypeInfo().IsInterface;
#endif
}
public static bool IsGenericType(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsGenericType;
#else
return type.GetTypeInfo().IsGenericType;
#endif
}
public static bool IsGenericTypeDefinition(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsGenericTypeDefinition;
#else
return type.GetTypeInfo().IsGenericTypeDefinition;
#endif
}
public static Type BaseType(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.BaseType;
#else
return type.GetTypeInfo().BaseType;
#endif
}
public static bool IsEnum(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsEnum;
#else
return type.GetTypeInfo().IsEnum;
#endif
}
public static bool IsClass(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsClass;
#else
return type.GetTypeInfo().IsClass;
#endif
}
public static bool IsSealed(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsSealed;
#else
return type.GetTypeInfo().IsSealed;
#endif
}
#if PORTABLE40
public static PropertyInfo GetProperty(this Type type, string name, BindingFlags bindingFlags, object placeholder1, Type propertyType, IList<Type> indexParameters, object placeholder2)
{
IList<PropertyInfo> propertyInfos = type.GetProperties(bindingFlags);
return propertyInfos.Where(p =>
{
if (name != null && name != p.Name)
return false;
if (propertyType != null && propertyType != p.PropertyType)
return false;
if (indexParameters != null)
{
if (!p.GetIndexParameters().Select(ip => ip.ParameterType).SequenceEqual(indexParameters))
return false;
}
return true;
}).SingleOrDefault();
}
public static IEnumerable<MemberInfo> GetMember(this Type type, string name, MemberTypes memberType, BindingFlags bindingFlags)
{
return type.GetMembers(bindingFlags).Where(m =>
{
if (name != null && name != m.Name)
return false;
if (m.MemberType() != memberType)
return false;
return true;
});
}
#endif
#if (NETFX_CORE || PORTABLE)
public static MethodInfo GetBaseDefinition(this MethodInfo method)
{
return method.GetRuntimeBaseDefinition();
}
#endif
#if (NETFX_CORE || PORTABLE)
public static bool IsDefined(this Type type, Type attributeType, bool inherit)
{
return type.GetTypeInfo().CustomAttributes.Any(a => a.AttributeType == attributeType);
}
public static MethodInfo GetMethod(this Type type, string name)
{
return type.GetMethod(name, DefaultFlags);
}
public static MethodInfo GetMethod(this Type type, string name, BindingFlags bindingFlags)
{
return type.GetTypeInfo().GetDeclaredMethod(name);
}
public static MethodInfo GetMethod(this Type type, IList<Type> parameterTypes)
{
return type.GetMethod(null, parameterTypes);
}
public static MethodInfo GetMethod(this Type type, string name, IList<Type> parameterTypes)
{
return type.GetMethod(name, DefaultFlags, null, parameterTypes, null);
}
public static MethodInfo GetMethod(this Type type, string name, BindingFlags bindingFlags, object placeHolder1, IList<Type> parameterTypes, object placeHolder2)
{
return type.GetTypeInfo().DeclaredMethods.Where(m =>
{
if (name != null && m.Name != name)
return false;
if (!TestAccessibility(m, bindingFlags))
return false;
return m.GetParameters().Select(p => p.ParameterType).SequenceEqual(parameterTypes);
}).SingleOrDefault();
}
public static PropertyInfo GetProperty(this Type type, string name, BindingFlags bindingFlags, object placeholder1, Type propertyType, IList<Type> indexParameters, object placeholder2)
{
return type.GetTypeInfo().DeclaredProperties.Where(p =>
{
if (name != null && name != p.Name)
return false;
if (propertyType != null && propertyType != p.PropertyType)
return false;
if (indexParameters != null)
{
if (!p.GetIndexParameters().Select(ip => ip.ParameterType).SequenceEqual(indexParameters))
return false;
}
return true;
}).SingleOrDefault();
}
public static IEnumerable<MemberInfo> GetMember(this Type type, string name, MemberTypes memberType, BindingFlags bindingFlags)
{
return type.GetTypeInfo().GetMembersRecursive().Where(m =>
{
if (name != null && name != m.Name)
return false;
if (m.MemberType() != memberType)
return false;
if (!TestAccessibility(m, bindingFlags))
return false;
return true;
});
}
public static IEnumerable<ConstructorInfo> GetConstructors(this Type type)
{
return type.GetConstructors(DefaultFlags);
}
public static IEnumerable<ConstructorInfo> GetConstructors(this Type type, BindingFlags bindingFlags)
{
return type.GetConstructors(bindingFlags, null);
}
private static IEnumerable<ConstructorInfo> GetConstructors(this Type type, BindingFlags bindingFlags, IList<Type> parameterTypes)
{
return type.GetTypeInfo().DeclaredConstructors.Where(c =>
{
if (!TestAccessibility(c, bindingFlags))
return false;
if (parameterTypes != null && !c.GetParameters().Select(p => p.ParameterType).SequenceEqual(parameterTypes))
return false;
return true;
});
}
public static ConstructorInfo GetConstructor(this Type type, IList<Type> parameterTypes)
{
return type.GetConstructor(DefaultFlags, null, parameterTypes, null);
}
public static ConstructorInfo GetConstructor(this Type type, BindingFlags bindingFlags, object placeholder1, IList<Type> parameterTypes, object placeholder2)
{
return type.GetConstructors(bindingFlags, parameterTypes).SingleOrDefault();
}
public static MemberInfo[] GetMember(this Type type, string member)
{
return type.GetMember(member, DefaultFlags);
}
public static MemberInfo[] GetMember(this Type type, string member, BindingFlags bindingFlags)
{
return type.GetTypeInfo().GetMembersRecursive().Where(m => m.Name == member && TestAccessibility(m, bindingFlags)).ToArray();
}
public static MemberInfo GetField(this Type type, string member)
{
return type.GetField(member, DefaultFlags);
}
public static MemberInfo GetField(this Type type, string member, BindingFlags bindingFlags)
{
return type.GetTypeInfo().GetDeclaredField(member);
}
public static IEnumerable<PropertyInfo> GetProperties(this Type type, BindingFlags bindingFlags)
{
IList<PropertyInfo> properties = (bindingFlags.HasFlag(BindingFlags.DeclaredOnly))
? type.GetTypeInfo().DeclaredProperties.ToList()
: type.GetTypeInfo().GetPropertiesRecursive();
return properties.Where(p => TestAccessibility(p, bindingFlags));
}
private static IList<MemberInfo> GetMembersRecursive(this TypeInfo type)
{
TypeInfo t = type;
IList<MemberInfo> members = new List<MemberInfo>();
while (t != null)
{
foreach (var member in t.DeclaredMembers)
{
if (!members.Any(p => p.Name == member.Name))
members.Add(member);
}
t = (t.BaseType != null) ? t.BaseType.GetTypeInfo() : null;
}
return members;
}
private static IList<PropertyInfo> GetPropertiesRecursive(this TypeInfo type)
{
TypeInfo t = type;
IList<PropertyInfo> properties = new List<PropertyInfo>();
while (t != null)
{
foreach (var member in t.DeclaredProperties)
{
if (!properties.Any(p => p.Name == member.Name))
properties.Add(member);
}
t = (t.BaseType != null) ? t.BaseType.GetTypeInfo() : null;
}
return properties;
}
private static IList<FieldInfo> GetFieldsRecursive(this TypeInfo type)
{
TypeInfo t = type;
IList<FieldInfo> fields = new List<FieldInfo>();
while (t != null)
{
foreach (var member in t.DeclaredFields)
{
if (!fields.Any(p => p.Name == member.Name))
fields.Add(member);
}
t = (t.BaseType != null) ? t.BaseType.GetTypeInfo() : null;
}
return fields;
}
public static IEnumerable<MethodInfo> GetMethods(this Type type, BindingFlags bindingFlags)
{
return type.GetTypeInfo().DeclaredMethods;
}
public static PropertyInfo GetProperty(this Type type, string name)
{
return type.GetProperty(name, DefaultFlags);
}
public static PropertyInfo GetProperty(this Type type, string name, BindingFlags bindingFlags)
{
return type.GetTypeInfo().GetDeclaredProperty(name);
}
public static IEnumerable<FieldInfo> GetFields(this Type type)
{
return type.GetFields(DefaultFlags);
}
public static IEnumerable<FieldInfo> GetFields(this Type type, BindingFlags bindingFlags)
{
IList<FieldInfo> fields = (bindingFlags.HasFlag(BindingFlags.DeclaredOnly))
? type.GetTypeInfo().DeclaredFields.ToList()
: type.GetTypeInfo().GetFieldsRecursive();
return fields.Where(f => TestAccessibility(f, bindingFlags)).ToList();
}
private static bool TestAccessibility(PropertyInfo member, BindingFlags bindingFlags)
{
if (member.GetMethod != null && TestAccessibility(member.GetMethod, bindingFlags))
return true;
if (member.SetMethod != null && TestAccessibility(member.SetMethod, bindingFlags))
return true;
return false;
}
private static bool TestAccessibility(MemberInfo member, BindingFlags bindingFlags)
{
if (member is FieldInfo)
{
return TestAccessibility((FieldInfo)member, bindingFlags);
}
else if (member is MethodBase)
{
return TestAccessibility((MethodBase)member, bindingFlags);
}
else if (member is PropertyInfo)
{
return TestAccessibility((PropertyInfo)member, bindingFlags);
}
throw new Exception("Unexpected member type.");
}
private static bool TestAccessibility(FieldInfo member, BindingFlags bindingFlags)
{
bool visibility = (member.IsPublic && bindingFlags.HasFlag(BindingFlags.Public)) ||
(!member.IsPublic && bindingFlags.HasFlag(BindingFlags.NonPublic));
bool instance = (member.IsStatic && bindingFlags.HasFlag(BindingFlags.Static)) ||
(!member.IsStatic && bindingFlags.HasFlag(BindingFlags.Instance));
return visibility && instance;
}
private static bool TestAccessibility(MethodBase member, BindingFlags bindingFlags)
{
bool visibility = (member.IsPublic && bindingFlags.HasFlag(BindingFlags.Public)) ||
(!member.IsPublic && bindingFlags.HasFlag(BindingFlags.NonPublic));
bool instance = (member.IsStatic && bindingFlags.HasFlag(BindingFlags.Static)) ||
(!member.IsStatic && bindingFlags.HasFlag(BindingFlags.Instance));
return visibility && instance;
}
public static Type[] GetGenericArguments(this Type type)
{
return type.GetTypeInfo().GenericTypeArguments;
}
public static IEnumerable<Type> GetInterfaces(this Type type)
{
return type.GetTypeInfo().ImplementedInterfaces;
}
public static IEnumerable<MethodInfo> GetMethods(this Type type)
{
return type.GetTypeInfo().DeclaredMethods;
}
#endif
public static bool IsAbstract(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsAbstract;
#else
return type.GetTypeInfo().IsAbstract;
#endif
}
public static bool IsVisible(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsVisible;
#else
return type.GetTypeInfo().IsVisible;
#endif
}
public static bool IsValueType(this Type type)
{
#if !(NETFX_CORE || PORTABLE)
return type.IsValueType;
#else
return type.GetTypeInfo().IsValueType;
#endif
}
public static bool AssignableToTypeName(this Type type, string fullTypeName, out Type match)
{
Type current = type;
while (current != null)
{
if (string.Equals(current.FullName, fullTypeName, StringComparison.Ordinal))
{
match = current;
return true;
}
current = current.BaseType();
}
foreach (Type i in type.GetInterfaces())
{
if (string.Equals(i.Name, fullTypeName, StringComparison.Ordinal))
{
match = type;
return true;
}
}
match = null;
return false;
}
public static bool AssignableToTypeName(this Type type, string fullTypeName)
{
Type match;
return type.AssignableToTypeName(fullTypeName, out match);
}
public static MethodInfo GetGenericMethod(this Type type, string name, params Type[] parameterTypes)
{
var methods = type.GetMethods().Where(method => method.Name == name);
foreach (var method in methods)
{
if (method.HasParameters(parameterTypes))
return method;
}
return null;
}
public static bool HasParameters(this MethodInfo method, params Type[] parameterTypes)
{
var methodParameters = method.GetParameters().Select(parameter => parameter.ParameterType).ToArray();
if (methodParameters.Length != parameterTypes.Length)
return false;
for (int i = 0; i < methodParameters.Length; i++)
if (methodParameters[i].ToString() != parameterTypes[i].ToString())
return false;
return true;
}
public static IEnumerable<Type> GetAllInterfaces(this Type target)
{
foreach (var i in target.GetInterfaces())
{
yield return i;
foreach (var ci in i.GetInterfaces())
{
yield return ci;
}
}
}
public static IEnumerable<MethodInfo> GetAllMethods(this Type target)
{
var allTypes = target.GetAllInterfaces().ToList();
allTypes.Add(target);
return from type in allTypes
from method in type.GetMethods()
select method;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq;
using FluentAssertions.Equivalency.Tracing;
using Newtonsoft.Json;
using Xunit;
using Xunit.Sdk;
namespace FluentAssertions.Equivalency.Specs
{
public class DictionarySpecs
{
private class NonGenericDictionary : IDictionary
{
private readonly IDictionary dictionary = new Dictionary<object, object>();
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public void CopyTo(Array array, int index)
{
dictionary.CopyTo(array, index);
}
public int Count => dictionary.Count;
public bool IsSynchronized => dictionary.IsSynchronized;
public object SyncRoot => dictionary.SyncRoot;
public void Add(object key, object value)
{
dictionary.Add(key, value);
}
public void Clear()
{
dictionary.Clear();
}
public bool Contains(object key)
{
return dictionary.Contains(key);
}
public IDictionaryEnumerator GetEnumerator()
{
return dictionary.GetEnumerator();
}
public void Remove(object key)
{
dictionary.Remove(key);
}
public bool IsFixedSize => dictionary.IsFixedSize;
public bool IsReadOnly => dictionary.IsReadOnly;
public object this[object key]
{
get => dictionary[key];
set => dictionary[key] = value;
}
public ICollection Keys => dictionary.Keys;
public ICollection Values => dictionary.Values;
}
private class GenericDictionaryNotImplementingIDictionary<TKey, TValue> : IDictionary<TKey, TValue>
{
private readonly Dictionary<TKey, TValue> dictionary = new Dictionary<TKey, TValue>();
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
{
return dictionary.GetEnumerator();
}
void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item)
{
((ICollection<KeyValuePair<TKey, TValue>>)dictionary).Add(item);
}
public void Clear()
{
dictionary.Clear();
}
bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> item)
{
return ((ICollection<KeyValuePair<TKey, TValue>>)dictionary).Contains(item);
}
void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex)
{
((ICollection<KeyValuePair<TKey, TValue>>)dictionary).CopyTo(array, arrayIndex);
}
bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item)
{
return ((ICollection<KeyValuePair<TKey, TValue>>)dictionary).Remove(item);
}
public int Count => dictionary.Count;
public bool IsReadOnly => ((ICollection<KeyValuePair<TKey, TValue>>)dictionary).IsReadOnly;
public bool ContainsKey(TKey key)
{
return dictionary.ContainsKey(key);
}
public void Add(TKey key, TValue value)
{
dictionary.Add(key, value);
}
public bool Remove(TKey key)
{
return dictionary.Remove(key);
}
public bool TryGetValue(TKey key, out TValue value)
{
return dictionary.TryGetValue(key, out value);
}
public TValue this[TKey key]
{
get => dictionary[key];
set => dictionary[key] = value;
}
public ICollection<TKey> Keys => dictionary.Keys;
public ICollection<TValue> Values => dictionary.Values;
}
/// <summary>
/// FakeItEasy can probably handle this in a couple lines, but then it would not be portable.
/// </summary>
private class ClassWithTwoDictionaryImplementations : Dictionary<int, object>, IDictionary<string, object>
{
IEnumerator<KeyValuePair<string, object>> IEnumerable<KeyValuePair<string, object>>.GetEnumerator()
{
return
((ICollection<KeyValuePair<int, object>>)this).Select(
item =>
new KeyValuePair<string, object>(
item.Key.ToString(CultureInfo.InvariantCulture),
item.Value)).GetEnumerator();
}
public void Add(KeyValuePair<string, object> item)
{
((ICollection<KeyValuePair<int, object>>)this).Add(new KeyValuePair<int, object>(Parse(item.Key), item.Value));
}
public bool Contains(KeyValuePair<string, object> item)
{
return
((ICollection<KeyValuePair<int, object>>)this).Contains(
new KeyValuePair<int, object>(Parse(item.Key), item.Value));
}
public void CopyTo(KeyValuePair<string, object>[] array, int arrayIndex)
{
((ICollection<KeyValuePair<int, object>>)this).Select(
item =>
new KeyValuePair<string, object>(item.Key.ToString(CultureInfo.InvariantCulture), item.Value))
.ToArray()
.CopyTo(array, arrayIndex);
}
public bool Remove(KeyValuePair<string, object> item)
{
return
((ICollection<KeyValuePair<int, object>>)this).Remove(
new KeyValuePair<int, object>(Parse(item.Key), item.Value));
}
bool ICollection<KeyValuePair<string, object>>.IsReadOnly =>
((ICollection<KeyValuePair<int, object>>)this).IsReadOnly;
public bool ContainsKey(string key)
{
return ContainsKey(Parse(key));
}
public void Add(string key, object value)
{
Add(Parse(key), value);
}
public bool Remove(string key)
{
return Remove(Parse(key));
}
public bool TryGetValue(string key, out object value)
{
return TryGetValue(Parse(key), out value);
}
public object this[string key]
{
get => this[Parse(key)];
set => this[Parse(key)] = value;
}
ICollection<string> IDictionary<string, object>.Keys
{
get { return Keys.Select(_ => _.ToString(CultureInfo.InvariantCulture)).ToList(); }
}
ICollection<object> IDictionary<string, object>.Values => Values;
private int Parse(string key)
{
return int.Parse(key, CultureInfo.InvariantCulture);
}
}
public class UserRolesLookupElement
{
private readonly Dictionary<Guid, List<string>> innerRoles = new Dictionary<Guid, List<string>>();
public virtual Dictionary<Guid, IEnumerable<string>> Roles
{
get { return innerRoles.ToDictionary(x => x.Key, y => y.Value.Select(z => z)); }
}
public void Add(Guid userId, params string[] roles)
{
innerRoles[userId] = roles.ToList();
}
}
public class ClassWithMemberDictionary
{
public Dictionary<string, string> Dictionary { get; set; }
}
public class SomeBaseKeyClass : IEquatable<SomeBaseKeyClass>
{
public SomeBaseKeyClass(int id)
{
Id = id;
}
public int Id { get; }
public override int GetHashCode()
{
return Id;
}
public bool Equals(SomeBaseKeyClass other)
{
if (other is null)
{
return false;
}
return Id == other.Id;
}
public override bool Equals(object obj)
{
return Equals(obj as SomeBaseKeyClass);
}
public override string ToString()
{
return $"BaseKey {Id}";
}
}
public class SomeDerivedKeyClass : SomeBaseKeyClass
{
public SomeDerivedKeyClass(int id)
: base(id)
{
}
}
[Fact]
public void When_a_dictionary_does_not_implement_the_dictionary_interface_it_should_still_be_treated_as_a_dictionary()
{
// Arrange
IDictionary<string, int> dictionary = new GenericDictionaryNotImplementingIDictionary<string, int>
{
["hi"] = 1
};
ICollection<KeyValuePair<string, int>> collection =
new List<KeyValuePair<string, int>> { new KeyValuePair<string, int>("hi", 1) };
// Act
Action act = () => dictionary.Should().BeEquivalentTo(collection);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_read_only_dictionary_matches_the_expectation_it_should_succeed()
{
// Arrange
IReadOnlyDictionary<string, IEnumerable<string>> dictionary =
new ReadOnlyDictionary<string, IEnumerable<string>>(
new Dictionary<string, IEnumerable<string>>()
{
["Key2"] = new[] { "Value2" },
["Key1"] = new[] { "Value1" }
});
// Act
Action act = () => dictionary.Should().BeEquivalentTo(new Dictionary<string, IEnumerable<string>>()
{
["Key1"] = new[] { "Value1" },
["Key2"] = new[] { "Value2" }
});
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_read_only_dictionary_does_not_match_the_expectation_it_should_throw()
{
// Arrange
IReadOnlyDictionary<string, IEnumerable<string>> dictionary =
new ReadOnlyDictionary<string, IEnumerable<string>>(
new Dictionary<string, IEnumerable<string>>()
{
["Key2"] = new[] { "Value2" },
["Key1"] = new[] { "Value1" }
});
// Act
Action act = () => dictionary.Should().BeEquivalentTo(new Dictionary<string, IEnumerable<string>>()
{
["Key2"] = new[] { "Value3" },
["Key1"] = new[] { "Value1" }
});
// Assert
act.Should().Throw<XunitException>().WithMessage("Expected dictionary[Key2][0]*Value3*Value2*");
}
[Fact]
public void When_a_dictionary_is_compared_to_null_it_should_not_throw_a_NullReferenceException()
{
// Arrange
Dictionary<int, int> subject = null;
Dictionary<int, int> expectation = new Dictionary<int, int>();
// Act
Action act = () => subject.Should().BeEquivalentTo(expectation, "because we do expect a valid dictionary");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected*not to be*null*valid dictionary*");
}
[Fact]
public void When_a_null_dictionary_is_compared_to_null_it_should_not_throw()
{
// Arrange
Dictionary<int, int> subject = null;
Dictionary<int, int> expectation = null;
// Act
Action act = () => subject.Should().BeEquivalentTo(expectation);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_dictionary_is_compared_to_a_dictionary_it_should_allow_chaining()
{
// Arrange
Dictionary<int, int> subject = new Dictionary<int, int> { [42] = 1337 };
Dictionary<int, int> expectation = new Dictionary<int, int> { [42] = 1337 };
// Act
Action act = () => subject.Should().BeEquivalentTo(expectation)
.And.ContainKey(42);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_dictionary_is_compared_to_a_dictionary_with_a_config_it_should_allow_chaining()
{
// Arrange
Dictionary<int, int> subject = new Dictionary<int, int> { [42] = 1337 };
Dictionary<int, int> expectation = new Dictionary<int, int> { [42] = 1337 };
// Act
Action act = () => subject.Should().BeEquivalentTo(expectation, opt => opt)
.And.ContainKey(42);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_dictionary_property_is_detected_it_should_ignore_the_order_of_the_pairs()
{
// Arrange
var expected = new
{
Customers = new Dictionary<string, string>
{
["Key2"] = "Value2",
["Key1"] = "Value1"
}
};
var subject = new
{
Customers = new Dictionary<string, string>
{
["Key1"] = "Value1",
["Key2"] = "Value2"
}
};
// Act
Action act = () => subject.Should().BeEquivalentTo(expected);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_a_collection_of_key_value_pairs_is_equivalent_to_the_dictionary_it_should_succeed()
{
// Arrange
var collection = new List<KeyValuePair<string, int>> { new("hi", 1) };
// Act / Assert
Action act = () => collection.Should().BeEquivalentTo(new Dictionary<string, int>()
{
{ "hi", 2 }
});
act.Should().Throw<XunitException>().WithMessage("Expected collection[hi]*to be 2, but found 1.*");
}
[Fact]
public void
When_a_generic_dictionary_is_typed_as_object_and_runtime_typing_has_is_specified_it_should_use_the_runtime_type()
{
// Arrange
object object1 = new Dictionary<string, string> { ["greeting"] = "hello" };
object object2 = new Dictionary<string, string> { ["greeting"] = "hello" };
// Act
Action act = () => object1.Should().BeEquivalentTo(object2, opts => opts.RespectingRuntimeTypes());
// Assert
act.Should().NotThrow("the runtime type is a dictionary and the dictionaries are equivalent");
}
[Fact]
public void When_a_generic_dictionary_is_typed_as_object_it_should_respect_the_runtime_typed()
{
// Arrange
object object1 = new Dictionary<string, string> { ["greeting"] = "hello" };
object object2 = new Dictionary<string, string> { ["greeting"] = "hello" };
// Act
Action act = () => object1.Should().BeEquivalentTo(object2);
// Assert
act.Should().NotThrow();
}
[Fact]
public void
When_a_non_generic_dictionary_is_typed_as_object_and_runtime_typing_is_specified_the_runtime_type_should_be_respected()
{
// Arrange
object object1 = new NonGenericDictionary { ["greeting"] = "hello" };
object object2 = new NonGenericDictionary { ["greeting"] = "hello" };
// Act
Action act = () => object1.Should().BeEquivalentTo(object2, opts => opts.RespectingRuntimeTypes());
// Assert
act.Should().NotThrow("the runtime type is a dictionary and the dictionaries are equivalent");
}
[Fact]
public void
When_a_non_generic_dictionary_is_decided_to_be_equivalent_to_expected_trace_is_still_written()
{
// Arrange
object object1 = new NonGenericDictionary { ["greeting"] = "hello" };
object object2 = new NonGenericDictionary { ["greeting"] = "hello" };
var traceWriter = new StringBuilderTraceWriter();
// Act
object1.Should().BeEquivalentTo(object2, opts => opts.RespectingRuntimeTypes().WithTracing(traceWriter));
// Assert
string trace = traceWriter.ToString();
trace.Should().Contain("Recursing into dictionary item greeting at object1");
}
[Fact]
public void When_a_non_generic_dictionary_is_typed_as_object_it_should_respect_the_runtime_type()
{
// Arrange
object object1 = new NonGenericDictionary();
object object2 = new NonGenericDictionary();
// Act
Action act = () => object1.Should().BeEquivalentTo(object2);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_an_object_implements_two_IDictionary_interfaces_it_should_fail_descriptively()
{
// Arrange
var object1 = (object)new ClassWithTwoDictionaryImplementations();
var object2 = (object)new ClassWithTwoDictionaryImplementations();
// Act
Action act = () => object1.Should().BeEquivalentTo(object2);
// Assert
act.Should().Throw<ArgumentException>()
.WithMessage("*expectation*implements multiple dictionary types*");
}
[Fact]
public void
When_asserting_equivalence_of_dictionaries_and_configured_to_respect_runtime_type_it_should_respect_the_runtime_type()
{
// Arrange
IDictionary dictionary1 = new NonGenericDictionary { [2001] = new Car() };
IDictionary dictionary2 = new NonGenericDictionary { [2001] = new Customer() };
// Act
Action act =
() =>
dictionary1.Should().BeEquivalentTo(dictionary2,
opts => opts.RespectingRuntimeTypes());
// Assert
act.Should().Throw<XunitException>("the types have different properties");
}
[Fact]
public void When_asserting_equivalence_of_dictionaries_it_should_respect_the_declared_type()
{
// Arrange
var actual = new Dictionary<int, CustomerType> { [0] = new CustomerType("123") };
var expectation = new Dictionary<int, CustomerType> { [0] = new DerivedCustomerType("123") };
// Act
Action act = () => actual.Should().BeEquivalentTo(expectation);
// Assert
act.Should().NotThrow("because it should ignore the properties of the derived type");
}
[Fact]
public void When_injecting_a_null_config_it_should_throw()
{
// Arrange
var actual = new Dictionary<int, CustomerType>();
var expectation = new Dictionary<int, CustomerType>();
// Act
Action act = () => actual.Should().BeEquivalentTo(expectation, config: null);
// Assert
act.Should().ThrowExactly<ArgumentNullException>()
.WithParameterName("config");
}
[Fact]
public void
When_asserting_equivalence_of_generic_dictionaries_and_configured_to_use_runtime_properties_it_should_respect_the_runtime_type()
{
// Arrange
var actual = new Dictionary<int, CustomerType> { [0] = new CustomerType("123") };
var expectation = new Dictionary<int, CustomerType> { [0] = new DerivedCustomerType("123") };
// Act
Action act =
() =>
actual.Should().BeEquivalentTo(expectation, opts => opts
.RespectingRuntimeTypes()
.ComparingByMembers<CustomerType>()
);
// Assert
act.Should().Throw<XunitException>("the runtime types have different properties");
}
[Fact]
public void
When_asserting_equivalence_of_generic_dictionaries_and_the_expectation_key_type_is_assignable_from_the_subjects_it_should_fail_if_incompatible()
{
// Arrange
var actual = new Dictionary<object, string> { [new object()] = "hello" };
var expected = new Dictionary<string, string> { ["greeting"] = "hello" };
// Act
Action act = () => actual.Should().BeEquivalentTo(expected);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected actual*to contain key \"greeting\"*");
}
[Fact]
public void When_the_subjects_key_type_is_compatible_with_the_expected_key_type_it_should_match()
{
// Arrange
var dictionary1 = new Dictionary<object, string> { ["greeting"] = "hello" };
var dictionary2 = new Dictionary<string, string> { ["greeting"] = "hello" };
// Act
Action act = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
act.Should().NotThrow("the keys are still strings");
}
[Fact]
public void When_the_subjects_key_type_is_not_compatible_with_the_expected_key_type_it_should_throw()
{
// Arrange
var actual = new Dictionary<int, string> { [1234] = "hello" };
var expectation = new Dictionary<string, string> { ["greeting"] = "hello" };
// Act
Action act = () => actual.Should().BeEquivalentTo(expectation);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected actual to be a dictionary or collection of key-value pairs that is keyed to type System.String*");
}
[Fact]
public void
When_asserting_equivalence_of_generic_dictionaries_the_type_information_should_be_preserved_for_other_equivalency_steps()
{
// Arrange
var userId = Guid.NewGuid();
var dictionary1 = new Dictionary<Guid, IEnumerable<string>> { [userId] = new List<string> { "Admin", "Special" } };
var dictionary2 = new Dictionary<Guid, IEnumerable<string>> { [userId] = new List<string> { "Admin", "Other" } };
// Act
Action act = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
act.Should().Throw<XunitException>();
}
[Fact]
public void
When_asserting_equivalence_of_non_generic_dictionaries_the_lack_of_type_information_should_be_preserved_for_other_equivalency_steps()
{
// Arrange
var userId = Guid.NewGuid();
var dictionary1 = new NonGenericDictionary { [userId] = new List<string> { "Admin", "Special" } };
var dictionary2 = new NonGenericDictionary { [userId] = new List<string> { "Admin", "Other" } };
// Act
Action act = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*Other*Special*");
}
[Fact]
public void When_asserting_the_equivalence_of_generic_dictionaries_it_should_respect_the_declared_type()
{
// Arrange
var actual = new Dictionary<int, CustomerType>
{
[0] = new DerivedCustomerType("123")
};
var expectation = new Dictionary<int, CustomerType> { [0] = new CustomerType("123") };
// Act
Action act = () => actual.Should().BeEquivalentTo(expectation);
// Assert
act.Should().NotThrow("the objects are equivalent according to the members on the declared type");
}
[Fact]
public void When_the_both_properties_are_null_it_should_not_throw()
{
// Arrange
var expected = new ClassWithMemberDictionary
{
Dictionary = null
};
var subject = new ClassWithMemberDictionary
{
Dictionary = null
};
// Act
Action act = () => subject.Should().BeEquivalentTo(expected);
// Assert
act.Should().NotThrow<XunitException>();
}
[Fact]
public void
When_the_dictionary_values_are_handled_by_the_enumerable_equivalency_step_the_type_information_should_be_preserved()
{
// Arrange
var userId = Guid.NewGuid();
var actual = new UserRolesLookupElement();
actual.Add(userId, "Admin", "Special");
var expected = new UserRolesLookupElement();
expected.Add(userId, "Admin", "Other");
// Act
Action act = () => actual.Should().BeEquivalentTo(expected);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected*Roles[*][1]*Other*Special*");
}
[Fact]
public void When_the_other_dictionary_does_not_contain_enough_items_it_should_throw()
{
// Arrange
var expected = new
{
Customers = new Dictionary<string, string>
{
["Key1"] = "Value1",
["Key2"] = "Value2"
}
};
var subject = new
{
Customers = new Dictionary<string, string>
{
["Key1"] = "Value1"
}
};
// Act
Action act = () => subject.Should().BeEquivalentTo(expected, "because we are expecting two keys");
// Assert
act.Should().Throw<XunitException>().WithMessage(
"Expected*Customers*dictionary*2 item(s)*expecting two keys*but*misses*");
}
[Fact]
public void When_the_other_property_is_not_a_dictionary_it_should_throw()
{
// Arrange
var subject = new
{
Customers = "I am a string"
};
var expected = new
{
Customers = new Dictionary<string, string>
{
["Key2"] = "Value2",
["Key1"] = "Value1"
}
};
// Act
Action act = () => subject.Should().BeEquivalentTo(expected);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected property subject.Customers to be a dictionary or collection of key-value pairs that is keyed to type System.String*");
}
[Fact]
public void When_the_other_property_is_null_it_should_throw()
{
// Arrange
var subject = new ClassWithMemberDictionary
{
Dictionary = new Dictionary<string, string>
{
["Key2"] = "Value2",
["Key1"] = "Value1"
}
};
var expected = new ClassWithMemberDictionary
{
Dictionary = null
};
// Act
Action act = () => subject.Should().BeEquivalentTo(expected, "because we are not expecting anything");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*property*Dictionary*to be <null> because we are not expecting anything, but found *{*}*");
}
[Fact]
public void When_subject_dictionary_asserted_to_be_equivalent_have_less_elements_fails_describing_missing_keys()
{
// Arrange
var dictionary1 = new Dictionary<string, string>
{
["greeting"] = "hello"
};
var dictionary2 = new Dictionary<string, string>
{
["greeting"] = "hello",
["farewell"] = "goodbye"
};
// Act
Action action = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected dictionary1*to be a dictionary with 2 item(s), but it misses key(s) {\"farewell\"}*");
}
[Fact]
public void When_subject_dictionary_with_class_keys_asserted_to_be_equivalent_have_less_elements_other_dictionary_derived_class_keys_fails_describing_missing_keys()
{
// Arrange
var dictionary1 = new Dictionary<SomeBaseKeyClass, string>
{
[new SomeDerivedKeyClass(1)] = "hello"
};
var dictionary2 = new Dictionary<SomeDerivedKeyClass, string>
{
[new SomeDerivedKeyClass(1)] = "hello",
[new SomeDerivedKeyClass(2)] = "hello"
};
// Act
Action action = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected*to be a dictionary with 2 item(s), but*misses key(s) {BaseKey 2}*");
}
[Fact]
public void When_subject_dictionary_asserted_to_be_equivalent_have_more_elements_fails_describing_additional_keys()
{
// Arrange
var expectation = new Dictionary<string, string>
{
["greeting"] = "hello"
};
var subject = new Dictionary<string, string>
{
["greeting"] = "hello",
["farewell"] = "goodbye"
};
// Act
Action action = () => subject.Should().BeEquivalentTo(expectation, "because we expect one pair");
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected subject*to be a dictionary with 1 item(s) because we expect one pair, but*additional key(s) {\"farewell\"}*");
}
[Fact]
public void When_subject_dictionary_with_class_keys_asserted_to_be_equivalent_and_other_dictionary_derived_class_keys_fails_because_of_types_incompatibility()
{
// Arrange
var dictionary1 = new Dictionary<SomeBaseKeyClass, string>
{
[new SomeDerivedKeyClass(1)] = "hello"
};
var dictionary2 = new Dictionary<SomeDerivedKeyClass, string>
{
[new SomeDerivedKeyClass(1)] = "hello",
[new SomeDerivedKeyClass(2)] = "hello"
};
// Act
Action action = () => dictionary2.Should().BeEquivalentTo(dictionary1);
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected dictionary2 to be a dictionary or collection of key-value pairs that is keyed to type FluentAssertions.Equivalency.Specs.DictionarySpecs+SomeBaseKeyClass.*");
}
[Fact]
public void When_subject_dictionary_asserted_to_be_equivalent_have_less_elements_but_some_missing_and_some_additional_elements_fails_describing_missing_and_additional_keys()
{
// Arrange
var dictionary1 = new Dictionary<string, string>
{
["GREETING"] = "hello"
};
var dictionary2 = new Dictionary<string, string>
{
["greeting"] = "hello",
["farewell"] = "goodbye"
};
// Act
Action action = () => dictionary1.Should().BeEquivalentTo(dictionary2);
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected*to be a dictionary with 2 item(s), but*misses key(s)*{\"greeting\", \"farewell\"}*additional key(s) {\"GREETING\"}*");
}
[Fact]
public void When_subject_dictionary_asserted_to_be_equivalent_have_more_elements_but_some_missing_and_some_additional_elements_fails_describing_missing_and_additional_keys()
{
// Arrange
var dictionary1 = new Dictionary<string, string>
{
["GREETING"] = "hello"
};
var dictionary2 = new Dictionary<string, string>
{
["greeting"] = "hello",
["farewell"] = "goodbye"
};
// Act
Action action = () => dictionary2.Should().BeEquivalentTo(dictionary1);
// Assert
action.Should().Throw<XunitException>()
.WithMessage("Expected*to be a dictionary with 1 item(s), but*misses key(s) {\"GREETING\"}*additional key(s) {\"greeting\", \"farewell\"}*");
}
[Fact]
public void When_two_equivalent_dictionaries_are_compared_directly_as_if_it_is_a_collection_it_should_succeed()
{
// Arrange
var result = new Dictionary<string, int?>
{
["C"] = null,
["B"] = 0,
["A"] = 0
};
// Act
Action act = () => result.Should().BeEquivalentTo(new Dictionary<string, int?>
{
["A"] = 0,
["B"] = 0,
["C"] = null
});
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_two_equivalent_dictionaries_are_compared_directly_it_should_succeed()
{
// Arrange
var result = new Dictionary<string, int>
{
["C"] = 0,
["B"] = 0,
["A"] = 0
};
// Act
Action act = () => result.Should().BeEquivalentTo(new Dictionary<string, int>
{
["A"] = 0,
["B"] = 0,
["C"] = 0
});
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_two_nested_dictionaries_contain_null_values_it_should_not_crash()
{
// Arrange
var projection = new
{
ReferencedEquipment = new Dictionary<int, string>
{
[1] = null
}
};
var persistedProjection = new
{
ReferencedEquipment = new Dictionary<int, string>
{
[1] = null
}
};
// Act
Action act = () => persistedProjection.Should().BeEquivalentTo(projection);
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_two_nested_dictionaries_do_not_match_it_should_throw()
{
// Arrange
var projection = new
{
ReferencedEquipment = new Dictionary<int, string>
{
[1] = "Bla1"
}
};
var persistedProjection = new
{
ReferencedEquipment = new Dictionary<int, string>
{
[1] = "Bla2"
}
};
// Act
Action act = () => persistedProjection.Should().BeEquivalentTo(projection);
// Assert
act.Should().Throw<XunitException>().WithMessage(
"Expected*ReferencedEquipment[1]*Bla1*Bla2*2*index 3*");
}
[Fact]
public void When_a_dictionary_is_missing_a_key_it_should_report_the_specific_key()
{
// Arrange
var actual = new Dictionary<string, string>
{
{ "a", "x" },
{ "b", "x" },
};
var expected = new Dictionary<string, string>
{
{ "a", "x" },
{ "c", "x" }, // key mismatch
};
// Act
Action act = () => actual.Should().BeEquivalentTo(expected, "because we're expecting {0}", "c");
// Assert
act.Should().Throw<XunitException>().WithMessage(
"Expected actual*key*c*because we're expecting c*");
}
[Fact]
public void When_a_nested_dictionary_value_doesnt_match_it_should_throw()
{
// Arrange
const string json = @"{
""NestedDictionary"": {
""StringProperty"": ""string"",
""IntProperty"": 123
}
}";
var expectedResult = new Dictionary<string, object>
{
["NestedDictionary"] = new Dictionary<string, object>
{
["StringProperty"] = "string",
["IntProperty"] = 123
}
};
// Act
var result = JsonConvert.DeserializeObject<Dictionary<string, object>>(json);
Action act = () => result.Should().BeEquivalentTo(expectedResult);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected*String*JValue*");
}
[Fact]
public void When_a_custom_rule_is_applied_on_a_dictionary_it_should_apply_it_on_the_values()
{
// Arrange
var dictOne = new Dictionary<string, double>
{
{ "a", 1.2345 },
{ "b", 2.4567 },
{ "c", 5.6789 },
{ "s", 3.333 }
};
var dictTwo = new Dictionary<string, double>
{
{ "a", 1.2348 },
{ "b", 2.4561 },
{ "c", 5.679 },
{ "s", 3.333 }
};
// Act / Assert
dictOne.Should().BeEquivalentTo(dictTwo, options => options
.Using<double>(ctx => ctx.Subject.Should().BeApproximately(ctx.Expectation, 0.1))
.WhenTypeIs<double>()
);
}
}
}
| |
namespace EIDSS.Reports.BaseControls.Aggregate
{
partial class AdmUnitReport
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AdmUnitReport));
this.Detail = new DevExpress.XtraReports.UI.DetailBand();
this.xrTable2 = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow2 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell8 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell5 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell6 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell7 = new DevExpress.XtraReports.UI.XRTableCell();
this.PageHeader = new DevExpress.XtraReports.UI.PageHeaderBand();
this.PageFooter = new DevExpress.XtraReports.UI.PageFooterBand();
this.m_DataSet = new EIDSS.Reports.BaseControls.Aggregate.AdmUnitDataSet();
this.m_Adaper = new EIDSS.Reports.BaseControls.Aggregate.AdmUnitDataSetTableAdapters.HeaderAdapter();
this.topMarginBand1 = new DevExpress.XtraReports.UI.TopMarginBand();
this.bottomMarginBand1 = new DevExpress.XtraReports.UI.BottomMarginBand();
this.ReportHeader = new DevExpress.XtraReports.UI.ReportHeaderBand();
this.xrTable1 = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow1 = new DevExpress.XtraReports.UI.XRTableRow();
this.AdmUnitHeaderCell = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell3 = new DevExpress.XtraReports.UI.XRTableCell();
((System.ComponentModel.ISupportInitialize)(this.xrTable2)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.m_DataSet)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this)).BeginInit();
//
// Detail
//
this.Detail.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.xrTable2});
resources.ApplyResources(this.Detail, "Detail");
this.Detail.Name = "Detail";
this.Detail.Padding = new DevExpress.XtraPrinting.PaddingInfo(0, 0, 0, 0, 100F);
this.Detail.StylePriority.UseFont = false;
this.Detail.StylePriority.UseTextAlignment = false;
//
// xrTable2
//
this.xrTable2.Borders = ((DevExpress.XtraPrinting.BorderSide)(((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
resources.ApplyResources(this.xrTable2, "xrTable2");
this.xrTable2.Name = "xrTable2";
this.xrTable2.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow2});
this.xrTable2.StylePriority.UseBorders = false;
//
// xrTableRow2
//
this.xrTableRow2.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell8,
this.xrTableCell5,
this.xrTableCell6,
this.xrTableCell7});
this.xrTableRow2.Name = "xrTableRow2";
resources.ApplyResources(this.xrTableRow2, "xrTableRow2");
//
// xrTableCell8
//
this.xrTableCell8.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "HeaderTable.AdmUnitName")});
this.xrTableCell8.Name = "xrTableCell8";
resources.ApplyResources(this.xrTableCell8, "xrTableCell8");
//
// xrTableCell5
//
this.xrTableCell5.Borders = ((DevExpress.XtraPrinting.BorderSide)((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Bottom)));
this.xrTableCell5.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "HeaderTable.StartDate", "{0:dd/MM/yyyy}")});
this.xrTableCell5.Name = "xrTableCell5";
this.xrTableCell5.StylePriority.UseBorders = false;
this.xrTableCell5.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.xrTableCell5, "xrTableCell5");
//
// xrTableCell6
//
this.xrTableCell6.Borders = DevExpress.XtraPrinting.BorderSide.Bottom;
this.xrTableCell6.Name = "xrTableCell6";
this.xrTableCell6.StylePriority.UseBorders = false;
resources.ApplyResources(this.xrTableCell6, "xrTableCell6");
//
// xrTableCell7
//
this.xrTableCell7.Borders = ((DevExpress.XtraPrinting.BorderSide)((DevExpress.XtraPrinting.BorderSide.Right | DevExpress.XtraPrinting.BorderSide.Bottom)));
this.xrTableCell7.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "HeaderTable.FinishDate", "{0:dd/MM/yyyy}")});
this.xrTableCell7.Name = "xrTableCell7";
this.xrTableCell7.StylePriority.UseBorders = false;
this.xrTableCell7.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.xrTableCell7, "xrTableCell7");
//
// PageHeader
//
resources.ApplyResources(this.PageHeader, "PageHeader");
this.PageHeader.Name = "PageHeader";
this.PageHeader.Padding = new DevExpress.XtraPrinting.PaddingInfo(0, 0, 0, 0, 100F);
//
// PageFooter
//
resources.ApplyResources(this.PageFooter, "PageFooter");
this.PageFooter.Name = "PageFooter";
this.PageFooter.Padding = new DevExpress.XtraPrinting.PaddingInfo(0, 0, 0, 0, 100F);
//
// m_DataSet
//
this.m_DataSet.DataSetName = "AdmUnitDataSet";
this.m_DataSet.SchemaSerializationMode = System.Data.SchemaSerializationMode.IncludeSchema;
//
// m_Adaper
//
this.m_Adaper.ClearBeforeFill = true;
//
// topMarginBand1
//
resources.ApplyResources(this.topMarginBand1, "topMarginBand1");
this.topMarginBand1.Name = "topMarginBand1";
//
// bottomMarginBand1
//
resources.ApplyResources(this.bottomMarginBand1, "bottomMarginBand1");
this.bottomMarginBand1.Name = "bottomMarginBand1";
//
// ReportHeader
//
this.ReportHeader.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.xrTable1});
resources.ApplyResources(this.ReportHeader, "ReportHeader");
this.ReportHeader.Name = "ReportHeader";
this.ReportHeader.StylePriority.UseTextAlignment = false;
//
// xrTable1
//
resources.ApplyResources(this.xrTable1, "xrTable1");
this.xrTable1.Name = "xrTable1";
this.xrTable1.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow1});
this.xrTable1.StylePriority.UseFont = false;
//
// xrTableRow1
//
this.xrTableRow1.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.AdmUnitHeaderCell,
this.xrTableCell3});
this.xrTableRow1.Name = "xrTableRow1";
resources.ApplyResources(this.xrTableRow1, "xrTableRow1");
//
// AdmUnitHeaderCell
//
this.AdmUnitHeaderCell.Name = "AdmUnitHeaderCell";
resources.ApplyResources(this.AdmUnitHeaderCell, "AdmUnitHeaderCell");
//
// xrTableCell3
//
this.xrTableCell3.Name = "xrTableCell3";
this.xrTableCell3.StylePriority.UseFont = false;
resources.ApplyResources(this.xrTableCell3, "xrTableCell3");
//
// AdmUnitReport
//
this.Bands.AddRange(new DevExpress.XtraReports.UI.Band[] {
this.Detail,
this.PageHeader,
this.PageFooter,
this.topMarginBand1,
this.bottomMarginBand1,
this.ReportHeader});
this.Borders = ((DevExpress.XtraPrinting.BorderSide)((((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Top)
| DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
this.DataAdapter = this.m_Adaper;
this.DataMember = "HeaderTable";
this.DataSource = this.m_DataSet;
resources.ApplyResources(this, "$this");
this.Version = "13.1";
((System.ComponentModel.ISupportInitialize)(this.xrTable2)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.m_DataSet)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this)).EndInit();
}
#endregion
private DevExpress.XtraReports.UI.DetailBand Detail;
private DevExpress.XtraReports.UI.PageHeaderBand PageHeader;
private DevExpress.XtraReports.UI.PageFooterBand PageFooter;
private AdmUnitDataSet m_DataSet;
private EIDSS.Reports.BaseControls.Aggregate.AdmUnitDataSetTableAdapters.HeaderAdapter m_Adaper;
private DevExpress.XtraReports.UI.TopMarginBand topMarginBand1;
private DevExpress.XtraReports.UI.BottomMarginBand bottomMarginBand1;
private DevExpress.XtraReports.UI.ReportHeaderBand ReportHeader;
private DevExpress.XtraReports.UI.XRTable xrTable1;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow1;
private DevExpress.XtraReports.UI.XRTableCell AdmUnitHeaderCell;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell3;
private DevExpress.XtraReports.UI.XRTable xrTable2;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow2;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell5;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell6;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell7;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell8;
}
}
| |
// =================================================================================================
// ADOBE SYSTEMS INCORPORATED
// Copyright 2006 Adobe Systems Incorporated
// All Rights Reserved
//
// NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the terms
// of the Adobe license agreement accompanying it.
// =================================================================================================
using System;
using Com.Adobe.Xmp;
using Com.Adobe.Xmp.Impl.Xpath;
using Com.Adobe.Xmp.Options;
using Com.Adobe.Xmp.Properties;
using Sharpen;
namespace Com.Adobe.Xmp.Impl
{
/// <summary>The <code>XMPIterator</code> implementation.</summary>
/// <remarks>
/// The <code>XMPIterator</code> implementation.
/// Iterates the XMP Tree according to a set of options.
/// During the iteration the XMPMeta-object must not be changed.
/// Calls to <code>skipSubtree()</code> / <code>skipSiblings()</code> will affect the iteration.
/// </remarks>
/// <since>29.06.2006</since>
public class XMPIteratorImpl : XMPIterator
{
/// <summary>stores the iterator options</summary>
private IteratorOptions options;
/// <summary>the base namespace of the property path, will be changed during the iteration</summary>
private string baseNS = null;
/// <summary>flag to indicate that skipSiblings() has been called.</summary>
protected internal bool skipSiblings = false;
/// <summary>flag to indicate that skipSiblings() has been called.</summary>
protected internal bool skipSubtree = false;
/// <summary>the node iterator doing the work</summary>
private Iterator nodeIterator = null;
/// <summary>Constructor with optionsl initial values.</summary>
/// <remarks>
/// Constructor with optionsl initial values. If <code>propName</code> is provided,
/// <code>schemaNS</code> has also be provided.
/// </remarks>
/// <param name="xmp">the iterated metadata object.</param>
/// <param name="schemaNS">the iteration is reduced to this schema (optional)</param>
/// <param name="propPath">the iteration is redurce to this property within the <code>schemaNS</code></param>
/// <param name="options">
/// advanced iteration options, see
/// <see cref="Com.Adobe.Xmp.Options.IteratorOptions"/>
/// </param>
/// <exception cref="Com.Adobe.Xmp.XMPException">If the node defined by the paramters is not existing.</exception>
public XMPIteratorImpl(XMPMetaImpl xmp, string schemaNS, string propPath, IteratorOptions options)
{
// make sure that options is defined at least with defaults
this.options = options != null ? options : new IteratorOptions();
// the start node of the iteration depending on the schema and property filter
XMPNode startNode = null;
string initialPath = null;
bool baseSchema = schemaNS != null && schemaNS.Length > 0;
bool baseProperty = propPath != null && propPath.Length > 0;
if (!baseSchema && !baseProperty)
{
// complete tree will be iterated
startNode = xmp.GetRoot();
}
else
{
if (baseSchema && baseProperty)
{
// Schema and property node provided
XMPPath path = XMPPathParser.ExpandXPath(schemaNS, propPath);
// base path is the prop path without the property leaf
XMPPath basePath = new XMPPath();
for (int i = 0; i < path.Size() - 1; i++)
{
basePath.Add(path.GetSegment(i));
}
startNode = XMPNodeUtils.FindNode(xmp.GetRoot(), path, false, null);
baseNS = schemaNS;
initialPath = basePath.ToString();
}
else
{
if (baseSchema && !baseProperty)
{
// Only Schema provided
startNode = XMPNodeUtils.FindSchemaNode(xmp.GetRoot(), schemaNS, false);
}
else
{
// !baseSchema && baseProperty
// No schema but property provided -> error
throw new XMPException("Schema namespace URI is required", XMPErrorConstants.Badschema);
}
}
}
// create iterator
if (startNode != null)
{
if (!this.options.IsJustChildren())
{
nodeIterator = new XMPIteratorImpl.NodeIterator(this, startNode, initialPath, 1);
}
else
{
nodeIterator = new XMPIteratorImpl.NodeIteratorChildren(this, startNode, initialPath);
}
}
else
{
// create null iterator
nodeIterator = Sharpen.Collections.EmptyList().Iterator();
}
}
/// <seealso cref="Com.Adobe.Xmp.XMPIterator.SkipSubtree()"/>
public virtual void SkipSubtree()
{
this.skipSubtree = true;
}
/// <seealso cref="Com.Adobe.Xmp.XMPIterator.SkipSiblings()"/>
public virtual void SkipSiblings()
{
SkipSubtree();
this.skipSiblings = true;
}
/// <seealso cref="Sharpen.Iterator{E}.HasNext()"/>
public virtual bool HasNext()
{
return nodeIterator.HasNext();
}
/// <seealso cref="Sharpen.Iterator{E}.Next()"/>
public virtual object Next()
{
return nodeIterator.Next();
}
/// <seealso cref="Sharpen.Iterator{E}.Remove()"/>
public virtual void Remove()
{
throw new NotSupportedException("The XMPIterator does not support remove().");
}
/// <returns>Exposes the options for inner class.</returns>
protected internal virtual IteratorOptions GetOptions()
{
return options;
}
/// <returns>Exposes the options for inner class.</returns>
protected internal virtual string GetBaseNS()
{
return baseNS;
}
/// <param name="baseNS">sets the baseNS from the inner class.</param>
protected internal virtual void SetBaseNS(string baseNS)
{
this.baseNS = baseNS;
}
/// <summary>The <code>XMPIterator</code> implementation.</summary>
/// <remarks>
/// The <code>XMPIterator</code> implementation.
/// It first returns the node itself, then recursivly the children and qualifier of the node.
/// </remarks>
/// <since>29.06.2006</since>
private class NodeIterator : Iterator
{
/// <summary>iteration state</summary>
protected internal const int IterateNode = 0;
/// <summary>iteration state</summary>
protected internal const int IterateChildren = 1;
/// <summary>iteration state</summary>
protected internal const int IterateQualifier = 2;
/// <summary>the state of the iteration</summary>
private int state = XMPIteratorImpl.NodeIterator.IterateNode;
/// <summary>the currently visited node</summary>
private XMPNode visitedNode;
/// <summary>the recursively accumulated path</summary>
private string path;
/// <summary>the iterator that goes through the children and qualifier list</summary>
private Iterator childrenIterator = null;
/// <summary>index of node with parent, only interesting for arrays</summary>
private int index = 0;
/// <summary>the iterator for each child</summary>
private Iterator subIterator = Sharpen.Collections.EmptyList().Iterator();
/// <summary>the cached <code>PropertyInfo</code> to return</summary>
private XMPPropertyInfo returnProperty = null;
/// <summary>Default constructor</summary>
public NodeIterator(XMPIteratorImpl _enclosing)
{
this._enclosing = _enclosing;
}
/// <summary>Constructor for the node iterator.</summary>
/// <param name="visitedNode">the currently visited node</param>
/// <param name="parentPath">the accumulated path of the node</param>
/// <param name="index">the index within the parent node (only for arrays)</param>
public NodeIterator(XMPIteratorImpl _enclosing, XMPNode visitedNode, string parentPath, int index)
{
this._enclosing = _enclosing;
// EMPTY
this.visitedNode = visitedNode;
this.state = XMPIteratorImpl.NodeIterator.IterateNode;
if (visitedNode.GetOptions().IsSchemaNode())
{
this._enclosing.SetBaseNS(visitedNode.GetName());
}
// for all but the root node and schema nodes
this.path = this.AccumulatePath(visitedNode, parentPath, index);
}
/// <summary>Prepares the next node to return if not already done.</summary>
/// <seealso cref="Sharpen.Iterator{E}.HasNext()"/>
public virtual bool HasNext()
{
if (this.returnProperty != null)
{
// hasNext has been called before
return true;
}
// find next node
if (this.state == XMPIteratorImpl.NodeIterator.IterateNode)
{
return this.ReportNode();
}
else
{
if (this.state == XMPIteratorImpl.NodeIterator.IterateChildren)
{
if (this.childrenIterator == null)
{
this.childrenIterator = this.visitedNode.IterateChildren();
}
bool hasNext = this.IterateChildrenMethod(this.childrenIterator);
if (!hasNext && this.visitedNode.HasQualifier() && !this._enclosing.GetOptions().IsOmitQualifiers())
{
this.state = XMPIteratorImpl.NodeIterator.IterateQualifier;
this.childrenIterator = null;
hasNext = this.HasNext();
}
return hasNext;
}
else
{
if (this.childrenIterator == null)
{
this.childrenIterator = this.visitedNode.IterateQualifier();
}
return this.IterateChildrenMethod(this.childrenIterator);
}
}
}
/// <summary>Sets the returnProperty as next item or recurses into <code>hasNext()</code>.</summary>
/// <returns>Returns if there is a next item to return.</returns>
protected internal virtual bool ReportNode()
{
this.state = XMPIteratorImpl.NodeIterator.IterateChildren;
if (this.visitedNode.GetParent() != null && (!this._enclosing.GetOptions().IsJustLeafnodes() || !this.visitedNode.HasChildren()))
{
this.returnProperty = this.CreatePropertyInfo(this.visitedNode, this._enclosing.GetBaseNS(), this.path);
return true;
}
else
{
return this.HasNext();
}
}
/// <summary>Handles the iteration of the children or qualfier</summary>
/// <param name="iterator">an iterator</param>
/// <returns>Returns if there are more elements available.</returns>
private bool IterateChildrenMethod(Iterator iterator)
{
if (this._enclosing.skipSiblings)
{
// setSkipSiblings(false);
this._enclosing.skipSiblings = false;
this.subIterator = Sharpen.Collections.EmptyList().Iterator();
}
// create sub iterator for every child,
// if its the first child visited or the former child is finished
if ((!this.subIterator.HasNext()) && iterator.HasNext())
{
XMPNode child = (XMPNode)iterator.Next();
this.index++;
this.subIterator = new XMPIteratorImpl.NodeIterator(this._enclosing, child, this.path, this.index);
}
if (this.subIterator.HasNext())
{
this.returnProperty = (XMPPropertyInfo)this.subIterator.Next();
return true;
}
else
{
return false;
}
}
/// <summary>Calls hasNext() and returnes the prepared node.</summary>
/// <remarks>
/// Calls hasNext() and returnes the prepared node. Afterwards its set to null.
/// The existance of returnProperty indicates if there is a next node, otherwise
/// an exceptio is thrown.
/// </remarks>
/// <seealso cref="Sharpen.Iterator{E}.Next()"/>
public virtual object Next()
{
if (this.HasNext())
{
XMPPropertyInfo result = this.returnProperty;
this.returnProperty = null;
return result;
}
else
{
throw new NoSuchElementException("There are no more nodes to return");
}
}
/// <summary>Not supported.</summary>
/// <seealso cref="Sharpen.Iterator{E}.Remove()"/>
public virtual void Remove()
{
throw new NotSupportedException();
}
/// <param name="currNode">the node that will be added to the path.</param>
/// <param name="parentPath">the path up to this node.</param>
/// <param name="currentIndex">the current array index if an arrey is traversed</param>
/// <returns>Returns the updated path.</returns>
protected internal virtual string AccumulatePath(XMPNode currNode, string parentPath, int currentIndex)
{
string separator;
string segmentName;
if (currNode.GetParent() == null || currNode.GetOptions().IsSchemaNode())
{
return null;
}
else
{
if (currNode.GetParent().GetOptions().IsArray())
{
separator = string.Empty;
segmentName = "[" + currentIndex.ToString() + "]";
}
else
{
separator = "/";
segmentName = currNode.GetName();
}
}
if (parentPath == null || parentPath.Length == 0)
{
return segmentName;
}
else
{
if (this._enclosing.GetOptions().IsJustLeafname())
{
return !segmentName.StartsWith("?") ? segmentName : Sharpen.Runtime.Substring(segmentName, 1);
}
else
{
// qualifier
return parentPath + separator + segmentName;
}
}
}
/// <summary>Creates a property info object from an <code>XMPNode</code>.</summary>
/// <param name="node">an <code>XMPNode</code></param>
/// <param name="baseNS">the base namespace to report</param>
/// <param name="path">the full property path</param>
/// <returns>Returns a <code>XMPProperty</code>-object that serves representation of the node.</returns>
protected internal virtual XMPPropertyInfo CreatePropertyInfo(XMPNode node, string baseNS, string path)
{
string value = node.GetOptions().IsSchemaNode() ? null : node.GetValue();
return new _XMPPropertyInfo_450(node, baseNS, path, value);
}
private sealed class _XMPPropertyInfo_450 : XMPPropertyInfo
{
public _XMPPropertyInfo_450(XMPNode node, string baseNS, string path, string value)
{
this.node = node;
this.baseNS = baseNS;
this.path = path;
this.value = value;
}
public string GetNamespace()
{
if (!node.GetOptions().IsSchemaNode())
{
// determine namespace of leaf node
QName qname = new QName(node.GetName());
return XMPMetaFactory.GetSchemaRegistry().GetNamespaceURI(qname.GetPrefix());
}
else
{
return baseNS;
}
}
public string GetPath()
{
return path;
}
public string GetValue()
{
return value;
}
public PropertyOptions GetOptions()
{
return node.GetOptions();
}
public string GetLanguage()
{
// the language is not reported
return null;
}
private readonly XMPNode node;
private readonly string baseNS;
private readonly string path;
private readonly string value;
}
/// <returns>the childrenIterator</returns>
protected internal virtual Iterator GetChildrenIterator()
{
return this.childrenIterator;
}
/// <param name="childrenIterator">the childrenIterator to set</param>
protected internal virtual void SetChildrenIterator(Iterator childrenIterator)
{
this.childrenIterator = childrenIterator;
}
/// <returns>Returns the returnProperty.</returns>
protected internal virtual XMPPropertyInfo GetReturnProperty()
{
return this.returnProperty;
}
/// <param name="returnProperty">the returnProperty to set</param>
protected internal virtual void SetReturnProperty(XMPPropertyInfo returnProperty)
{
this.returnProperty = returnProperty;
}
private readonly XMPIteratorImpl _enclosing;
}
/// <summary>
/// This iterator is derived from the default <code>NodeIterator</code>,
/// and is only used for the option
/// <see cref="Com.Adobe.Xmp.Options.IteratorOptions.JustChildren"/>
/// .
/// </summary>
/// <since>02.10.2006</since>
private class NodeIteratorChildren : XMPIteratorImpl.NodeIterator
{
private string parentPath;
private Iterator childrenIterator;
private int index = 0;
/// <summary>Constructor</summary>
/// <param name="parentNode">the node which children shall be iterated.</param>
/// <param name="parentPath">the full path of the former node without the leaf node.</param>
public NodeIteratorChildren(XMPIteratorImpl _enclosing, XMPNode parentNode, string parentPath)
: base(_enclosing)
{
this._enclosing = _enclosing;
if (parentNode.GetOptions().IsSchemaNode())
{
this._enclosing.SetBaseNS(parentNode.GetName());
}
this.parentPath = this.AccumulatePath(parentNode, parentPath, 1);
this.childrenIterator = parentNode.IterateChildren();
}
/// <summary>Prepares the next node to return if not already done.</summary>
/// <seealso cref="Sharpen.Iterator{E}.HasNext()"/>
public override bool HasNext()
{
if (this.GetReturnProperty() != null)
{
// hasNext has been called before
return true;
}
else
{
if (this._enclosing.skipSiblings)
{
return false;
}
else
{
if (this.childrenIterator.HasNext())
{
XMPNode child = (XMPNode)this.childrenIterator.Next();
this.index++;
string path = null;
if (child.GetOptions().IsSchemaNode())
{
this._enclosing.SetBaseNS(child.GetName());
}
else
{
if (child.GetParent() != null)
{
// for all but the root node and schema nodes
path = this.AccumulatePath(child, this.parentPath, this.index);
}
}
// report next property, skip not-leaf nodes in case options is set
if (!this._enclosing.GetOptions().IsJustLeafnodes() || !child.HasChildren())
{
this.SetReturnProperty(this.CreatePropertyInfo(child, this._enclosing.GetBaseNS(), path));
return true;
}
else
{
return this.HasNext();
}
}
else
{
return false;
}
}
}
}
private readonly XMPIteratorImpl _enclosing;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net.Http;
using System.Net.Http.WinHttpHandlerUnitTests;
using Microsoft.DotNet.RemoteExecutor;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.Http.Tests
{
public class HttpWindowsProxyTest
{
private readonly ITestOutputHelper _output;
private const string FakeProxyString = "http://proxy.contoso.com";
private const string insecureProxyUri = "http://proxy.insecure.com";
private const string secureProxyUri = "http://proxy.secure.com";
private const string secureAndInsecureProxyUri = "http://proxy.secure-and-insecure.com";
private const string fooHttp = "http://foo.com";
private const string fooHttps = "https://foo.com";
private const string fooWs = "ws://foo.com";
private const string fooWss = "wss://foo.com";
public HttpWindowsProxyTest(ITestOutputHelper output)
{
_output = output;
}
[Theory]
[MemberData(nameof(ProxyParsingData))]
public void HttpProxy_WindowsProxy_Manual_Loaded(string rawProxyString, string rawInsecureUri, string rawSecureUri)
{
RemoteExecutor.Invoke((proxyString, insecureProxy, secureProxy) =>
{
FakeRegistry.Reset();
Assert.False(HttpWindowsProxy.TryCreate(out IWebProxy p));
FakeRegistry.WinInetProxySettings.Proxy = proxyString;
WinInetProxyHelper proxyHelper = new WinInetProxyHelper();
Assert.Null(proxyHelper.AutoConfigUrl);
Assert.Equal(proxyString, proxyHelper.Proxy);
Assert.False(proxyHelper.AutoSettingsUsed);
Assert.True(proxyHelper.ManualSettingsUsed);
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
Assert.Equal(!string.IsNullOrEmpty(insecureProxy) ? new Uri(insecureProxy) : null, p.GetProxy(new Uri(fooHttp)));
Assert.Equal(!string.IsNullOrEmpty(secureProxy) ? new Uri(secureProxy) : null, p.GetProxy(new Uri(fooHttps)));
Assert.Equal(!string.IsNullOrEmpty(insecureProxy) ? new Uri(insecureProxy) : null, p.GetProxy(new Uri(fooWs)));
Assert.Equal(!string.IsNullOrEmpty(secureProxy) ? new Uri(secureProxy) : null, p.GetProxy(new Uri(fooWss)));
}, rawProxyString, rawInsecureUri ?? string.Empty, rawSecureUri ?? string.Empty).Dispose();
}
[Theory]
[MemberData(nameof(ProxyParsingData))]
public void HttpProxy_WindowsProxy_PAC_Loaded(string rawProxyString, string rawInsecureUri, string rawSecureUri)
{
RemoteExecutor.Invoke((proxyString, insecureProxy, secureProxy) =>
{
TestControl.ResetAll();
Assert.False(HttpWindowsProxy.TryCreate(out IWebProxy p));
FakeRegistry.WinInetProxySettings.AutoConfigUrl = "http://127.0.0.1/proxy.pac";
WinInetProxyHelper proxyHelper = new WinInetProxyHelper();
Assert.Null(proxyHelper.Proxy);
Assert.Equal(FakeRegistry.WinInetProxySettings.AutoConfigUrl, proxyHelper.AutoConfigUrl);
Assert.False(proxyHelper.ManualSettingsUsed);
Assert.True(proxyHelper.AutoSettingsUsed);
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
// With a HttpWindowsProxy created configured to use auto-config, now set Proxy so when it
// attempts to resolve a proxy, it resolves our string.
FakeRegistry.WinInetProxySettings.Proxy = proxyString;
proxyHelper = new WinInetProxyHelper();
Assert.Equal(proxyString, proxyHelper.Proxy);
Assert.Equal(!string.IsNullOrEmpty(insecureProxy) ? new Uri(insecureProxy) : null, p.GetProxy(new Uri(fooHttp)));
Assert.Equal(!string.IsNullOrEmpty(secureProxy) ? new Uri(secureProxy) : null, p.GetProxy(new Uri(fooHttps)));
Assert.Equal(!string.IsNullOrEmpty(insecureProxy) ? new Uri(insecureProxy) : null, p.GetProxy(new Uri(fooWs)));
Assert.Equal(!string.IsNullOrEmpty(secureProxy) ? new Uri(secureProxy) : null, p.GetProxy(new Uri(fooWss)));
}, rawProxyString, rawInsecureUri ?? string.Empty, rawSecureUri ?? string.Empty).Dispose();
}
public static TheoryData<string, string, string> ProxyParsingData =>
new TheoryData<string, string, string>
{
{ "http://proxy.insecure.com", insecureProxyUri, null },
{ "http=http://proxy.insecure.com", insecureProxyUri, null },
{ "http=proxy.insecure.com", insecureProxyUri, null },
{ "http://proxy.insecure.com http://proxy.wrong.com", insecureProxyUri, null },
{ "https=proxy.secure.com http=proxy.insecure.com", insecureProxyUri, secureProxyUri },
{ "https://proxy.secure.com\nhttp://proxy.insecure.com", insecureProxyUri, secureProxyUri },
{ "https=proxy.secure.com\nhttp=proxy.insecure.com", insecureProxyUri, secureProxyUri },
{ "https://proxy.secure.com;http://proxy.insecure.com", insecureProxyUri, secureProxyUri },
{ "https=proxy.secure.com;http=proxy.insecure.com", insecureProxyUri, secureProxyUri },
{ ";http=proxy.insecure.com;;", insecureProxyUri, null },
{ " http=proxy.insecure.com ", insecureProxyUri, null },
{ "http=proxy.insecure.com;http=proxy.wrong.com", insecureProxyUri, null },
{ "http=http://proxy.insecure.com", insecureProxyUri, null },
{ "https://proxy.secure.com", null, secureProxyUri },
{ "https=proxy.secure.com", null, secureProxyUri },
{ "https=https://proxy.secure.com", null, secureProxyUri },
{ "http=https://proxy.secure.com", null, secureProxyUri },
{ "https=http://proxy.insecure.com", insecureProxyUri, null },
{ "proxy.secure-and-insecure.com", secureAndInsecureProxyUri, secureAndInsecureProxyUri },
};
[Theory]
[InlineData("localhost:1234", "http://localhost:1234/")]
[InlineData("123.123.123.123", "http://123.123.123.123/")]
public void HttpProxy_WindowsProxy_Loaded(string rawProxyString, string expectedUri)
{
RemoteExecutor.Invoke((proxyString, expectedString) =>
{
IWebProxy p;
FakeRegistry.Reset();
FakeRegistry.WinInetProxySettings.Proxy = proxyString;
WinInetProxyHelper proxyHelper = new WinInetProxyHelper();
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
Assert.Equal(expectedString, p.GetProxy(new Uri(fooHttp)).ToString());
Assert.Equal(expectedString, p.GetProxy(new Uri(fooHttps)).ToString());
}, rawProxyString, expectedUri).Dispose();
}
[Theory]
[InlineData("http://localhost/", true)]
[InlineData("http://127.0.0.1/", true)]
[InlineData("http://128.0.0.1/", false)]
[InlineData("http://[::1]/", true)]
[InlineData("http://foo/", true)]
[InlineData("http://www.foo.com/", true)]
[InlineData("http://WWW.FOO.COM/", true)]
[InlineData("http://foo.com/", false)]
[InlineData("http://bar.com/", true)]
[InlineData("http://BAR.COM/", true)]
[InlineData("http://162.1.1.1/", true)]
[InlineData("http://[2a01:5b40:0:248::52]/", false)]
[InlineData("http://[2002::11]/", true)]
[InlineData("http://[2607:f8b0:4005:80a::200e]/", true)]
[InlineData("http://[2607:f8B0:4005:80A::200E]/", true)]
[InlineData("http://b\u00e9b\u00e9.eu/", true)]
[InlineData("http://www.b\u00e9b\u00e9.eu/", true)]
public void HttpProxy_Local_Bypassed(string name, bool shouldBypass)
{
RemoteExecutor.Invoke((url, expected) =>
{
bool expectedResult = Boolean.Parse(expected);
IWebProxy p;
FakeRegistry.Reset();
FakeRegistry.WinInetProxySettings.Proxy = insecureProxyUri;
FakeRegistry.WinInetProxySettings.ProxyBypass = "23.23.86.44;*.foo.com;<local>;BAR.COM; ; 162*;[2002::11];[*:f8b0:4005:80a::200e]; http://www.xn--mnchhausen-9db.at;http://*.xn--bb-bjab.eu;http://xn--bb-bjab.eu;";
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
Uri u = new Uri(url);
Assert.Equal(expectedResult, p.GetProxy(u) == null);
}, name, shouldBypass.ToString()).Dispose();
}
[Theory]
[InlineData("", 0)]
[InlineData(" ", 0)]
[InlineData(" ; ; ", 0)]
[InlineData("http://127.0.0.1/", 1)]
[InlineData("[::]", 1)]
public void HttpProxy_Local_Parsing(string bypass, int count)
{
RemoteExecutor.Invoke((bypassValue, expected) =>
{
int expectedCount = Convert.ToInt32(expected);
IWebProxy p;
FakeRegistry.Reset();
FakeRegistry.WinInetProxySettings.Proxy = insecureProxyUri;
FakeRegistry.WinInetProxySettings.ProxyBypass = bypassValue;
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
HttpWindowsProxy sp = p as HttpWindowsProxy;
Assert.NotNull(sp);
if (expectedCount > 0)
{
Assert.Equal(expectedCount, sp.BypassList.Count);
}
else
{
Assert.Null(sp.BypassList);
}
}, bypass, count.ToString()).Dispose();
}
[Theory]
[InlineData("http://")]
[InlineData("http=")]
[InlineData("http://;")]
[InlineData("http=;")]
[InlineData(" ; ")]
public void HttpProxy_InvalidWindowsProxy_Null(string rawProxyString)
{
RemoteExecutor.Invoke((proxyString) =>
{
IWebProxy p;
FakeRegistry.Reset();
Assert.False(HttpWindowsProxy.TryCreate(out p));
FakeRegistry.WinInetProxySettings.Proxy = proxyString;
WinInetProxyHelper proxyHelper = new WinInetProxyHelper();
Assert.True(HttpWindowsProxy.TryCreate(out p));
Assert.NotNull(p);
Assert.Null(p.GetProxy(new Uri(fooHttp)));
Assert.Null(p.GetProxy(new Uri(fooHttps)));
Assert.Null(p.GetProxy(new Uri(fooWs)));
Assert.Null(p.GetProxy(new Uri(fooWss)));
}, rawProxyString).Dispose();
}
[Theory]
[MemberData(nameof(HttpProxy_Multi_Data))]
public void HttpProxy_Multi_Success(bool manualConfig, string proxyConfig, string url, string expected)
{
RemoteExecutor.Invoke((manualConfigValue, proxyConfigValue, urlValue, expectedValue) =>
{
bool manual = bool.Parse(manualConfigValue);
Uri requestUri = new Uri(urlValue);
string[] expectedUris = expectedValue.Split(';', StringSplitOptions.RemoveEmptyEntries);
TestControl.ResetAll();
if (manual)
{
FakeRegistry.WinInetProxySettings.Proxy = proxyConfigValue;
}
else
{
FakeRegistry.WinInetProxySettings.AutoConfigUrl = "http://dummy.com";
}
Assert.True(HttpWindowsProxy.TryCreate(out IWebProxy p));
HttpWindowsProxy wp = Assert.IsType<HttpWindowsProxy>(p);
if (!manual)
{
// Now that HttpWindowsProxy has been constructed to use autoconfig,
// set Proxy which will be used by Fakes for all the per-URL calls.
FakeRegistry.WinInetProxySettings.Proxy = proxyConfigValue;
}
MultiProxy multi = wp.GetMultiProxy(requestUri);
for (int i = 0; i < expectedUris.Length; ++i)
{
// Both the current enumerator and the proxy globally should move to the next proxy.
Assert.True(multi.ReadNext(out Uri uri, out _));
Assert.Equal(new Uri(expectedUris[i]), uri);
Assert.Equal(new Uri(expectedUris[i]), p.GetProxy(requestUri));
}
Assert.False(multi.ReadNext(out _, out _));
}, manualConfig.ToString(), proxyConfig, url, expected).Dispose();
}
public static IEnumerable<object[]> HttpProxy_Multi_Data()
{
for (int i = 0; i < 2; ++i)
{
yield return new object[] { i == 0, "http://proxy.com", "http://request.com", "http://proxy.com" };
yield return new object[] { i == 0, "http://proxy.com https://secure-proxy.com", "http://request.com", "http://proxy.com" };
yield return new object[] { i == 0, "http://proxy-a.com https://secure-proxy.com http://proxy-b.com", "http://request.com", "http://proxy-a.com;http://proxy-b.com" };
yield return new object[] { i == 0, "http://proxy-a.com https://secure-proxy.com http://proxy-b.com", "https://request.com", "http://secure-proxy.com" };
yield return new object[] { i == 0, "http://proxy-a.com https://secure-proxy-a.com http://proxy-b.com https://secure-proxy-b.com https://secure-proxy-c.com", "https://request.com", "http://secure-proxy-a.com;http://secure-proxy-b.com;http://secure-proxy-c.com" };
}
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void HttpProxy_Multi_ConcurrentUse_Success(bool manualConfig)
{
const string MultiProxyConfig = "http://proxy-a.com http://proxy-b.com http://proxy-c.com";
RemoteExecutor.Invoke(manualValue =>
{
bool manual = bool.Parse(manualValue);
Uri requestUri = new Uri("http://request.com");
Uri firstProxy = new Uri("http://proxy-a.com");
Uri secondProxy = new Uri("http://proxy-b.com");
Uri thirdProxy = new Uri("http://proxy-c.com");
TestControl.ResetAll();
if (manual)
{
FakeRegistry.WinInetProxySettings.Proxy = MultiProxyConfig;
}
else
{
FakeRegistry.WinInetProxySettings.AutoConfigUrl = "http://dummy.com";
}
Assert.True(HttpWindowsProxy.TryCreate(out IWebProxy p));
HttpWindowsProxy wp = Assert.IsType<HttpWindowsProxy>(p);
if (!manual)
{
// Now that HttpWindowsProxy has been constructed to use autoconfig,
// set Proxy which will be used by Fakes for all the per-URL calls.
FakeRegistry.WinInetProxySettings.Proxy = MultiProxyConfig;
}
MultiProxy multiA = wp.GetMultiProxy(requestUri);
MultiProxy multiB = wp.GetMultiProxy(requestUri);
// Assert first proxy is returned across all three methods.
Assert.True(multiA.ReadNext(out Uri proxyA, out _));
Assert.True(multiB.ReadNext(out Uri proxyB, out _));
Assert.Equal(firstProxy, proxyA);
Assert.Equal(firstProxy, proxyB);
Assert.Equal(firstProxy, p.GetProxy(requestUri));
// Assert second proxy is returned across all three methods.
Assert.True(multiA.ReadNext(out proxyA, out _));
Assert.True(multiB.ReadNext(out proxyB, out _));
Assert.Equal(secondProxy, proxyA);
Assert.Equal(secondProxy, proxyB);
Assert.Equal(secondProxy, p.GetProxy(requestUri));
// Assert third proxy is returned from multiA.
Assert.True(multiA.ReadNext(out proxyA, out _));
Assert.Equal(thirdProxy, proxyA);
Assert.Equal(thirdProxy, p.GetProxy(requestUri));
// Enumerating multiA once more should exhaust all of our proxies.
// So, multiB, still on secondProxy, should now also be exhausted because
// when it tries thirdProxy it will see it marked as failed.
Assert.False(multiA.ReadNext(out proxyA, out _));
Assert.False(multiB.ReadNext(out proxyB, out _));
// GetProxy should now return the proxy closest to being turned back on, which should be firstProxy.
Assert.Equal(firstProxy, p.GetProxy(requestUri));
// Enumerating a new MultiProxy should again return the proxy closed to being turned back on, and no others.
MultiProxy multiC = wp.GetMultiProxy(requestUri);
Assert.True(multiC.ReadNext(out Uri proxyC, out _));
Assert.Equal(firstProxy, proxyC);
Assert.False(multiC.ReadNext(out proxyC, out _));
}, manualConfig.ToString()).Dispose();
}
}
}
| |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections;
using Alachisoft.NCache.Common.Net;
using Alachisoft.NGroups.Stack;
namespace Alachisoft.NGroups
{
/// <summary>
/// Abstract class for any implementation of a channel
/// <p><b>Author:</b> Chris Koiak, Bela Ban</p>
/// <p><b>Date:</b> 12/03/2003</p>
/// </summary>
public abstract class Channel : Transport
{
public const int BLOCK = 0;
public const int SUSPECT = 2;
public const int LOCAL = 3;
public const int GET_STATE_EVENTS = 4;
public const int AUTO_RECONNECT = 5;
public const int AUTO_GETSTATE = 6;
/// <summary>
/// All events will be sent to this class once received.
/// </summary>
protected UpHandler up_handler=null;
/// <summary>
/// Allows listening on the channel. The listener will be notified of channel closing, etc.
/// </summary>
protected ChannelListener channel_listener=null;
/// <summary>
/// Constructor: Uses predefined stack.
/// </summary>
protected Channel(){}
/// <summary>
/// Constructor: Stack and properties are specified
/// </summary>
/// <param name="properties">Properties of Channel stack</param>
protected Channel(Object properties) {}
#region "properties"
/// <summary>Determines whether the channel is open, ie. the protocol stack has been created (may not be connected though).</summary>
abstract public bool IsOpen{get;}
/// <summary>Determines whether the channel is connected to a group. This implies it is open. If true is returned,
/// then the channel can be used to send and receive messages.
/// </summary>
abstract public bool IsConnected{get;}
/// <summary> Returns the number of messages that are waiting. Those messages can be
/// removed by {@link #receive(long)}. Note that this number could change after
/// calling this method and before calling <tt>receive()</tt> (e.g. the latter
/// method might be called by a different thread).
/// </summary>
/// <returns> The number of messages on the queue, or -1 if the queue/channel
/// is closed/disconnected.
/// </returns>
virtual public int NumMessages
{
get
{
return - 1;
}
}
/// <summary> Gets the current view. This does <em>not</em> retrieve a new view, use
/// <code>receive()</code> to do so. The view may only be available after a successful
/// <code>connect()</code>. The result of calling this method on an unconnected channel
/// is implementation defined (may return null). Calling it on a channel that is not
/// enabled to receive view events (via <code>setOpt</code>) returns
/// <code>null</code>. Calling this method on a closed channel returns a null view.
/// </summary>
/// <returns> The current view.
/// </returns>
abstract public View View{get;}
/// <summary>Returns the channel's own address. The result of calling this method on an unconnected
/// channel is implementation defined (may return null). Calling this method on a closed
/// channel returns null.
/// </summary>
/// <returns> The channel's address. Generated by the underlying transport, and opaque.
/// Addresses can be used as destination in the <code>Send</code> operation.
/// </returns>
abstract public Address LocalAddress{get;}
/// <summary>Returns the group address of the group of which the channel is a member. This is
/// the object that was the argument to <code>Connect</code>. Calling this method on a closed
/// channel returns <code>null</code>.
/// </summary>
/// <returns> The group address
/// </returns>
abstract public string ChannelName{get;}
/// <summary>When up_handler is set, all events will be passed to it directly. These will not be received
/// by the channel (except connect/disconnect, state retrieval and the like). This can be used by
/// building blocks on top of a channel; thus the channel is used as a pass-through medium, and
/// the building blocks take over some of the channel's tasks. However, tasks such as connection
/// management and state transfer is still handled by the channel.
/// </summary>
virtual public UpHandler UpHandler
{
set
{
this.up_handler = value;
}
}
/// <summary>Allows to be notified when a channel event such as connect, disconnect or close occurs.
/// E.g. a PullPushAdapter may choose to stop when the channel is closed, or to start when
/// it is opened.
/// </summary>
virtual public ChannelListener ChannelListener
{
set
{
this.channel_listener = value;
}
}
#endregion
/// <summary>
/// Connects the Channel to a group.
/// </summary>
/// <param name="channel_name">Group to connect to (or create).</param>
abstract public void connect(string channel_name, string subGroup_name, bool isStartedAsMirror, bool twoPhaseInitialization);
abstract public void connectPhase2();
/// <summary>
/// Disconnects the Channel from the group
/// </summary>
abstract public void disconnect();
/// <summary>
/// Disconnects and closes the Channel.
/// </summary>
abstract public void close();
/// <summary>
/// Re-opens a closed channel.
/// </summary>
abstract public void open();
/// <summary>
/// Sends a message through the Channel
/// </summary>
/// <param name="msg">Message to be sent</param>
abstract public void send(Message msg);
/// <summary>Helper method. Will create a Message(dst, src, obj) and use send(Message).</summary>
/// <param name="dst">Destination address for message. If null, message will be sent to all current group members
/// </param>
/// <param name="src">Source (sender's) address. If null, it will be set by the protocol's transport layer before
/// being put on the wire. Can usually be set to null.
/// </param>
/// <param name="obj">Serializable object. Will be serialized into the byte buffer of the Message. If it is <em>
/// not</em> serializable, the byte buffer will be null.
/// </param>
abstract public void send(Address dst, Address src, object obj);
/// <summary>
/// Passes an event down the protocol stack
/// </summary>
/// <param name="evt">Event to be passed down the stack</param>
virtual public void down(Event evt) {}
/// <summary>
/// Receives an event from the channel
/// </summary>
/// <param name="timeout">Time (ms) to wait for a message</param>
/// <returns>The next Event received by the channel</returns>
abstract public Object receive(long timeout);
/// <summary>
/// Performs the same as <c>receive()</c> but does not remove the Event
/// </summary>
/// <param name="timeout">Time (ms) to wait for a message</param>
/// <returns>The next Event received by the channel</returns>
abstract public Event peek(long timeout) ;
/// <summary>
/// Sets a variety of options within the channel
/// </summary>
/// <param name="option">The string representation of the option</param>
/// <param name="value">The value that the option should be set to</param>
abstract public void setOpt(int option, Object value);
/// <summary>Gets an option. This method can be called on an unconnected channel. Calling this
/// method on a closed channel returns <code>null</code>.
/// </summary>
/// <param name="option"> The option to be returned.
/// </param>
/// <returns> The object associated with an option.
/// </returns>
abstract public object getOpt(int option);
/// <summary>Called to acknowledge a block() (callback in <code>MembershipListener</code> or
/// <code>BlockEvent</code> received from call to <code>Receive</code>).
/// After sending BlockOk, no messages should be sent until a new view has been received.
/// Calling this method on a closed channel has no effect.
/// </summary>
abstract public void blockOk();
/// <summary>
/// Get cluster stat collector instance to get values to publish on wmis
/// </summary>
abstract public PerfStatsCollector ClusterStatCollector { get;}
public static string option2String(int option)
{
switch (option)
{
case BLOCK:
return "BLOCK";
case SUSPECT:
return "SUSPECT";
case LOCAL:
return "LOCAL";
case GET_STATE_EVENTS:
return "GET_STATE_EVENTS";
case AUTO_RECONNECT:
return "AUTO_RECONNECT";
case AUTO_GETSTATE:
return "AUTO_GETSTATE";
default:
return "unknown (" + option + ')';
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using NSec.Cryptography;
namespace NSec.Experimental.Asn1
{
// ITU-T X.690 5.0 DER
internal ref struct Asn1Reader
{
internal const int MaxDepth = 7;
#pragma warning disable 0414
private Span _stack0;
private Span _stack1;
private Span _stack2;
private Span _stack3;
private Span _stack4;
private Span _stack5;
private Span _stack6;
#pragma warning restore 0414
private ReadOnlySpan<byte> _buffer;
private int _depth;
private bool _failed;
public Asn1Reader(
ReadOnlySpan<byte> buffer)
{
_stack0 = new Span(buffer);
_stack1 = default;
_stack2 = default;
_stack3 = default;
_stack4 = default;
_stack5 = default;
_stack6 = default;
_buffer = buffer;
_depth = 0;
_failed = false;
}
public readonly bool Success => !_failed;
public readonly bool SuccessComplete => !_failed && _depth == 0 && _stack0.IsEmpty;
public void BeginSequence()
{
Span span = Read(0x30);
if (_failed)
{
Fail();
}
else
{
_depth++;
if (_depth == MaxDepth)
{
throw Error.InvalidOperation_InternalError(); // overflow
}
Unsafe.Add(ref _stack0, _depth) = span;
}
}
public ReadOnlySpan<byte> BitString()
{
ReadOnlySpan<byte> bytes = Read(0x03).ApplyTo(_buffer);
ReadOnlySpan<byte> value = default;
if (_failed || bytes.IsEmpty || bytes[0] != 0)
{
Fail();
}
else
{
value = bytes.Slice(1);
}
return value;
}
public bool Bool()
{
ReadOnlySpan<byte> bytes = Read(0x01).ApplyTo(_buffer);
bool value = default;
if (_failed || bytes.Length != 1 || (bytes[0] != 0x00 && bytes[0] != 0xFF))
{
Fail();
}
else
{
value = (bytes[0] != 0x00);
}
return value;
}
public void End()
{
if (_failed || !Unsafe.Add(ref _stack0, _depth).IsEmpty)
{
Fail();
}
else
{
if (_depth == 0)
{
throw Error.InvalidOperation_InternalError(); // underflow
}
_depth--;
}
}
public int Integer32()
{
ReadOnlySpan<byte> bytes = Read(0x02).ApplyTo(_buffer);
int value = default;
if (_failed || IsInvalidInteger(bytes, sizeof(int)))
{
Fail();
}
else
{
value = -(bytes[0] >> 7);
for (int i = 0; i < bytes.Length; i++)
{
value = (value << 8) | bytes[i];
}
}
return value;
}
public long Integer64()
{
ReadOnlySpan<byte> bytes = Read(0x02).ApplyTo(_buffer);
long value = default;
if (_failed || IsInvalidInteger(bytes, sizeof(long)))
{
Fail();
}
else
{
value = -(bytes[0] >> 7);
for (int i = 0; i < bytes.Length; i++)
{
value = (value << 8) | bytes[i];
}
}
return value;
}
public void Null()
{
Span span = Read(0x05);
if (_failed || !span.IsEmpty)
{
Fail();
}
}
public ReadOnlySpan<byte> ObjectIdentifier()
{
return Read(0x06).ApplyTo(_buffer);
}
public ReadOnlySpan<byte> OctetString()
{
return Read(0x04).ApplyTo(_buffer);
}
private void Fail()
{
_failed = true;
_depth = 0;
_stack0 = default;
}
private readonly bool IsInvalidInteger(
ReadOnlySpan<byte> bytes,
int maxSize)
{
return bytes.Length == 0
|| bytes.Length > maxSize
|| bytes.Length > 1 && bytes[0] == 0x00 && (bytes[1] & 0x80) == 0x00
|| bytes.Length > 1 && bytes[0] == 0xFF && (bytes[1] & 0x80) == 0x80;
}
private Span Read(
int tag)
{
Span span = Unsafe.Add(ref _stack0, _depth);
ReadOnlySpan<byte> bytes = span.ApplyTo(_buffer);
if (_failed || bytes.Length < 2 || bytes[0] != tag)
{
goto failed;
}
int start = 2;
int length = 0;
if ((bytes[1] & ~0x7F) == 0)
{
length = bytes[1];
}
else
{
int count = bytes[1] & 0x7F;
if (count < 1 || count > sizeof(int) || count > bytes.Length - 2 || bytes[2] == 0)
{
goto failed;
}
while (count-- > 0)
{
length = (length << 8) | bytes[start++];
}
if (length < 0x80)
{
goto failed;
}
}
if (length > bytes.Length - start)
{
goto failed;
}
Unsafe.Add(ref _stack0, _depth) = span.Slice(start + length);
return span.Slice(start, length);
failed:
Fail();
return default;
}
private readonly struct Span
{
private readonly int _start;
private readonly int _length;
public Span(ReadOnlySpan<byte> buffer)
: this(0, buffer.Length)
{
}
private Span(int start, int length)
{
_start = start;
_length = length;
}
public bool IsEmpty => _length == 0;
public int Length => _length;
public int Start => _start;
public ReadOnlySpan<byte> ApplyTo(ReadOnlySpan<byte> buffer)
{
return buffer.Slice(_start, _length);
}
public Span Slice(int start)
{
Debug.Assert(start >= 0 && start <= _length);
return new Span(_start + start, _length - start);
}
public Span Slice(int start, int length)
{
Debug.Assert(start >= 0 && start <= _length);
Debug.Assert(length >= 0 && length <= _length - start);
return new Span(_start + start, length);
}
}
}
}
| |
//
// Options.cs
//
// Authors:
// Jonathan Pryor <jpryor@novell.com>
//
// Copyright (C) 2008 Novell (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// Compile With:
// gmcs -debug+ -r:System.Core Options.cs -o:NDesk.Options.dll
// gmcs -debug+ -d:LINQ -r:System.Core Options.cs -o:NDesk.Options.dll
//
// The LINQ version just changes the implementation of
// OptionSet.Parse(IEnumerable<string>), and confers no semantic changes.
//
// A Getopt::Long-inspired option parsing library for C#.
//
// NDesk.Options.OptionSet is built upon a key/value table, where the
// key is a option format string and the value is a delegate that is
// invoked when the format string is matched.
//
// Option format strings:
// Regex-like BNF Grammar:
// name: .+
// type: [=:]
// sep: ( [^{}]+ | '{' .+ '}' )?
// aliases: ( name type sep ) ( '|' name type sep )*
//
// Each '|'-delimited name is an alias for the associated action. If the
// format string ends in a '=', it has a required value. If the format
// string ends in a ':', it has an optional value. If neither '=' or ':'
// is present, no value is supported. `=' or `:' need only be defined on one
// alias, but if they are provided on more than one they must be consistent.
//
// Each alias portion may also end with a "key/value separator", which is used
// to split option values if the option accepts > 1 value. If not specified,
// it defaults to '=' and ':'. If specified, it can be any character except
// '{' and '}' OR the *string* between '{' and '}'. If no separator should be
// used (i.e. the separate values should be distinct arguments), then "{}"
// should be used as the separator.
//
// Options are extracted either from the current option by looking for
// the option name followed by an '=' or ':', or is taken from the
// following option IFF:
// - The current option does not contain a '=' or a ':'
// - The current option requires a value (i.e. not a Option type of ':')
//
// The `name' used in the option format string does NOT include any leading
// option indicator, such as '-', '--', or '/'. All three of these are
// permitted/required on any named option.
//
// Option bundling is permitted so long as:
// - '-' is used to start the option group
// - all of the bundled options are a single character
// - at most one of the bundled options accepts a value, and the value
// provided starts from the next character to the end of the string.
//
// This allows specifying '-a -b -c' as '-abc', and specifying '-D name=value'
// as '-Dname=value'.
//
// Option processing is disabled by specifying "--". All options after "--"
// are returned by OptionSet.Parse() unchanged and unprocessed.
//
// Unprocessed options are returned from OptionSet.Parse().
//
// Examples:
// int verbose = 0;
// OptionSet p = new OptionSet ()
// .Add ("v", v => ++verbose)
// .Add ("name=|value=", v => Console.WriteLine (v));
// p.Parse (new string[]{"-v", "--v", "/v", "-name=A", "/name", "B", "extra"});
//
// The above would parse the argument string array, and would invoke the
// lambda expression three times, setting `verbose' to 3 when complete.
// It would also print out "A" and "B" to standard output.
// The returned array would contain the string "extra".
//
// C# 3.0 collection initializers are supported and encouraged:
// var p = new OptionSet () {
// { "h|?|help", v => ShowHelp () },
// };
//
// System.ComponentModel.TypeConverter is also supported, allowing the use of
// custom data types in the callback type; TypeConverter.ConvertFromString()
// is used to convert the value option to an instance of the specified
// type:
//
// var p = new OptionSet () {
// { "foo=", (Foo f) => Console.WriteLine (f.ToString ()) },
// };
//
// Random other tidbits:
// - Boolean options (those w/o '=' or ':' in the option format string)
// are explicitly enabled if they are followed with '+', and explicitly
// disabled if they are followed with '-':
// string a = null;
// var p = new OptionSet () {
// { "a", s => a = s },
// };
// p.Parse (new string[]{"-a"}); // sets v != null
// p.Parse (new string[]{"-a+"}); // sets v != null
// p.Parse (new string[]{"-a-"}); // sets v == null
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Runtime.Serialization;
using System.Security.Permissions;
using System.Text;
using System.Text.RegularExpressions;
#if LINQ
using System.Linq;
#endif
#if TEST
using NDesk.Options;
#endif
namespace NDesk.Options {
public class OptionValueCollection : IList, IList<string> {
List<string> values = new List<string> ();
OptionContext c;
internal OptionValueCollection (OptionContext c)
{
this.c = c;
}
#region ICollection
void ICollection.CopyTo (Array array, int index) {(values as ICollection).CopyTo (array, index);}
bool ICollection.IsSynchronized {get {return (values as ICollection).IsSynchronized;}}
object ICollection.SyncRoot {get {return (values as ICollection).SyncRoot;}}
#endregion
#region ICollection<T>
public void Add (string item) {values.Add (item);}
public void Clear () {values.Clear ();}
public bool Contains (string item) {return values.Contains (item);}
public void CopyTo (string[] array, int arrayIndex) {values.CopyTo (array, arrayIndex);}
public bool Remove (string item) {return values.Remove (item);}
public int Count {get {return values.Count;}}
public bool IsReadOnly {get {return false;}}
#endregion
#region IEnumerable
IEnumerator IEnumerable.GetEnumerator () {return values.GetEnumerator ();}
#endregion
#region IEnumerable<T>
public IEnumerator<string> GetEnumerator () {return values.GetEnumerator ();}
#endregion
#region IList
int IList.Add (object value) {return (values as IList).Add (value);}
bool IList.Contains (object value) {return (values as IList).Contains (value);}
int IList.IndexOf (object value) {return (values as IList).IndexOf (value);}
void IList.Insert (int index, object value) {(values as IList).Insert (index, value);}
void IList.Remove (object value) {(values as IList).Remove (value);}
void IList.RemoveAt (int index) {(values as IList).RemoveAt (index);}
bool IList.IsFixedSize {get {return false;}}
object IList.this [int index] {get {return this [index];} set {(values as IList)[index] = value;}}
#endregion
#region IList<T>
public int IndexOf (string item) {return values.IndexOf (item);}
public void Insert (int index, string item) {values.Insert (index, item);}
public void RemoveAt (int index) {values.RemoveAt (index);}
private void AssertValid (int index)
{
if (c.Option == null)
throw new InvalidOperationException ("OptionContext.Option is null.");
if (index >= c.Option.MaxValueCount)
throw new ArgumentOutOfRangeException ("index");
if (c.Option.OptionValueType == OptionValueType.Required &&
index >= values.Count)
throw new OptionException (string.Format (
c.OptionSet.MessageLocalizer ("Missing required value for option '{0}'."), c.OptionName),
c.OptionName);
}
public string this [int index] {
get {
AssertValid (index);
return index >= values.Count ? null : values [index];
}
set {
values [index] = value;
}
}
#endregion
public List<string> ToList ()
{
return new List<string> (values);
}
public string[] ToArray ()
{
return values.ToArray ();
}
public override string ToString ()
{
return string.Join (", ", values.ToArray ());
}
}
public class OptionContext {
private Option option;
private string name;
private int index;
private OptionSet set;
private OptionValueCollection c;
public OptionContext (OptionSet set)
{
this.set = set;
this.c = new OptionValueCollection (this);
}
public Option Option {
get {return option;}
set {option = value;}
}
public string OptionName {
get {return name;}
set {name = value;}
}
public int OptionIndex {
get {return index;}
set {index = value;}
}
public OptionSet OptionSet {
get {return set;}
}
public OptionValueCollection OptionValues {
get {return c;}
}
}
public enum OptionValueType {
None,
Optional,
Required,
}
public abstract class Option {
string prototype, description;
string[] names;
OptionValueType type;
int count;
string[] separators;
protected Option (string prototype, string description)
: this (prototype, description, 1)
{
}
protected Option (string prototype, string description, int maxValueCount)
{
if (prototype == null)
throw new ArgumentNullException ("prototype");
if (prototype.Length == 0)
throw new ArgumentException ("Cannot be the empty string.", "prototype");
if (maxValueCount < 0)
throw new ArgumentOutOfRangeException ("maxValueCount");
this.prototype = prototype;
this.names = prototype.Split ('|');
this.description = description;
this.count = maxValueCount;
this.type = ParsePrototype ();
if (this.count == 0 && type != OptionValueType.None)
throw new ArgumentException (
"Cannot provide maxValueCount of 0 for OptionValueType.Required or " +
"OptionValueType.Optional.",
"maxValueCount");
if (this.type == OptionValueType.None && maxValueCount > 1)
throw new ArgumentException (
string.Format ("Cannot provide maxValueCount of {0} for OptionValueType.None.", maxValueCount),
"maxValueCount");
if (Array.IndexOf (names, "<>") >= 0 &&
((names.Length == 1 && this.type != OptionValueType.None) ||
(names.Length > 1 && this.MaxValueCount > 1)))
throw new ArgumentException (
"The default option handler '<>' cannot require values.",
"prototype");
for (int i = 0; i < names.Length; i++)
names[i] = names[i].ToLower();
}
public string Prototype { get { return prototype; } }
public string Description {get {return description;}}
public OptionValueType OptionValueType {get {return type;}}
public int MaxValueCount {get {return count;}}
public string[] GetNames ()
{
return (string[]) names.Clone ();
}
public string[] GetValueSeparators ()
{
if (separators == null)
return new string [0];
return (string[]) separators.Clone ();
}
protected static T Parse<T> (string value, OptionContext c)
{
TypeConverter conv = TypeDescriptor.GetConverter (typeof (T));
T t = default (T);
try {
if (value != null)
t = (T) conv.ConvertFromString (value);
}
catch (Exception e) {
throw new OptionException (
string.Format (
c.OptionSet.MessageLocalizer ("Could not convert string `{0}' to type {1} for option `{2}'."),
value, typeof (T).Name, c.OptionName),
c.OptionName, e);
}
return t;
}
internal string[] Names {get {return names;}}
internal string[] ValueSeparators {get {return separators;}}
static readonly char[] NameTerminator = new char[]{'=', ':'};
private OptionValueType ParsePrototype ()
{
char type = '\0';
List<string> seps = new List<string> ();
for (int i = 0; i < names.Length; ++i) {
string name = names [i];
if (name.Length == 0)
throw new ArgumentException ("Empty option names are not supported.", "prototype");
int end = name.IndexOfAny (NameTerminator);
if (end == -1)
continue;
names [i] = name.Substring (0, end);
if (type == '\0' || type == name [end])
type = name [end];
else
throw new ArgumentException (
string.Format ("Conflicting option types: '{0}' vs. '{1}'.", type, name [end]),
"prototype");
AddSeparators (name, end, seps);
}
if (type == '\0')
return OptionValueType.None;
if (count <= 1 && seps.Count != 0)
throw new ArgumentException (
string.Format ("Cannot provide key/value separators for Options taking {0} value(s).", count),
"prototype");
if (count > 1) {
if (seps.Count == 0)
this.separators = new string[]{":", "="};
else if (seps.Count == 1 && seps [0].Length == 0)
this.separators = null;
else
this.separators = seps.ToArray ();
}
return type == '=' ? OptionValueType.Required : OptionValueType.Optional;
}
private static void AddSeparators (string name, int end, ICollection<string> seps)
{
int start = -1;
for (int i = end+1; i < name.Length; ++i) {
switch (name [i]) {
case '{':
if (start != -1)
throw new ArgumentException (
string.Format ("Ill-formed name/value separator found in \"{0}\".", name),
"prototype");
start = i+1;
break;
case '}':
if (start == -1)
throw new ArgumentException (
string.Format ("Ill-formed name/value separator found in \"{0}\".", name),
"prototype");
seps.Add (name.Substring (start, i-start));
start = -1;
break;
default:
if (start == -1)
seps.Add (name [i].ToString ());
break;
}
}
if (start != -1)
throw new ArgumentException (
string.Format ("Ill-formed name/value separator found in \"{0}\".", name),
"prototype");
}
public void Invoke (OptionContext c)
{
OnParseComplete (c);
c.OptionName = null;
c.Option = null;
c.OptionValues.Clear ();
}
protected abstract void OnParseComplete (OptionContext c);
public override string ToString ()
{
return Prototype;
}
}
[Serializable]
public class OptionException : Exception {
private string option;
public OptionException ()
{
}
public OptionException (string message, string optionName)
: base (message)
{
this.option = optionName;
}
public OptionException (string message, string optionName, Exception innerException)
: base (message, innerException)
{
this.option = optionName;
}
protected OptionException (SerializationInfo info, StreamingContext context)
: base (info, context)
{
this.option = info.GetString ("OptionName");
}
public string OptionName {
get {return this.option;}
}
[SecurityPermission (SecurityAction.LinkDemand, SerializationFormatter = true)]
public override void GetObjectData (SerializationInfo info, StreamingContext context)
{
base.GetObjectData (info, context);
info.AddValue ("OptionName", option);
}
}
public delegate void OptionAction<TKey, TValue> (TKey key, TValue value);
public class OptionSet : KeyedCollection<string, Option>
{
public OptionSet ()
: this (delegate (string f) {return f;})
{
}
public OptionSet (Converter<string, string> localizer)
{
this.localizer = localizer;
}
Converter<string, string> localizer;
public Converter<string, string> MessageLocalizer {
get {return localizer;}
}
protected override string GetKeyForItem (Option item)
{
if (item == null)
throw new ArgumentNullException ("option");
if (item.Names != null && item.Names.Length > 0)
return item.Names [0];
// This should never happen, as it's invalid for Option to be
// constructed w/o any names.
throw new InvalidOperationException ("Option has no names!");
}
[Obsolete ("Use KeyedCollection.this[string]")]
protected Option GetOptionForName (string option)
{
if (option == null)
throw new ArgumentNullException ("option");
try {
return base [option];
}
catch (KeyNotFoundException) {
return null;
}
}
protected override void InsertItem (int index, Option item)
{
base.InsertItem (index, item);
AddImpl (item);
}
protected override void RemoveItem (int index)
{
base.RemoveItem (index);
Option p = Items [index];
// KeyedCollection.RemoveItem() handles the 0th item
for (int i = 1; i < p.Names.Length; ++i) {
Dictionary.Remove (p.Names [i]);
}
}
protected override void SetItem (int index, Option item)
{
base.SetItem (index, item);
RemoveItem (index);
AddImpl (item);
}
private void AddImpl (Option option)
{
if (option == null)
throw new ArgumentNullException ("option");
List<string> added = new List<string> (option.Names.Length);
try {
// KeyedCollection.InsertItem/SetItem handle the 0th name.
for (int i = 1; i < option.Names.Length; ++i) {
Dictionary.Add (option.Names [i], option);
added.Add (option.Names [i]);
}
}
catch (Exception) {
foreach (string name in added)
Dictionary.Remove (name);
throw;
}
}
public new OptionSet Add (Option option)
{
base.Add (option);
return this;
}
sealed class ActionOption : Option {
Action<OptionValueCollection> action;
public ActionOption (string prototype, string description, int count, Action<OptionValueCollection> action)
: base (prototype, description, count)
{
if (action == null)
throw new ArgumentNullException ("action");
this.action = action;
}
protected override void OnParseComplete (OptionContext c)
{
action (c.OptionValues);
}
}
public OptionSet Add (string prototype, Action<string> action)
{
return Add (prototype, null, action);
}
public OptionSet Add (string prototype, string description, Action<string> action)
{
if (action == null)
throw new ArgumentNullException ("action");
Option p = new ActionOption (prototype, description, 1,
delegate (OptionValueCollection v) { action (v [0]); });
base.Add (p);
return this;
}
public OptionSet Add (string prototype, OptionAction<string, string> action)
{
return Add (prototype, null, action);
}
public OptionSet Add (string prototype, string description, OptionAction<string, string> action)
{
if (action == null)
throw new ArgumentNullException ("action");
Option p = new ActionOption (prototype, description, 2,
delegate (OptionValueCollection v) {action (v [0], v [1]);});
base.Add (p);
return this;
}
sealed class ActionOption<T> : Option {
Action<T> action;
public ActionOption (string prototype, string description, Action<T> action)
: base (prototype, description, 1)
{
if (action == null)
throw new ArgumentNullException ("action");
this.action = action;
}
protected override void OnParseComplete (OptionContext c)
{
action (Parse<T> (c.OptionValues [0], c));
}
}
sealed class ActionOption<TKey, TValue> : Option {
OptionAction<TKey, TValue> action;
public ActionOption (string prototype, string description, OptionAction<TKey, TValue> action)
: base (prototype, description, 2)
{
if (action == null)
throw new ArgumentNullException ("action");
this.action = action;
}
protected override void OnParseComplete (OptionContext c)
{
action (
Parse<TKey> (c.OptionValues [0], c),
Parse<TValue> (c.OptionValues [1], c));
}
}
public OptionSet Add<T> (string prototype, Action<T> action)
{
return Add (prototype, null, action);
}
public OptionSet Add<T> (string prototype, string description, Action<T> action)
{
return Add (new ActionOption<T> (prototype, description, action));
}
public OptionSet Add<TKey, TValue> (string prototype, OptionAction<TKey, TValue> action)
{
return Add (prototype, null, action);
}
public OptionSet Add<TKey, TValue> (string prototype, string description, OptionAction<TKey, TValue> action)
{
return Add (new ActionOption<TKey, TValue> (prototype, description, action));
}
protected virtual OptionContext CreateOptionContext ()
{
return new OptionContext (this);
}
#if LINQ
public List<string> Parse (IEnumerable<string> arguments)
{
bool process = true;
OptionContext c = CreateOptionContext ();
c.OptionIndex = -1;
var def = GetOptionForName ("<>");
var unprocessed =
from argument in arguments
where ++c.OptionIndex >= 0 && (process || def != null)
? process
? argument == "--"
? (process = false)
: !Parse (argument, c)
? def != null
? Unprocessed (null, def, c, argument)
: true
: false
: def != null
? Unprocessed (null, def, c, argument)
: true
: true
select argument;
List<string> r = unprocessed.ToList ();
if (c.Option != null)
c.Option.Invoke (c);
return r;
}
#else
public List<string> Parse (IEnumerable<string> arguments)
{
OptionContext c = CreateOptionContext ();
c.OptionIndex = -1;
bool process = true;
List<string> unprocessed = new List<string> ();
Option def = Contains ("<>") ? this ["<>"] : null;
foreach (string argument in arguments) {
++c.OptionIndex;
if (argument == "--") {
process = false;
continue;
}
if (!process) {
Unprocessed (unprocessed, def, c, argument);
continue;
}
if (!Parse (argument.ToLower(), c))
Unprocessed (unprocessed, def, c, argument);
}
if (c.Option != null)
c.Option.Invoke (c);
return unprocessed;
}
#endif
private static bool Unprocessed (ICollection<string> extra, Option def, OptionContext c, string argument)
{
if (def == null) {
extra.Add (argument);
return false;
}
c.OptionValues.Add (argument);
c.Option = def;
c.Option.Invoke (c);
return false;
}
private readonly Regex ValueOption = new Regex (
@"^(?<flag>--|-|/)(?<name>[^:=]+)((?<sep>[:=])(?<value>.*))?$");
protected bool GetOptionParts (string argument, out string flag, out string name, out string sep, out string value)
{
if (argument == null)
throw new ArgumentNullException ("argument");
flag = name = sep = value = null;
Match m = ValueOption.Match (argument);
if (!m.Success) {
return false;
}
flag = m.Groups ["flag"].Value;
name = m.Groups ["name"].Value;
if (m.Groups ["sep"].Success && m.Groups ["value"].Success) {
sep = m.Groups ["sep"].Value;
value = m.Groups ["value"].Value;
}
return true;
}
protected virtual bool Parse (string argument, OptionContext c)
{
if (c.Option != null) {
ParseValue (argument, c);
return true;
}
string f, n, s, v;
if (!GetOptionParts (argument, out f, out n, out s, out v))
return false;
Option p;
if (Contains (n)) {
p = this [n];
c.OptionName = f + n;
c.Option = p;
switch (p.OptionValueType) {
case OptionValueType.None:
c.OptionValues.Add (n);
c.Option.Invoke (c);
break;
case OptionValueType.Optional:
case OptionValueType.Required:
ParseValue (v, c);
break;
}
return true;
}
// no match; is it a bool option?
if (ParseBool (argument, n, c))
return true;
// is it a bundled option?
if (ParseBundledValue (f, string.Concat (n + s + v), c))
return true;
return false;
}
private void ParseValue (string option, OptionContext c)
{
if (option != null)
foreach (string o in c.Option.ValueSeparators != null
? option.Split (c.Option.ValueSeparators, StringSplitOptions.None)
: new string[]{option}) {
c.OptionValues.Add (o);
}
if (c.OptionValues.Count == c.Option.MaxValueCount ||
c.Option.OptionValueType == OptionValueType.Optional)
c.Option.Invoke (c);
else if (c.OptionValues.Count > c.Option.MaxValueCount) {
throw new OptionException (localizer (string.Format (
"Error: Found {0} option values when expecting {1}.",
c.OptionValues.Count, c.Option.MaxValueCount)),
c.OptionName);
}
}
private bool ParseBool (string option, string n, OptionContext c)
{
Option p;
string rn;
if (n.Length >= 1 && (n [n.Length-1] == '+' || n [n.Length-1] == '-') &&
Contains ((rn = n.Substring (0, n.Length-1)))) {
p = this [rn];
string v = n [n.Length-1] == '+' ? option : null;
c.OptionName = option;
c.Option = p;
c.OptionValues.Add (v);
p.Invoke (c);
return true;
}
return false;
}
private bool ParseBundledValue (string f, string n, OptionContext c)
{
if (f != "-")
return false;
for (int i = 0; i < n.Length; ++i) {
Option p;
string opt = f + n [i].ToString ();
string rn = n [i].ToString ();
if (!Contains (rn)) {
if (i == 0)
return false;
throw new OptionException (string.Format (localizer (
"Cannot bundle unregistered option '{0}'."), opt), opt);
}
p = this [rn];
switch (p.OptionValueType) {
case OptionValueType.None:
Invoke (c, opt, n, p);
break;
case OptionValueType.Optional:
case OptionValueType.Required: {
string v = n.Substring (i+1);
c.Option = p;
c.OptionName = opt;
ParseValue (v.Length != 0 ? v : null, c);
return true;
}
default:
throw new InvalidOperationException ("Unknown OptionValueType: " + p.OptionValueType);
}
}
return true;
}
private static void Invoke (OptionContext c, string name, string value, Option option)
{
c.OptionName = name;
c.Option = option;
c.OptionValues.Add (value);
option.Invoke (c);
}
private const int OptionWidth = 29;
public void WriteOptionDescriptions (TextWriter o)
{
foreach (Option p in this) {
int written = 0;
if (!WriteOptionPrototype (o, p, ref written))
continue;
if (written < OptionWidth)
o.Write (new string (' ', OptionWidth - written));
else {
o.WriteLine ();
o.Write (new string (' ', OptionWidth));
}
List<string> lines = GetLines (localizer (GetDescription (p.Description)));
o.WriteLine (lines [0]);
string prefix = new string (' ', OptionWidth+2);
for (int i = 1; i < lines.Count; ++i) {
o.Write (prefix);
o.WriteLine (lines [i]);
}
}
}
bool WriteOptionPrototype (TextWriter o, Option p, ref int written)
{
string[] names = p.Names;
int i = GetNextOptionIndex (names, 0);
if (i == names.Length)
return false;
if (names [i].Length == 1) {
Write (o, ref written, " -");
Write (o, ref written, names [0]);
}
else {
Write (o, ref written, " --");
Write (o, ref written, names [0]);
}
for ( i = GetNextOptionIndex (names, i+1);
i < names.Length; i = GetNextOptionIndex (names, i+1)) {
Write (o, ref written, ", ");
Write (o, ref written, names [i].Length == 1 ? "-" : "--");
Write (o, ref written, names [i]);
}
if (p.OptionValueType == OptionValueType.Optional ||
p.OptionValueType == OptionValueType.Required) {
if (p.OptionValueType == OptionValueType.Optional) {
Write (o, ref written, localizer ("["));
}
Write (o, ref written, localizer ("=" + GetArgumentName (0, p.MaxValueCount, p.Description)));
string sep = p.ValueSeparators != null && p.ValueSeparators.Length > 0
? p.ValueSeparators [0]
: " ";
for (int c = 1; c < p.MaxValueCount; ++c) {
Write (o, ref written, localizer (sep + GetArgumentName (c, p.MaxValueCount, p.Description)));
}
if (p.OptionValueType == OptionValueType.Optional) {
Write (o, ref written, localizer ("]"));
}
}
return true;
}
static int GetNextOptionIndex (string[] names, int i)
{
while (i < names.Length && names [i] == "<>") {
++i;
}
return i;
}
static void Write (TextWriter o, ref int n, string s)
{
n += s.Length;
o.Write (s);
}
private static string GetArgumentName (int index, int maxIndex, string description)
{
if (description == null)
return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1);
string[] nameStart;
if (maxIndex == 1)
nameStart = new string[]{"{0:", "{"};
else
nameStart = new string[]{"{" + index + ":"};
for (int i = 0; i < nameStart.Length; ++i) {
int start, j = 0;
do {
start = description.IndexOf (nameStart [i], j);
} while (start >= 0 && j != 0 ? description [j++ - 1] == '{' : false);
if (start == -1)
continue;
int end = description.IndexOf ("}", start);
if (end == -1)
continue;
return description.Substring (start + nameStart [i].Length, end - start - nameStart [i].Length);
}
return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1);
}
private static string GetDescription (string description)
{
if (description == null)
return string.Empty;
StringBuilder sb = new StringBuilder (description.Length);
int start = -1;
for (int i = 0; i < description.Length; ++i) {
switch (description [i]) {
case '{':
if (i == start) {
sb.Append ('{');
start = -1;
}
else if (start < 0)
start = i + 1;
break;
case '}':
if (start < 0) {
if ((i+1) == description.Length || description [i+1] != '}')
throw new InvalidOperationException ("Invalid option description: " + description);
++i;
sb.Append ("}");
}
else {
sb.Append (description.Substring (start, i - start));
start = -1;
}
break;
case ':':
if (start < 0)
goto default;
start = i + 1;
break;
default:
if (start < 0)
sb.Append (description [i]);
break;
}
}
return sb.ToString ();
}
private static List<string> GetLines (string description)
{
List<string> lines = new List<string> ();
if (string.IsNullOrEmpty (description)) {
lines.Add (string.Empty);
return lines;
}
int length = 80 - OptionWidth - 2;
int start = 0, end;
do {
end = GetLineEnd (start, length, description);
bool cont = false;
if (end < description.Length) {
char c = description [end];
if (c == '-' || (char.IsWhiteSpace (c) && c != '\n'))
++end;
else if (c != '\n') {
cont = true;
--end;
}
}
lines.Add (description.Substring (start, end - start));
if (cont) {
lines [lines.Count-1] += "-";
}
start = end;
if (start < description.Length && description [start] == '\n')
++start;
} while (end < description.Length);
return lines;
}
private static int GetLineEnd (int start, int length, string description)
{
int end = Math.Min (start + length, description.Length);
int sep = -1;
for (int i = start; i < end; ++i) {
switch (description [i]) {
case ' ':
case '\t':
case '\v':
case '-':
case ',':
case '.':
case ';':
sep = i;
break;
case '\n':
return i;
}
}
if (sep == -1 || end == description.Length)
return end;
return sep;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
namespace Microsoft.Xml
{
using System;
internal class BinHexDecoder : IncrementalReadDecoder
{
//
// Fields
//
private byte[] _buffer;
private int _startIndex;
private int _curIndex;
private int _endIndex;
private bool _hasHalfByteCached;
private byte _cachedHalfByte;
//
// IncrementalReadDecoder interface
//
internal override int DecodedCount
{
get
{
return _curIndex - _startIndex;
}
}
internal override bool IsFull
{
get
{
return _curIndex == _endIndex;
}
}
internal override unsafe int Decode(char[] chars, int startPos, int len)
{
if (chars == null)
{
throw new ArgumentNullException("chars");
}
if (len < 0)
{
throw new ArgumentOutOfRangeException("len");
}
if (startPos < 0)
{
throw new ArgumentOutOfRangeException("startPos");
}
if (chars.Length - startPos < len)
{
throw new ArgumentOutOfRangeException("len");
}
if (len == 0)
{
return 0;
}
int bytesDecoded, charsDecoded;
fixed (char* pChars = &chars[startPos])
{
fixed (byte* pBytes = &_buffer[_curIndex])
{
Decode(pChars, pChars + len, pBytes, pBytes + (_endIndex - _curIndex),
ref _hasHalfByteCached, ref _cachedHalfByte, out charsDecoded, out bytesDecoded);
}
}
_curIndex += bytesDecoded;
return charsDecoded;
}
internal override unsafe int Decode(string str, int startPos, int len)
{
if (str == null)
{
throw new ArgumentNullException("str");
}
if (len < 0)
{
throw new ArgumentOutOfRangeException("len");
}
if (startPos < 0)
{
throw new ArgumentOutOfRangeException("startPos");
}
if (str.Length - startPos < len)
{
throw new ArgumentOutOfRangeException("len");
}
if (len == 0)
{
return 0;
}
int bytesDecoded, charsDecoded;
fixed (char* pChars = str)
{
fixed (byte* pBytes = &_buffer[_curIndex])
{
Decode(pChars + startPos, pChars + startPos + len, pBytes, pBytes + (_endIndex - _curIndex),
ref _hasHalfByteCached, ref _cachedHalfByte, out charsDecoded, out bytesDecoded);
}
}
_curIndex += bytesDecoded;
return charsDecoded;
}
internal override void Reset()
{
_hasHalfByteCached = false;
_cachedHalfByte = 0;
}
internal override void SetNextOutputBuffer(Array buffer, int index, int count)
{
Debug.Assert(buffer != null);
Debug.Assert(count >= 0);
Debug.Assert(index >= 0);
Debug.Assert(buffer.Length - index >= count);
Debug.Assert((buffer as byte[]) != null);
_buffer = (byte[])buffer;
_startIndex = index;
_curIndex = index;
_endIndex = index + count;
}
//
// Static methods
//
public static unsafe byte[] Decode(char[] chars, bool allowOddChars)
{
if (chars == null)
{
throw new ArgumentNullException("chars");
}
int len = chars.Length;
if (len == 0)
{
return new byte[0];
}
byte[] bytes = new byte[(len + 1) / 2];
int bytesDecoded, charsDecoded;
bool hasHalfByteCached = false;
byte cachedHalfByte = 0;
fixed (char* pChars = &chars[0])
{
fixed (byte* pBytes = &bytes[0])
{
Decode(pChars, pChars + len, pBytes, pBytes + bytes.Length, ref hasHalfByteCached, ref cachedHalfByte, out charsDecoded, out bytesDecoded);
}
}
if (hasHalfByteCached && !allowOddChars)
{
throw new XmlException(ResXml.Xml_InvalidBinHexValueOddCount, new string(chars));
}
if (bytesDecoded < bytes.Length)
{
byte[] tmp = new byte[bytesDecoded];
Array.Copy(bytes, 0, tmp, 0, bytesDecoded);
bytes = tmp;
}
return bytes;
}
//
// Private methods
//
private static unsafe void Decode(char* pChars, char* pCharsEndPos,
byte* pBytes, byte* pBytesEndPos,
ref bool hasHalfByteCached, ref byte cachedHalfByte,
out int charsDecoded, out int bytesDecoded)
{
#if DEBUG
Debug.Assert(pCharsEndPos - pChars >= 0);
Debug.Assert(pBytesEndPos - pBytes >= 0);
#endif
char* pChar = pChars;
byte* pByte = pBytes;
XmlCharType xmlCharType = XmlCharType.Instance;
while (pChar < pCharsEndPos && pByte < pBytesEndPos)
{
byte halfByte;
char ch = *pChar++;
if (ch >= 'a' && ch <= 'f')
{
halfByte = (byte)(ch - 'a' + 10);
}
else if (ch >= 'A' && ch <= 'F')
{
halfByte = (byte)(ch - 'A' + 10);
}
else if (ch >= '0' && ch <= '9')
{
halfByte = (byte)(ch - '0');
}
else if ((xmlCharType.charProperties[ch] & XmlCharType.fWhitespace) != 0)
{ // else if ( xmlCharType.IsWhiteSpace( ch ) ) {
continue;
}
else
{
throw new XmlException(ResXml.Xml_InvalidBinHexValue, new string(pChars, 0, (int)(pCharsEndPos - pChars)));
}
if (hasHalfByteCached)
{
*pByte++ = (byte)((cachedHalfByte << 4) + halfByte);
hasHalfByteCached = false;
}
else
{
cachedHalfByte = halfByte;
hasHalfByteCached = true;
}
}
bytesDecoded = (int)(pByte - pBytes);
charsDecoded = (int)(pChar - pChars);
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System.Collections.Specialized;
using System.Linq;
using System.Reactive.Linq;
using System.Threading.Tasks;
using Avalonia.Animation;
using Avalonia.Controls.Primitives;
using Avalonia.Controls.Utils;
using Avalonia.Data;
namespace Avalonia.Controls.Presenters
{
/// <summary>
/// Displays pages inside an <see cref="ItemsControl"/>.
/// </summary>
public class CarouselPresenter : ItemsPresenterBase
{
/// <summary>
/// Defines the <see cref="IsVirtualized"/> property.
/// </summary>
public static readonly StyledProperty<bool> IsVirtualizedProperty =
Carousel.IsVirtualizedProperty.AddOwner<CarouselPresenter>();
/// <summary>
/// Defines the <see cref="SelectedIndex"/> property.
/// </summary>
public static readonly DirectProperty<CarouselPresenter, int> SelectedIndexProperty =
SelectingItemsControl.SelectedIndexProperty.AddOwner<CarouselPresenter>(
o => o.SelectedIndex,
(o, v) => o.SelectedIndex = v);
/// <summary>
/// Defines the <see cref="PageTransition"/> property.
/// </summary>
public static readonly StyledProperty<IPageTransition> PageTransitionProperty =
Carousel.PageTransitionProperty.AddOwner<CarouselPresenter>();
private int _selectedIndex = -1;
// private Task _current;
private Task _currentTransition;
private int _queuedTransitionIndex = -1;
/// <summary>
/// Initializes static members of the <see cref="CarouselPresenter"/> class.
/// </summary>
static CarouselPresenter()
{
SelectedIndexProperty.Changed.AddClassHandler<CarouselPresenter>(x => x.SelectedIndexChanged);
}
/// <summary>
/// Gets or sets a value indicating whether the items in the carousel are virtualized.
/// </summary>
/// <remarks>
/// When the carousel is virtualized, only the active page is held in memory.
/// </remarks>
public bool IsVirtualized
{
get { return GetValue(IsVirtualizedProperty); }
set { SetValue(IsVirtualizedProperty, value); }
}
/// <summary>
/// Gets or sets the index of the selected page.
/// </summary>
public int SelectedIndex
{
get
{
return _selectedIndex;
}
set
{
var old = SelectedIndex;
var effective = (value >= 0 && value < Items?.Cast<object>().Count()) ? value : -1;
if (old != effective)
{
_selectedIndex = effective;
RaisePropertyChanged(SelectedIndexProperty, old, effective, BindingPriority.LocalValue);
}
}
}
/// <summary>
/// Gets or sets a transition to use when switching pages.
/// </summary>
public IPageTransition PageTransition
{
get { return GetValue(PageTransitionProperty); }
set { SetValue(PageTransitionProperty, value); }
}
/// <inheritdoc/>
protected override void PanelCreated(IPanel panel)
{
#pragma warning disable 4014
MoveToPage(-1, SelectedIndex);
#pragma warning restore 4014
}
/// <inheritdoc/>
protected override void ItemsChanged(NotifyCollectionChangedEventArgs e)
{
switch (e.Action)
{
case NotifyCollectionChangedAction.Remove:
if (!IsVirtualized)
{
var generator = ItemContainerGenerator;
var containers = generator.RemoveRange(e.OldStartingIndex, e.OldItems.Count);
Panel.Children.RemoveAll(containers.Select(x => x.ContainerControl));
#pragma warning disable 4014
MoveToPage(-1, SelectedIndex);
#pragma warning restore 4014
}
break;
case NotifyCollectionChangedAction.Reset:
{
var generator = ItemContainerGenerator;
var containers = generator.Containers.ToList();
generator.Clear();
Panel.Children.RemoveAll(containers.Select(x => x.ContainerControl));
#pragma warning disable 4014
var newIndex = SelectedIndex;
if(SelectedIndex < 0)
{
if(Items != null && Items.Count() > 0)
{
newIndex = 0;
}
else
{
newIndex = -1;
}
}
MoveToPage(-1, newIndex);
#pragma warning restore 4014
}
break;
}
}
/// <summary>
/// Moves to the selected page, animating if a <see cref="PageTransition"/> is set.
/// </summary>
/// <param name="fromIndex">The index of the old page.</param>
/// <param name="toIndex">The index of the new page.</param>
/// <returns>A task tracking the animation.</returns>
private async Task MoveToPage(int fromIndex, int toIndex)
{
if (fromIndex != toIndex)
{
var generator = ItemContainerGenerator;
IControl from = null;
IControl to = null;
if (fromIndex != -1)
{
from = ItemContainerGenerator.ContainerFromIndex(fromIndex);
}
if (toIndex != -1)
{
to = GetOrCreateContainer(toIndex);
}
if (PageTransition != null && (from != null || to != null))
{
await PageTransition.Start((Visual)from, (Visual)to, fromIndex < toIndex);
}
else if (to != null)
{
to.IsVisible = true;
}
if (from != null)
{
if (IsVirtualized)
{
Panel.Children.Remove(from);
generator.Dematerialize(fromIndex, 1);
}
else
{
from.IsVisible = false;
}
}
}
}
private IControl GetOrCreateContainer(int index)
{
var container = ItemContainerGenerator.ContainerFromIndex(index);
if (container == null)
{
var item = Items.Cast<object>().ElementAt(index);
var materialized = ItemContainerGenerator.Materialize(index, item, MemberSelector);
Panel.Children.Add(materialized.ContainerControl);
container = materialized.ContainerControl;
}
return container;
}
/// <summary>
/// Called when the <see cref="SelectedIndex"/> property changes.
/// </summary>
/// <param name="e">The event args.</param>
private async void SelectedIndexChanged(AvaloniaPropertyChangedEventArgs e)
{
if (Panel != null)
{
if (_currentTransition == null)
{
int fromIndex = (int)e.OldValue;
int toIndex = (int)e.NewValue;
for (;;)
{
_currentTransition = MoveToPage(fromIndex, toIndex);
await _currentTransition;
if (_queuedTransitionIndex != -1)
{
fromIndex = toIndex;
toIndex = _queuedTransitionIndex;
_queuedTransitionIndex = -1;
}
else
{
_currentTransition = null;
break;
}
}
}
else
{
_queuedTransitionIndex = (int)e.NewValue;
}
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Microsoft.VisualStudio.Debugger.Metadata;
using Roslyn.Utilities;
using Type = Microsoft.VisualStudio.Debugger.Metadata.Type;
namespace Microsoft.CodeAnalysis.ExpressionEvaluator
{
[Flags]
internal enum DeclarationInfo : byte
{
/// <summary>
/// A declaration with this name has not been encountered.
/// </summary>
None = 0,
/// <summary>
/// This member is defined on the declared type or one of its base classes.
/// </summary>
FromDeclaredTypeOrBase = 0,
/// <summary>
/// This member is defined on a type that inherits from the declared type (is more derived).
/// </summary>
FromSubTypeOfDeclaredType = 1,
/// <summary>
/// This member should be hidden (under "Non-Public members" node), because Just My Code is on and
/// no symbols have been loaded for the declaring type's module.
/// </summary>
HideNonPublic = 1 << 2,
/// <summary>
/// More than one non-virtual member with this name exists in the type hierarchy.
/// The ResultProvider should include the declaring type of this member in the member name to disambiguate.
/// </summary>
IncludeTypeInMemberName = 1 << 3,
/// <summary>
/// The full name for this member access expression will require a cast to the declaring type.
/// </summary>
RequiresExplicitCast = 1 << 4,
}
internal static class DeclarationInfoExtensions
{
internal static bool IsSet(this DeclarationInfo info, DeclarationInfo value)
{
return (info & value) == value;
}
}
internal struct MemberAndDeclarationInfo
{
public static readonly IComparer<MemberAndDeclarationInfo> Comparer = new MemberNameComparer();
private readonly MemberInfo _member;
public readonly DkmClrDebuggerBrowsableAttributeState? BrowsableState;
public readonly bool HideNonPublic;
public readonly bool IncludeTypeInMemberName;
public readonly bool RequiresExplicitCast;
/// <summary>
/// Exists to correctly order fields with the same name from different types in the inheritance hierarchy.
/// </summary>
private readonly int _inheritanceLevel;
public MemberAndDeclarationInfo(MemberInfo member, DkmClrDebuggerBrowsableAttributeState? browsableState, DeclarationInfo info, int inheritanceLevel)
{
Debug.Assert(member != null);
_member = member;
this.BrowsableState = browsableState;
this.HideNonPublic = info.IsSet(DeclarationInfo.HideNonPublic);
this.IncludeTypeInMemberName = info.IsSet(DeclarationInfo.IncludeTypeInMemberName);
this.RequiresExplicitCast = info.IsSet(DeclarationInfo.RequiresExplicitCast);
_inheritanceLevel = inheritanceLevel;
}
public Type DeclaringType
{
get
{
return _member.DeclaringType;
}
}
public bool IsPublic
{
get
{
return _member.IsPublic();
}
}
public bool IsStatic
{
get
{
switch (_member.MemberType)
{
case MemberTypes.Field:
return ((FieldInfo)_member).IsStatic;
case MemberTypes.Property:
return ((PropertyInfo)_member).GetGetMethod(nonPublic: true).IsStatic;
default:
throw ExceptionUtilities.UnexpectedValue(_member.MemberType);
}
}
}
public MemberTypes MemberType
{
get
{
return _member.MemberType;
}
}
public string Name
{
get
{
return _member.Name;
}
}
public Type Type
{
get
{
return GetMemberType(_member);
}
}
public Type OriginalDefinitionType
{
get
{
return GetMemberType(_member.GetOriginalDefinition());
}
}
private static Type GetMemberType(MemberInfo member)
{
switch (member.MemberType)
{
case MemberTypes.Field:
return ((FieldInfo)member).FieldType;
case MemberTypes.Property:
return ((PropertyInfo)member).PropertyType;
default:
throw ExceptionUtilities.UnexpectedValue(member.MemberType);
}
}
public DkmClrCustomTypeInfo TypeInfo
{
get
{
switch (_member.MemberType)
{
case MemberTypes.Field:
case MemberTypes.Property:
return _member.GetCustomAttributesData().GetCustomTypeInfo();
default:
// If we ever see a method, we'll have to use ReturnTypeCustomAttributes.
throw ExceptionUtilities.UnexpectedValue(_member.MemberType);
}
}
}
public Type GetExplicitlyImplementedInterface(out string memberName)
{
memberName = _member.Name;
// We only display fields and properties and fields never implement interface members.
if (_member.MemberType == MemberTypes.Property)
{
// A dot is neither necessary nor sufficient for determining whether a member explicitly
// implements an interface member, but it does characterize the set of members we're
// interested in displaying differently. For example, if the property is from VB, it will
// be an explicit interface implementation, but will not have a dot.
var dotPos = memberName.LastIndexOf('.');
if (dotPos >= 0)
{
var property = (PropertyInfo)_member;
var accessors = property.GetAccessors(nonPublic: true);
Debug.Assert(accessors.Length > 0);
// We'll just pick the first interface we find since we don't have a good way
// to display more than one.
foreach (var accessor in accessors)
{
foreach (var interfaceAccessor in accessor.GetExplicitInterfacesImplemented())
{
memberName = memberName.Substring(dotPos + 1);
return interfaceAccessor.DeclaringType;
}
}
}
}
return null;
}
private sealed class MemberNameComparer : IComparer<MemberAndDeclarationInfo>
{
public int Compare(MemberAndDeclarationInfo x, MemberAndDeclarationInfo y)
{
var comp = string.Compare(x.Name, y.Name, StringComparison.Ordinal);
return comp != 0 ? comp : (y._inheritanceLevel - x._inheritanceLevel);
}
}
}
}
| |
// <copyright file="LinearAlgebraProviderTests.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2015 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System;
using System.Collections.Generic;
using MathNet.Numerics.Distributions;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Factorization;
using MathNet.Numerics.LinearAlgebra.Single;
using MathNet.Numerics.Providers.LinearAlgebra;
using NUnit.Framework;
namespace MathNet.Numerics.UnitTests.LinearAlgebraProviderTests.Single
{
/// <summary>
/// Base class for linear algebra provider tests.
/// </summary>
[TestFixture, Category("LAProvider")]
public class LinearAlgebraProviderTests
{
/// <summary>
/// The Y float test vector.
/// </summary>
readonly float[] _y = {1.1f, 2.2f, 3.3f, 4.4f, 5.5f};
/// <summary>
/// The X float test vector.
/// </summary>
readonly float[] _x = {6.6f, 7.7f, 8.8f, 9.9f, 10.1f};
static readonly IContinuousDistribution Dist = new Normal();
/// <summary>
/// Test matrix to use.
/// </summary>
readonly IDictionary<string, DenseMatrix> _matrices = new Dictionary<string, DenseMatrix>
{
{"Singular3x3", DenseMatrix.OfArray(new[,] {{1.0f, 1.0f, 2.0f}, {1.0f, 1.0f, 2.0f}, {1.0f, 1.0f, 2.0f}})},
{"Square3x3", DenseMatrix.OfArray(new[,] {{-1.1f, -2.2f, -3.3f}, {0.0f, 1.1f, 2.2f}, {-4.4f, 5.5f, 6.6f}})},
{"Square4x4", DenseMatrix.OfArray(new[,] {{-1.1f, -2.2f, -3.3f, -4.4f}, {0.0f, 1.1f, 2.2f, 3.3f}, {1.0f, 2.1f, 6.2f, 4.3f}, {-4.4f, 5.5f, 6.6f, -7.7f}})},
{"Singular4x4", DenseMatrix.OfArray(new[,] {{-1.1f, -2.2f, -3.3f, -4.4f}, {-1.1f, -2.2f, -3.3f, -4.4f}, {-1.1f, -2.2f, -3.3f, -4.4f}, {-1.1f, -2.2f, -3.3f, -4.4f}})},
{"Tall3x2", DenseMatrix.OfArray(new[,] {{-1.1f, -2.2f}, {0.0f, 1.1f}, {-4.4f, 5.5f}})},
{"Wide2x3", DenseMatrix.OfArray(new[,] {{-1.1f, -2.2f, -3.3f}, {0.0f, 1.1f, 2.2f}})},
{"Tall50000x10", DenseMatrix.CreateRandom(50000, 10, Dist)},
{"Wide10x50000", DenseMatrix.CreateRandom(10, 50000, Dist)},
{"Square1000x1000", DenseMatrix.CreateRandom(1000, 1000, Dist)}
};
/// <summary>
/// Can add a vector to scaled vector
/// </summary>
[Test]
public void CanAddVectorToScaledVectorSingle()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.AddVectorToScaledVector(_y, 0, _x, result);
for (var i = 0; i < _y.Length; i++)
{
Assert.AreEqual(_y[i], result[i]);
}
Array.Copy(_y, result, _y.Length);
Control.LinearAlgebraProvider.AddVectorToScaledVector(result, 1, _x, result);
for (var i = 0; i < _y.Length; i++)
{
Assert.AreEqual(_y[i] + _x[i], result[i]);
}
Array.Copy(_y, result, _y.Length);
Control.LinearAlgebraProvider.AddVectorToScaledVector(result, (float) Math.PI, _x, result);
for (var i = 0; i < _y.Length; i++)
{
AssertHelpers.AlmostEqualRelative(_y[i] + ((float) Math.PI*_x[i]), result[i], 5);
}
}
/// <summary>
/// Can scale an array.
/// </summary>
[Test]
public void CanScaleArray()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.ScaleArray(1, _y, result);
for (var i = 0; i < _y.Length; i++)
{
Assert.AreEqual(_y[i], result[i]);
}
Array.Copy(_y, result, _y.Length);
Control.LinearAlgebraProvider.ScaleArray((float) Math.PI, result, result);
for (var i = 0; i < _y.Length; i++)
{
AssertHelpers.AlmostEqualRelative(_y[i]*(float) Math.PI, result[i], 5);
}
}
/// <summary>
/// Can compute the dot product.
/// </summary>
[Test]
public void CanComputeDotProduct()
{
var result = Control.LinearAlgebraProvider.DotProduct(_x, _y);
AssertHelpers.AlmostEqualRelative(152.35, result, 5);
}
/// <summary>
/// Can add two arrays.
/// </summary>
[Test]
public void CanAddArrays()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.AddArrays(_x, _y, result);
for (var i = 0; i < result.Length; i++)
{
Assert.AreEqual(_x[i] + _y[i], result[i]);
}
}
/// <summary>
/// Can subtract two arrays.
/// </summary>
[Test]
public void CanSubtractArrays()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.SubtractArrays(_x, _y, result);
for (var i = 0; i < result.Length; i++)
{
Assert.AreEqual(_x[i] - _y[i], result[i]);
}
}
/// <summary>
/// Can pointwise multiply two arrays.
/// </summary>
[Test]
public void CanPointWiseMultiplyArrays()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.PointWiseMultiplyArrays(_x, _y, result);
for (var i = 0; i < result.Length; i++)
{
Assert.AreEqual(_x[i]*_y[i], result[i]);
}
}
/// <summary>
/// Can pointwise divide two arrays.
/// </summary>
[Test]
public void CanPointWiseDivideArrays()
{
var result = new float[_y.Length];
Control.LinearAlgebraProvider.PointWiseDivideArrays(_x, _y, result);
for (var i = 0; i < result.Length; i++)
{
Assert.AreEqual(_x[i]/_y[i], result[i]);
}
}
/// <summary>
/// Can compute L1 norm.
/// </summary>
[Test]
public void CanComputeMatrixL1Norm()
{
var matrix = _matrices["Square3x3"];
var norm = Control.LinearAlgebraProvider.MatrixNorm(Norm.OneNorm, matrix.RowCount, matrix.ColumnCount, matrix.Values);
AssertHelpers.AlmostEqualRelative(12.1, norm, 5);
}
/// <summary>
/// Can compute Frobenius norm.
/// </summary>
[Test]
public void CanComputeMatrixFrobeniusNorm()
{
var matrix = _matrices["Square3x3"];
var norm = Control.LinearAlgebraProvider.MatrixNorm(Norm.FrobeniusNorm, matrix.RowCount, matrix.ColumnCount, matrix.Values);
AssertHelpers.AlmostEqual(10.777754868246, norm, 5);
}
/// <summary>
/// Can compute Infinity norm.
/// </summary>
[Test]
public void CanComputeMatrixInfinityNorm()
{
var matrix = _matrices["Square3x3"];
var norm = Control.LinearAlgebraProvider.MatrixNorm(Norm.InfinityNorm, matrix.RowCount, matrix.ColumnCount, matrix.Values);
Assert.AreEqual(16.5, norm);
}
/// <summary>
/// Can multiply two square matrices.
/// </summary>
[Test]
public void CanMultiplySquareMatrices()
{
var x = _matrices["Singular3x3"];
var y = _matrices["Square3x3"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiply(x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
AssertHelpers.AlmostEqualRelative(x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can multiply a wide and tall matrix.
/// </summary>
[Test]
public void CanMultiplyWideAndTallMatrices()
{
var x = _matrices["Wide2x3"];
var y = _matrices["Tall3x2"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiply(x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
AssertHelpers.AlmostEqualRelative(x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can multiply a tall and wide matrix.
/// </summary>
[Test]
public void CanMultiplyTallAndWideMatrices()
{
var x = _matrices["Tall3x2"];
var y = _matrices["Wide2x3"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiply(x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
AssertHelpers.AlmostEqualRelative(x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can multiply two square matrices.
/// </summary>
[Test]
public void CanMultiplySquareMatricesWithUpdate()
{
var x = _matrices["Singular3x3"];
var y = _matrices["Square3x3"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiplyWithUpdate(Transpose.DontTranspose, Transpose.DontTranspose, 2.2f, x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, 1.0f, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
AssertHelpers.AlmostEqualRelative(2.2f*x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can multiply a wide and tall matrix.
/// </summary>
[Test]
public void CanMultiplyWideAndTallMatricesWithUpdate()
{
var x = _matrices["Wide2x3"];
var y = _matrices["Tall3x2"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiplyWithUpdate(Transpose.DontTranspose, Transpose.DontTranspose, 2.2f, x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, 1.0f, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
AssertHelpers.AlmostEqualRelative(2.2f*x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can multiply a tall and wide matrix.
/// </summary>
[Test]
public void CanMultiplyTallAndWideMatricesWithUpdate()
{
var x = _matrices["Tall3x2"];
var y = _matrices["Wide2x3"];
var c = new DenseMatrix(x.RowCount, y.ColumnCount);
Control.LinearAlgebraProvider.MatrixMultiplyWithUpdate(Transpose.DontTranspose, Transpose.DontTranspose, 2.2f, x.Values, x.RowCount, x.ColumnCount, y.Values, y.RowCount, y.ColumnCount, 1.0f, c.Values);
for (var i = 0; i < c.RowCount; i++)
{
for (var j = 0; j < c.ColumnCount; j++)
{
var test = 2.2f*x.Row(i)*y.Column(j);
// if they are both close to zero, skip
if (Math.Abs(test) < 1e-7 && Math.Abs(c[i, j]) < 1e-7)
{
continue;
}
AssertHelpers.AlmostEqualRelative(2.2f*x.Row(i)*y.Column(j), c[i, j], 5);
}
}
}
/// <summary>
/// Can compute the LU factor of a matrix.
/// </summary>
[Test]
public void CanComputeLuFactor()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
var ipiv = new int[matrix.RowCount];
Control.LinearAlgebraProvider.LUFactor(a, matrix.RowCount, ipiv);
AssertHelpers.AlmostEqual(a[0], -4.4, 5);
AssertHelpers.AlmostEqual(a[1], 0.25, 5);
AssertHelpers.AlmostEqual(a[2], 0, 5);
AssertHelpers.AlmostEqual(a[3], 5.5, 5);
AssertHelpers.AlmostEqual(a[4], -3.575, 5);
AssertHelpers.AlmostEqual(a[5], -0.307692307692308, 5);
AssertHelpers.AlmostEqual(a[6], 6.6, 5);
AssertHelpers.AlmostEqual(a[7], -4.95, 5);
AssertHelpers.AlmostEqual(a[8], 0.676923076923077, 5);
Assert.AreEqual(ipiv[0], 2);
Assert.AreEqual(ipiv[1], 2);
Assert.AreEqual(ipiv[2], 2);
}
/// <summary>
/// Can compute the inverse of a matrix using LU factorization.
/// </summary>
[Test]
public void CanComputeLuInverse()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
Control.LinearAlgebraProvider.LUInverse(a, matrix.RowCount);
AssertHelpers.AlmostEqual(a[0], -0.454545454545454, 5);
AssertHelpers.AlmostEqual(a[1], -0.909090909090908, 5);
AssertHelpers.AlmostEqual(a[2], 0.454545454545454, 5);
AssertHelpers.AlmostEqual(a[3], -0.340909090909090, 5);
AssertHelpers.AlmostEqual(a[4], -2.045454545454543, 5);
AssertHelpers.AlmostEqual(a[5], 1.477272727272726, 5);
AssertHelpers.AlmostEqual(a[6], -0.113636363636364, 5);
AssertHelpers.AlmostEqual(a[7], 0.227272727272727, 5);
AssertHelpers.AlmostEqual(a[8], -0.113636363636364, 5);
}
/// <summary>
/// Can compute the inverse of a matrix using LU factorization
/// using a previously factored matrix.
/// </summary>
[Test]
public void CanComputeLuInverseOnFactoredMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
var ipiv = new int[matrix.RowCount];
Control.LinearAlgebraProvider.LUFactor(a, matrix.RowCount, ipiv);
Control.LinearAlgebraProvider.LUInverseFactored(a, matrix.RowCount, ipiv);
AssertHelpers.AlmostEqual(a[0], -0.454545454545454, 5);
AssertHelpers.AlmostEqual(a[1], -0.909090909090908, 5);
AssertHelpers.AlmostEqual(a[2], 0.454545454545454, 5);
AssertHelpers.AlmostEqual(a[3], -0.340909090909090, 5);
AssertHelpers.AlmostEqual(a[4], -2.045454545454543, 5);
AssertHelpers.AlmostEqual(a[5], 1.477272727272726, 5);
AssertHelpers.AlmostEqual(a[6], -0.113636363636364, 5);
AssertHelpers.AlmostEqual(a[7], 0.227272727272727, 5);
AssertHelpers.AlmostEqual(a[8], -0.113636363636364, 5);
}
/// <summary>
/// Can solve Ax=b using LU factorization.
/// </summary>
[Test]
public void CanSolveUsingLU()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
Control.LinearAlgebraProvider.LUSolve(2, a, matrix.RowCount, b);
AssertHelpers.AlmostEqualRelative(b[0], -1.477272727272726, 5);
AssertHelpers.AlmostEqualRelative(b[1], -4.318181818181815, 5);
AssertHelpers.AlmostEqualRelative(b[2], 3.068181818181816, 5);
AssertHelpers.AlmostEqualRelative(b[3], -4.204545454545451, 5);
AssertHelpers.AlmostEqualRelative(b[4], -12.499999999999989, 5);
AssertHelpers.AlmostEqualRelative(b[5], 8.522727272727266, 5);
NotModified(matrix.RowCount, matrix.ColumnCount, a, matrix);
}
/// <summary>
/// Can solve Ax=b using LU factorization using a factored matrix.
/// </summary>
[Test]
public void CanSolveUsingLUOnFactoredMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
var ipiv = new int[matrix.RowCount];
Control.LinearAlgebraProvider.LUFactor(a, matrix.RowCount, ipiv);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
Control.LinearAlgebraProvider.LUSolveFactored(2, a, matrix.RowCount, ipiv, b);
AssertHelpers.AlmostEqualRelative(b[0], -1.477272727272726, 5);
AssertHelpers.AlmostEqualRelative(b[1], -4.318181818181815, 5);
AssertHelpers.AlmostEqualRelative(b[2], 3.068181818181816, 5);
AssertHelpers.AlmostEqualRelative(b[3], -4.204545454545451, 5);
AssertHelpers.AlmostEqualRelative(b[4], -12.499999999999989, 5);
AssertHelpers.AlmostEqualRelative(b[5], 8.522727272727266, 5);
}
/// <summary>
/// Can compute the <c>Cholesky</c> factorization.
/// </summary>
[Test]
public void CanComputeCholeskyFactor()
{
var matrix = new float[] {1, 1, 1, 1, 1, 5, 5, 5, 1, 5, 14, 14, 1, 5, 14, 15};
Control.LinearAlgebraProvider.CholeskyFactor(matrix, 4);
Assert.AreEqual(matrix[0], 1);
Assert.AreEqual(matrix[1], 1);
Assert.AreEqual(matrix[2], 1);
Assert.AreEqual(matrix[3], 1);
Assert.AreEqual(matrix[4], 0);
Assert.AreEqual(matrix[5], 2);
Assert.AreEqual(matrix[6], 2);
Assert.AreEqual(matrix[7], 2);
Assert.AreEqual(matrix[8], 0);
Assert.AreEqual(matrix[9], 0);
Assert.AreEqual(matrix[10], 3);
Assert.AreEqual(matrix[11], 3);
Assert.AreEqual(matrix[12], 0);
Assert.AreEqual(matrix[13], 0);
Assert.AreEqual(matrix[14], 0);
Assert.AreEqual(matrix[15], 1);
}
/// <summary>
/// Can solve Ax=b using Cholesky factorization.
/// </summary>
[Test]
public void CanSolveUsingCholesky()
{
var matrix = new DenseMatrix(3, 3, new float[] {1, 1, 1, 1, 2, 3, 1, 3, 6});
var a = new float[] {1, 1, 1, 1, 2, 3, 1, 3, 6};
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
Control.LinearAlgebraProvider.CholeskySolve(a, 3, b, 2);
AssertHelpers.AlmostEqualRelative(b[0], 0, 5);
AssertHelpers.AlmostEqualRelative(b[1], 1, 5);
AssertHelpers.AlmostEqualRelative(b[2], 0, 5);
AssertHelpers.AlmostEqualRelative(b[3], 3, 5);
AssertHelpers.AlmostEqualRelative(b[4], 1, 5);
AssertHelpers.AlmostEqualRelative(b[5], 0, 5);
NotModified(3, 3, a, matrix);
}
/// <summary>
/// Can solve Ax=b using LU factorization using a factored matrix.
/// </summary>
[Test]
public void CanSolveUsingCholeskyOnFactoredMatrix()
{
var a = new float[] {1, 1, 1, 1, 2, 3, 1, 3, 6};
Control.LinearAlgebraProvider.CholeskyFactor(a, 3);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
Control.LinearAlgebraProvider.CholeskySolveFactored(a, 3, b, 2);
AssertHelpers.AlmostEqualRelative(b[0], 0, 5);
AssertHelpers.AlmostEqualRelative(b[1], 1, 5);
AssertHelpers.AlmostEqualRelative(b[2], 0, 5);
AssertHelpers.AlmostEqualRelative(b[3], 3, 5);
AssertHelpers.AlmostEqualRelative(b[4], 1, 5);
AssertHelpers.AlmostEqualRelative(b[5], 0, 5);
}
/// <summary>
/// Can compute QR factorization of a square matrix.
/// </summary>
[Test]
public void CanComputeQRFactorSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var r = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, r, r.Length);
var tau = new float[3];
var q = new float[matrix.RowCount*matrix.RowCount];
Control.LinearAlgebraProvider.QRFactor(r, matrix.RowCount, matrix.ColumnCount, q, tau);
var mq = new DenseMatrix(matrix.RowCount, matrix.RowCount, q);
var mr = new DenseMatrix(matrix.RowCount, matrix.ColumnCount, r).UpperTriangle();
var a = mq*mr;
for (var row = 0; row < matrix.RowCount; row++)
{
for (var col = 0; col < matrix.ColumnCount; col++)
{
AssertHelpers.AlmostEqualRelative(matrix[row, col], a[row, col], 5);
}
}
}
/// <summary>
/// Can compute QR factorization of a tall matrix.
/// </summary>
[Test]
public void CanComputeQRFactorTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var r = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, r, r.Length);
var tau = new float[3];
var q = new float[matrix.RowCount*matrix.RowCount];
Control.LinearAlgebraProvider.QRFactor(r, matrix.RowCount, matrix.ColumnCount, q, tau);
var mr = new DenseMatrix(matrix.RowCount, matrix.ColumnCount, r).UpperTriangle();
var mq = new DenseMatrix(matrix.RowCount, matrix.RowCount, q);
var a = mq*mr;
for (var row = 0; row < matrix.RowCount; row++)
{
for (var col = 0; col < matrix.ColumnCount; col++)
{
AssertHelpers.AlmostEqualRelative(matrix[row, col], a[row, col], 5);
}
}
}
/// <summary>
/// Can compute QR factorization of a wide matrix.
/// </summary>
[Test]
public void CanComputeQRFactorWideMatrix()
{
var matrix = _matrices["Wide2x3"];
var r = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, r, r.Length);
var tau = new float[3];
var q = new float[matrix.RowCount*matrix.RowCount];
Control.LinearAlgebraProvider.QRFactor(r, matrix.RowCount, matrix.ColumnCount, q, tau);
var mr = new DenseMatrix(matrix.RowCount, matrix.ColumnCount, r).UpperTriangle();
var mq = new DenseMatrix(matrix.RowCount, matrix.RowCount, q);
var a = mq*mr;
for (var row = 0; row < matrix.RowCount; row++)
{
for (var col = 0; col < matrix.ColumnCount; col++)
{
AssertHelpers.AlmostEqualRelative(matrix[row, col], a[row, col], 5);
}
}
}
/// <summary>
/// Can compute thin QR factorization of a square matrix.
/// </summary>
[Test]
public void CanComputeThinQRFactorSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var r = new float[matrix.ColumnCount*matrix.ColumnCount];
var tau = new float[3];
var q = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, q, q.Length);
Control.LinearAlgebraProvider.ThinQRFactor(q, matrix.RowCount, matrix.ColumnCount, r, tau);
var mq = new DenseMatrix(matrix.RowCount, matrix.ColumnCount, q);
var mr = new DenseMatrix(matrix.ColumnCount, matrix.ColumnCount, r);
var a = mq*mr;
for (var row = 0; row < matrix.RowCount; row++)
{
for (var col = 0; col < matrix.ColumnCount; col++)
{
AssertHelpers.AlmostEqualRelative(matrix[row, col], a[row, col], 5);
}
}
}
/// <summary>
/// Can compute thin QR factorization of a tall matrix.
/// </summary>
[Test]
public void CanComputeThinQRFactorTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var r = new float[matrix.ColumnCount*matrix.ColumnCount];
var tau = new float[3];
var q = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, q, q.Length);
Control.LinearAlgebraProvider.ThinQRFactor(q, matrix.RowCount, matrix.ColumnCount, r, tau);
var mq = new DenseMatrix(matrix.RowCount, matrix.ColumnCount, q);
var mr = new DenseMatrix(matrix.ColumnCount, matrix.ColumnCount, r);
var a = mq*mr;
for (var row = 0; row < matrix.RowCount; row++)
{
for (var col = 0; col < matrix.ColumnCount; col++)
{
AssertHelpers.AlmostEqualRelative(matrix[row, col], a[row, col], 5);
}
}
}
/// <summary>
/// Can solve Ax=b using QR factorization with a square A matrix.
/// </summary>
[Test]
public void CanSolveUsingQRSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x);
NotModified(3, 3, a, matrix);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqualRelative(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqualRelative(mb[2, 0], b[2], 5);
AssertHelpers.AlmostEqualRelative(mb[0, 1], b[3], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqualRelative(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using QR factorization with a tall A matrix.
/// </summary>
[Test]
public void CanSolveUsingQRTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x);
NotModified(3, 2, a, matrix);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqualRelative(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqualRelative(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqualRelative(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqualRelative(test[1, 1], x[3], 5);
}
/// <summary>
/// Can solve Ax=b using QR factorization with a square A matrix
/// using a factored A matrix.
/// </summary>
[Test]
public void CanSolveUsingQRSquareMatrixOnFactoredMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.RowCount];
Array.Copy(matrix.Values, a, a.Length);
var tau = new float[matrix.ColumnCount];
var q = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.QRFactor(a, matrix.RowCount, matrix.ColumnCount, q, tau);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolveFactored(q, a, matrix.RowCount, matrix.ColumnCount, tau, b, 2, x);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqualRelative(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqualRelative(mb[2, 0], b[2], 5);
AssertHelpers.AlmostEqualRelative(mb[0, 1], b[3], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqualRelative(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using QR factorization with a tall A matrix
/// using a factored A matrix.
/// </summary>
[Test]
public void CanSolveUsingQRTallMatrixOnFactoredMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var tau = new float[matrix.ColumnCount];
var q = new float[matrix.RowCount*matrix.RowCount];
Control.LinearAlgebraProvider.QRFactor(a, matrix.RowCount, matrix.ColumnCount, q, tau);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolveFactored(q, a, matrix.RowCount, matrix.ColumnCount, tau, b, 2, x);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqualRelative(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqualRelative(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqualRelative(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqualRelative(test[1, 1], x[3], 5);
}
/// <summary>
/// Can solve Ax=b using thin QR factorization with a square A matrix.
/// </summary>
[Test]
public void CanSolveUsingThinQRSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x, QRMethod.Thin);
NotModified(3, 3, a, matrix);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqualRelative(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqualRelative(mb[2, 0], b[2], 5);
AssertHelpers.AlmostEqualRelative(mb[0, 1], b[3], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqualRelative(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using thin QR factorization with a tall A matrix.
/// </summary>
[Test]
public void CanSolveUsingThinQRTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x, QRMethod.Thin);
NotModified(3, 2, a, matrix);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqualRelative(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqualRelative(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqualRelative(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqualRelative(test[1, 1], x[3], 5);
}
/// <summary>
/// Can solve Ax=b using thin QR factorization with a square A matrix
/// using a factored A matrix.
/// </summary>
[Test]
public void CanSolveUsingThinQRSquareMatrixOnFactoredMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var tau = new float[matrix.ColumnCount];
var r = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.ThinQRFactor(a, matrix.RowCount, matrix.ColumnCount, r, tau);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolveFactored(a, r, matrix.RowCount, matrix.ColumnCount, tau, b, 2, x, QRMethod.Thin);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqualRelative(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqualRelative(mb[2, 0], b[2], 5);
AssertHelpers.AlmostEqualRelative(mb[0, 1], b[3], 5);
AssertHelpers.AlmostEqualRelative(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqualRelative(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using thin QR factorization with a tall A matrix
/// using a factored A matrix.
/// </summary>
[Test]
public void CanSolveUsingThinQRTallMatrixOnFactoredMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var tau = new float[matrix.ColumnCount];
var r = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.ThinQRFactor(a, matrix.RowCount, matrix.ColumnCount, r, tau);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.QRSolveFactored(a, r, matrix.RowCount, matrix.ColumnCount, tau, b, 2, x, QRMethod.Thin);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqualRelative(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqualRelative(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqualRelative(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqualRelative(test[1, 1], x[3], 5);
}
/// <summary>
/// Can compute the SVD factorization of a square matrix.
/// </summary>
[Test]
public void CanComputeSVDFactorizationOfSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var s = new float[matrix.RowCount];
var u = new float[matrix.RowCount*matrix.RowCount];
var vt = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.SingularValueDecomposition(true, a, matrix.RowCount, matrix.ColumnCount, s, u, vt);
var w = new DenseMatrix(matrix.RowCount, matrix.ColumnCount);
for (var index = 0; index < s.Length; index++)
{
w[index, index] = s[index];
}
var mU = new DenseMatrix(matrix.RowCount, matrix.RowCount, u);
var mV = new DenseMatrix(matrix.ColumnCount, matrix.ColumnCount, vt);
var result = mU*w*mV;
AssertHelpers.AlmostEqualRelative(matrix[0, 0], result[0, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 0], result[1, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[2, 0], result[2, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[0, 1], result[0, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 1], result[1, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[2, 1], result[2, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[0, 2], result[0, 2], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 2], result[1, 2], 5);
AssertHelpers.AlmostEqualRelative(matrix[2, 2], result[2, 2], 5);
}
/// <summary>
/// Can compute the SVD factorization of a tall matrix.
/// </summary>
[Test]
public void CanComputeSVDFactorizationOfTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var s = new float[matrix.ColumnCount];
var u = new float[matrix.RowCount*matrix.RowCount];
var vt = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.SingularValueDecomposition(true, a, matrix.RowCount, matrix.ColumnCount, s, u, vt);
var w = new DenseMatrix(matrix.RowCount, matrix.ColumnCount);
for (var index = 0; index < s.Length; index++)
{
w[index, index] = s[index];
}
var mU = new DenseMatrix(matrix.RowCount, matrix.RowCount, u);
var mV = new DenseMatrix(matrix.ColumnCount, matrix.ColumnCount, vt);
var result = mU*w*mV;
AssertHelpers.AlmostEqualRelative(matrix[0, 0], result[0, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 0], result[1, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[2, 0], result[2, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[0, 1], result[0, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 1], result[1, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[2, 1], result[2, 1], 5);
}
/// <summary>
/// Can compute the SVD factorization of a wide matrix.
/// </summary>
[Test]
public void CanComputeSVDFactorizationOfWideMatrix()
{
var matrix = _matrices["Wide2x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var s = new float[matrix.RowCount];
var u = new float[matrix.RowCount*matrix.RowCount];
var vt = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.SingularValueDecomposition(true, a, matrix.RowCount, matrix.ColumnCount, s, u, vt);
var w = new DenseMatrix(matrix.RowCount, matrix.ColumnCount);
for (var index = 0; index < s.Length; index++)
{
w[index, index] = s[index];
}
var mU = new DenseMatrix(matrix.RowCount, matrix.RowCount, u);
var mV = new DenseMatrix(matrix.ColumnCount, matrix.ColumnCount, vt);
var result = mU*w*mV;
AssertHelpers.AlmostEqualRelative(matrix[0, 0], result[0, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 0], result[1, 0], 5);
AssertHelpers.AlmostEqualRelative(matrix[0, 1], result[0, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 1], result[1, 1], 5);
AssertHelpers.AlmostEqualRelative(matrix[0, 2], result[0, 2], 5);
AssertHelpers.AlmostEqualRelative(matrix[1, 2], result[1, 2], 5);
}
/// <summary>
/// Can solve Ax=b using SVD factorization with a square A matrix.
/// </summary>
[Test]
public void CanSolveUsingSVDSquareMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.SvdSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x);
NotModified(3, 3, a, matrix);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqual(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqual(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqual(mb[2, 0], b[2], 4);
AssertHelpers.AlmostEqual(mb[0, 1], b[3], 4);
AssertHelpers.AlmostEqual(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqual(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using SVD factorization with a tall A matrix.
/// </summary>
[Test]
public void CanSolveUsingSVDTallMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.SvdSolve(a, matrix.RowCount, matrix.ColumnCount, b, 2, x);
NotModified(3, 2, a, matrix);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqual(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqual(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqual(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqual(test[1, 1], x[3], 5);
}
/// <summary>
/// Can solve Ax=b using SVD factorization with a square A matrix
/// using a factored matrix.
/// </summary>
[Test]
public void CanSolveUsingSVDSquareMatrixOnFactoredMatrix()
{
var matrix = _matrices["Square3x3"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var s = new float[matrix.RowCount];
var u = new float[matrix.RowCount*matrix.RowCount];
var vt = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.SingularValueDecomposition(true, a, matrix.RowCount, matrix.ColumnCount, s, u, vt);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.SvdSolveFactored(matrix.RowCount, matrix.ColumnCount, s, u, vt, b, 2, x);
var mx = new DenseMatrix(matrix.ColumnCount, 2, x);
var mb = matrix*mx;
AssertHelpers.AlmostEqual(mb[0, 0], b[0], 5);
AssertHelpers.AlmostEqual(mb[1, 0], b[1], 5);
AssertHelpers.AlmostEqual(mb[2, 0], b[2], 4);
AssertHelpers.AlmostEqual(mb[0, 1], b[3], 4);
AssertHelpers.AlmostEqual(mb[1, 1], b[4], 4);
AssertHelpers.AlmostEqual(mb[2, 1], b[5], 4);
}
/// <summary>
/// Can solve Ax=b using SVD factorization with a tall A matrix
/// using a factored matrix.
/// </summary>
[Test]
public void CanSolveUsingSVDTallMatrixOnFactoredMatrix()
{
var matrix = _matrices["Tall3x2"];
var a = new float[matrix.RowCount*matrix.ColumnCount];
Array.Copy(matrix.Values, a, a.Length);
var s = new float[matrix.ColumnCount];
var u = new float[matrix.RowCount*matrix.RowCount];
var vt = new float[matrix.ColumnCount*matrix.ColumnCount];
Control.LinearAlgebraProvider.SingularValueDecomposition(true, a, matrix.RowCount, matrix.ColumnCount, s, u, vt);
var b = new[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
var x = new float[matrix.ColumnCount*2];
Control.LinearAlgebraProvider.SvdSolveFactored(matrix.RowCount, matrix.ColumnCount, s, u, vt, b, 2, x);
var mb = new DenseMatrix(matrix.RowCount, 2, b);
var test = (matrix.Transpose()*matrix).Inverse()*matrix.Transpose()*mb;
AssertHelpers.AlmostEqualRelative(test[0, 0], x[0], 5);
AssertHelpers.AlmostEqualRelative(test[1, 0], x[1], 5);
AssertHelpers.AlmostEqualRelative(test[0, 1], x[2], 5);
AssertHelpers.AlmostEqualRelative(test[1, 1], x[3], 5);
}
[TestCase("Wide10x50000", "Tall50000x10")]
[TestCase("Square1000x1000", "Square1000x1000")]
[Explicit, Timeout(1000*5)]
public void IsMatrixMultiplicationPerformant(string leftMatrixKey, string rightMatrixKey)
{
var leftMatrix = _matrices[leftMatrixKey];
var rightMatrix = _matrices[rightMatrixKey];
var result = leftMatrix*rightMatrix;
Assert.That(result, Is.Not.Null);
}
/// <summary>
/// Checks to see if a matrix and array contain the same values.
/// </summary>
/// <param name="rows">number of rows.</param>
/// <param name="columns">number of columns.</param>
/// <param name="array">array to check.</param>
/// <param name="matrix">matrix to check against.</param>
static void NotModified(int rows, int columns, IList<float> array, Matrix<float> matrix)
{
var index = 0;
for (var col = 0; col < columns; col++)
{
for (var row = 0; row < rows; row++)
{
Assert.AreEqual(array[index++], matrix[row, col]);
}
}
}
}
}
| |
// ============================================================
// Name: UMADNAToBonePoseWindow
// Author: Eli Curtz
// Copyright: (c) 2016 Eli Curtz
// ============================================================
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using UMA;
namespace UMA.PoseTools
{
public class UMADNAToBonePoseWindow : EditorWindow
{
public UMAData sourceUMA;
public UnityEngine.Object outputFolder;
private string folderPath = "";
private GameObject tempAvatarPreDNA;
private GameObject tempAvatarPostDNA;
private bool avatarDNAisDirty = false;
private int selectedDNAIndex = -1;
private int selectedDNAHash = 0;
private int poseSaveIndex = -1;
const string startingPoseName = "StartingPose";
private string poseSaveName = startingPoseName;
private static GUIContent sourceGUIContent = new GUIContent(
"Source UMA",
"UMA character in an active scene to collect DNA poses from.");
private static GUIContent converterGUIContent = new GUIContent(
"DNA Converter",
"DNA Converter Behavior being converted to poses.");
private static GUIContent folderGUIContent = new GUIContent(
"Output Folder",
"Parent folder where the new set of bone poses will be saved.");
private static GUIContent saveButtonGUIContent = new GUIContent(
"Save Pose Set",
"Generate the new poses (may take several seconds).");
void OnGUI()
{
sourceUMA = EditorGUILayout.ObjectField(sourceGUIContent, sourceUMA, typeof(UMAData), true) as UMAData;
EditorGUI.indentLevel++;
selectedDNAHash = 0;
if (sourceUMA == null)
{
EditorGUI.BeginDisabledGroup(true);
GUIContent[] dnaNames = new GUIContent[1];
dnaNames[0] = new GUIContent("");
EditorGUILayout.Popup(converterGUIContent, selectedDNAIndex, dnaNames);
EditorGUI.EndDisabledGroup();
}
else
{
DnaConverterBehaviour[] dnaConverters = sourceUMA.umaRecipe.raceData.dnaConverterList;
GUIContent[] dnaNames = new GUIContent[dnaConverters.Length];
for (int i = 0; i < dnaConverters.Length; i++)
{
dnaNames[i] = new GUIContent(dnaConverters[i].name);
}
selectedDNAIndex = EditorGUILayout.Popup(converterGUIContent, selectedDNAIndex, dnaNames);
if ((selectedDNAIndex >= 0) && (selectedDNAIndex < dnaConverters.Length))
{
selectedDNAHash = dnaConverters[selectedDNAIndex].DNATypeHash;
}
}
EditorGUI.indentLevel--;
EditorGUILayout.Space();
outputFolder = EditorGUILayout.ObjectField(folderGUIContent, outputFolder, typeof(UnityEngine.Object), false) as UnityEngine.Object;
EnforceFolder(ref outputFolder);
EditorGUI.BeginDisabledGroup((sourceUMA == null) || (selectedDNAHash == 0) || (outputFolder == null));
if (GUILayout.Button(saveButtonGUIContent))
{
SavePoseSet();
}
EditorGUI.EndDisabledGroup();
}
void Update()
{
if (avatarDNAisDirty)
{
avatarDNAisDirty = false;
UMAData umaPostDNA = tempAvatarPostDNA.GetComponent<UMADynamicAvatar>().umaData;
if (umaPostDNA != null)
{
umaPostDNA.Dirty(true, false, false);
}
}
}
// This code is generally the same as used in the DynamicDNAConverterCustomizer
// Probably worth breaking it out at some point and having it geenric
protected void CreateBonePoseCallback(UMAData umaData)
{
avatarDNAisDirty = false;
UMABonePose bonePose = ScriptableObject.CreateInstance<UMABonePose>();
UMAData umaPreDNA = tempAvatarPreDNA.GetComponent<UMADynamicAvatar>().umaData;
UMAData umaPostDNA = tempAvatarPostDNA.GetComponent<UMADynamicAvatar>().umaData;
UMADnaBase activeDNA = umaPostDNA.umaRecipe.GetDna(selectedDNAHash);
UMASkeleton skeletonPreDNA = umaPreDNA.skeleton;
UMASkeleton skeletonPostDNA = umaPostDNA.skeleton;
if (poseSaveIndex < 0)
{
poseSaveName = startingPoseName;
// Now that StartingPose has been generated
// add the active DNA to the pre DNA avatar
DnaConverterBehaviour activeConverter = sourceUMA.umaRecipe.raceData.GetConverter(sourceUMA.umaRecipe.GetDna(selectedDNAHash));
umaPreDNA.umaRecipe.raceData.dnaConverterList = new DnaConverterBehaviour[1];
umaPreDNA.umaRecipe.raceData.dnaConverterList[0] = activeConverter;
umaPreDNA.umaRecipe.raceData.UpdateDictionary();
umaPreDNA.umaRecipe.EnsureAllDNAPresent();
umaPreDNA.Dirty(true, false, true);
}
Transform transformPreDNA;
Transform transformPostDNA;
bool transformDirty;
int parentHash;
foreach (int boneHash in skeletonPreDNA.BoneHashes)
{
skeletonPreDNA.TryGetBoneTransform(boneHash, out transformPreDNA, out transformDirty, out parentHash);
skeletonPostDNA.TryGetBoneTransform(boneHash, out transformPostDNA, out transformDirty, out parentHash);
if ((transformPreDNA == null) || (transformPostDNA == null))
{
Debug.LogWarning("Bad bone hash in skeleton: " + boneHash);
continue;
}
if (!LocalTransformsMatch(transformPreDNA, transformPostDNA))
{
bonePose.AddBone(transformPreDNA, transformPostDNA.localPosition, transformPostDNA.localRotation, transformPostDNA.localScale);
}
}
int activeDNACount = activeDNA.Count;
for (int i = 0; i < activeDNACount; i++)
{
activeDNA.SetValue(i, 0.5f);
}
AssetDatabase.CreateAsset(bonePose, folderPath + "/" + poseSaveName + ".asset");
EditorUtility.SetDirty(bonePose);
AssetDatabase.SaveAssets();
poseSaveIndex++;
if (poseSaveIndex < activeDNACount)
{
poseSaveName = activeDNA.Names[poseSaveIndex] + "_0";
activeDNA.SetValue(poseSaveIndex, 0.0f);
avatarDNAisDirty = true;
}
else if (poseSaveIndex < (activeDNACount * 2))
{
int dnaIndex = poseSaveIndex - activeDNACount;
poseSaveName = activeDNA.Names[dnaIndex] + "_1";
activeDNA.SetValue(dnaIndex, 1.0f);
umaPostDNA.Dirty();
avatarDNAisDirty = true;
}
else
{
UMAUtils.DestroySceneObject(tempAvatarPreDNA);
UMAUtils.DestroySceneObject(tempAvatarPostDNA);
// Build a prefab DNA Converter and populate it with the morph set
string assetName = "Morph Set";
string assetPath = AssetDatabase.GenerateUniqueAssetPath(folderPath + "/" + assetName + ".asset");
MorphSetDnaAsset asset = CustomAssetUtility.CreateAsset<MorphSetDnaAsset>(assetPath, false);
SerializedObject serializedAsset = new SerializedObject(asset);
SerializedProperty startingPose = serializedAsset.FindProperty("startingPose");
startingPose.objectReferenceValue = AssetDatabase.LoadAssetAtPath<UMABonePose>(folderPath + "/" + startingPoseName + ".asset");
SerializedProperty morphSetArray = serializedAsset.FindProperty("dnaMorphs");
morphSetArray.ClearArray();
for (int i = 0; i < activeDNACount; i++)
{
string posePairName = activeDNA.Names[i];
morphSetArray.InsertArrayElementAtIndex(i);
SerializedProperty posePair = morphSetArray.GetArrayElementAtIndex(i);
SerializedProperty dnaEntryName = posePair.FindPropertyRelative("dnaEntryName");
dnaEntryName.stringValue = posePairName;
SerializedProperty zeroPose = posePair.FindPropertyRelative("poseZero");
zeroPose.objectReferenceValue = AssetDatabase.LoadAssetAtPath<UMABonePose>(folderPath + "/" + posePairName + "_0.asset");
SerializedProperty onePose = posePair.FindPropertyRelative("poseOne");
onePose.objectReferenceValue = AssetDatabase.LoadAssetAtPath<UMABonePose>(folderPath + "/" + posePairName + "_1.asset");
}
serializedAsset.ApplyModifiedPropertiesWithoutUndo();
// Build a prefab DNA Converter and populate it with the morph set
string prefabName = "Converter Prefab";
string prefabPath = AssetDatabase.GenerateUniqueAssetPath(folderPath + "/" + prefabName + ".prefab");
GameObject tempConverterPrefab = new GameObject(prefabName);
MorphSetDnaConverterBehaviour converter = tempConverterPrefab.AddComponent<MorphSetDnaConverterBehaviour>();
SerializedObject serializedConverter = new SerializedObject(converter);
SerializedProperty morphSet = serializedAsset.FindProperty("morphSet");
morphSet.objectReferenceValue = AssetDatabase.LoadAssetAtPath<MorphSetDnaAsset>(assetPath);
serializedConverter.ApplyModifiedPropertiesWithoutUndo();
PrefabUtility.CreatePrefab(prefabPath, tempConverterPrefab);
DestroyImmediate(tempConverterPrefab, false);
}
}
protected void SavePoseSet()
{
DnaConverterBehaviour activeConverter = sourceUMA.umaRecipe.raceData.GetConverter(sourceUMA.umaRecipe.GetDna(selectedDNAHash));
folderPath = AssetDatabase.GetAssetPath(outputFolder) + "/" + activeConverter.name;
if (!AssetDatabase.IsValidFolder(folderPath))
{
string folderGUID = AssetDatabase.CreateFolder(AssetDatabase.GetAssetPath(outputFolder), activeConverter.name);
folderPath = AssetDatabase.GUIDToAssetPath(folderGUID);
}
poseSaveIndex = -1;
// Build a temporary version of the Avatar with no DNA to get original state
SlotData[] activeSlots = sourceUMA.umaRecipe.GetAllSlots();
int slotIndex;
tempAvatarPreDNA = new GameObject("Temp Raw Avatar");
tempAvatarPreDNA.transform.parent = sourceUMA.transform.parent;
tempAvatarPreDNA.transform.localPosition = Vector3.zero;
tempAvatarPreDNA.transform.localRotation = sourceUMA.transform.localRotation;
UMADynamicAvatar tempAvatar = tempAvatarPreDNA.AddComponent<UMADynamicAvatar>();
tempAvatar.umaGenerator = sourceUMA.umaGenerator;
tempAvatar.Initialize();
tempAvatar.umaData.umaRecipe = new UMAData.UMARecipe();
tempAvatar.umaData.umaRecipe.raceData = ScriptableObject.CreateInstance<RaceData>();
tempAvatar.umaData.umaRecipe.raceData.raceName = "Temp Raw Race";
tempAvatar.umaData.umaRecipe.raceData.TPose = sourceUMA.umaRecipe.raceData.TPose;
tempAvatar.umaData.umaRecipe.raceData.umaTarget = sourceUMA.umaRecipe.raceData.umaTarget;
slotIndex = 0;
foreach (SlotData slotEntry in activeSlots) {
if ((slotEntry == null) || slotEntry.dontSerialize) continue;
tempAvatar.umaData.umaRecipe.SetSlot(slotIndex++, slotEntry);
}
tempAvatar.Show();
tempAvatarPostDNA = new GameObject("Temp DNA Avatar");
tempAvatarPostDNA.transform.parent = sourceUMA.transform.parent;
tempAvatarPostDNA.transform.localPosition = Vector3.zero;
tempAvatarPostDNA.transform.localRotation = sourceUMA.transform.localRotation;
UMADynamicAvatar tempAvatar2 = tempAvatarPostDNA.AddComponent<UMADynamicAvatar>();
tempAvatar2.umaGenerator = sourceUMA.umaGenerator;
tempAvatar2.Initialize();
tempAvatar2.umaData.umaRecipe = new UMAData.UMARecipe();
tempAvatar2.umaData.umaRecipe.raceData = ScriptableObject.CreateInstance<RaceData>();
tempAvatar2.umaData.umaRecipe.raceData.raceName = "Temp DNA Race";
tempAvatar2.umaData.umaRecipe.raceData.TPose = sourceUMA.umaRecipe.raceData.TPose;
tempAvatar2.umaData.umaRecipe.raceData.umaTarget = sourceUMA.umaRecipe.raceData.umaTarget;
tempAvatar2.umaData.umaRecipe.raceData.dnaConverterList = new DnaConverterBehaviour[1];
tempAvatar2.umaData.umaRecipe.raceData.dnaConverterList[0] = activeConverter;
tempAvatar2.umaData.umaRecipe.raceData.UpdateDictionary();
slotIndex = 0;
foreach (SlotData slotEntry in activeSlots) {
if ((slotEntry == null) || slotEntry.dontSerialize) continue;
tempAvatar2.umaData.umaRecipe.SetSlot(slotIndex++, slotEntry);
}
tempAvatar2.umaData.OnCharacterUpdated += CreateBonePoseCallback;
tempAvatar2.Show();
}
public static void EnforceFolder(ref UnityEngine.Object folderObject)
{
if (folderObject != null)
{
string destpath = AssetDatabase.GetAssetPath(folderObject);
if (string.IsNullOrEmpty(destpath))
{
folderObject = null;
}
else if (!System.IO.Directory.Exists(destpath))
{
destpath = destpath.Substring(0, destpath.LastIndexOf('/'));
folderObject = AssetDatabase.LoadMainAssetAtPath(destpath);
}
}
}
private const float bonePoseAccuracy = 0.0001f;
private static bool LocalTransformsMatch(Transform t1, Transform t2)
{
if ((t1.localPosition - t2.localPosition).sqrMagnitude > bonePoseAccuracy) return false;
if ((t1.localScale - t2.localScale).sqrMagnitude > bonePoseAccuracy) return false;
if (t1.localRotation != t2.localRotation) return false;
return true;
}
[MenuItem("UMA/Pose Tools/Bone Pose DNA Extractor")]
public static void OpenUMADNAToBonePoseWindow()
{
EditorWindow win = EditorWindow.GetWindow(typeof(UMADNAToBonePoseWindow));
win.titleContent.text = "Pose Extractor";
}
}
}
#endif
| |
using System;
using System.IO;
/*
* $Id: ZStream.cs,v 1.1 2006-07-31 13:59:26 bouncy Exp $
*
Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
3. The names of the authors may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This program is based on zlib-1.1.3, so all credit should go authors
* Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
* and contributors of zlib.
*/
namespace Org.BouncyCastle.Utilities.Zlib
{
public abstract class ZStream : Stream
{
private const int MAX_WBITS = 15; // 32K LZ77 window
private const int DEF_WBITS = MAX_WBITS;
private const int MAX_MEM_LEVEL = 9;
public byte[] next_in; // next input byte
public int next_in_index;
public int avail_in; // number of bytes available at next_in
public long total_in; // total nb of input bytes read so far
public byte[] next_out; // next output byte should be put there
public int next_out_index;
public int avail_out; // remaining free space at next_out
public long total_out; // total nb of bytes output so far
public String msg;
internal Deflate dstate;
internal Inflate istate;
//internal int data_type; // best guess about the data type: ascii or binary
// TODO: Make setter private
public long Adler { get; set; }
//internal Adler32 _adler = new Adler32();
// largest prime smaller than 65536
private const int ADLER32_BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const int ADLER32_NMAX = 5552;
internal long Adler32(long adler, byte[] buf, int index, int len)
{
if (buf == null) { return 1L; }
long s1 = adler & 0xffff;
long s2 = (adler >> 16) & 0xffff;
int k;
while (len > 0)
{
k = len < ADLER32_NMAX ? len : ADLER32_NMAX;
len -= k;
while (k >= 16)
{
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
s1 += buf[index++] & 0xff; s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += buf[index++] & 0xff; s2 += s1;
}
while (--k != 0);
}
s1 %= ADLER32_BASE;
s2 %= ADLER32_BASE;
}
return (s2 << 16) | s1;
}
internal void UpdateAdler(long adler, byte[] buf, int index, int len)
{
this.Adler = this.Adler32(adler, buf, index, len);
}
internal void UpdateAdler(byte[] buf, int index, int len)
{
this.Adler = this.Adler32(this.Adler, buf, index, len);
}
protected ZLibStatus inflateInit()
{
return inflateInit(DEF_WBITS);
}
protected ZLibStatus inflateInit(bool nowrap)
{
return inflateInit(DEF_WBITS, nowrap);
}
protected ZLibStatus inflateInit(int w)
{
return inflateInit(w, false);
}
protected ZLibStatus inflateInit(int w, bool nowrap)
{
istate = new Inflate();
return istate.InflateInit(this, nowrap ? -w : w);
}
protected ZLibStatus inflate(FlushType f)
{
if (istate == null) return ZLibStatus.Z_STREAM_ERROR;
return istate.inflate(this, f);
}
protected ZLibStatus inflateEnd()
{
if (istate == null) return ZLibStatus.Z_STREAM_ERROR;
var ret = istate.InflateEnd(this);
istate = null;
return ret;
}
protected ZLibStatus inflateSync()
{
if (istate == null)
return ZLibStatus.Z_STREAM_ERROR;
return istate.InflateSync(this);
}
//protected ZLibStatus inflateSetDictionary(byte[] dictionary, int dictLength)
//{
// if (istate == null)
// return ZLibStatus.Z_STREAM_ERROR;
// return istate.InflateSetDictionary(this, dictionary, dictLength);
//}
protected ZLibStatus deflateInit(CompressionLevel level)
{
return deflateInit(level, MAX_WBITS);
}
protected ZLibStatus deflateInit(CompressionLevel level, bool nowrap)
{
return deflateInit(level, MAX_WBITS, nowrap);
}
protected ZLibStatus deflateInit(CompressionLevel level, int bits)
{
return deflateInit(level, bits, false);
}
protected ZLibStatus deflateInit(CompressionLevel level, int bits, bool nowrap)
{
dstate = new Deflate();
return dstate.deflateInit(this, level, nowrap ? -bits : bits);
}
public ZLibStatus deflate(FlushType flush)
{
if (dstate == null)
{
return ZLibStatus.Z_STREAM_ERROR;
}
return dstate.deflate(this, flush);
}
protected ZLibStatus deflateEnd()
{
if (dstate == null) return ZLibStatus.Z_STREAM_ERROR;
var ret = dstate.deflateEnd();
dstate = null;
return ret;
}
protected ZLibStatus deflateParams(CompressionLevel level, CompressionStrategy strategy)
{
if (dstate == null) return ZLibStatus.Z_STREAM_ERROR;
return dstate.deflateParams(this, level, strategy);
}
protected ZLibStatus deflateSetDictionary(byte[] dictionary, int dictLength)
{
if (dstate == null)
return ZLibStatus.Z_STREAM_ERROR;
return dstate.deflateSetDictionary(this, dictionary, dictLength);
}
// Flush as much pending output as possible. All deflate() output goes
// through this function so some applications may wish to modify it
// to avoid allocating a large strm->next_out buffer and copying into it.
// (See also read_buf()).
internal void flush_pending()
{
int len = dstate.pending;
if (len > avail_out) len = avail_out;
if (len == 0) return;
if (dstate.pending_buf.Length <= dstate.pending_out ||
next_out.Length <= next_out_index ||
dstate.pending_buf.Length < (dstate.pending_out + len) ||
next_out.Length < (next_out_index + len))
{
// System.out.println(dstate.pending_buf.length+", "+dstate.pending_out+
// ", "+next_out.length+", "+next_out_index+", "+len);
// System.out.println("avail_out="+avail_out);
}
System.Array.Copy(dstate.pending_buf, dstate.pending_out,
next_out, next_out_index, len);
next_out_index += len;
dstate.pending_out += len;
total_out += len;
avail_out -= len;
dstate.pending -= len;
if (dstate.pending == 0)
{
dstate.pending_out = 0;
}
}
// Read a new buffer from the current input stream, update the adler32
// and total number of bytes read. All deflate() input goes through
// this function so some applications may wish to modify it to avoid
// allocating a large strm->next_in buffer and copying from it.
// (See also flush_pending()).
internal int read_buf(byte[] buf, int start, int size)
{
int len = avail_in;
if (len > size) len = size;
if (len == 0) return 0;
avail_in -= len;
if (dstate.noheader == 0)
{
Adler = this.Adler32(Adler, next_in, next_in_index, len);
}
System.Array.Copy(next_in, next_in_index, buf, start, len);
next_in_index += len;
total_in += len;
return len;
}
}
}
| |
#region Disclaimer/Info
///////////////////////////////////////////////////////////////////////////////////////////////////
// Subtext WebLog
//
// Subtext is an open source weblog system that is a fork of the .TEXT
// weblog system.
//
// For updated news and information please visit http://subtextproject.com/
// Subtext is hosted at Google Code at http://code.google.com/p/subtext/
// The development mailing list is at subtext@googlegroups.com
//
// This project is licensed under the BSD license. See the License.txt file for more information.
///////////////////////////////////////////////////////////////////////////////////////////////////
#endregion
using System;
using System.Collections.Generic;
using Subtext.Extensibility;
using Subtext.Extensibility.Interfaces;
using Subtext.Framework.Configuration;
namespace Subtext.Framework.Components
{
/// <summary>
/// Summary description for Entry.
/// </summary>
[Serializable]
public class Entry : IEntryIdentity
{
DateTime _dateSyndicated = NullValue.NullDateTime;
public Entry(PostType postType, Blog blog)
{
Categories = new List<string>();
PostConfig = PostConfig.None;
DateModified = NullValue.NullDateTime;
DateCreated = NullValue.NullDateTime;
PostType = postType;
Blog = blog;
Id = NullValue.NullInt32;
}
public Entry(PostType postType)
: this(postType, Config.CurrentBlog)
{
}
/// <summary>
/// Gets or sets the blog ID.
/// </summary>
/// <value>The blog ID.</value>
public int BlogId { get; set; }
public Blog Blog { get; set; }
/// <summary>
/// Gets a value indicating whether this instance has description.
/// </summary>
/// <value>
/// <c>true</c> if this instance has description; otherwise, <c>false</c>.
/// </value>
public bool HasDescription
{
get { return !String.IsNullOrEmpty(Description); }
}
/// <summary>
/// Gets or sets the description or excerpt for this blog post.
/// Some blogs like to sydicate description only.
/// </summary>
/// <value>The description.</value>
public string Description
{ //todo: let's rename this property to excerpt.
get;
set;
}
/// <summary>
/// Gets a value indicating whether this instance has entry name.
/// </summary>
/// <value>
/// <c>true</c> if this instance has entry name; otherwise, <c>false</c>.
/// </value>
public bool HasEntryName
{
get { return EntryName != null && EntryName.Trim().Length > 0; }
}
/// <summary>
/// Gets or sets the title of this post.
/// </summary>
/// <value>The title.</value>
public string Title { get; set; }
/// <summary>
/// Gets or sets the body of the Entry. This is the
/// main content of the entry.
/// </summary>
/// <value></value>
public string Body { get; set; }
/// <summary>
/// Gets or sets the author name of the entry.
/// For comments, this is the name given by the commenter.
/// </summary>
/// <value>The author.</value>
public string Author { get; set; }
/// <summary>
/// Gets or sets the email of the author.
/// </summary>
/// <value>The email.</value>
public string Email { get; set; }
/// <summary>
/// Gets or sets the date this entry was last updated.
/// </summary>
/// <value></value>
public DateTime DateModified { get; set; }
/// <summary>
/// Gets or sets the date the item was published.
/// </summary>
/// <value></value>
public DateTime DateSyndicated
{
get { return _dateSyndicated; }
set
{
if(NullValue.IsNull(value))
{
IncludeInMainSyndication = false;
}
_dateSyndicated = value;
}
}
/// <summary>
/// Gets or sets a value indicating whether this entry is active.
/// </summary>
/// <value><c>true</c> if this instance is active; otherwise, <c>false</c>.</value>
public bool IsActive
{
get { return EntryPropertyCheck(PostConfig.IsActive); }
set { PostConfigSetter(PostConfig.IsActive, value); }
}
/// <summary>
/// Gets or sets a value indicating whether this entry allows comments.
/// </summary>
/// <value><c>true</c> if [allows comments]; otherwise, <c>false</c>.</value>
public bool AllowComments
{
get { return EntryPropertyCheck(PostConfig.AllowComments); }
set { PostConfigSetter(PostConfig.AllowComments, value); }
}
/// <summary>
/// Gets or sets a value indicating whether this entry is displayed on the home page.
/// </summary>
/// <value><c>true</c> if [display on home page]; otherwise, <c>false</c>.</value>
public bool DisplayOnHomePage
{
get { return EntryPropertyCheck(PostConfig.DisplayOnHomepage); }
set { PostConfigSetter(PostConfig.DisplayOnHomepage, value); }
}
/// <summary>
/// Gets or sets a value indicating whether the description only should be syndicated.
/// </summary>
/// <value>
/// <c>true</c> if [syndicate description only]; otherwise, <c>false</c>.
/// </value>
public bool SyndicateDescriptionOnly
{
get { return EntryPropertyCheck(PostConfig.SyndicateDescriptionOnly); }
set { PostConfigSetter(PostConfig.SyndicateDescriptionOnly, value); }
}
/// <summary>
/// Gets or sets a value indicating whether [include in main syndication].
/// </summary>
/// <value>
/// <c>true</c> if [include in main syndication]; otherwise, <c>false</c>.
/// </value>
public bool IncludeInMainSyndication
{
get
{
return EntryPropertyCheck(PostConfig.IncludeInMainSyndication);
}
set
{
PostConfigSetter(PostConfig.IncludeInMainSyndication, value);
}
}
/// <summary>
/// Whether or not this entry is aggregated.
/// </summary>
public bool IsAggregated
{
get { return EntryPropertyCheck(PostConfig.IsAggregated); }
set { PostConfigSetter(PostConfig.IsAggregated, value); }
}
/// <summary>
/// True if comments have been closed. Otherwise false. Comments are closed
/// either explicitly or after by global age setting which overrides explicit settings
/// </summary>
public bool CommentingClosed
{
get
{
return (CommentingClosedByAge || EntryPropertyCheck(PostConfig.CommentsClosed));
}
set
{
// Closing By Age overrides explicit closing
if(!CommentingClosedByAge)
{
PostConfigSetter(PostConfig.CommentsClosed, value);
}
}
}
/// <summary>
/// Returns true if the comments for this entry are closed due
/// to the age of the entry. This is related to the DaysTillCommentsClose setting.
/// </summary>
public bool CommentingClosedByAge
{
get
{
if(Blog.DaysTillCommentsClose == int.MaxValue)
{
return false;
}
return Blog.TimeZone.Now > DateSyndicated.AddDays(Blog.DaysTillCommentsClose);
}
}
public int FeedBackCount { get; set; }
public PostConfig PostConfig { get; set; }
/// <summary>
/// Returns the categories for this entry.
/// </summary>
public ICollection<string> Categories { get; private set; }
/// <summary>
/// Gets and sets the enclosure for the entry.
/// </summary>
public Enclosure Enclosure { get; set; }
/// <summary>
/// Gets or sets the entry ID.
/// </summary>
/// <value>The entry ID.</value>
public int Id { get; set; }
/// <summary>
/// Gets or sets the type of the post.
/// </summary>
/// <value>The type of the post.</value>
public PostType PostType { get; set; }
/// <summary>
/// Gets or sets the name of the entry. This is used
/// to create a friendly URL for this entry.
/// </summary>
/// <value>The name of the entry.</value>
public string EntryName { get; set; }
/// <summary>
/// Gets or sets the date this item was created.
/// </summary>
/// <value></value>
public DateTime DateCreated { get; set; }
protected bool EntryPropertyCheck(PostConfig ep)
{
return (PostConfig & ep) == ep;
}
protected void PostConfigSetter(PostConfig ep, bool select)
{
if(select)
{
PostConfig = PostConfig | ep;
}
else
{
PostConfig = PostConfig & ~ep;
}
}
/// <summary>
/// Calculates a simple checksum of the specified text.
/// This is used for comment filtering purposes.
/// Once deployed, this algorithm shouldn't change.
/// </summary>
/// <param name="text">Text.</param>
/// <returns></returns>
public static int CalculateChecksum(string text)
{
if(text == null)
{
throw new ArgumentNullException("text");
}
int checksum = 0;
foreach(char c in text)
{
checksum += c;
}
return checksum;
}
public ICollection<FeedbackItem> Comments
{
get
{
if(_comments == null)
{
_comments = new List<FeedbackItem>();
}
return _comments;
}
}
List<FeedbackItem> _comments;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Management.Automation.Runspaces;
using System.Text;
using System.Threading;
using Dbg = System.Management.Automation.Diagnostics;
namespace System.Management.Automation.Remoting.Internal
{
/// <summary>
/// PSStreamObjectType is for internal (PowerShell) consumption and should not be treated as a public API.
/// </summary>
public enum PSStreamObjectType
{
/// <summary>
/// </summary>
Output = 1,
/// <summary>
/// </summary>
Error = 2,
/// <summary>
/// </summary>
MethodExecutor = 3,
/// <summary>
/// </summary>
Warning = 4,
/// <summary>
/// </summary>
BlockingError = 5,
/// <summary>
/// </summary>
ShouldMethod = 6,
/// <summary>
/// </summary>
WarningRecord = 7,
/// <summary>
/// </summary>
Debug = 8,
/// <summary>
/// </summary>
Progress = 9,
/// <summary>
/// </summary>
Verbose = 10,
/// <summary>
/// </summary>
Information = 11,
/// <summary>
/// </summary>
Exception = 12,
}
/// <summary>
/// Struct which describes whether an object written
/// to an ObjectStream is of type - output, error,
/// verbose, debug.
/// PSStreamObject is for internal (PowerShell) consumption
/// and should not be treated as a public API.
/// </summary>
public class PSStreamObject
{
/// <summary>
/// </summary>
public PSStreamObjectType ObjectType { get; set; }
internal object Value { get; set; }
internal Guid Id { get; set; }
internal PSStreamObject(PSStreamObjectType objectType, object value, Guid id)
{
ObjectType = objectType;
Value = value;
Id = id;
}
/// <summary>
/// </summary>
/// <param name="objectType"></param>
/// <param name="value"></param>
public PSStreamObject(PSStreamObjectType objectType, object value) :
this(objectType, value, Guid.Empty)
{
}
/// <summary>
/// Handle the object obtained from an ObjectStream's reader
/// based on its type.
/// </summary>
/// <param name="cmdlet">Cmdlet to use for outputting the object.</param>
/// <param name="overrideInquire">Used by Receive-Job to suppress inquire preference.</param>
public void WriteStreamObject(Cmdlet cmdlet, bool overrideInquire = false)
{
if (cmdlet != null)
{
switch (this.ObjectType)
{
case PSStreamObjectType.Output:
{
cmdlet.WriteObject(this.Value);
}
break;
case PSStreamObjectType.Error:
{
ErrorRecord errorRecord = (ErrorRecord)this.Value;
errorRecord.PreserveInvocationInfoOnce = true;
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteError(errorRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Debug:
{
string debug = (string)Value;
DebugRecord debugRecord = new DebugRecord(debug);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteDebug(debugRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Warning:
{
string warning = (string)Value;
WarningRecord warningRecord = new WarningRecord(warning);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteWarning(warningRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Verbose:
{
string verbose = (string)Value;
VerboseRecord verboseRecord = new VerboseRecord(verbose);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteVerbose(verboseRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Progress:
{
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteProgress((ProgressRecord)Value, overrideInquire);
}
}
break;
case PSStreamObjectType.Information:
{
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteInformation((InformationRecord)Value, overrideInquire);
}
}
break;
case PSStreamObjectType.WarningRecord:
{
WarningRecord warningRecord = (WarningRecord)Value;
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.AppendWarningVarList(warningRecord);
}
}
break;
case PSStreamObjectType.MethodExecutor:
{
Dbg.Assert(this.Value is ClientMethodExecutor,
"Expected psstreamObject.value is ClientMethodExecutor");
ClientMethodExecutor methodExecutor = (ClientMethodExecutor)Value;
methodExecutor.Execute(cmdlet);
}
break;
case PSStreamObjectType.BlockingError:
{
CmdletMethodInvoker<object> methodInvoker = (CmdletMethodInvoker<object>)Value;
InvokeCmdletMethodAndWaitForResults(methodInvoker, cmdlet);
}
break;
case PSStreamObjectType.ShouldMethod:
{
CmdletMethodInvoker<bool> methodInvoker = (CmdletMethodInvoker<bool>)Value;
InvokeCmdletMethodAndWaitForResults(methodInvoker, cmdlet);
}
break;
case PSStreamObjectType.Exception:
{
Exception e = (Exception)Value;
throw e;
}
}
}
else if (ObjectType == PSStreamObjectType.Exception)
{
Exception e = (Exception)Value;
throw e;
}
}
private static void GetIdentifierInfo(string message, out Guid jobInstanceId, out string computerName)
{
jobInstanceId = Guid.Empty;
computerName = string.Empty;
if (message == null) return;
string[] parts = message.Split(Utils.Separators.Colon, 3);
if (parts.Length != 3) return;
if (!Guid.TryParse(parts[0], out jobInstanceId))
jobInstanceId = Guid.Empty;
computerName = parts[1];
}
/// <summary>
/// Handle the object obtained from an ObjectStream's reader
/// based on its type.
/// </summary>
/// <param name="cmdlet">Cmdlet to use for outputting the object.</param>
/// <param name="instanceId"></param>
/// <param name="overrideInquire">Suppresses prompt on messages with Inquire preference.
/// Needed for Receive-Job</param>
internal void WriteStreamObject(Cmdlet cmdlet, Guid instanceId, bool overrideInquire = false)
{
switch (ObjectType)
{
case PSStreamObjectType.Output:
{
if (instanceId != Guid.Empty)
{
PSObject o = Value as PSObject;
if (o != null)
AddSourceJobNoteProperty(o, instanceId);
}
cmdlet.WriteObject(Value);
}
break;
case PSStreamObjectType.Error:
{
ErrorRecord errorRecord = (ErrorRecord)this.Value;
RemotingErrorRecord remoteErrorRecord = errorRecord as RemotingErrorRecord;
if (remoteErrorRecord == null)
{
// if we get a base ErrorRecord object, check if the computerName is
// populated in the RecommendedAction field
if (errorRecord.ErrorDetails != null && !string.IsNullOrEmpty(errorRecord.ErrorDetails.RecommendedAction))
{
string computerName;
Guid jobInstanceId;
GetIdentifierInfo(errorRecord.ErrorDetails.RecommendedAction,
out jobInstanceId, out computerName);
errorRecord = new RemotingErrorRecord(errorRecord,
new OriginInfo(computerName, Guid.Empty,
jobInstanceId));
}
}
else
{
errorRecord = remoteErrorRecord;
}
errorRecord.PreserveInvocationInfoOnce = true;
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteError(errorRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Warning:
{
string warning = (string)Value;
WarningRecord warningRecord = new WarningRecord(warning);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteWarning(warningRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Verbose:
{
string verbose = (string)Value;
VerboseRecord verboseRecord = new VerboseRecord(verbose);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteVerbose(verboseRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Progress:
{
ProgressRecord progressRecord = (ProgressRecord)Value;
RemotingProgressRecord remotingProgressRecord = progressRecord as RemotingProgressRecord;
if (remotingProgressRecord == null)
{
Guid jobInstanceId;
string computerName;
GetIdentifierInfo(progressRecord.CurrentOperation, out jobInstanceId,
out computerName);
OriginInfo info = new OriginInfo(computerName, Guid.Empty, jobInstanceId);
progressRecord = new RemotingProgressRecord(progressRecord, info);
}
else
{
progressRecord = remotingProgressRecord;
}
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteProgress(progressRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Debug:
{
string debug = (string)Value;
DebugRecord debugRecord = new DebugRecord(debug);
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteDebug(debugRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.Information:
{
InformationRecord informationRecord = (InformationRecord)this.Value;
RemotingInformationRecord remoteInformationRecord = informationRecord as RemotingInformationRecord;
if (remoteInformationRecord == null)
{
// if we get a base InformationRecord object, check if the computerName is
// populated in the Source field
if (!string.IsNullOrEmpty(informationRecord.Source))
{
string computerName;
Guid jobInstanceId;
GetIdentifierInfo(informationRecord.Source, out jobInstanceId, out computerName);
informationRecord = new RemotingInformationRecord(informationRecord,
new OriginInfo(computerName, Guid.Empty,
jobInstanceId));
}
}
else
{
informationRecord = remoteInformationRecord;
}
MshCommandRuntime mshCommandRuntime = cmdlet.CommandRuntime as MshCommandRuntime;
if (mshCommandRuntime != null)
{
mshCommandRuntime.WriteInformation(informationRecord, overrideInquire);
}
}
break;
case PSStreamObjectType.WarningRecord:
case PSStreamObjectType.MethodExecutor:
case PSStreamObjectType.BlockingError:
case PSStreamObjectType.ShouldMethod:
{
WriteStreamObject(cmdlet, overrideInquire);
}
break;
}
}
/// <summary>
/// Handle the object obtained from an ObjectStream's reader
/// based on its type.
/// </summary>
/// <param name="cmdlet">Cmdlet to use for outputting the object.</param>
/// <param name="writeSourceIdentifier"></param>
/// <param name="overrideInquire">Overrides the inquire preference, used in Receive-Job to suppress prompts.</param>
internal void WriteStreamObject(Cmdlet cmdlet, bool writeSourceIdentifier, bool overrideInquire)
{
if (writeSourceIdentifier)
WriteStreamObject(cmdlet, Id, overrideInquire);
else
WriteStreamObject(cmdlet, overrideInquire);
}
private static void InvokeCmdletMethodAndWaitForResults<T>(CmdletMethodInvoker<T> cmdletMethodInvoker, Cmdlet cmdlet)
{
Dbg.Assert(cmdletMethodInvoker != null, "Caller should verify cmdletMethodInvoker != null");
cmdletMethodInvoker.MethodResult = default(T);
try
{
T tmpMethodResult = cmdletMethodInvoker.Action(cmdlet);
lock (cmdletMethodInvoker.SyncObject)
{
cmdletMethodInvoker.MethodResult = tmpMethodResult;
}
}
catch (Exception e)
{
lock (cmdletMethodInvoker.SyncObject)
{
cmdletMethodInvoker.ExceptionThrownOnCmdletThread = e;
}
throw;
}
finally
{
if (cmdletMethodInvoker.Finished != null)
{
cmdletMethodInvoker.Finished.Set();
}
}
}
internal static void AddSourceJobNoteProperty(PSObject psObj, Guid instanceId)
{
Dbg.Assert(psObj != null, "psObj is null trying to add a note property.");
if (psObj.Properties[RemotingConstants.SourceJobInstanceId] != null)
{
psObj.Properties.Remove(RemotingConstants.SourceJobInstanceId);
}
psObj.Properties.Add(new PSNoteProperty(RemotingConstants.SourceJobInstanceId, instanceId));
}
internal static string CreateInformationalMessage(Guid instanceId, string message)
{
var newMessage = new StringBuilder(instanceId.ToString());
newMessage.Append(":");
newMessage.Append(message);
return newMessage.ToString();
}
internal static ErrorRecord AddSourceTagToError(ErrorRecord errorRecord, Guid sourceId)
{
if (errorRecord == null) return null;
if (errorRecord.ErrorDetails == null) errorRecord.ErrorDetails = new ErrorDetails(string.Empty);
errorRecord.ErrorDetails.RecommendedAction = CreateInformationalMessage(sourceId, errorRecord.ErrorDetails.RecommendedAction);
return errorRecord;
}
}
}
namespace System.Management.Automation.Remoting
{
/// <summary>
/// </summary>
public class CmdletMethodInvoker<T>
{
/// <summary>
/// </summary>
public Func<Cmdlet, T> Action { get; set; }
/// <summary>
/// </summary>
public Exception ExceptionThrownOnCmdletThread { get; set; }
/// <summary>
/// </summary>
public ManualResetEventSlim Finished { get; set; }
/// <summary>
/// </summary>
public object SyncObject { get; set; }
/// <summary>
/// </summary>
public T MethodResult { get; set; }
}
}
| |
//
// System.Web.UI.WebControls.MenuItemBinding.cs
//
// Authors:
// Lluis Sanchez Gual (lluis@novell.com)
//
// (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
#if NET_2_0
using System;
using System.Collections;
using System.Web.UI;
using System.ComponentModel;
namespace System.Web.UI.WebControls
{
[DefaultProperty ("TextField")]
public sealed class MenuItemBinding: IStateManager, ICloneable, IDataSourceViewSchemaAccessor
{
StateBag ViewState = new StateBag ();
[DefaultValue ("")]
public string DataMember {
get {
object o = ViewState ["DataMember"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["DataMember"] = value;
}
}
[DefaultValue (-1)]
public int Depth {
get {
object o = ViewState ["Depth"];
if (o != null) return (int) o;
return -1;
}
set {
ViewState ["Depth"] = value;
}
}
[DefaultValue (true)]
public bool Enabled {
get {
object o = ViewState ["Enabled"];
if (o != null) return (bool) o;
return true;
}
set {
ViewState ["Enabled"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string EnabledField {
get {
object o = ViewState ["EnabledField"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["EnabledField"] = value;
}
}
[Localizable (true)]
[DefaultValue ("")]
public string FormatString {
get {
object o = ViewState ["FormatString"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["FormatString"] = value;
}
}
[DefaultValue ("")]
[UrlProperty]
[Editor ("System.Web.UI.Design.ImageUrlEditor, " + Consts.AssemblySystem_Design, typeof (System.Drawing.Design.UITypeEditor))]
public string ImageUrl {
get {
object o = ViewState ["ImageUrl"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["ImageUrl"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string ImageUrlField {
get {
object o = ViewState ["ImageUrlField"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["ImageUrlField"] = value;
}
}
[DefaultValue ("")]
[UrlProperty]
[Editor ("System.Web.UI.Design.UrlEditor, " + Consts.AssemblySystem_Design, typeof (System.Drawing.Design.UITypeEditor))]
public string NavigateUrl {
get {
object o = ViewState ["NavigateUrl"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["NavigateUrl"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string NavigateUrlField {
get {
object o = ViewState ["NavigateUrlField"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["NavigateUrlField"] = value;
}
}
[DefaultValue (true)]
public bool Selectable {
get {
object o = ViewState ["Selectable"];
if (o != null) return (bool) o;
return true;
}
set {
ViewState ["Selectable"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string SelectableField {
get {
object o = ViewState ["SelectableField"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["SelectableField"] = value;
}
}
[DefaultValue ("")]
public string Target {
get {
object o = ViewState ["Target"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["Target"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string TargetField {
get {
object o = ViewState ["TargetField"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["TargetField"] = value;
}
}
[Localizable (true)]
[DefaultValue ("")]
[WebSysDescription ("The display text of the menu item.")]
public string Text {
get {
object o = ViewState ["Text"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["Text"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string TextField {
get {
object o = ViewState ["TextField"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["TextField"] = value;
}
}
[DefaultValue ("")]
[Localizable (true)]
public string ToolTip {
get {
object o = ViewState ["ToolTip"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["ToolTip"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string ToolTipField {
get {
object o = ViewState ["ToolTipField"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["ToolTipField"] = value;
}
}
[DefaultValue ("")]
[Localizable (true)]
public string Value {
get {
object o = ViewState ["Value"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["Value"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string ValueField {
get {
object o = ViewState ["ValueField"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["ValueField"] = value;
}
}
[DefaultValue ("")]
[UrlProperty]
[Editor ("System.Web.UI.Design.UrlEditor, " + Consts.AssemblySystem_Design, typeof (System.Drawing.Design.UITypeEditor))]
public string PopOutImageUrl {
get {
object o = ViewState ["PopOutImageUrl"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["PopOutImageUrl"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string PopOutImageUrlField {
get {
object o = ViewState ["PopOutImageUrlField"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["PopOutImageUrlField"] = value;
}
}
[DefaultValue ("")]
[UrlProperty]
[Editor ("System.Web.UI.Design.UrlEditor, " + Consts.AssemblySystem_Design, typeof (System.Drawing.Design.UITypeEditor))]
public string SeparatorImageUrl {
get {
object o = ViewState ["SeparatorImageUrl"];
if (o != null) return (string) o;
return "";
}
set {
ViewState ["SeparatorImageUrl"] = value;
}
}
[DefaultValue ("")]
[TypeConverter ("System.Web.UI.Design.DataSourceViewSchemaConverter, " + Consts.AssemblySystem_Design)]
public string SeparatorImageUrlField {
get {
object o = ViewState ["SeparatorImageUrlField"];
if(o != null) return (string)o;
return "";
}
set {
ViewState ["SeparatorImageUrlField"] = value;
}
}
void IStateManager.LoadViewState (object savedState)
{
ViewState.LoadViewState (savedState);
}
object IStateManager.SaveViewState ()
{
return ViewState.SaveViewState();
}
void IStateManager.TrackViewState ()
{
ViewState.TrackViewState ();
}
bool IStateManager.IsTrackingViewState {
get { return ViewState.IsTrackingViewState; }
}
[MonoTODO]
object IDataSourceViewSchemaAccessor.DataSourceViewSchema {
get { throw new NotImplementedException (); }
set { throw new NotImplementedException (); }
}
object ICloneable.Clone ()
{
MenuItemBinding bin = new MenuItemBinding ();
foreach (DictionaryEntry e in ViewState)
bin.ViewState [(string)e.Key] = e.Value;
return bin;
}
internal void SetDirty ()
{
foreach (string key in ViewState.Keys)
ViewState.SetItemDirty (key, true);
}
}
}
#endif
| |
// ---------------------------------------------------------------------------
// <copyright file="WellKnownFolderName.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ---------------------------------------------------------------------------
//-----------------------------------------------------------------------
// <summary>Defines the WellKnownFolderName enumeration.</summary>
//-----------------------------------------------------------------------
namespace Microsoft.Exchange.WebServices.Data
{
using System;
using System.Collections.Generic;
using System.Text;
// The values in this enumeration must match the values of the
// DistinguishedFolderIdNameType type in the schema.
/// <summary>
/// Defines well known folder names.
/// </summary>
public enum WellKnownFolderName
{
/// <summary>
/// The Calendar folder.
/// </summary>
[EwsEnum("calendar")]
Calendar,
/// <summary>
/// The Contacts folder.
/// </summary>
[EwsEnum("contacts")]
Contacts,
/// <summary>
/// The Deleted Items folder
/// </summary>
[EwsEnum("deleteditems")]
DeletedItems,
/// <summary>
/// The Drafts folder.
/// </summary>
[EwsEnum("drafts")]
Drafts,
/// <summary>
/// The Inbox folder.
/// </summary>
[EwsEnum("inbox")]
Inbox,
/// <summary>
/// The Journal folder.
/// </summary>
[EwsEnum("journal")]
Journal,
/// <summary>
/// The Notes folder.
/// </summary>
[EwsEnum("notes")]
Notes,
/// <summary>
/// The Outbox folder.
/// </summary>
[EwsEnum("outbox")]
Outbox,
/// <summary>
/// The Sent Items folder.
/// </summary>
[EwsEnum("sentitems")]
SentItems,
/// <summary>
/// The Tasks folder.
/// </summary>
[EwsEnum("tasks")]
Tasks,
/// <summary>
/// The message folder root.
/// </summary>
[EwsEnum("msgfolderroot")]
MsgFolderRoot,
/// <summary>
/// The root of the Public Folders hierarchy.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2007_SP1)]
[EwsEnum("publicfoldersroot")]
PublicFoldersRoot,
/// <summary>
/// The root of the mailbox.
/// </summary>
[EwsEnum("root")]
Root,
/// <summary>
/// The Junk E-mail folder.
/// </summary>
[EwsEnum("junkemail")]
JunkEmail,
/// <summary>
/// The Search Folders folder, also known as the Finder folder.
/// </summary>
[EwsEnum("searchfolders")]
SearchFolders,
/// <summary>
/// The Voicemail folder.
/// </summary>
[EwsEnum("voicemail")]
VoiceMail,
/// <summary>
/// The Dumpster 2.0 root folder.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("recoverableitemsroot")]
RecoverableItemsRoot,
/// <summary>
/// The Dumpster 2.0 soft deletions folder.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("recoverableitemsdeletions")]
RecoverableItemsDeletions,
/// <summary>
/// The Dumpster 2.0 versions folder.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("recoverableitemsversions")]
RecoverableItemsVersions,
/// <summary>
/// The Dumpster 2.0 hard deletions folder.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("recoverableitemspurges")]
RecoverableItemsPurges,
/// <summary>
/// The Dumpster 2.0 discovery hold folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013_SP1)]
[EwsEnum("recoverableitemsdiscoveryholds")]
RecoverableItemsDiscoveryHolds,
/// <summary>
/// The root of the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archiveroot")]
ArchiveRoot,
/// <summary>
/// The root of the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013_SP1)]
[EwsEnum("archiveinbox")]
ArchiveInbox,
/// <summary>
/// The message folder root in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archivemsgfolderroot")]
ArchiveMsgFolderRoot,
/// <summary>
/// The Deleted Items folder in the archive mailbox
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archivedeleteditems")]
ArchiveDeletedItems,
/// <summary>
/// The Dumpster 2.0 root folder in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archiverecoverableitemsroot")]
ArchiveRecoverableItemsRoot,
/// <summary>
/// The Dumpster 2.0 soft deletions folder in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archiverecoverableitemsdeletions")]
ArchiveRecoverableItemsDeletions,
/// <summary>
/// The Dumpster 2.0 versions folder in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archiverecoverableitemsversions")]
ArchiveRecoverableItemsVersions,
/// <summary>
/// The Dumpster 2.0 hard deletions folder in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2010_SP1)]
[EwsEnum("archiverecoverableitemspurges")]
ArchiveRecoverableItemsPurges,
/// <summary>
/// The Dumpster 2.0 discovery hold folder in the archive mailbox.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013_SP1)]
[EwsEnum("archiverecoverableitemsdiscoveryholds")]
ArchiveRecoverableItemsDiscoveryHolds,
/// <summary>
/// The Sync Issues folder.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("syncissues")]
SyncIssues,
/// <summary>
/// The Conflicts folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("conflicts")]
Conflicts,
/// <summary>
/// The Local failures folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("localfailures")]
LocalFailures,
/// <summary>
/// The Server failures folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("serverfailures")]
ServerFailures,
/// <summary>
/// The Recipient Cache folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("recipientcache")]
RecipientCache,
/// <summary>
/// The Quick Contacts folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("quickcontacts")]
QuickContacts,
/// <summary>
/// Conversation history folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("conversationhistory")]
ConversationHistory,
/// <summary>
/// AdminAuditLogs folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("adminauditlogs")]
AdminAuditLogs,
/// <summary>
/// ToDo search folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("todosearch")]
ToDoSearch,
/// <summary>
/// MyContacts folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("mycontacts")]
MyContacts,
/// <summary>
/// Directory (GAL)
/// It is not a mailbox folder. It only indicates any GAL operation.
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013_SP1)]
[EwsEnum("directory")]
Directory,
/// <summary>
/// IMContactList folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("imcontactlist")]
IMContactList,
/// <summary>
/// PeopleConnect folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("peopleconnect")]
PeopleConnect,
/// <summary>
/// Favorites folder
/// </summary>
[RequiredServerVersion(ExchangeVersion.Exchange2013)]
[EwsEnum("favorites")]
Favorites,
//// Note when you adding new folder id here, please update sources\test\Services\src\ComponentTests\GlobalVersioningControl.cs
//// IsExchange2013Folder method accordingly.
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Projection
// Description: The basic module for MapWindow version 6.0
// ********************************************************************************************************
//
// The original content was ported from the C language from the 4.6 version of Proj4 libraries.
// Frank Warmerdam has released the full content of that version under the MIT license which is
// recognized as being approximately equivalent to public domain. The original work was done
// mostly by Gerald Evenden. The latest versions of the C libraries can be obtained here:
// http://trac.osgeo.org/proj/
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 8/13/2009 4:15:20 PM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
// Name | Date | Comment
// --------------------|------------|------------------------------------------------------------
// Ted Dunsford | 5/3/2010 | Updated project to DotSpatial.Projection and license to LGPL
// ********************************************************************************************************
using System;
namespace DotSpatial.Projections.Transforms
{
/// <summary>
/// Polygconic
/// </summary>
public class Polyconic : EllipticalTransform
{
#region Private Variables
private const double TOL = 1E-10;
private const double CONV = 1E-10;
private const int N_ITER = 10;
private const int ITER = 20;
private const double ITOL = 1E-12;
private double[] _en;
private double _ml0;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of Polygconic
/// </summary>
public Polyconic()
{
Proj4Name = "poly";
Name = "Polyconic";
}
#endregion
#region Methods
/// <summary>
/// Initializes the transform using the parameters from the specified coordinate system information
/// </summary>
/// <param name="projInfo">A ProjectionInfo class contains all the standard and custom parameters needed to initialize this transform</param>
protected override void OnInit(ProjectionInfo projInfo)
{
if (IsElliptical)
{
_en = Proj.Enfn(Es);
_ml0 = Proj.Mlfn(Phi0, Math.Sin(Phi0), Math.Cos(Phi0), _en);
}
else
{
_ml0 = -Phi0;
}
}
/// <inheritdoc />
protected override void EllipticalForward(double[] lp, double[] xy, int startIndex, int numPoints)
{
for (int i = startIndex; i < startIndex + numPoints; i++)
{
int phi = i * 2 + PHI;
int lam = i * 2 + LAMBDA;
int x = i * 2 + X;
int y = i * 2 + Y;
if (Math.Abs(lp[phi]) <= TOL)
{
xy[x] = lp[lam];
xy[y] = -_ml0;
}
else
{
double sp = Math.Sin(lp[phi]);
double cp;
double ms = Math.Abs(cp = Math.Cos(lp[phi])) > TOL ? Proj.Msfn(sp, cp, Es) / sp : 0;
xy[x] = ms * Math.Sin(lp[lam] *= sp);
xy[y] = (Proj.Mlfn(lp[phi], sp, cp, _en) - _ml0) + ms * (1 - Math.Cos(lp[lam]));
}
}
}
/// <inheritdoc />
protected override void SphericalForward(double[] lp, double[] xy, int startIndex, int numPoints)
{
for (int i = startIndex; i < startIndex + numPoints; i++)
{
int phi = i * 2 + PHI;
int lam = i * 2 + LAMBDA;
int x = i * 2 + X;
int y = i * 2 + Y;
if (Math.Abs(lp[phi]) <= TOL)
{
xy[x] = lp[lam];
xy[y] = _ml0;
}
else
{
double cot = 1 / Math.Tan(lp[phi]);
double e;
xy[x] = Math.Sin(e = lp[lam] * Math.Sin(lp[phi])) * cot;
xy[y] = lp[phi] - Phi0 + cot * (1 - Math.Cos(e));
}
}
}
/// <inheritdoc />
protected override void EllipticalInverse(double[] xy, double[] lp, int startIndex, int numPoints)
{
for (int i = startIndex; i < startIndex + numPoints; i++)
{
int phi = i * 2 + PHI;
int lam = i * 2 + LAMBDA;
int x = i * 2 + X;
int y = i * 2 + Y;
xy[y] += _ml0;
if (Math.Abs(xy[y]) <= TOL)
{
lp[lam] = xy[x];
lp[phi] = 0;
}
else
{
double c;
int j;
double r = xy[y] * xy[y] + xy[x] * xy[x];
for (lp[phi] = xy[y], j = ITER; j > 0; --j)
{
double sp = Math.Sin(lp[phi]);
double cp;
double s2Ph = sp * (cp = Math.Cos(lp[phi]));
if (Math.Abs(cp) < ITOL)
{
lp[lam] = double.NaN;
lp[phi] = double.NaN;
continue;
//ProjectionException(20);
}
double mlp;
c = sp * (mlp = Math.Sqrt(1 - Es * sp * sp)) / cp;
double ml = Proj.Mlfn(lp[phi], sp, cp, _en);
double mlb = ml * ml + r;
mlp = OneEs / (mlp * mlp * mlp);
double dPhi;
lp[phi] += (dPhi =
(ml + ml + c * mlb - 2 * xy[y] * (c * ml + 1)) / (Es * s2Ph * (mlb - 2 * xy[y] * ml) / c +
2 * (xy[y] - ml) * (c * mlp - 1 / s2Ph) - mlp - mlp));
if (Math.Abs(dPhi) <= ITOL)
break;
}
if (j == 0)
{
lp[lam] = double.NaN;
lp[phi] = double.NaN;
continue;
//ProjectionException(20);
}
c = Math.Sin(lp[phi]);
lp[lam] = Math.Asin(xy[x] * Math.Tan(lp[phi]) * Math.Sqrt(1 - Es * c * c)) / Math.Sin(lp[phi]);
}
}
}
/// <inheritdoc />
protected override void SphericalInverse(double[] xy, double[] lp, int startIndex, int numPoints)
{
for (int i = startIndex; i < startIndex + numPoints; i++)
{
int phi = i * 2 + PHI;
int lam = i * 2 + LAMBDA;
int x = i * 2 + X;
int y = i * 2 + Y;
if (Math.Abs(xy[y] = Phi0 + xy[y]) <= TOL)
{
lp[lam] = xy[x];
lp[phi] = 0;
}
else
{
lp[phi] = xy[y];
double b = xy[x] * xy[x] + xy[y] * xy[y];
int j = N_ITER;
double dphi;
do
{
double tp = Math.Tan(lp[phi]);
lp[phi] -= (dphi = (xy[y] * (lp[phi] * tp + 1) - lp[phi] -
.5 * (lp[phi] * lp[phi] + b) * tp) /
((lp[phi] - xy[y]) / tp - 1));
} while (Math.Abs(dphi) > CONV && --j > 0);
if (j == 0)
{
lp[lam] = double.NaN;
lp[phi] = double.NaN;
continue;
//ProjectionException(20);
}
lp[lam] = Math.Asin(xy[x] * Math.Tan(lp[phi])) / Math.Sin(lp[phi]);
}
}
}
#endregion
}
}
| |
//
// (C) Copyright 2003-2011 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
using System;
using System.Collections.Generic;
using System.Text;
using System.Windows.Forms;
using Autodesk;
using Autodesk.Revit;
using Autodesk.Revit.DB;
using Autodesk.Revit.UI;
using Autodesk.Revit.DB.Structure;
namespace Revit.SDK.Samples.Loads.CS
{
/// <summary>
/// mainly deal class which give methods to connect Revit and the user operation on the form
/// </summary>
public class LoadCombinationDeal
{
// Private Members
Loads m_dataBuffer; // Store the reference of Loads
Autodesk.Revit.ApplicationServices.Application m_revit; // Store the reference of revit
Autodesk.Revit.DB.Document m_document; // Store the reference of document
// Methods
/// <summary>
/// Default constructor of LoadCombinationDeal
/// </summary>
public LoadCombinationDeal(Loads dataBuffer)
{
m_dataBuffer = dataBuffer;
m_revit = dataBuffer.RevitApplication;
UIApplication uiapplication = new UIApplication(m_revit);
m_document = uiapplication.ActiveUIDocument.Document;
}
/// <summary>
/// Find out all Load Combination and Usage in the existing document.
/// As specification require, prepare some Load Combination Usages if they are not in document
/// </summary>
public void PrepareData()
{
// Find out all Load Combination and Usage in the existing document.
IList<Element> elements = (new FilteredElementCollector(m_document)).OfClass(typeof(LoadCombination)).ToElements();
foreach (Element elem in elements)
{
LoadCombination combination = elem as LoadCombination;
if (null != combination)
{
// Add the Load Combination name.
m_dataBuffer.LoadCombinationNames.Add(combination.Name);
// Create LoadCombinationMap object.
LoadCombinationMap combinationMap = new LoadCombinationMap(combination);
// Add the LoadCombinationMap object to the array list.
m_dataBuffer.LoadCombinationMap.Add(combinationMap);
}
}
elements = (new FilteredElementCollector(m_document)).OfClass(typeof(LoadUsage)).ToElements();
foreach (Element elem in elements)
{
// Add Load Combination Usage information
LoadUsage usage = elem as LoadUsage;
if (null != usage)
{
// Add the Load Usage name
m_dataBuffer.LoadUsageNames.Add(usage.Name);
// Add the Load Usage object to a LoadUsageArray
m_dataBuffer.LoadUsages.Add(usage);
// Add the Load Usage information to UsageMap.
UsageMap usageMap = new UsageMap(m_dataBuffer, usage.Name);
m_dataBuffer.UsageMap.Add(usageMap);
}
}
// As specification require, some Load Combination Usages if they are not in document
String[] initUsageArray = { "Gravity", "Lateral", "Steel", "Composite", "Concrete"};
foreach (String s in initUsageArray)
{
NewLoadUsage(s);
}
}
/// <summary>
/// Create new Load Combination
/// </summary>
/// <param name="name">The new Load Combination name</param>
/// <param name="typeId">The index of new Load Combination Type</param>
/// <param name="stateId">The index of new Load Combination State</param>
/// <returns>true if the creation was successful; otherwise, false</returns>
public Boolean NewLoadCombination(String name, int typeId, int stateId)
{
// Define some data for creation.
LoadUsageArray usageArray = new LoadUsageArray();
LoadCaseArray caseArray = new LoadCaseArray();
LoadCombinationArray combinations = new LoadCombinationArray();
double[] factorArray = new double[m_dataBuffer.FormulaMap.Count];
// First check whether the name has been used
foreach (String s in m_dataBuffer.LoadCombinationNames)
{
if (s == name || null == name)
{
m_dataBuffer.ErrorInformation = "the combination name has been used.";
return false;
}
}
// Get the usage information.
foreach (UsageMap usageMap in m_dataBuffer.UsageMap)
{
if (true == usageMap.Set)
{
LoadUsage usage = FindUsageByName(usageMap.Name);
if (null != usage)
{
usageArray.Append(usage);
}
}
}
// Get the formula information
for (int i = 0; i < m_dataBuffer.FormulaMap.Count; i++)
{
FormulaMap formulaMap = m_dataBuffer.FormulaMap[i];
factorArray[i] = formulaMap.Factor;
LoadCase loadCase = FindLoadCaseByName(formulaMap.Case);
if(null != loadCase)
{
caseArray.Append(loadCase);
}
}
// Begin to new a load combination
try
{
LoadCombination loadCombination = m_document.Create.NewLoadCombination(name,
typeId, stateId, factorArray, caseArray, combinations, usageArray);
if (null == loadCombination)
{
m_dataBuffer.ErrorInformation = "Get null reference after usage creation.";
return false;
}
// Store this load combination information for further use
m_dataBuffer.LoadCombinationNames.Add(loadCombination.Name);
LoadCombinationMap combinationMap = new LoadCombinationMap(loadCombination);
m_dataBuffer.LoadCombinationMap.Add(combinationMap);
}
catch (Exception e)
{
m_dataBuffer.ErrorInformation = e.Message;
return false;
}
// If create combination successful, reset the usage check state and clear the formula
foreach (UsageMap usageMap in m_dataBuffer.UsageMap)
{
usageMap.Set = false;
}
m_dataBuffer.FormulaMap.Clear();
return true;
}
/// <summary>
/// Delete the selected Load Combination
/// </summary>
/// <param name="index">The selected index in the DataGridView</param>
/// <returns>true if the delete operation was successful; otherwise, false</returns>
public Boolean DeleteCombination(int index)
{
// Get the name of the delete combination
String combinationName = m_dataBuffer.LoadCombinationNames[index];
// Find the combination by the name and delete the combination
LoadCombination combination;
IList<Element> elements = (new FilteredElementCollector(m_document)).OfClass(typeof(LoadCombination)).ToElements();
foreach (Element elem in elements)
{
combination = elem as LoadCombination;
if (combinationName == combination.Name)
{
// Begin to delete the combination
try
{
m_document.Delete(combination);
}
catch (Exception e)
{
m_dataBuffer.ErrorInformation = e.ToString();
return false;
}
break;
}
}
// If delete is successful, Change the map and the string List
m_dataBuffer.LoadCombinationMap.RemoveAt(index);
m_dataBuffer.LoadCombinationNames.RemoveAt(index);
return true;
}
/// <summary>
/// Create a new load combination usage
/// </summary>
/// <param name="usageName">The new Load Usage name</param>
/// <returns>true if the process is successful; otherwise, false</returns>
public Boolean NewLoadUsage(String usageName)
{
// First check whether the name has been used
foreach (String s in m_dataBuffer.LoadUsageNames)
{
if(usageName == s)
{
m_dataBuffer.ErrorInformation = "the usage name has been used.";
return false;
}
}
// Begin to new a load combination usage
try
{
LoadUsage loadUsage = m_document.Create.NewLoadUsage(usageName);
if (null == loadUsage)
{
m_dataBuffer.ErrorInformation = "Get null reference after usage creation.";
return false;
}
// Store this load usage information for further use.
m_dataBuffer.LoadUsageNames.Add(loadUsage.Name);
m_dataBuffer.LoadUsages.Add(loadUsage);
// Add the Load Usage information to UsageMap.
UsageMap usageMap = new UsageMap(m_dataBuffer, loadUsage.Name);
m_dataBuffer.UsageMap.Add(usageMap);
}
catch (Exception e)
{
m_dataBuffer.ErrorInformation = e.Message;
return false;
}
return true;
}
/// <summary>
/// Delete the selected Load Usage
/// </summary>
/// <param name="index">The selected index in the DataGridView</param>
/// <returns>true if the delete operation was successful; otherwise, false</returns>
public Boolean DeleteUsage(int index)
{
// Get the delete usage
LoadUsage deleteUsage = m_dataBuffer.LoadUsages[index];
String usageName = deleteUsage.Name;
// Begin to delete the combination
try
{
m_document.Delete(deleteUsage);
}
catch (Exception e)
{
m_dataBuffer.ErrorInformation = e.ToString();
return false;
}
// Modify the data to show the delete operation
m_dataBuffer.LoadUsages.RemoveAt(index);
m_dataBuffer.LoadUsageNames.RemoveAt(index);
m_dataBuffer.UsageMap.RemoveAt(index);
// Need to delete corresponding in Combination
foreach (LoadCombinationMap map in m_dataBuffer.LoadCombinationMap)
{
String oldUsage = map.Usage;
int location = oldUsage.IndexOf(usageName);
if (-1 == location)
{
continue;
}
if (oldUsage.Length == usageName.Length)
{
map.Usage = oldUsage.Remove(0);
continue;
}
if (0 == location)
{
map.Usage = oldUsage.Remove(location, usageName.Length + 1);
}
else
{
map.Usage = oldUsage.Remove(location - 1, usageName.Length + 1);
}
}
return true;
}
/// <summary>
/// Change usage name when the user modify it on the form
/// </summary>
/// <param name="oldName">The name before modification</param>
/// <param name="newName">The name after modification</param>
/// <returns>true if the modification was successful; otherwise, false</returns>
public Boolean ModifyUsageName(String oldName, String newName)
{
// If the name is no change, just return true.
if (oldName == newName)
{
return true;
}
// Check whether the name has been used
foreach (String s in m_dataBuffer.LoadUsageNames)
{
if (s == newName)
{
MessageBox.Show("There is a same named usage already.");
return false;
}
}
// Begin to modify the name of the usage
foreach (LoadUsage usage in m_dataBuffer.LoadUsages)
{
if (oldName == usage.Name)
{
usage.get_Parameter(BuiltInParameter.LOAD_USAGE_NAME).Set(newName);
}
}
return true;
}
/// <summary>
/// Add a formula with the load case name
/// </summary>
/// <param name="caseName">The name of the load case</param>
/// <returns>true if the creation is successful; otherwise, false</returns>
public Boolean AddFormula(String caseName)
{
// New a FormulaMap, and add it to m_dataBuffer.FormulaMap
// Note: the factor of the formula is always set 1
FormulaMap map = new FormulaMap(caseName);
m_dataBuffer.FormulaMap.Add(map);
return true;
}
/// <summary>
/// Find a load usage by the load usage name
/// </summary>
/// <param name="name">The name of load usage</param>
/// <returns>The reference of the LoadUsage</returns>
private LoadUsage FindUsageByName(String name)
{
LoadUsage usage = null;
foreach (LoadUsage l in m_dataBuffer.LoadUsages)
{
if (name == l.Name)
{
usage = l;
break;
}
}
return usage;
}
/// <summary>
/// Find a load case by the load case name
/// </summary>
/// <param name="name">The name of load case</param>
/// <returns>The reference of the LoadCase</returns>
private LoadCase FindLoadCaseByName(String name)
{
LoadCase loadCase = null;
foreach (LoadCase l in m_dataBuffer.LoadCases)
{
if (name == l.Name)
{
loadCase = l;
break;
}
}
return loadCase;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace ProjectEuler.Problems
{
class Problem54
{
private Dictionary<char, int> card;
private Dictionary<string, int> hand;
public Problem54()
{
card = new Dictionary<char, int>();
card.Add('2', 2);
card.Add('3', 3);
card.Add('4', 4);
card.Add('5', 5);
card.Add('6', 6);
card.Add('7', 7);
card.Add('8', 8);
card.Add('9', 9);
card.Add('V', 10); // Ten
card.Add('W', 11); // Jack
card.Add('X', 12); // Queen
card.Add('Y', 13); // King
card.Add('Z', 14); // Ace
hand = new Dictionary<string, int>();
hand.Add("High Card", 0);
hand.Add("One Pair", 100);
hand.Add("Two Pairs", 200);
hand.Add("Three of a Kind", 300);
hand.Add("Straight", 400);
hand.Add("Flush", 500);
hand.Add("Full House", 600);
hand.Add("Four of a Kind", 700);
hand.Add("Straight Flush", 800);
hand.Add("Royal Flush", 900);
}
private bool Player1Wins(string plays)
{
string play1 = plays.Substring(0, 15);
string play2 = plays.Substring(15);
int play1Value = Eval(ref play1);
int play2Value = Eval(ref play2);
if (play1Value > play2Value)
{
if (play1Value > 0 && play2Value > 0)
{
Console.WriteLine(play1 + " > " + play2);
}
return true;
}
else
{
if (play1Value < play2Value)
{
if (play1Value > 0 && play2Value > 0)
{
Console.WriteLine(play1 + " < " + play2);
}
}
return false;
}
}
private int Eval(ref string playString)
{
List<string> play = new List<string>();
foreach (string p in playString.Split(' '))
{
if (p != "")
{
play.Add(p);
}
}
play.Sort();
string sortedPlay = "";
foreach (string p in play)
{
sortedPlay += p + " ";
}
playString = sortedPlay;
if (play[0][1] == play[1][1]
&& play[0][1] == play[2][1]
&& play[0][1] == play[3][1]
&& play[0][1] == play[4][1])
{
// Royal Flush
if (play[0][0] == 'V'
&& play[1][0] == 'W'
&& play[2][0] == 'X'
&& play[3][0] == 'Y'
&& play[4][0] == 'Z')
{
return hand["Royal Flush"];
}
// Straight Flush
else if (
card[play[0][0]] + 1 == card[play[1][0]]
&& card[play[0][0]] + 2 == card[play[2][0]]
&& card[play[0][0]] + 3 == card[play[3][0]]
&& card[play[0][0]] + 4 == card[play[4][0]]
)
{
return hand["Straight Flush"] + card[play[4][0]];
}
// Regular Flush
else
{
return hand["Flush"] + card[play[4][0]];
}
}
else
{
// Four of a Kind
if (play[1][0] == play[2][0] && play[2][0] == play[3][0] &&
(play[0][0] == play[1][0] || play[3][0] == play[4][0]))
{
return hand["Four of a Kind"] + card[play[1][0]];
}
// Full House
else if (play[0][0] == play[1][0] && play[1][0] == play[2][0] && play[3][0] == play[4][0])
{
return hand["Full House"] + 10 * card[play[2][0]] + card[play[4][0]];
}
else if (play[0][0] == play[1][0] && play[2][0] == play[3][0] && play[3][0] == play[4][0])
{
return hand["Full House"] + 10 * card[play[2][0]] + card[play[0][0]];
}
// Straight
else if (card[play[0][0]] + 1 == card[play[1][0]]
&& card[play[0][0]] + 2 == card[play[2][0]]
&& card[play[0][0]] + 3 == card[play[3][0]]
&& card[play[0][0]] + 4 == card[play[4][0]])
{
return hand["Straight"] + card[play[4][0]];
}
// Three of a Kind
else if (
play[0][0] == play[1][0] && play[1][0] == play[2][0]
|| play[1][0] == play[2][0] && play[2][0] == play[3][0]
|| play[2][0] == play[3][0] && play[3][0] == play[4][0])
{
return hand["Three of a Kind"] + card[play[2][0]];
}
// Two Pairs
else if (play[0][0] == play[1][0] && play[2][0] == play[3][0])
{
int max = card[play[0][0]] > card[play[3][0]] ? card[play[0][0]] : card[play[3][0]];
return hand["Two Pairs"] + max;
}
else if (play[0][0] == play[1][0] && play[3][0] == play[4][0])
{
int max = card[play[0][0]] > card[play[4][0]] ? card[play[0][0]] : card[play[4][0]];
return hand["Two Pairs"] + max;
}
else if (play[1][0] == play[2][0] && play[3][0] == play[4][0])
{
int max = card[play[1][0]] > card[play[4][0]] ? card[play[1][0]] : card[play[4][0]];
return hand["Two Pairs"] + max;
}
// One Pair
else if (play[0][0] == play[1][0])
{
return hand["One Pair"] + card[play[0][0]];
}
else if (play[1][0] == play[2][0])
{
return hand["One Pair"] + card[play[1][0]];
}
else if (play[2][0] == play[3][0])
{
return hand["One Pair"] + card[play[2][0]];
}
else if (play[3][0] == play[4][0])
{
return hand["One Pair"] + card[play[3][0]];
}
else
{
// High Card
int max = 0;
for (int i = 0; i < play.Count; i++)
{
if(card[play[i][0]] > max) {
max = card[play[i][0]];
}
}
return hand["High Card"] + max;
}
}
}
public string Run()
{
int count = 0;
try
{
using (StreamReader sr = new StreamReader("Input/p054_poker.txt"))
{
string both_hands = "";
int i = 0;
do
{
both_hands = sr.ReadLine();
if (both_hands.Length > 15)
{
both_hands = both_hands.Replace("T", "V").Replace("J", "W").Replace("Q", "X").Replace("K", "Y").Replace("A", "Z");
if (Player1Wins(both_hands))
{
count++;
}
}
i++;
}
while (i<1000);
}
}
catch (Exception e)
{
Console.WriteLine("The file could not be read:");
Console.WriteLine(e.Message);
}
return count.ToString();
}
}
}
| |
// The MIT License (MIT)
// Copyright 2015 Siney/Pangweiwei siney@yeah.net
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
namespace SLua
{
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System;
using System.Reflection;
using UnityEditor;
using LuaInterface;
using System.Text;
using System.Text.RegularExpressions;
public class LuaCodeGen : MonoBehaviour
{
public const string Path = "Assets/LuaObject/";
public delegate void ExportGenericDelegate(Type t, string ns);
static bool autoRefresh = true;
static bool IsCompiling {
get {
if (EditorApplication.isCompiling) {
Debug.Log("Unity Editor is compiling, please wait.");
}
return EditorApplication.isCompiling;
}
}
[InitializeOnLoad]
public class Startup
{
static Startup()
{
bool ok = System.IO.Directory.Exists(Path);
if (!ok && EditorUtility.DisplayDialog("Slua", "Not found lua interface for Unity, generate it now?", "Generate", "No"))
{
GenerateAll();
}
}
}
[MenuItem("SLua/All/Make")]
static public void GenerateAll()
{
autoRefresh = false;
Generate();
GenerateUI();
Custom();
Generate3rdDll();
autoRefresh = true;
AssetDatabase.Refresh();
}
[MenuItem("SLua/Unity/Make UnityEngine")]
static public void Generate()
{
if (IsCompiling) {
return;
}
Assembly assembly = Assembly.Load("UnityEngine");
Type[] types = assembly.GetExportedTypes();
List<string> uselist;
List<string> noUseList;
CustomExport.OnGetNoUseList(out noUseList);
CustomExport.OnGetUseList(out uselist);
List<Type> exports = new List<Type>();
string path = Path + "Unity/";
foreach (Type t in types)
{
bool export = true;
// check type in uselist
if (uselist != null && uselist.Count > 0)
{
export = false;
foreach (string str in uselist)
{
if (t.FullName == str)
{
export = true;
break;
}
}
}
else
{
// check type not in nouselist
foreach (string str in noUseList)
{
if (t.FullName.Contains(str))
{
export = false;
break;
}
}
}
if (export)
{
if (Generate(t,path))
exports.Add(t);
}
}
GenerateBind(exports, "BindUnity", 0,path);
if(autoRefresh)
AssetDatabase.Refresh();
Debug.Log("Generate engine interface finished");
}
[MenuItem("SLua/Unity/Make UI (for Unity4.6+)")]
static public void GenerateUI()
{
if (IsCompiling) {
return;
}
List<string> noUseList = new List<string>
{
"CoroutineTween",
"GraphicRebuildTracker",
};
Assembly assembly = Assembly.Load("UnityEngine.UI");
Type[] types = assembly.GetExportedTypes();
List<Type> exports = new List<Type>();
string path = Path + "Unity/";
foreach (Type t in types)
{
bool export = true;
foreach (string str in noUseList)
{
if (t.FullName.Contains(str))
export = false;
}
if (export)
{
if (Generate(t, path))
exports.Add(t);
}
}
GenerateBind(exports, "BindUnityUI", 1,path);
if(autoRefresh)
AssetDatabase.Refresh();
Debug.Log("Generate UI interface finished");
}
[MenuItem("SLua/Unity/Clear Uinty UI")]
static public void ClearUnity()
{
clear(new string[] { Path + "Unity" });
Debug.Log("Clear Unity & UI complete.");
}
static public bool IsObsolete(MemberInfo t)
{
return t.GetCustomAttributes(typeof(ObsoleteAttribute), false).Length > 0;
}
[MenuItem("SLua/Custom/Make")]
static public void Custom()
{
if (IsCompiling) {
return;
}
List<Type> exports = new List<Type>();
string path = Path + "Custom/";
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
ExportGenericDelegate fun = (Type t, string ns) =>
{
if (Generate(t, ns, path))
exports.Add(t);
};
// export self-dll
Assembly assembly = Assembly.Load("Assembly-CSharp");
Type[] types = assembly.GetExportedTypes();
foreach (Type t in types)
{
if (t.GetCustomAttributes(typeof(CustomLuaClassAttribute), false).Length > 0)
{
fun(t, null);
}
}
CustomExport.OnAddCustomClass(fun);
GenerateBind(exports, "BindCustom", 3,path);
if(autoRefresh)
AssetDatabase.Refresh();
Debug.Log("Generate custom interface finished");
}
[MenuItem("SLua/3rdDll/Make")]
static public void Generate3rdDll()
{
if (IsCompiling) {
return;
}
List<Type> cust = new List<Type>();
Assembly assembly = Assembly.Load("Assembly-CSharp");
Type[] types = assembly.GetExportedTypes();
List<string> assemblyList = new List<string>();
CustomExport.OnAddCustomAssembly(ref assemblyList);
foreach (string assemblyItem in assemblyList)
{
assembly = Assembly.Load(assemblyItem);
types = assembly.GetExportedTypes();
foreach (Type t in types)
{
cust.Add(t);
}
}
if (cust.Count > 0)
{
List<Type> exports = new List<Type>();
string path = Path + "Dll/";
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
foreach (Type t in cust)
{
if (Generate(t,path))
exports.Add(t);
}
GenerateBind(exports, "BindDll", 2, path);
if(autoRefresh)
AssetDatabase.Refresh();
Debug.Log("Generate 3rdDll interface finished");
}
}
[MenuItem("SLua/3rdDll/Clear")]
static public void Clear3rdDll()
{
clear(new string[] { Path + "Dll" });
Debug.Log("Clear AssemblyDll complete.");
}
[MenuItem("SLua/Custom/Clear")]
static public void ClearCustom()
{
clear(new string[] { Path + "Custom" });
Debug.Log("Clear custom complete.");
}
[MenuItem("SLua/All/Clear")]
static public void ClearALL()
{
clear(new string[] { Path.Substring(0, Path.Length - 1) });
Debug.Log("Clear all complete.");
}
static void clear(string[] paths)
{
try
{
foreach (string path in paths)
{
System.IO.Directory.Delete(path, true);
}
}
catch
{
}
AssetDatabase.Refresh();
}
static bool Generate(Type t, string path)
{
return Generate(t, null, path);
}
static bool Generate(Type t, string ns, string path)
{
if (t.IsInterface)
return false;
CodeGenerator cg = new CodeGenerator();
cg.givenNamespace = ns;
cg.path = path;
return cg.Generate(t);
}
static void GenerateBind(List<Type> list, string name, int order,string path)
{
CodeGenerator cg = new CodeGenerator();
cg.path = path;
cg.GenerateBind(list, name, order);
}
}
class CodeGenerator
{
static List<string> memberFilter = new List<string>
{
"AnimationClip.averageDuration",
"AnimationClip.averageAngularSpeed",
"AnimationClip.averageSpeed",
"AnimationClip.apparentSpeed",
"AnimationClip.isLooping",
"AnimationClip.isAnimatorMotion",
"AnimationClip.isHumanMotion",
"AnimatorOverrideController.PerformOverrideClipListCleanup",
"Caching.SetNoBackupFlag",
"Caching.ResetNoBackupFlag",
"Light.areaSize",
"Security.GetChainOfTrustValue",
"Texture2D.alphaIsTransparency",
"WWW.movie",
"WebCamTexture.MarkNonReadable",
"WebCamTexture.isReadable",
// i don't why below 2 functions missed in iOS platform
"Graphic.OnRebuildRequested",
"Text.OnRebuildRequested",
// il2cpp not exixts
"Application.ExternalEval",
"GameObject.networkView",
"Component.networkView",
// unity5
"AnimatorControllerParameter.name",
"Input.IsJoystickPreconfigured",
"Resources.LoadAssetAtPath",
#if UNITY_4_6
"Motion.ValidateIfRetargetable",
"Motion.averageDuration",
"Motion.averageAngularSpeed",
"Motion.averageSpeed",
"Motion.apparentSpeed",
"Motion.isLooping",
"Motion.isAnimatorMotion",
"Motion.isHumanMotion",
#endif
};
HashSet<string> funcname = new HashSet<string>();
Dictionary<string, bool> directfunc = new Dictionary<string, bool>();
public string givenNamespace;
public string path;
class PropPair
{
public string get = "null";
public string set = "null";
public bool isInstance = true;
}
Dictionary<string, PropPair> propname = new Dictionary<string, PropPair>();
int indent = 0;
public void GenerateBind(List<Type> list, string name, int order)
{
HashSet<Type> exported = new HashSet<Type>();
string f = path + name + ".cs";
StreamWriter file = new StreamWriter(f, false, Encoding.UTF8);
Write(file, "using System;");
Write(file, "namespace SLua {");
Write(file, "[LuaBinder({0})]", order);
Write(file, "public class {0} {{", name);
Write(file, "public static void Bind(IntPtr l) {");
foreach (Type t in list)
{
WriteBindType(file, t, list, exported);
}
Write(file, "}");
Write(file, "}");
Write(file, "}");
file.Close();
}
void WriteBindType(StreamWriter file, Type t, List<Type> exported, HashSet<Type> binded)
{
if (t == null || binded.Contains(t) || !exported.Contains(t))
return;
WriteBindType(file, t.BaseType, exported, binded);
Write(file, "{0}.reg(l);", ExportName(t), binded);
binded.Add(t);
}
public bool Generate(Type t)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
if (!t.IsGenericTypeDefinition && (!IsObsolete(t) && t != typeof(YieldInstruction) && t != typeof(Coroutine))
|| (t.BaseType != null && t.BaseType == typeof(System.MulticastDelegate)))
{
if (t.IsEnum)
{
StreamWriter file = Begin(t);
WriteHead(t, file);
RegEnumFunction(t, file);
End(file);
}
else if (t.BaseType == typeof(System.MulticastDelegate))
{
string f;
if (t.IsGenericType)
{
if (t.ContainsGenericParameters)
return false;
f = path + string.Format("Lua{0}_{1}.cs", _Name(GenericBaseName(t)), _Name(GenericName(t)));
}
else
{
f = path + "LuaDelegate_" + _Name(t.FullName) + ".cs";
}
StreamWriter file = new StreamWriter(f, false, Encoding.UTF8);
WriteDelegate(t, file);
file.Close();
return false;
}
else
{
funcname.Clear();
propname.Clear();
directfunc.Clear();
StreamWriter file = Begin(t);
WriteHead(t, file);
WriteConstructor(t, file);
WriteFunction(t, file);
WriteFunction(t, file, true);
WriteField(t, file);
RegFunction(t, file);
End(file);
if (t.BaseType != null && t.BaseType.Name == "UnityEvent`1")
{
string f = path + "LuaUnityEvent_" + _Name(GenericName(t.BaseType)) + ".cs";
file = new StreamWriter(f, false, Encoding.UTF8);
WriteEvent(t, file);
file.Close();
}
}
return true;
}
return false;
}
void WriteDelegate(Type t, StreamWriter file)
{
string temp = @"
using System;
using System.Collections.Generic;
using LuaInterface;
using UnityEngine;
namespace SLua
{
public partial class LuaDelegation : LuaObject
{
static internal int checkDelegate(IntPtr l,int p,out $FN ua) {
int op = extractFunction(l,p);
if(LuaDLL.lua_isnil(l,p)) {
ua=null;
return op;
}
else if (LuaDLL.lua_isuserdata(l, p)==1)
{
ua = ($FN)checkObj(l, p);
return op;
}
LuaDelegate ld;
checkType(l, -1, out ld);
if(ld.d!=null)
{
ua = ($FN)ld.d;
return op;
}
LuaDLL.lua_pop(l,1);
l = LuaState.get(l).L;
ua = ($ARGS) =>
{
int error = pushTry(l);
";
temp = temp.Replace("$TN", t.Name);
temp = temp.Replace("$FN", SimpleType(t));
MethodInfo mi = t.GetMethod("Invoke");
List<int> outindex = new List<int>();
List<int> refindex = new List<int>();
temp = temp.Replace("$ARGS", ArgsList(mi, ref outindex, ref refindex));
Write(file, temp);
this.indent = 4;
for (int n = 0; n < mi.GetParameters().Length; n++)
{
if (!outindex.Contains(n))
Write(file, "pushValue(l,a{0});", n + 1);
}
Write(file, "ld.pcall({0}, error);", mi.GetParameters().Length - outindex.Count);
if (mi.ReturnType != typeof(void))
WriteValueCheck(file, mi.ReturnType, 1, "ret", "error+");
foreach (int i in outindex)
{
string a = string.Format("a{0}", i + 1);
WriteCheckType(file, mi.GetParameters()[i].ParameterType, i + 1, a, "error+");
}
foreach (int i in refindex)
{
string a = string.Format("a{0}", i + 1);
WriteCheckType(file, mi.GetParameters()[i].ParameterType, i + 1, a, "error+");
}
Write(file, "LuaDLL.lua_settop(l, error-1);");
if (mi.ReturnType != typeof(void))
Write(file, "return ret;");
Write(file, "};");
Write(file, "ld.d=ua;");
Write(file, "return op;");
Write(file, "}");
Write(file, "}");
Write(file, "}");
}
string ArgsList(MethodInfo m, ref List<int> outindex, ref List<int> refindex)
{
string str = "";
ParameterInfo[] pars = m.GetParameters();
for (int n = 0; n < pars.Length; n++)
{
string t = SimpleType(pars[n].ParameterType);
ParameterInfo p = pars[n];
if (p.ParameterType.IsByRef && p.IsOut)
{
str += string.Format("out {0} a{1}", t, n + 1);
outindex.Add(n);
}
else if (p.ParameterType.IsByRef)
{
str += string.Format("ref {0} a{1}", t, n + 1);
refindex.Add(n);
}
else
str += string.Format("{0} a{1}", t, n + 1);
if (n < pars.Length - 1)
str += ",";
}
return str;
}
void tryMake(Type t)
{
if (t.BaseType == typeof(System.MulticastDelegate))
{
CodeGenerator cg = new CodeGenerator();
cg.path = this.path;
cg.Generate(t);
}
}
void WriteEvent(Type t, StreamWriter file)
{
string temp = @"
using System;
using System.Collections.Generic;
using LuaInterface;
using UnityEngine;
using UnityEngine.EventSystems;
namespace SLua
{
public class LuaUnityEvent_$CLS : LuaObject
{
[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]
static public int AddListener(IntPtr l)
{
try
{
UnityEngine.Events.UnityEvent<$GN> self = checkSelf<UnityEngine.Events.UnityEvent<$GN>>(l);
UnityEngine.Events.UnityAction<$GN> a1;
checkType(l, 2, out a1);
self.AddListener(a1);
return 0;
}
catch (Exception e)
{
LuaDLL.luaL_error(l, e.ToString());
return 0;
}
}
[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]
static public int RemoveListener(IntPtr l)
{
try
{
UnityEngine.Events.UnityEvent<$GN> self = checkSelf<UnityEngine.Events.UnityEvent<$GN>>(l);
UnityEngine.Events.UnityAction<$GN> a1;
checkType(l, 2, out a1);
self.RemoveListener(a1);
return 0;
}
catch (Exception e)
{
LuaDLL.luaL_error(l, e.ToString());
return 0;
}
}
[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]
static public int Invoke(IntPtr l)
{
try
{
UnityEngine.Events.UnityEvent<$GN> self = checkSelf<UnityEngine.Events.UnityEvent<$GN>>(l);
$GN o;
checkType(l,2,out o);
self.Invoke(o);
return 0;
}
catch (Exception e)
{
LuaDLL.luaL_error(l, e.ToString());
return 0;
}
}
static public void reg(IntPtr l)
{
getTypeTable(l, typeof(LuaUnityEvent_$CLS).FullName);
addMember(l, AddListener);
addMember(l, RemoveListener);
addMember(l, Invoke);
createTypeMetatable(l, null, typeof(LuaUnityEvent_$CLS), typeof(UnityEngine.Events.UnityEventBase));
}
static bool checkType(IntPtr l,int p,out UnityEngine.Events.UnityAction<$GN> ua) {
LuaDLL.luaL_checktype(l, p, LuaTypes.LUA_TFUNCTION);
LuaDelegate ld;
checkType(l, p, out ld);
if (ld.d != null)
{
ua = (UnityEngine.Events.UnityAction<$GN>)ld.d;
return true;
}
l = LuaState.get(l).L;
ua = ($GN v) =>
{
int error = pushTry(l);
pushValue(l, v);
ld.pcall(1, error);
LuaDLL.lua_settop(l,error - 1);
};
ld.d = ua;
return true;
}
}
}";
temp = temp.Replace("$CLS", _Name(GenericName(t.BaseType)));
temp = temp.Replace("$FNAME", FullName(t));
temp = temp.Replace("$GN", GenericName(t.BaseType));
Write(file, temp);
}
void RegEnumFunction(Type t, StreamWriter file)
{
// Write export function
Write(file, "static public void reg(IntPtr l) {");
Write(file, "getEnumTable(l,\"{0}\");", string.IsNullOrEmpty(givenNamespace) ? FullName(t) : givenNamespace);
FieldInfo[] fields = t.GetFields();
foreach (FieldInfo f in fields)
{
if (f.Name == "value__") continue;
Write(file, "addMember(l,{0},\"{1}\");", (int)f.GetValue(null), f.Name);
}
Write(file, "LuaDLL.lua_pop(l, 1);");
Write(file, "}");
}
StreamWriter Begin(Type t)
{
string clsname = ExportName(t);
string f = path + clsname + ".cs";
StreamWriter file = new StreamWriter(f, false, Encoding.UTF8);
return file;
}
private void End(StreamWriter file)
{
Write(file, "}");
file.Flush();
file.Close();
}
private void WriteHead(Type t, StreamWriter file)
{
Write(file, "using UnityEngine;");
Write(file, "using System;");
Write(file, "using LuaInterface;");
Write(file, "using SLua;");
Write(file, "using System.Collections.Generic;");
Write(file, "public class {0} : LuaObject {{", ExportName(t));
}
private void WriteFunction(Type t, StreamWriter file, bool writeStatic = false)
{
BindingFlags bf = BindingFlags.Public | BindingFlags.DeclaredOnly;
if (writeStatic)
bf |= BindingFlags.Static;
else
bf |= BindingFlags.Instance;
MethodInfo[] members = t.GetMethods(bf);
foreach (MethodInfo mi in members)
{
bool instanceFunc;
if (writeStatic && isPInvoke(mi, out instanceFunc))
{
directfunc.Add(t.FullName + "." + mi.Name, instanceFunc);
continue;
}
string fn = writeStatic ? staticName(mi.Name) : mi.Name;
if (mi.MemberType == MemberTypes.Method
&& !IsObsolete(mi)
&& !DontExport(mi)
&& !funcname.Contains(fn)
&& isUsefullMethod(mi)
&& !MemberInFilter(t, mi))
{
WriteFunctionDec(file, fn);
WriteFunctionImpl(file, mi, t, bf);
funcname.Add(fn);
}
}
}
bool isPInvoke(MethodInfo mi, out bool instanceFunc)
{
object[] attrs = mi.GetCustomAttributes(typeof(MonoPInvokeCallbackAttribute), false);
if (attrs.Length > 0)
{
instanceFunc = mi.GetCustomAttributes(typeof(StaticExportAttribute), false).Length == 0;
return true;
}
instanceFunc = true;
return false;
}
string staticName(string name)
{
if (name.StartsWith("op_"))
return name;
return name + "_s";
}
bool MemberInFilter(Type t, MemberInfo mi)
{
return memberFilter.Contains(t.Name + "." + mi.Name);
}
bool IsObsolete(MemberInfo mi)
{
return LuaCodeGen.IsObsolete(mi);
}
void RegFunction(Type t, StreamWriter file)
{
// Write export function
Write(file, "static public void reg(IntPtr l) {");
if (t.BaseType != null && t.BaseType.Name.Contains("UnityEvent`"))
{
Write(file, "LuaUnityEvent_{1}.reg(l);", FullName(t), _Name((GenericName(t.BaseType))));
}
Write(file, "getTypeTable(l,\"{0}\");", string.IsNullOrEmpty(givenNamespace) ? FullName(t) : givenNamespace);
foreach (string f in funcname)
{
Write(file, "addMember(l,{0});", f);
}
foreach (string f in directfunc.Keys)
{
bool instance = directfunc[f];
Write(file, "addMember(l,{0},{1});", f, instance ? "true" : "false");
}
foreach (string f in propname.Keys)
{
PropPair pp = propname[f];
Write(file, "addMember(l,\"{0}\",{1},{2},{3});", f, pp.get, pp.set, pp.isInstance ? "true" : "false");
}
if (t.BaseType != null && !CutBase(t.BaseType))
{
if (t.BaseType.Name.Contains("UnityEvent`1"))
Write(file, "createTypeMetatable(l,{2}, typeof({0}),typeof(LuaUnityEvent_{1}));", TypeDecl(t), _Name(GenericName(t.BaseType)), constructorOrNot(t));
else
Write(file, "createTypeMetatable(l,{2}, typeof({0}),typeof({1}));", TypeDecl(t), TypeDecl(t.BaseType), constructorOrNot(t));
}
else
Write(file, "createTypeMetatable(l,{1}, typeof({0}));", TypeDecl(t), constructorOrNot(t));
Write(file, "}");
}
string constructorOrNot(Type t)
{
ConstructorInfo[] cons = GetValidConstructor(t);
if (cons.Length > 0 || t.IsValueType)
return "constructor";
return "null";
}
bool CutBase(Type t)
{
if (t.FullName.StartsWith("System.Object"))
return true;
return false;
}
void WriteSet(StreamWriter file, Type t, string cls, string fn, bool isstatic = false)
{
if (t.BaseType == typeof(MulticastDelegate))
{
if (isstatic)
{
Write(file, "if(op==0) {0}.{1}=v;", cls, fn);
Write(file, "else if(op==1) {0}.{1}+=v;", cls, fn);
Write(file, "else if(op==2) {0}.{1}-=v;", cls, fn);
}
else
{
Write(file, "if(op==0) self.{0}=v;", fn);
Write(file, "else if(op==1) self.{0}+=v;", fn);
Write(file, "else if(op==2) self.{0}-=v;", fn);
}
}
else
{
if (isstatic)
{
Write(file, "{0}.{1}=v;", cls, fn);
}
else
{
Write(file, "self.{0}=v;", fn);
}
}
}
private void WriteField(Type t, StreamWriter file)
{
// Write field set/get
FieldInfo[] fields = t.GetFields(BindingFlags.Static | BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly);
foreach (FieldInfo fi in fields)
{
if (DontExport(fi) || IsObsolete(fi))
continue;
PropPair pp = new PropPair();
pp.isInstance = !fi.IsStatic;
if (fi.FieldType.BaseType != typeof(MulticastDelegate))
{
WriteFunctionAttr(file);
Write(file, "static public int get_{0}(IntPtr l) {{", fi.Name);
WriteTry(file);
if (fi.IsStatic)
{
WritePushValue(fi.FieldType, file, string.Format("{0}.{1}", TypeDecl(t), fi.Name));
}
else
{
WriteCheckSelf(file, t);
WritePushValue(fi.FieldType, file, string.Format("self.{0}", fi.Name));
}
Write(file, "return 1;");
WriteCatchExecption(file);
Write(file, "}");
pp.get = "get_" + fi.Name;
}
if (!fi.IsLiteral && !fi.IsInitOnly)
{
WriteFunctionAttr(file);
Write(file, "static public int set_{0}(IntPtr l) {{", fi.Name);
WriteTry(file);
if (fi.IsStatic)
{
Write(file, "{0} v;", TypeDecl(fi.FieldType));
WriteCheckType(file, fi.FieldType, 2);
WriteSet(file, fi.FieldType, TypeDecl(t), fi.Name, true);
}
else
{
WriteCheckSelf(file, t);
Write(file, "{0} v;", TypeDecl(fi.FieldType));
WriteCheckType(file, fi.FieldType, 2);
WriteSet(file, fi.FieldType, t.FullName, fi.Name);
}
if (t.IsValueType && !fi.IsStatic)
Write(file, "setBack(l,self);");
Write(file, "return 0;");
WriteCatchExecption(file);
Write(file, "}");
pp.set = "set_" + fi.Name;
}
propname.Add(fi.Name, pp);
tryMake(fi.FieldType);
}
//for this[]
List<PropertyInfo> getter = new List<PropertyInfo>();
List<PropertyInfo> setter = new List<PropertyInfo>();
// Write property set/get
PropertyInfo[] props = t.GetProperties(BindingFlags.Static | BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly);
foreach (PropertyInfo fi in props)
{
//if (fi.Name == "Item" || IsObsolete(fi) || MemberInFilter(t,fi) || DontExport(fi))
if (IsObsolete(fi) || MemberInFilter(t, fi) || DontExport(fi))
continue;
if (fi.Name == "Item"
|| (t.Name == "String" && fi.Name == "Chars")) // for string[]
{
//for this[]
if (!fi.GetGetMethod().IsStatic && fi.GetIndexParameters().Length == 1)
{
if (fi.CanRead && !IsNotSupport(fi.PropertyType))
getter.Add(fi);
if (fi.CanWrite && fi.GetSetMethod() != null)
setter.Add(fi);
}
continue;
}
PropPair pp = new PropPair();
bool isInstance = true;
if (fi.CanRead && fi.GetGetMethod() != null)
{
if (!IsNotSupport(fi.PropertyType))
{
WriteFunctionAttr(file);
Write(file, "static public int get_{0}(IntPtr l) {{", fi.Name);
WriteTry(file);
if (fi.GetGetMethod().IsStatic)
{
isInstance = false;
WritePushValue(fi.PropertyType, file, string.Format("{0}.{1}", TypeDecl(t), fi.Name));
}
else
{
WriteCheckSelf(file, t);
WritePushValue(fi.PropertyType, file, string.Format("self.{0}", fi.Name));
}
Write(file, "return 1;");
WriteCatchExecption(file);
Write(file, "}");
pp.get = "get_" + fi.Name;
}
}
if (fi.CanWrite && fi.GetSetMethod() != null)
{
WriteFunctionAttr(file);
Write(file, "static public int set_{0}(IntPtr l) {{", fi.Name);
WriteTry(file);
if (fi.GetSetMethod().IsStatic)
{
WriteValueCheck(file, fi.PropertyType, 2);
WriteSet(file, fi.PropertyType, TypeDecl(t), fi.Name, true);
isInstance = false;
}
else
{
WriteCheckSelf(file, t);
WriteValueCheck(file, fi.PropertyType, 2);
WriteSet(file, fi.PropertyType, TypeDecl(t), fi.Name);
}
if (t.IsValueType)
Write(file, "setBack(l,self);");
Write(file, "return 0;");
WriteCatchExecption(file);
Write(file, "}");
pp.set = "set_" + fi.Name;
}
pp.isInstance = isInstance;
propname.Add(fi.Name, pp);
tryMake(fi.PropertyType);
}
//for this[]
WriteItemFunc(t, file, getter, setter);
}
void WriteItemFunc(Type t, StreamWriter file, List<PropertyInfo> getter, List<PropertyInfo> setter)
{
//Write property this[] set/get
if (getter.Count > 0)
{
//get
bool first_get = true;
WriteFunctionAttr(file);
Write(file, "static public int getItem(IntPtr l) {");
WriteTry(file);
WriteCheckSelf(file, t);
if (getter.Count == 1)
{
PropertyInfo _get = getter[0];
ParameterInfo[] infos = _get.GetIndexParameters();
WriteValueCheck(file, infos[0].ParameterType, 2, "v");
Write(file, "var ret = self[v];");
WritePushValue(_get.PropertyType, file, "ret");
Write(file, "return 1;");
}
else
{
Write(file, "LuaTypes t = LuaDLL.lua_type(l, 2);");
for (int i = 0; i < getter.Count; i++)
{
PropertyInfo fii = getter[i];
ParameterInfo[] infos = fii.GetIndexParameters();
Write(file, "{0}(matchType(l,2,t,typeof({1}))){{", first_get ? "if" : "else if", infos[0].ParameterType);
WriteValueCheck(file, infos[0].ParameterType, 2, "v");
Write(file, "var ret = self[v];");
WritePushValue(fii.PropertyType, file, "ret");
Write(file, "return 1;");
Write(file, "}");
first_get = false;
}
Write(file, "LuaDLL.luaL_error(l,\"No matched override function to call\");");
Write(file, "return 0;");
}
WriteCatchExecption(file);
Write(file, "}");
funcname.Add("getItem");
}
if (setter.Count > 0)
{
bool first_set = true;
WriteFunctionAttr(file);
Write(file, "static public int setItem(IntPtr l) {");
WriteTry(file);
WriteCheckSelf(file, t);
if (setter.Count == 1)
{
PropertyInfo _set = setter[0];
ParameterInfo[] infos = _set.GetIndexParameters();
WriteValueCheck(file, infos[0].ParameterType, 2);
WriteValueCheck(file, _set.PropertyType, 3, "c");
Write(file, "self[v]=c;");
}
else
{
Write(file, "LuaTypes t = LuaDLL.lua_type(l, 2);");
for (int i = 0; i < setter.Count; i++)
{
PropertyInfo fii = setter[i];
if (t.BaseType != typeof(MulticastDelegate))
{
ParameterInfo[] infos = fii.GetIndexParameters();
Write(file, "{0}(matchType(l,2,t,typeof({1}))){{", first_set ? "if" : "else if", infos[0].ParameterType);
WriteValueCheck(file, infos[0].ParameterType, 2, "v");
WriteValueCheck(file, fii.PropertyType, 3, "c");
Write(file, "self[v]=c;");
Write(file, "return 0;");
Write(file, "}");
first_set = false;
}
if (t.IsValueType)
Write(file, "setBack(l,self);");
}
Write(file, "LuaDLL.luaL_error(l,\"No matched override function to call\");");
}
Write(file, "return 0;");
WriteCatchExecption(file);
Write(file, "}");
funcname.Add("setItem");
}
}
void WriteTry(StreamWriter file)
{
Write(file, "try {");
}
void WriteCatchExecption(StreamWriter file)
{
Write(file, "}");
Write(file, "catch(Exception e) {");
Write(file, "LuaDLL.luaL_error(l, e.ToString());");
Write(file, "return 0;");
Write(file, "}");
}
void WriteCheckType(StreamWriter file, Type t, int n, string v = "v", string nprefix = "")
{
if (t.IsEnum)
Write(file, "checkEnum(l,{2}{0},out {1});", n, v, nprefix);
else if (t.BaseType == typeof(System.MulticastDelegate))
Write(file, "int op=LuaDelegation.checkDelegate(l,{2}{0},out {1});", n, v, nprefix);
else
Write(file, "checkType(l,{2}{0},out {1});", n, v, nprefix);
}
void WriteValueCheck(StreamWriter file, Type t, int n, string v = "v", string nprefix = "")
{
Write(file, "{0} {1};", SimpleType(t), v);
WriteCheckType(file, t, n, v, nprefix);
}
private void WriteFunctionAttr(StreamWriter file)
{
Write(file, "[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]");
}
ConstructorInfo[] GetValidConstructor(Type t)
{
List<ConstructorInfo> ret = new List<ConstructorInfo>();
if (t.GetConstructor(Type.EmptyTypes) == null && t.IsAbstract && t.IsSealed)
return ret.ToArray();
if (t.BaseType != null && t.BaseType.Name == "MonoBehaviour")
return ret.ToArray();
ConstructorInfo[] cons = t.GetConstructors(BindingFlags.Instance | BindingFlags.Public);
foreach (ConstructorInfo ci in cons)
{
if (!IsObsolete(ci) && !DontExport(ci) && !ContainUnsafe(ci))
ret.Add(ci);
}
return ret.ToArray();
}
bool ContainUnsafe(MethodBase mi)
{
foreach (ParameterInfo p in mi.GetParameters())
{
if (p.ParameterType.FullName.Contains("*"))
return true;
}
return false;
}
bool DontExport(MemberInfo mi)
{
return mi.GetCustomAttributes(typeof(DoNotToLuaAttribute), false).Length > 0;
}
private void WriteConstructor(Type t, StreamWriter file)
{
ConstructorInfo[] cons = GetValidConstructor(t);
if (cons.Length > 0)
{
WriteFunctionAttr(file);
Write(file, "static public int constructor(IntPtr l) {");
WriteTry(file);
if (cons.Length > 1)
Write(file, "int argc = LuaDLL.lua_gettop(l);");
Write(file, "{0} o;", TypeDecl(t));
bool first = true;
for (int n = 0; n < cons.Length; n++)
{
ConstructorInfo ci = cons[n];
ParameterInfo[] pars = ci.GetParameters();
if (cons.Length > 1)
{
if (isUniqueArgsCount(cons, ci))
Write(file, "{0}(argc=={1}){{", first ? "if" : "else if", ci.GetParameters().Length + 1);
else
Write(file, "{0}(matchType(l,argc,2{1})){{", first ? "if" : "else if", TypeDecl(pars));
}
for (int k = 0; k < pars.Length; k++)
{
ParameterInfo p = pars[k];
bool hasParams = p.GetCustomAttributes(typeof(ParamArrayAttribute), false).Length > 0;
CheckArgument(file, p.ParameterType, k, 2, p.IsOut, hasParams);
}
Write(file, "o=new {0}({1});", TypeDecl(t), FuncCall(ci));
if (t.Name == "String") // if export system.string, push string as ud not lua string
Write(file, "pushObject(l,o);");
else
Write(file, "pushValue(l,o);");
Write(file, "return 1;");
if (cons.Length == 1)
WriteCatchExecption(file);
Write(file, "}");
first = false;
}
if (cons.Length > 1)
{
Write(file, "LuaDLL.luaL_error(l,\"New object failed.\");");
Write(file, "return 0;");
WriteCatchExecption(file);
Write(file, "}");
}
}
else if (t.IsValueType) // default constructor
{
WriteFunctionAttr(file);
Write(file, "static public int constructor(IntPtr l) {");
WriteTry(file);
Write(file, "{0} o;", FullName(t));
Write(file, "o=new {0}();", FullName(t));
Write(file, "pushValue(l,o);");
Write(file, "return 1;");
WriteCatchExecption(file);
Write(file, "}");
}
}
bool IsNotSupport(Type t)
{
if (t.IsSubclassOf(typeof(Delegate)))
return true;
return false;
}
string[] prefix = new string[] { "System.Collections.Generic" };
string RemoveRef(string s, bool removearray = true)
{
if (s.EndsWith("&")) s = s.Substring(0, s.Length - 1);
if (s.EndsWith("[]") && removearray) s = s.Substring(0, s.Length - 2);
if (s.StartsWith(prefix[0])) s = s.Substring(prefix[0].Length + 1, s.Length - prefix[0].Length - 1);
s = s.Replace("+", ".");
if (s.Contains("`"))
{
string regstr = @"`\d";
Regex r = new Regex(regstr, RegexOptions.None);
s = r.Replace(s, "");
s = s.Replace("[", "<");
s = s.Replace("]", ">");
}
return s;
}
string GenericBaseName(Type t)
{
string n = t.FullName;
if (n.IndexOf('[') > 0)
{
n = n.Substring(0, n.IndexOf('['));
}
return n.Replace("+", ".");
}
string GenericName(Type t)
{
try
{
Type[] tt = t.GetGenericArguments();
string ret = "";
for (int n = 0; n < tt.Length; n++)
{
string dt = SimpleType(tt[n]);
ret += dt;
if (n < tt.Length - 1)
ret += "_";
}
return ret;
}
catch (Exception e)
{
Debug.Log(e.ToString());
return "";
}
}
string _Name(string n)
{
string ret = "";
for (int i = 0; i < n.Length; i++)
{
if (char.IsLetterOrDigit(n[i]))
ret += n[i];
else
ret += "_";
}
return ret;
}
string TypeDecl(ParameterInfo[] pars)
{
string ret = "";
for (int n = 0; n < pars.Length; n++)
{
ret += ",typeof(";
if (pars[n].IsOut)
ret += "LuaOut";
else
ret += SimpleType(pars[n].ParameterType);
ret += ")";
}
return ret;
}
bool isUsefullMethod(MethodInfo method)
{
if (method.Name != "GetType" && method.Name != "GetHashCode" && method.Name != "Equals" &&
method.Name != "ToString" && method.Name != "Clone" &&
method.Name != "GetEnumerator" && method.Name != "CopyTo" &&
method.Name != "op_Implicit" &&
!method.Name.StartsWith("get_", StringComparison.Ordinal) &&
!method.Name.StartsWith("set_", StringComparison.Ordinal) &&
!method.Name.StartsWith("add_", StringComparison.Ordinal) &&
!IsObsolete(method) && !method.IsGenericMethod &&
//!method.Name.StartsWith("op_", StringComparison.Ordinal) &&
!method.Name.StartsWith("remove_", StringComparison.Ordinal))
{
return true;
}
return false;
}
void WriteFunctionDec(StreamWriter file, string name)
{
WriteFunctionAttr(file);
Write(file, "static public int {0}(IntPtr l) {{", name);
}
MethodBase[] GetMethods(Type t, string name, BindingFlags bf)
{
List<MethodBase> methods = new List<MethodBase>();
MemberInfo[] cons = t.GetMember(name, bf);
foreach (MemberInfo m in cons)
{
if (m.MemberType == MemberTypes.Method
&& !IsObsolete(m)
&& !DontExport(m)
&& isUsefullMethod((MethodInfo)m))
methods.Add((MethodBase)m);
}
methods.Sort((a, b) =>
{
return a.GetParameters().Length - b.GetParameters().Length;
});
return methods.ToArray();
}
void WriteFunctionImpl(StreamWriter file, MethodInfo m, Type t, BindingFlags bf)
{
WriteTry(file);
MethodBase[] cons = GetMethods(t, m.Name, bf);
if (cons.Length == 1) // no override function
{
if (isUsefullMethod(m) && !m.ReturnType.ContainsGenericParameters && !m.ContainsGenericParameters) // don't support generic method
WriteFunctionCall(m, file, t);
else
{
Write(file, "LuaDLL.luaL_error(l,\"No matched override function to call\");");
Write(file, "return 0;");
}
}
else // 2 or more override function
{
Write(file, "int argc = LuaDLL.lua_gettop(l);");
bool first = true;
for (int n = 0; n < cons.Length; n++)
{
if (cons[n].MemberType == MemberTypes.Method)
{
MethodInfo mi = cons[n] as MethodInfo;
ParameterInfo[] pars = mi.GetParameters();
if (isUsefullMethod(mi)
&& !mi.ReturnType.ContainsGenericParameters
/*&& !ContainGeneric(pars)*/) // don't support generic method
{
if (isUniqueArgsCount(cons, mi))
Write(file, "{0}(argc=={1}){{", first ? "if" : "else if", mi.IsStatic ? mi.GetParameters().Length : mi.GetParameters().Length + 1);
else
Write(file, "{0}(matchType(l,argc,{1}{2})){{", first ? "if" : "else if", mi.IsStatic ? 1 : 2, TypeDecl(pars));
WriteFunctionCall(mi, file, t);
Write(file, "}");
first = false;
}
}
}
Write(file, "LuaDLL.luaL_error(l,\"No matched override function to call\");");
Write(file, "return 0;");
}
WriteCatchExecption(file);
Write(file, "}");
}
bool isUniqueArgsCount(MethodBase[] cons, MethodBase mi)
{
foreach (MethodBase member in cons)
{
MethodBase m = (MethodBase)member;
if (m != mi && mi.GetParameters().Length == m.GetParameters().Length)
return false;
}
return true;
}
bool ContainGeneric(ParameterInfo[] pars)
{
foreach (ParameterInfo p in pars)
{
if (p.ParameterType.IsGenericType || p.ParameterType.IsGenericParameter || p.ParameterType.IsGenericTypeDefinition)
return true;
}
return false;
}
void WriteCheckSelf(StreamWriter file, Type t)
{
if (t.IsValueType)
{
Write(file, "{0} self;", TypeDecl(t));
Write(file, "checkType(l,1,out self);");
}
else
Write(file, "{0} self=({0})checkSelf(l);", TypeDecl(t));
}
private void WriteFunctionCall(MethodInfo m, StreamWriter file, Type t)
{
bool hasref = false;
ParameterInfo[] pars = m.GetParameters();
int argIndex = 1;
if (!m.IsStatic)
{
WriteCheckSelf(file, t);
argIndex++;
}
for (int n = 0; n < pars.Length; n++)
{
ParameterInfo p = pars[n];
string pn = p.ParameterType.Name;
if (pn.EndsWith("&"))
{
hasref = true;
}
bool hasParams = p.GetCustomAttributes(typeof(ParamArrayAttribute), false).Length > 0;
CheckArgument(file, p.ParameterType, n, argIndex, p.IsOut, hasParams);
}
string ret = "";
if (m.ReturnType != typeof(void))
{
ret = "var ret=";
}
if (m.IsStatic)
{
if (m.Name == "op_Multiply")
Write(file, "{0}a1*a2;", ret);
else if (m.Name == "op_Subtraction")
Write(file, "{0}a1-a2;", ret);
else if (m.Name == "op_Addition")
Write(file, "{0}a1+a2;", ret);
else if (m.Name == "op_Division")
Write(file, "{0}a1/a2;", ret);
else if (m.Name == "op_UnaryNegation")
Write(file, "{0}-a1;", ret);
else if (m.Name == "op_Equality")
Write(file, "{0}(a1==a2);", ret);
else if (m.Name == "op_Inequality")
Write(file, "{0}(a1!=a2);", ret);
else if (m.Name == "op_LessThan")
Write(file, "{0}(a1<a2);", ret);
else if (m.Name == "op_GreaterThan")
Write(file, "{0}(a2<a1);", ret);
else if (m.Name == "op_LessThanOrEqual")
Write(file, "{0}(a1<=a2);", ret);
else if (m.Name == "op_GreaterThanOrEqual")
Write(file, "{0}(a2<=a1);", ret);
else
Write(file, "{3}{2}.{0}({1});", m.Name, FuncCall(m), TypeDecl(t), ret);
}
else
Write(file, "{2}self.{0}({1});", m.Name, FuncCall(m), ret);
int retcount = 0;
if (m.ReturnType != typeof(void))
{
WritePushValue(m.ReturnType, file);
retcount = 1;
}
// push out/ref value for return value
if (hasref)
{
for (int n = 0; n < pars.Length; n++)
{
ParameterInfo p = pars[n];
if (p.ParameterType.IsByRef)
{
WritePushValue(p.ParameterType, file, string.Format("a{0}", n + 1));
retcount++;
}
}
}
if (t.IsValueType && m.ReturnType == typeof(void) && !m.IsStatic)
Write(file, "setBack(l,self);");
Write(file, "return {0};", retcount);
}
string SimpleType_(Type t)
{
string tn = t.Name;
switch (tn)
{
case "Single":
return "float";
case "String":
return "string";
case "Double":
return "double";
case "Boolean":
return "bool";
case "Int32":
return "int";
case "Object":
return FullName(t);
default:
tn = TypeDecl(t);
tn = tn.Replace("System.Collections.Generic.", "");
tn = tn.Replace("System.Object", "object");
return tn;
}
}
string SimpleType(Type t)
{
string ret = SimpleType_(t);
return ret;
}
void WritePushValue(Type t, StreamWriter file)
{
if (t.IsEnum)
Write(file, "pushEnum(l,(int)ret);");
else
Write(file, "pushValue(l,ret);");
}
void WritePushValue(Type t, StreamWriter file, string ret)
{
if (t.IsEnum)
Write(file, "pushEnum(l,(int){0});", ret);
else
Write(file, "pushValue(l,{0});", ret);
}
void Write(StreamWriter file, string fmt, params object[] args)
{
if (fmt.StartsWith("}")) indent--;
for (int n = 0; n < indent; n++)
file.Write("\t");
if (args.Length == 0)
file.WriteLine(fmt);
else
{
string line = string.Format(fmt, args);
file.WriteLine(line);
}
if (fmt.EndsWith("{")) indent++;
}
private void CheckArgument(StreamWriter file, Type t, int n, int argstart, bool isout, bool isparams)
{
Write(file, "{0} a{1};", TypeDecl(t), n + 1);
if (!isout)
{
if (t.IsEnum)
Write(file, "checkEnum(l,{0},out a{1});", n + argstart, n + 1);
else if (t.BaseType == typeof(System.MulticastDelegate))
{
tryMake(t);
Write(file, "LuaDelegation.checkDelegate(l,{0},out a{1});", n + argstart, n + 1);
}
else if (isparams)
Write(file, "checkParams(l,{0},out a{1});", n + argstart, n + 1);
else
Write(file, "checkType(l,{0},out a{1});", n + argstart, n + 1);
}
}
string FullName(string str)
{
if (str == null)
{
throw new NullReferenceException();
}
return RemoveRef(str.Replace("+", "."));
}
string TypeDecl(Type t)
{
if (t.IsGenericType)
{
string ret = GenericBaseName(t);
string gs = "";
gs += "<";
Type[] types = t.GetGenericArguments();
for (int n = 0; n < types.Length; n++)
{
gs += TypeDecl(types[n]);
if (n < types.Length - 1)
gs += ",";
}
gs += ">";
ret = Regex.Replace(ret, @"`\d", gs);
return ret;
}
if (t.IsArray)
{
return TypeDecl(t.GetElementType()) + "[]";
}
else
return RemoveRef(t.ToString(), false);
}
string ExportName(Type t)
{
if (t.IsGenericType)
{
return string.Format("Lua_{0}_{1}", _Name(GenericBaseName(t)), _Name(GenericName(t)));
}
else
{
string name = RemoveRef(t.FullName, true);
name = "Lua_" + name;
return name.Replace(".", "_");
}
}
string FullName(Type t)
{
if (t.FullName == null)
{
Debug.Log(t.Name);
return t.Name;
}
return FullName(t.FullName);
}
string FuncCall(MethodBase m)
{
string str = "";
ParameterInfo[] pars = m.GetParameters();
for (int n = 0; n < pars.Length; n++)
{
ParameterInfo p = pars[n];
if (p.ParameterType.IsByRef && p.IsOut)
str += string.Format("out a{0}", n + 1);
else if (p.ParameterType.IsByRef)
str += string.Format("ref a{0}", n + 1);
else
str += string.Format("a{0}", n + 1);
if (n < pars.Length - 1)
str += ",";
}
return str;
}
}
}
| |
// Copyright (c) Rotorz Limited. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root.
using System;
using System.IO;
using System.Text.RegularExpressions;
using UnityEditor;
namespace Rotorz.Games.EditorExtensions
{
/// <summary>
/// Utility functions for accessing package assets.
/// </summary>
public static class PackageUtility
{
private const string PackagesAssetPath = "Assets/Plugins/Packages";
private const string PackageDataAssetPath = "Assets/Plugins/PackageData";
private const string PackageNameRegex_ScopeName = @"[@a-z_\-][a-z0-9_\-]+";
private const string PackageNameRegex_PackageName = @"[a-z_\-][a-z0-9_\-]+";
private static Regex s_PackageNameRegex = new Regex(PackageNameRegex_ScopeName + "(/" + PackageNameRegex_PackageName + ")?");
private const string AssetFileNameRegex = @"[A-Za-z0-9_\-\.]";
private static Regex s_AssetFileNameRegex = new Regex(AssetFileNameRegex);
private static Regex s_RelativeAssetPathRegex = new Regex(AssetFileNameRegex + "(/" + AssetFileNameRegex + ")*");
/// <summary>
/// Checks the value of a package name argument for validity.
/// </summary>
/// <param name="packageName">The name of the package.</param>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/> has an invalid value.</c>.
/// </exception>
public static void CheckPackageNameArgument(string packageName)
{
if (packageName == null) {
throw new ArgumentNullException("packageName");
}
if (!s_PackageNameRegex.IsMatch(packageName)) {
throw new ArgumentException(string.Format("Invalid package name '{0}'.", packageName), "packageName");
}
}
private static void CheckAssetFileNameArgument(string assetFileName)
{
if (assetFileName == null) {
return; // It's fine!
}
if (!s_AssetFileNameRegex.IsMatch(assetFileName)) {
throw new ArgumentException(string.Format("Invalid asset file name '{0}'.", assetFileName), "assetFileName");
}
}
private static void CheckRelativeFolderPathArgument(string relativeFolderPath)
{
if (relativeFolderPath == null) {
return; // It's fine!
}
if (!s_RelativeAssetPathRegex.IsMatch(relativeFolderPath)) {
throw new ArgumentException(string.Format("Invalid relative asset folder path '{0}'.", relativeFolderPath), "relativeFolderPath");
}
}
private static string NormalizeDirectorySeparatorForOS(string path)
{
return path.Replace('/', Path.DirectorySeparatorChar);
}
private static string AssetPathToAbsolutePath(string assetPath)
{
return NormalizeDirectorySeparatorForOS(
Path.Combine(Directory.GetCurrentDirectory(), assetPath)
);
}
private static void EnsureFolderAssetPathExists(string folderAssetPath)
{
string[] parts = folderAssetPath.Split('/');
if (parts[0] != "Assets") {
throw new ArgumentException(string.Format("Invalid asset folder path '{0}'.", folderAssetPath), "folderAssetPath");
}
string currentAssetPath = "Assets";
for (int i = 1; i < parts.Length; ++i) {
string parentAssetPath = currentAssetPath;
string folderName = parts[i];
currentAssetPath += "/" + folderName;
if (!AssetDatabase.IsValidFolder(currentAssetPath)) {
AssetDatabase.CreateFolder(parentAssetPath, folderName);
}
}
}
/// <summary>
/// Resolves asset path of a package specific folder or asset.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.ResolveAssetPath("@vendor-name/package-name", "Language", "en-US.txt"));
/// // Assets/Plugins/Packages/@vendor-name/package-name/Language/en-US.txt
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string ResolveAssetPath(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
CheckPackageNameArgument(packageName);
CheckRelativeFolderPathArgument(relativeFolderPath);
CheckAssetFileNameArgument(assetFileName);
string assetPath = PackagesAssetPath + "/" + packageName;
if (relativeFolderPath != null) {
assetPath += "/" + relativeFolderPath;
}
if (assetFileName != null) {
assetPath += "/" + assetFileName;
}
return assetPath;
}
/// <summary>
/// Resolves absolute file system path of a package specific folder or asset.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.ResolveAssetPathAbsolute("@vendor-name/package-name", "Language", "en-US.txt"));
/// // C:\MyProject\Assets\Plugins\Packages\@vendor-name\package-name\Language\en-US.txt
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string ResolveAssetPathAbsolute(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
string assetPath = ResolveAssetPath(packageName, relativeFolderPath, assetFileName);
return AssetPathToAbsolutePath(assetPath);
}
/// <summary>
/// Resolves asset path of a package specific data folder or asset.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.ResolveDataAssetPath("@vendor-name/package-name", "Presets", "NewPreset.asset"));
/// // Assets/Plugins/PackageData/@vendor-name/package-name/Presets/NewPreset.asset
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific data path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string ResolveDataAssetPath(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
CheckPackageNameArgument(packageName);
CheckRelativeFolderPathArgument(relativeFolderPath);
CheckAssetFileNameArgument(assetFileName);
string assetPath = PackageDataAssetPath + "/" + packageName;
if (relativeFolderPath != null) {
assetPath += "/" + relativeFolderPath;
}
if (assetFileName != null) {
assetPath += "/" + assetFileName;
}
return assetPath;
}
/// <summary>
/// Resolves absolute file system path of a package specific data folder or asset.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.ResolveDataPathAbsolute("@vendor-name/package-name", "Presets", "NewPreset.asset"));
/// // C:\MyProject\Assets\Plugins\PackageData\@vendor-name\package-name\Presets\NewPreset.asset
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific data path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string ResolveDataPathAbsolute(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
string assetPath = ResolveDataAssetPath(packageName, relativeFolderPath, assetFileName);
return AssetPathToAbsolutePath(assetPath);
}
/// <summary>
/// Gets asset path of a package specific data folder or asset and ensures that
/// the path exists on the file system.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.GetDataAssetPath("@vendor-name/package-name", "Presets", "NewPreset.asset"));
/// // Assets/Plugins/PackageData/@vendor-name/package-name/Presets/NewPreset.asset
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific data path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string GetDataAssetPath(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
string assetPath = ResolveDataAssetPath(packageName, relativeFolderPath, assetFileName);
string folderAssetPath = ResolveDataAssetPath(packageName, relativeFolderPath);
EnsureFolderAssetPathExists(folderAssetPath);
return assetPath;
}
/// <summary>
/// Gets absolute file system path of a package specific data folder or asset and
/// ensures that the path exists on the file system.
/// </summary>
/// <example>
/// <code language="csharp"><![CDATA[
/// Debug.Log(PackageUtility.GetDataPathAbsolute("@vendor-name/package-name", "Presets", "NewPreset.asset"));
/// // C:\MyProject\Assets\Plugins\PackageData\@vendor-name\package-name\Presets\NewPreset.asset
/// ]]></code>
/// </example>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder (optional).</param>
/// <param name="assetFileName">Name of asset file (optional).</param>
/// <returns>
/// The resolved asset path of the package specific data path.
/// </returns>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> is <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/>, <paramref name="relativeFolderPath"/> or
/// <paramref name="assetFileName"/> has an invalid value.</c>.
/// </exception>
public static string GetDataPathAbsolute(string packageName, string relativeFolderPath = null, string assetFileName = null)
{
string assetPath = GetDataAssetPath(packageName, relativeFolderPath, assetFileName);
return AssetPathToAbsolutePath(assetPath);
}
/// <summary>
/// Delete a data folder but only if it is empty.
/// </summary>
/// <param name="packageName">The name of the package.</param>
/// <param name="relativeFolderPath">Relative folder path inside package data
/// folder.</param>
/// <exception cref="System.ArgumentNullException">
/// If <paramref name="packageName"/> or <paramref name="relativeFolderPath"/> is
/// <c>null</c>.
/// </exception>
/// <exception cref="System.ArgumentException">
/// If <paramref name="packageName"/> or <paramref name="relativeFolderPath"/>
/// has an invalid value.</c>.
/// </exception>
public static void DeleteDataFolderIfEmpty(string packageName, string relativeFolderPath)
{
if (relativeFolderPath == null) {
throw new ArgumentNullException("relativeFolderPath");
}
string absoluteFolderPath = ResolveDataPathAbsolute(packageName, relativeFolderPath);
// Bail if the directory doesn't actually exist; nothing to delete!
if (!Directory.Exists(absoluteFolderPath)) {
return;
}
// Bail if the directory contains one or more files or sub-directories.
if (Directory.GetFiles(absoluteFolderPath).Length != 0 || Directory.GetDirectories(absoluteFolderPath).Length != 0) {
return;
}
string folderAssetPath = ResolveDataAssetPath(packageName, relativeFolderPath);
AssetDatabase.DeleteAsset(folderAssetPath);
}
}
}
| |
// Runtime.cs
// Script#/Libraries/CoreLib
// This source code is subject to terms and conditions of the Apache License, Version 2.0.
//
using System;
using System.Linq.Expressions;
using System.Reflection;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
namespace System {
[AttributeUsage(AttributeTargets.Enum, Inherited = false, AllowMultiple = false)]
[NonScriptable]
[Imported]
public sealed class FlagsAttribute : Attribute {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public abstract class MarshalByRefObject {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[Imported]
[IgnoreNamespace]
[ScriptName("Object")]
public abstract class ValueType {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public struct IntPtr {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public struct UIntPtr {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public struct RuntimeTypeHandle {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public struct RuntimeFieldHandle {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public struct RuntimeMethodHandle {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class ParamArrayAttribute : Attribute {
}
[AttributeUsage(AttributeTargets.Delegate | AttributeTargets.Interface | AttributeTargets.Event | AttributeTargets.Field | AttributeTargets.Property | AttributeTargets.Method | AttributeTargets.Constructor | AttributeTargets.Enum | AttributeTargets.Struct | AttributeTargets.Class, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class ObsoleteAttribute : Attribute {
private bool _error;
private string _message;
public ObsoleteAttribute() {
}
public ObsoleteAttribute(string message) {
_message = message;
}
public ObsoleteAttribute(string message, bool error) {
_message = message;
_error = error;
}
public bool IsError {
get {
return _error;
}
}
public string Message {
get {
return _message;
}
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[AttributeUsage(AttributeTargets.All, Inherited = true, AllowMultiple = false)]
[NonScriptable]
[Imported]
public sealed class CLSCompliantAttribute : Attribute {
private bool _isCompliant;
public CLSCompliantAttribute(bool isCompliant) {
_isCompliant = isCompliant;
}
public bool IsCompliant {
get {
return _isCompliant;
}
}
}
}
namespace System.CodeDom.Compiler {
[EditorBrowsable(EditorBrowsableState.Never)]
[AttributeUsage(AttributeTargets.All, Inherited = false, AllowMultiple = false)]
[NonScriptable]
[Imported]
public sealed class GeneratedCodeAttribute : Attribute {
private string _tool;
private string _version;
public GeneratedCodeAttribute(string tool, string version) {
_tool = tool;
_version = version;
}
public string Tool {
get {
return _tool;
}
}
public string Version {
get {
return _version;
}
}
}
}
namespace System.ComponentModel {
/// <summary>
/// This attribute marks a field, property, event or method as
/// "browsable", i.e. present in the type descriptor associated with
/// the type.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Property | AttributeTargets.Event | AttributeTargets.Method, Inherited = true, AllowMultiple = false)]
[NonScriptable]
[Imported]
public sealed class BrowsableAttribute : Attribute {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Enum | AttributeTargets.Constructor | AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Delegate | AttributeTargets.Interface)]
[NonScriptable]
[Imported]
public sealed class EditorBrowsableAttribute : Attribute {
private EditorBrowsableState _browsableState;
public EditorBrowsableAttribute(EditorBrowsableState state) {
_browsableState = state;
}
public EditorBrowsableState State {
get {
return _browsableState;
}
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum EditorBrowsableState {
Always = 0,
Never = 1,
Advanced = 2
}
}
namespace System.Reflection {
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class DefaultMemberAttribute {
private string _memberName;
public DefaultMemberAttribute(string memberName) {
_memberName = memberName;
}
public string MemberName {
get {
return _memberName;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyCopyrightAttribute : Attribute {
private string _copyright;
public AssemblyCopyrightAttribute(string copyright) {
_copyright = copyright;
}
public string Copyright {
get {
return _copyright;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyTrademarkAttribute : Attribute {
private string _trademark;
public AssemblyTrademarkAttribute(string trademark) {
_trademark = trademark;
}
public string Trademark {
get {
return _trademark;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyProductAttribute : Attribute {
private string _product;
public AssemblyProductAttribute(string product) {
_product = product;
}
public string Product {
get {
return _product;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyCompanyAttribute : Attribute {
private string _company;
public AssemblyCompanyAttribute(string company) {
_company = company;
}
public string Company {
get {
return _company;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyDescriptionAttribute : Attribute {
private string _description;
public AssemblyDescriptionAttribute(string description) {
_description = description;
}
public string Description {
get {
return _description;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyTitleAttribute : Attribute {
private string _title;
public AssemblyTitleAttribute(string title) {
_title = title;
}
public string Title {
get {
return _title;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyConfigurationAttribute : Attribute {
private string _configuration;
public AssemblyConfigurationAttribute(string configuration) {
_configuration = configuration;
}
public string Configuration {
get {
return _configuration;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyFileVersionAttribute : Attribute {
private string _version;
public AssemblyFileVersionAttribute(string version) {
_version = version;
}
public string Version {
get {
return _version;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyInformationalVersionAttribute : Attribute {
private string _informationalVersion;
public AssemblyInformationalVersionAttribute(string informationalVersion) {
_informationalVersion = informationalVersion;
}
public string InformationalVersion {
get {
return _informationalVersion;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyCultureAttribute : Attribute {
private string _culture;
public AssemblyCultureAttribute(string culture) {
_culture = culture;
}
public string Culture {
get {
return _culture;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyVersionAttribute : Attribute {
private string _version;
public AssemblyVersionAttribute(string version) {
_version = version;
}
public string Version {
get {
return _version;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyKeyFileAttribute : Attribute {
private string _keyFile;
public AssemblyKeyFileAttribute(string keyFile) {
_keyFile = keyFile;
}
public string KeyFile {
get {
return _keyFile;
}
}
}
[AttributeUsage(AttributeTargets.Assembly, Inherited = false)]
[NonScriptable]
[Imported]
public sealed class AssemblyDelaySignAttribute : Attribute {
private bool _delaySign;
public AssemblyDelaySignAttribute(bool delaySign) {
_delaySign = delaySign;
}
public bool DelaySign {
get {
return _delaySign;
}
}
}
}
namespace System.Runtime.CompilerServices {
[AttributeUsage(AttributeTargets.All, Inherited = true)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class CompilerGeneratedAttribute : Attribute {
public CompilerGeneratedAttribute() {
}
}
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Field, Inherited = false)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class DecimalConstantAttribute : Attribute {
public DecimalConstantAttribute(byte scale, byte sign, int hi, int mid, int low) {
}
public DecimalConstantAttribute(byte scale, byte sign, uint hi, uint mid, uint low) {
}
public decimal Value {
get {
return 0m;
}
}
}
[AttributeUsage(AttributeTargets.Assembly|AttributeTargets.Class|AttributeTargets.Method)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class ExtensionAttribute : Attribute {
}
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Parameter | AttributeTargets.ReturnValue)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class DynamicAttribute : Attribute {
public IList<bool> TransformFlags { get { return null; } }
public DynamicAttribute() {}
public DynamicAttribute(bool[] transformFlags) {}
}
[AttributeUsage(AttributeTargets.Field, Inherited = false)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class FixedBufferAttribute : Attribute {
public Type ElementType { get { return null; } }
public int Length { get { return 0; } }
public FixedBufferAttribute(Type elementType, int length) {}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = false, Inherited = false)]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class RuntimeCompatibilityAttribute : Attribute {
public bool WrapNonExceptionThrows { get; set; }
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public class CallSite {
public CallSiteBinder Binder { get { return null; } }
public static CallSite Create(Type delegateType, CallSiteBinder binder) {
return null;
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class CallSite<T> : CallSite where T : class {
public T Update { get { return null; } }
public T Target;
public static CallSite<T> Create(CallSiteBinder binder) {
return null;
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public abstract class CallSiteBinder {
public static LabelTarget UpdateLabel { get { return null; } }
public virtual T BindDelegate<T>(CallSite<T> site, object[] args) where T : class {
return null;
}
}
[NonScriptable]
[Imported]
[EditorBrowsable(EditorBrowsableState.Never)]
[AttributeUsage(AttributeTargets.Property)]
public class IndexerNameAttribute : Attribute {
public IndexerNameAttribute(string indexerName) {
this.Value = indexerName;
}
public string Value { get; private set; }
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public struct AsyncVoidMethodBuilder {
public static AsyncVoidMethodBuilder Create(){
return default(AsyncVoidMethodBuilder);
}
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine {
}
public void SetStateMachine(IAsyncStateMachine stateMachine) {
}
public void AwaitOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : INotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : ICriticalNotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void SetResult() {
}
public void SetException(Exception exception) {
}
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public struct AsyncTaskMethodBuilder {
public Task Task { get { return null; } }
public static AsyncTaskMethodBuilder Create() {
return default(AsyncTaskMethodBuilder);
}
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine {
}
public void SetStateMachine(IAsyncStateMachine stateMachine) {
}
public void AwaitOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : INotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : ICriticalNotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void SetResult() {
}
public void SetException(Exception exception) {
}
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public struct AsyncTaskMethodBuilder<TResult> {
public Task<TResult> Task { get { return null; } }
public static AsyncTaskMethodBuilder<TResult> Create() {
return default(AsyncTaskMethodBuilder<TResult>);
}
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine {
}
public void SetStateMachine(IAsyncStateMachine stateMachine) {
}
public void AwaitOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : INotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine) where TAwaiter : ICriticalNotifyCompletion where TStateMachine : IAsyncStateMachine {
}
public void SetResult(TResult result) {
}
public void SetException(Exception exception) {
}
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IAsyncStateMachine {
void MoveNext();
void SetStateMachine(IAsyncStateMachine stateMachine);
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public interface INotifyCompletion {
void OnCompleted(Action continuation);
}
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ICriticalNotifyCompletion : INotifyCompletion {
void UnsafeOnCompleted(Action continuation);
}
}
namespace System.Runtime.InteropServices {
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public class OutAttribute {
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class StructLayoutAttribute : Attribute
{
public int Pack;
public int Size;
public CharSet CharSet;
public LayoutKind Value { get { return LayoutKind.Auto; } }
public StructLayoutAttribute(LayoutKind layoutKind) {}
public StructLayoutAttribute(short layoutKind) {}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum CharSet {
None = 1,
Ansi = 2,
Unicode = 3,
Auto = 4,
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum LayoutKind {
Sequential = 0,
Explicit = 2,
Auto = 3,
}
}
namespace System.Runtime.Versioning {
[AttributeUsage(AttributeTargets.Assembly, Inherited = false, AllowMultiple = false)]
[NonScriptable]
[Imported]
public sealed class TargetFrameworkAttribute : Attribute {
private string _frameworkName;
private string _frameworkDisplayName;
public TargetFrameworkAttribute(string frameworkName) {
_frameworkName = frameworkName;
}
public string FrameworkDisplayName {
get {
return _frameworkDisplayName;
}
set {
_frameworkDisplayName = value;
}
}
public string FrameworkName {
get {
return _frameworkName;
}
}
}
}
namespace System.Threading {
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public static class Interlocked {
public static int CompareExchange(ref int location1, int value, int comparand) {
return 0;
}
public static T CompareExchange<T>(ref T location1, T value, T comparand) where T : class {
return null;
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public static class Monitor {
public static void Enter(object obj) {
}
public static void Enter(object obj, ref bool b) {
}
public static void Exit(object obj) {
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public class Thread {
public int ManagedThreadId { get { return 0; } }
public static Thread CurrentThread { get { return null; } }
}
}
namespace System.Security.Permissions {
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum SecurityAction {
Demand = 2,
Assert = 3,
Deny = 4,
PermitOnly = 5,
LinkDemand = 6,
InheritanceDemand = 7,
RequestMinimum = 8,
RequestOptional = 9,
RequestRefuse = 10,
}
}
namespace Microsoft.CSharp.RuntimeBinder
{
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public static class Binder {
public static CallSiteBinder BinaryOperation(CSharpBinderFlags flags, ExpressionType operation, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder Convert(CSharpBinderFlags flags, Type type, Type context) {
return null;
}
public static CallSiteBinder GetIndex(CSharpBinderFlags flags, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder GetMember(CSharpBinderFlags flags, string name, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder Invoke(CSharpBinderFlags flags, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder InvokeMember(CSharpBinderFlags flags, string name, IEnumerable<Type> typeArguments, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder InvokeConstructor(CSharpBinderFlags flags, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder IsEvent(CSharpBinderFlags flags, string name, Type context) {
return null;
}
public static CallSiteBinder SetIndex(CSharpBinderFlags flags, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder SetMember(CSharpBinderFlags flags, string name, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
public static CallSiteBinder UnaryOperation(CSharpBinderFlags flags, ExpressionType operation, Type context, IEnumerable<CSharpArgumentInfo> argumentInfo) {
return null;
}
}
[Flags]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum CSharpBinderFlags {
None = 0,
CheckedContext = 1,
InvokeSimpleName = 2,
InvokeSpecialName = 4,
BinaryOperationLogical = 8,
ConvertExplicit = 16,
ConvertArrayIndex = 32,
ResultIndexed = 64,
ValueFromCompoundAssignment = 128,
ResultDiscarded = 256,
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class CSharpArgumentInfo {
public static CSharpArgumentInfo Create(CSharpArgumentInfoFlags flags, string name) {
return null;
}
}
[Flags]
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum CSharpArgumentInfoFlags {
None = 0,
UseCompileTimeType = 1,
Constant = 2,
NamedArgument = 4,
IsRef = 8,
IsOut = 16,
IsStaticType = 32,
}
}
namespace System.Linq.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public enum ExpressionType {
Add,
AddChecked,
And,
AndAlso,
ArrayLength,
ArrayIndex,
Call,
Coalesce,
Conditional,
Constant,
Convert,
ConvertChecked,
Divide,
Equal,
ExclusiveOr,
GreaterThan,
GreaterThanOrEqual,
Invoke,
Lambda,
LeftShift,
LessThan,
LessThanOrEqual,
ListInit,
MemberAccess,
MemberInit,
Modulo,
Multiply,
MultiplyChecked,
Negate,
UnaryPlus,
NegateChecked,
New,
NewArrayInit,
NewArrayBounds,
Not,
NotEqual,
Or,
OrElse,
Parameter,
Power,
Quote,
RightShift,
Subtract,
SubtractChecked,
TypeAs,
TypeIs,
Assign,
Block,
DebugInfo,
Decrement,
Dynamic,
Default,
Extension,
Goto,
Increment,
Index,
Label,
RuntimeVariables,
Loop,
Switch,
Throw,
Try,
Unbox,
AddAssign,
AndAssign,
DivideAssign,
ExclusiveOrAssign,
LeftShiftAssign,
ModuloAssign,
MultiplyAssign,
OrAssign,
PowerAssign,
RightShiftAssign,
SubtractAssign,
AddAssignChecked,
MultiplyAssignChecked,
SubtractAssignChecked,
PreIncrementAssign,
PreDecrementAssign,
PostIncrementAssign,
PostDecrementAssign,
TypeEqual,
OnesComplement,
IsTrue,
IsFalse,
}
[EditorBrowsable(EditorBrowsableState.Never)]
[NonScriptable]
[Imported]
public sealed class LabelTarget {
public string Name { get { return null; } }
public Type Type { get { return null; } }
}
}
namespace System.Diagnostics {
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Constructor | AttributeTargets.Method, Inherited = false)]
[NonScriptable]
[EditorBrowsable(EditorBrowsableState.Never)]
[Serializable]
public sealed class DebuggerStepThroughAttribute : Attribute {
public DebuggerStepThroughAttribute() {}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Linq;
namespace Shielded
{
internal class SimpleHashSet : ISet<IShielded>
{
private int _count;
private const int SizeShift = 3;
private const int InitSize = 1 << SizeShift;
private int _mask;
private IShielded[] _array;
private int _bloom;
public SimpleHashSet()
{
_mask = InitSize - 1;
_array = new IShielded[InitSize];
}
bool AddInternal(IShielded item)
{
if (Place(item))
{
_bloom = _bloom | (1 << (item.GetHashCode() & 0x1F));
if (++_count >= (_mask ^ (_mask >> 2)))
Increase();
return true;
}
return false;
}
bool Place(IShielded item)
{
var i = item.GetHashCode() & _mask;
for ( ; _array[i] != null && _array[i] != item; i = (++i & _mask)) ;
if (_array[i] == null)
{
_array[i] = item;
return true;
}
return false;
}
void Increase()
{
var oldCount = _mask + 1;
var newCount = oldCount << 1;
var oldArray = _array;
_array = new IShielded[newCount];
_mask = newCount - 1;
for (int i = 0; i < oldCount; i++)
{
if (oldArray[i] != null)
Place(oldArray[i]);
}
}
/// <summary>
/// Performs the commit check on the enlisted items.
/// </summary>
public bool CanCommit(WriteStamp ws)
{
for (int i = 0; i < _array.Length; i++)
if (_array[i] != null && !_array[i].CanCommit(ws))
return false;
return true;
}
/// <summary>
/// Commits the enlisted items.
/// </summary>
public List<IShielded> Commit()
{
List<IShielded> changes = new List<IShielded>();
for (int i = 0; i < _array.Length; i++)
{
var item = _array[i];
if (item != null)
{
if (item.HasChanges) changes.Add(item);
item.Commit();
}
}
return changes;
}
/// <summary>
/// Commits items without preparing a list of changed ones, to use
/// when you know that there are no changes.
/// </summary>
public void CommitWoChanges()
{
for (int i = 0; i < _array.Length; i++)
if (_array[i] != null)
_array[i].Commit();
}
/// <summary>
/// Rolls the enlisted items back.
/// </summary>
public void Rollback()
{
for (int i = 0; i < _array.Length; i++)
if (_array[i] != null)
_array[i].Rollback();
}
/// <summary>
/// Helper for trimming.
/// </summary>
public void TrimCopies(long minOpenTransaction)
{
for (int i = 0; i < _array.Length; i++)
if (_array[i] != null)
_array[i].TrimCopies(minOpenTransaction);
}
#region IEnumerable implementation
IEnumerator IEnumerable.GetEnumerator()
{
return ((IEnumerable<IShielded>)this).GetEnumerator();
}
#endregion
#region IEnumerable implementation
public IEnumerator<IShielded> GetEnumerator()
{
for (int i = 0; i < _array.Length; i++)
{
if (_array[i] != null)
yield return _array[i];
}
}
#endregion
#region ICollection implementation
void ICollection<IShielded>.Add(IShielded item)
{
throw new System.NotImplementedException();
}
void ICollection<IShielded>.Clear()
{
throw new System.NotImplementedException();
}
public bool Contains(IShielded item)
{
var hash = item.GetHashCode();
if (((1 << (hash & 0x1F)) & _bloom) == 0)
return false;
var i = hash & _mask;
for ( ; _array[i] != null && _array[i] != item; i = (++i & _mask)) ;
return _array[i] != null;
}
void ICollection<IShielded>.CopyTo(IShielded[] target, int arrayIndex)
{
if (_count + arrayIndex > target.Length)
throw new IndexOutOfRangeException();
for (int i = 0; i < _array.Length; i++)
if (_array[i] != null)
target[arrayIndex++] = _array[i];
}
bool ICollection<IShielded>.Remove(IShielded item)
{
throw new System.NotImplementedException();
}
public int Count
{
get
{
return _count;
}
}
bool ICollection<IShielded>.IsReadOnly
{
get
{
return false;
}
}
#endregion
#region ISet implementation
public bool Add(IShielded item)
{
return AddInternal(item);
}
void ISet<IShielded>.ExceptWith(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
void ISet<IShielded>.IntersectWith(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
bool ISet<IShielded>.IsProperSubsetOf(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
bool ISet<IShielded>.IsProperSupersetOf(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
bool ISet<IShielded>.IsSubsetOf(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
bool ISet<IShielded>.IsSupersetOf(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
public bool Overlaps(IEnumerable<IShielded> other)
{
var otherAsSet = other as SimpleHashSet;
if (otherAsSet == null)
return other.Any(Contains);
if ((otherAsSet._bloom & this._bloom) == 0)
return false;
for (int i = 0; i < otherAsSet._array.Length; i++)
if (otherAsSet._array[i] != null && Contains(otherAsSet._array[i]))
return true;
return false;
}
public bool SetEquals(IEnumerable<IShielded> other)
{
var otherAsSet = other as SimpleHashSet;
if (otherAsSet == null)
{
int counter = 0;
foreach (var item in other)
{
if (++counter > _count || !Contains(item))
return false;
}
return counter == _count;
}
if (otherAsSet._bloom != this._bloom || otherAsSet._count != _count)
return false;
for (int i = 0; i < otherAsSet._array.Length; i++)
if (otherAsSet._array[i] != null && !Contains(otherAsSet._array[i]))
return false;
return true;
}
void ISet<IShielded>.SymmetricExceptWith(IEnumerable<IShielded> other)
{
throw new System.NotImplementedException();
}
public void UnionWith(IEnumerable<IShielded> other)
{
var otherAsSet = other as SimpleHashSet;
if (otherAsSet != null)
{
for (int i = 0; i < otherAsSet._array.Length; i++)
if (otherAsSet._array[i] != null)
AddInternal(otherAsSet._array[i]);
return;
}
var otherAsList = other as List<IShielded>;
if (otherAsList != null)
{
for (int i = 0; i < otherAsList.Count; i++)
AddInternal(otherAsList[i]);
return;
}
foreach (var item in other)
AddInternal(item);
}
#endregion
}
}
| |
using Alphaleonis.VSProjectSetMgr.Controls;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
namespace Alphaleonis.VSProjectSetMgr.ViewModels.Nodes
{
interface IModelWrapper<TModel>
{
TModel GetModel();
}
class ViewModelCollection<TViewModel, TModel> : ObservableBase, IReadOnlyList<TViewModel>, INotifyCollectionChanged, System.Collections.IList
where TViewModel : class
where TModel : class
{
#region Private Fields
private List<TViewModel> m_viewModels;
private IList<TModel> m_models;
private ConditionalWeakTable<TModel, TViewModel> m_viewModelMap;
private readonly Func<TModel, TViewModel> m_viewModelFactory;
#endregion
#region Events
public event NotifyCollectionChangedEventHandler CollectionChanged;
#endregion
#region Construction
public static ViewModelCollection<TViewModel, TModel> Create<TCollection>(TCollection modelCollection, Func<TModel, TViewModel> viewModelFactory)
where TCollection : class, IList<TModel>, INotifyCollectionChanged
{
if (modelCollection == null)
throw new ArgumentNullException("modelCollection", "modelCollection is null.");
if (viewModelFactory == null)
throw new ArgumentNullException("viewModelFactory", "viewModelFactory is null.");
return new ViewModelCollection<TViewModel, TModel>(modelCollection, viewModelFactory);
}
private ViewModelCollection(IList<TModel> models, Func<TModel, TViewModel> viewModelFactory)
{
CollectionChangedEventManager.AddHandler((INotifyCollectionChanged)models, SourceCollectionChanged);
m_models = models;
m_viewModelFactory = viewModelFactory;
m_viewModelMap = new ConditionalWeakTable<TModel, TViewModel>();
m_viewModels = new List<TViewModel>(models.Select(m => GetOrCreateVM(m)));
}
#endregion
#region Properties
public TViewModel this[int index]
{
get
{
return m_viewModels[index];
}
}
public int Count
{
get
{
return m_viewModels.Count;
}
}
#endregion
#region Methods
public IEnumerator<TViewModel> GetEnumerator()
{
return m_viewModels.GetEnumerator();
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private void SourceCollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
switch (e.Action)
{
case NotifyCollectionChangedAction.Add:
ModelItemsAdded(e);
break;
case NotifyCollectionChangedAction.Remove:
ModelItemsRemoved(e);
break;
case NotifyCollectionChangedAction.Replace:
ModelItemsReplaced(e);
break;
case NotifyCollectionChangedAction.Move:
ModelItemsMoved(e);
break;
case NotifyCollectionChangedAction.Reset:
ModelItemsReset(e);
break;
}
}
private void ModelItemsReset(NotifyCollectionChangedEventArgs e)
{
m_viewModels.Clear();
m_viewModels.AddRange(m_models.Select(m => GetOrCreateVM(m)));
OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset));
}
private void ModelItemsMoved(NotifyCollectionChangedEventArgs e)
{
var viewModelsMoved = m_viewModels.GetRange(e.OldStartingIndex, e.OldItems.Count);
m_viewModels.RemoveRange(e.OldStartingIndex, e.OldItems.Count);
m_viewModels.InsertRange(e.NewStartingIndex, viewModelsMoved);
OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Move, viewModelsMoved, e.OldStartingIndex, e.NewStartingIndex));
}
private void ModelItemsReplaced(NotifyCollectionChangedEventArgs e)
{
List<TViewModel> viewModelsRemoved = new List<TViewModel>();
foreach (TModel modelItemRemoved in e.OldItems.Cast<TModel>())
{
int index = m_viewModels.IndexOf(GetOrCreateVM(modelItemRemoved));
if (index != -1)
{
viewModelsRemoved.Add(m_viewModels[index]);
viewModelsRemoved.RemoveAt(index);
}
}
IList<TViewModel> viewModelsAdded = e.NewItems.Cast<TModel>().Select(m => GetOrCreateVM(m)).ToArray();
if (e.NewStartingIndex != -1)
m_viewModels.InsertRange(e.NewStartingIndex, viewModelsAdded);
else
m_viewModels.AddRange(viewModelsAdded);
OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Replace, viewModelsRemoved, viewModelsAdded, e.NewStartingIndex));
}
private void ModelItemsRemoved(NotifyCollectionChangedEventArgs e)
{
List<TViewModel> removedItems = new List<TViewModel>();
if (e.OldStartingIndex == -1)
{
foreach (var removedItem in e.OldItems.Cast<TModel>())
{
TViewModel viewModel = GetOrCreateVM(removedItem);
int removalIndex = m_viewModels.IndexOf(viewModel);
if (removalIndex != -1)
{
m_viewModels.RemoveAt(removalIndex);
removedItems.Add(viewModel);
}
}
}
else
{
removedItems.AddRange(m_viewModels.Skip(e.OldStartingIndex).Take(e.OldItems.Count));
m_viewModels.RemoveRange(e.OldStartingIndex, e.OldItems.Count);
}
if (removedItems.Count > 0)
{
NotifyCollectionChangedEventArgs ne = new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, removedItems);
OnCollectionChanged(ne);
}
}
private void ModelItemsAdded(NotifyCollectionChangedEventArgs e)
{
int index = e.NewStartingIndex;
if (index == -1)
index = Count;
TViewModel[] list = e.NewItems.Cast<TModel>().Select(m => GetOrCreateVM(m)).ToArray();
m_viewModels.InsertRange(e.NewStartingIndex, list);
OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, list, e.NewStartingIndex));
}
private TViewModel GetOrCreateVM(TModel modelItem)
{
return m_viewModelMap.GetValue(modelItem, (mi) => m_viewModelFactory(mi));
}
protected virtual void OnCollectionChanged(NotifyCollectionChangedEventArgs e)
{
NotifyCollectionChangedEventHandler handler = CollectionChanged;
if (handler != null)
handler(this, e);
}
#endregion
int System.Collections.IList.Add(object value)
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
void System.Collections.IList.Clear()
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
bool System.Collections.IList.Contains(object value)
{
return m_viewModels.Contains(value as TViewModel);
}
int System.Collections.IList.IndexOf(object value)
{
return m_viewModels.IndexOf(value as TViewModel);
}
void System.Collections.IList.Insert(int index, object value)
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
bool System.Collections.IList.IsFixedSize
{
get
{
return false;
}
}
bool System.Collections.IList.IsReadOnly
{
get
{
return true;
}
}
void System.Collections.IList.Remove(object value)
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
void System.Collections.IList.RemoveAt(int index)
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
object System.Collections.IList.this[int index]
{
get
{
return m_viewModels[index];
}
set
{
throw new NotSupportedException("Cannot modify a read-only collection.");
}
}
void System.Collections.ICollection.CopyTo(Array array, int index)
{
throw new NotSupportedException();
}
bool System.Collections.ICollection.IsSynchronized
{
get
{
return false;
}
}
object System.Collections.ICollection.SyncRoot
{
get
{
return m_viewModels;
}
}
}
}
| |
using UnityEngine;
using System.Collections;
public class PickupCamera : Photon.MonoBehaviour
{
public Transform cameraTransform;
private Transform _target;
// The distance in the x-z plane to the target
public float distance = 7.0f;
// the height we want the camera to be above the target
public float height = 3.0f;
public float angularSmoothLag = 0.3f;
public float angularMaxSpeed = 15.0f;
public float heightSmoothLag = 0.3f;
public float snapSmoothLag = 0.2f;
public float snapMaxSpeed = 720.0f;
public float clampHeadPositionScreenSpace = 0.75f;
public float lockCameraTimeout = 0.2f;
private Vector3 headOffset = Vector3.zero;
private Vector3 centerOffset = Vector3.zero;
private float heightVelocity = 0.0f;
private float angleVelocity = 0.0f;
private bool snap = false;
private PickupController controller;
private float targetHeight = 100000.0f;
private Camera m_CameraTransformCamera;
void OnEnable()
{
if( this.photonView != null && !this.photonView.isMine )
{
this.enabled = false;
return;
}
if( !cameraTransform && Camera.main )
cameraTransform = Camera.main.transform;
if( !cameraTransform )
{
Debug.Log( "Please assign a camera to the ThirdPersonCamera script." );
enabled = false;
}
m_CameraTransformCamera = cameraTransform.GetComponent<Camera>();
_target = transform;
if( _target )
{
controller = _target.GetComponent<PickupController>();
}
if( controller )
{
CharacterController characterController = (CharacterController)_target.GetComponent<Collider>();
centerOffset = characterController.bounds.center - _target.position;
headOffset = centerOffset;
headOffset.y = characterController.bounds.max.y - _target.position.y;
}
else
Debug.Log( "Please assign a target to the camera that has a ThirdPersonController script attached." );
Cut( _target, centerOffset );
}
void DebugDrawStuff()
{
Debug.DrawLine( _target.position, _target.position + headOffset );
}
float AngleDistance( float a, float b )
{
a = Mathf.Repeat( a, 360 );
b = Mathf.Repeat( b, 360 );
return Mathf.Abs( b - a );
}
void Apply( Transform dummyTarget, Vector3 dummyCenter )
{
// Early out if we don't have a target
if( !controller )
return;
Vector3 targetCenter = _target.position + centerOffset;
Vector3 targetHead = _target.position + headOffset;
// DebugDrawStuff();
// Calculate the current & target rotation angles
float originalTargetAngle = _target.eulerAngles.y;
float currentAngle = cameraTransform.eulerAngles.y;
// Adjust real target angle when camera is locked
float targetAngle = originalTargetAngle;
// When pressing Fire2 (alt) the camera will snap to the target direction real quick.
// It will stop snapping when it reaches the target
if( Input.GetButton( "Fire2" ) )
snap = true;
if( snap )
{
// We are close to the target, so we can stop snapping now!
if( AngleDistance( currentAngle, originalTargetAngle ) < 3.0f )
snap = false;
currentAngle = Mathf.SmoothDampAngle( currentAngle, targetAngle, ref angleVelocity, snapSmoothLag, snapMaxSpeed );
}
// Normal camera motion
else
{
if( controller.GetLockCameraTimer() < lockCameraTimeout )
{
targetAngle = currentAngle;
}
// Lock the camera when moving backwards!
// * It is really confusing to do 180 degree spins when turning around.
if( AngleDistance( currentAngle, targetAngle ) > 160 && controller.IsMovingBackwards() )
targetAngle += 180;
currentAngle = Mathf.SmoothDampAngle( currentAngle, targetAngle, ref angleVelocity, angularSmoothLag, angularMaxSpeed );
}
// When jumping don't move camera upwards but only down!
if( controller.IsJumping() )
{
// We'd be moving the camera upwards, do that only if it's really high
float newTargetHeight = targetCenter.y + height;
if( newTargetHeight < targetHeight || newTargetHeight - targetHeight > 5 )
targetHeight = targetCenter.y + height;
}
// When walking always update the target height
else
{
targetHeight = targetCenter.y + height;
}
// Damp the height
float currentHeight = cameraTransform.position.y;
currentHeight = Mathf.SmoothDamp( currentHeight, targetHeight, ref heightVelocity, heightSmoothLag );
// Convert the angle into a rotation, by which we then reposition the camera
Quaternion currentRotation = Quaternion.Euler( 0, currentAngle, 0 );
// Set the position of the camera on the x-z plane to:
// distance meters behind the target
cameraTransform.position = targetCenter;
cameraTransform.position += currentRotation * Vector3.back * distance;
// Set the height of the camera
cameraTransform.position = new Vector3( cameraTransform.position.x, currentHeight, cameraTransform.position.z );
// Always look at the target
SetUpRotation( targetCenter, targetHead );
}
void LateUpdate()
{
Apply( transform, Vector3.zero );
}
void Cut( Transform dummyTarget, Vector3 dummyCenter )
{
float oldHeightSmooth = heightSmoothLag;
float oldSnapMaxSpeed = snapMaxSpeed;
float oldSnapSmooth = snapSmoothLag;
snapMaxSpeed = 10000;
snapSmoothLag = 0.001f;
heightSmoothLag = 0.001f;
snap = true;
Apply( transform, Vector3.zero );
heightSmoothLag = oldHeightSmooth;
snapMaxSpeed = oldSnapMaxSpeed;
snapSmoothLag = oldSnapSmooth;
}
void SetUpRotation( Vector3 centerPos, Vector3 headPos )
{
// Now it's getting hairy. The devil is in the details here, the big issue is jumping of course.
// * When jumping up and down we don't want to center the guy in screen space.
// This is important to give a feel for how high you jump and avoiding large camera movements.
//
// * At the same time we dont want him to ever go out of screen and we want all rotations to be totally smooth.
//
// So here is what we will do:
//
// 1. We first find the rotation around the y axis. Thus he is always centered on the y-axis
// 2. When grounded we make him be centered
// 3. When jumping we keep the camera rotation but rotate the camera to get him back into view if his head is above some threshold
// 4. When landing we smoothly interpolate towards centering him on screen
Vector3 cameraPos = cameraTransform.position;
Vector3 offsetToCenter = centerPos - cameraPos;
// Generate base rotation only around y-axis
Quaternion yRotation = Quaternion.LookRotation( new Vector3( offsetToCenter.x, 0, offsetToCenter.z ) );
Vector3 relativeOffset = Vector3.forward * distance + Vector3.down * height;
cameraTransform.rotation = yRotation * Quaternion.LookRotation( relativeOffset );
// Calculate the projected center position and top position in world space
Ray centerRay = m_CameraTransformCamera.ViewportPointToRay( new Vector3( 0.5f, 0.5f, 1 ) );
Ray topRay = m_CameraTransformCamera.ViewportPointToRay( new Vector3( 0.5f, clampHeadPositionScreenSpace, 1 ) );
Vector3 centerRayPos = centerRay.GetPoint( distance );
Vector3 topRayPos = topRay.GetPoint( distance );
float centerToTopAngle = Vector3.Angle( centerRay.direction, topRay.direction );
float heightToAngle = centerToTopAngle / ( centerRayPos.y - topRayPos.y );
float extraLookAngle = heightToAngle * ( centerRayPos.y - centerPos.y );
if( extraLookAngle < centerToTopAngle )
{
extraLookAngle = 0;
}
else
{
extraLookAngle = extraLookAngle - centerToTopAngle;
cameraTransform.rotation *= Quaternion.Euler( -extraLookAngle, 0, 0 );
}
}
Vector3 GetCenterOffset()
{
return centerOffset;
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="XmlWellFormedWriterHelpers.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <owner current="true" primary="true">[....]</owner>
//------------------------------------------------------------------------------
using System;
using System.Text;
using System.Diagnostics;
using System.Collections.Generic;
namespace System.Xml {
internal partial class XmlWellFormedWriter : XmlWriter {
//
// Private types
//
class NamespaceResolverProxy : IXmlNamespaceResolver {
XmlWellFormedWriter wfWriter;
internal NamespaceResolverProxy(XmlWellFormedWriter wfWriter) {
this.wfWriter = wfWriter;
}
IDictionary<string, string> IXmlNamespaceResolver.GetNamespacesInScope(XmlNamespaceScope scope) {
throw new NotImplementedException();
}
string IXmlNamespaceResolver.LookupNamespace(string prefix) {
return wfWriter.LookupNamespace(prefix);
}
string IXmlNamespaceResolver.LookupPrefix(string namespaceName) {
return wfWriter.LookupPrefix(namespaceName);
}
}
partial struct ElementScope {
internal int prevNSTop;
internal string prefix;
internal string localName;
internal string namespaceUri;
internal XmlSpace xmlSpace;
internal string xmlLang;
internal void Set(string prefix, string localName, string namespaceUri, int prevNSTop) {
this.prevNSTop = prevNSTop;
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.localName = localName;
this.xmlSpace = (System.Xml.XmlSpace)(int)-1;
this.xmlLang = null;
}
internal void WriteEndElement(XmlRawWriter rawWriter) {
rawWriter.WriteEndElement(prefix, localName, namespaceUri);
}
internal void WriteFullEndElement(XmlRawWriter rawWriter) {
rawWriter.WriteFullEndElement(prefix, localName, namespaceUri);
}
}
enum NamespaceKind {
Written,
NeedToWrite,
Implied,
Special,
}
partial struct Namespace {
internal string prefix;
internal string namespaceUri;
internal NamespaceKind kind;
internal int prevNsIndex;
internal void Set(string prefix, string namespaceUri, NamespaceKind kind) {
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.kind = kind;
this.prevNsIndex = -1;
}
internal void WriteDecl(XmlWriter writer, XmlRawWriter rawWriter) {
Debug.Assert(kind == NamespaceKind.NeedToWrite);
if (null != rawWriter) {
rawWriter.WriteNamespaceDeclaration(prefix, namespaceUri);
}
else {
if (prefix.Length == 0) {
writer.WriteStartAttribute(string.Empty, "xmlns", XmlReservedNs.NsXmlNs);
}
else {
writer.WriteStartAttribute("xmlns", prefix, XmlReservedNs.NsXmlNs);
}
writer.WriteString(namespaceUri);
writer.WriteEndAttribute();
}
}
}
struct AttrName {
internal string prefix;
internal string namespaceUri;
internal string localName;
internal int prev;
internal void Set(string prefix, string localName, string namespaceUri) {
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.localName = localName;
this.prev = 0;
}
internal bool IsDuplicate(string prefix, string localName, string namespaceUri) {
return ((this.localName == localName)
&& ((this.prefix == prefix) || (this.namespaceUri == namespaceUri)));
}
}
enum SpecialAttribute {
No = 0,
DefaultXmlns,
PrefixedXmlns,
XmlSpace,
XmlLang
}
partial class AttributeValueCache {
enum ItemType {
EntityRef,
CharEntity,
SurrogateCharEntity,
Whitespace,
String,
StringChars,
Raw,
RawChars,
ValueString,
}
class Item {
internal ItemType type;
internal object data;
internal Item() { }
internal void Set(ItemType type, object data) {
this.type = type;
this.data = data;
}
}
class BufferChunk {
internal char[] buffer;
internal int index;
internal int count;
internal BufferChunk(char[] buffer, int index, int count) {
this.buffer = buffer;
this.index = index;
this.count = count;
}
}
StringBuilder stringValue = new StringBuilder();
string singleStringValue; // special-case for a single WriteString call
Item[] items;
int firstItem;
int lastItem = -1;
internal string StringValue {
get {
if (singleStringValue != null) {
return singleStringValue;
}
else {
return stringValue.ToString();
}
}
}
internal void WriteEntityRef(string name) {
if (singleStringValue != null) {
StartComplexValue();
}
switch (name) {
case "lt":
stringValue.Append('<');
break;
case "gt":
stringValue.Append('>');
break;
case "quot":
stringValue.Append('"');
break;
case "apos":
stringValue.Append('\'');
break;
case "amp":
stringValue.Append('&');
break;
default:
stringValue.Append('&');
stringValue.Append(name);
stringValue.Append(';');
break;
}
AddItem(ItemType.EntityRef, name);
}
internal void WriteCharEntity(char ch) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(ch);
AddItem(ItemType.CharEntity, ch);
}
internal void WriteSurrogateCharEntity(char lowChar, char highChar) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(highChar);
stringValue.Append(lowChar);
AddItem(ItemType.SurrogateCharEntity, new char[] { lowChar, highChar });
}
internal void WriteWhitespace(string ws) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(ws);
AddItem(ItemType.Whitespace, ws);
}
internal void WriteString(string text) {
if (singleStringValue != null) {
StartComplexValue();
}
else {
// special-case for a single WriteString
if (lastItem == -1) {
singleStringValue = text;
return;
}
}
stringValue.Append(text);
AddItem(ItemType.String, text);
}
internal void WriteChars(char[] buffer, int index, int count) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(buffer, index, count);
AddItem(ItemType.StringChars, new BufferChunk(buffer, index, count));
}
internal void WriteRaw(char[] buffer, int index, int count) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(buffer, index, count);
AddItem(ItemType.RawChars, new BufferChunk(buffer, index, count));
}
internal void WriteRaw(string data) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(data);
AddItem(ItemType.Raw, data);
}
internal void WriteValue(string value) {
if (singleStringValue != null) {
StartComplexValue();
}
stringValue.Append(value);
AddItem(ItemType.ValueString, value);
}
internal void Replay(XmlWriter writer) {
if (singleStringValue != null) {
writer.WriteString(singleStringValue);
return;
}
BufferChunk bufChunk;
for (int i = firstItem; i <= lastItem; i++) {
Item item = items[i];
switch (item.type) {
case ItemType.EntityRef:
writer.WriteEntityRef((string)item.data);
break;
case ItemType.CharEntity:
writer.WriteCharEntity((char)item.data);
break;
case ItemType.SurrogateCharEntity:
char[] chars = (char[])item.data;
writer.WriteSurrogateCharEntity(chars[0], chars[1]);
break;
case ItemType.Whitespace:
writer.WriteWhitespace((string)item.data);
break;
case ItemType.String:
writer.WriteString((string)item.data);
break;
case ItemType.StringChars:
bufChunk = (BufferChunk)item.data;
writer.WriteChars(bufChunk.buffer, bufChunk.index, bufChunk.count);
break;
case ItemType.Raw:
writer.WriteRaw((string)item.data);
break;
case ItemType.RawChars:
bufChunk = (BufferChunk)item.data;
writer.WriteChars(bufChunk.buffer, bufChunk.index, bufChunk.count);
break;
case ItemType.ValueString:
writer.WriteValue((string)item.data);
break;
default:
Debug.Assert(false, "Unexpected ItemType value.");
break;
}
}
}
// This method trims whitespaces from the beginnig and the end of the string and cached writer events
internal void Trim() {
// if only one string value -> trim the write spaces directly
if (singleStringValue != null) {
singleStringValue = XmlConvert.TrimString(singleStringValue);
return;
}
// trim the string in StringBuilder
string valBefore = stringValue.ToString();
string valAfter = XmlConvert.TrimString(valBefore);
if (valBefore != valAfter) {
stringValue = new StringBuilder(valAfter);
}
// trim the beginning of the recorded writer events
XmlCharType xmlCharType = XmlCharType.Instance;
int i = firstItem;
while (i == firstItem && i <= lastItem) {
Item item = items[i];
switch (item.type) {
case ItemType.Whitespace:
firstItem++;
break;
case ItemType.String:
case ItemType.Raw:
case ItemType.ValueString:
item.data = XmlConvert.TrimStringStart((string)item.data);
if (((string)item.data).Length == 0) {
// no characters left -> move the firstItem index to exclude it from the Replay
firstItem++;
}
break;
case ItemType.StringChars:
case ItemType.RawChars:
BufferChunk bufChunk = (BufferChunk)item.data;
int endIndex = bufChunk.index + bufChunk.count;
while (bufChunk.index < endIndex && xmlCharType.IsWhiteSpace(bufChunk.buffer[bufChunk.index])) {
bufChunk.index++;
bufChunk.count--;
}
if (bufChunk.index == endIndex) {
// no characters left -> move the firstItem index to exclude it from the Replay
firstItem++;
}
break;
}
i++;
}
// trim the end of the recorded writer events
i = lastItem;
while (i == lastItem && i >= firstItem) {
Item item = items[i];
switch (item.type) {
case ItemType.Whitespace:
lastItem--;
break;
case ItemType.String:
case ItemType.Raw:
case ItemType.ValueString:
item.data = XmlConvert.TrimStringEnd((string)item.data);
if (((string)item.data).Length == 0) {
// no characters left -> move the lastItem index to exclude it from the Replay
lastItem--;
}
break;
case ItemType.StringChars:
case ItemType.RawChars:
BufferChunk bufChunk = (BufferChunk)item.data;
while (bufChunk.count > 0 && xmlCharType.IsWhiteSpace(bufChunk.buffer[bufChunk.index + bufChunk.count - 1])) {
bufChunk.count--;
}
if (bufChunk.count == 0) {
// no characters left -> move the lastItem index to exclude it from the Replay
lastItem--;
}
break;
}
i--;
}
}
internal void Clear() {
singleStringValue = null;
lastItem = -1;
firstItem = 0;
stringValue.Length = 0;
}
private void StartComplexValue() {
Debug.Assert(singleStringValue != null);
Debug.Assert(lastItem == -1);
stringValue.Append( singleStringValue );
AddItem(ItemType.String, singleStringValue);
singleStringValue = null;
}
void AddItem(ItemType type, object data) {
int newItemIndex = lastItem + 1;
if (items == null) {
items = new Item[4];
}
else if (items.Length == newItemIndex) {
Item[] newItems = new Item[newItemIndex * 2];
Array.Copy(items, newItems, newItemIndex);
items = newItems;
}
if (items[newItemIndex] == null) {
items[newItemIndex] = new Item();
}
items[newItemIndex].Set(type, data);
lastItem = newItemIndex;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Web.Caching;
using System.Xml;
using System.Xml.Serialization;
using newtelligence.DasBlog.Runtime;
using newtelligence.DasBlog.Runtime.Proxies;
using newtelligence.DasBlog.Util;
using newtelligence.DasBlog.Web.Core;
namespace newtelligence.DasBlog.Web
{
/*
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// STOP! STOP! STOP! STOP!
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// If you want to add entries, please add the properties
// just above the comment tag at the bottom. SiteConfig
// is now exposed through a WebService and not putting
// the properties at the right place will break clients.
*/
[XmlType(Namespace = "urn:newtelligence-com:dasblog:config")]
[XmlRoot(Namespace = "urn:newtelligence-com:dasblog:config")]
public class SiteConfig
{
string title = "(none)";
string subtitle = "(none)";
string contact = "(none)";
string theme = "default";
string root = "(none)";
string copyright = "(none)";
string description = "";
string frontPageCategory = "";
int frontPageDayCount = 10;
int frontPageEntryCount = 50;
bool categoryAllEntries = true;
int rssDayCount = 10;
int rssMainEntryCount = 50;
int rssEntryCount = 50;
bool enableRssItemFooters = false;
string rssItemFooter;
bool alwaysIncludeContentInRSS = false;
bool entryTitleAsLink = false;
bool notifyWebLogsDotCom = false;
bool notifyBloGs = false;
bool obfuscateEmail = true;
string notificationEmailAddress = null;
bool sendCommentsByEmail = false;
bool sendPingbacksByEmail = false;
bool sendTrackbacksByEmail = false;
bool sendReferralsByEmail = false;
bool sendPostsByEmail = false;
bool enableBloggerApi = true;
bool enableComments = true;
bool enableCommentApi = true;
bool enableConfigEditService = false;
bool showCommentCount = true;
bool enableAutoPingback = true;
bool enableEditService = false;
bool enableTrackbackService = true;
bool enablePingbackService = true;
bool applyContentFiltersToWeb = true;
bool applyContentFiltersToRSS = false;
bool showItemDescriptionInAggregatedViews = false;
bool enableStartPageCaching = false;
bool enableBlogrollDescription = false;
bool enableUrlRewriting = false;
bool enableFtb = true;
bool useUserCulture = true;
int displayTimeZoneIndex = 90; // this is GMT
bool adjustDisplayTimeZone = false;
string editPassword;
string contentDir;
string logDir;
string binariesDir;
string profilesDir;
string smtpServer = null;
bool enablePop3 = false;
string pop3Server = null;
string pop3Username = null;
string pop3Password = null;
string pop3SubjectPrefix = null;
bool pop3InlineAttachedPictures = false;
int pop3InlinedAttachedPicturesThumbHeight = 0;
int pop3Interval = 240;
bool enableXSSUpstream = false;
string xssUpstreamEndpoint = "http://radio.xmlstoragesystem.com/RPC2";
string xssUpstreamUsername = null;
string xssUpstreamPassword = null;
string xssRSSFilename = "rss-dasblog.xml";
int xssUpstreamInterval = 60 * 60;
bool enableClickThrough = false;
bool enableAggregatorBugging = false;
bool enableCrossposts = true;
bool enableCrossPostFooter = false;
string crossPostFooter = null;
bool extensionlessUrls = false;
bool enableTitlePermaLink = false;
bool enableTitlePermaLinkUnique = false;
bool enableTitlePermaLinkSpaces = false;
bool enableEntryReferrals = false;
PingServiceCollection pingServices = new PingServiceCollection();
int daysCommentsAllowed = 0;
bool enableCommentDays = false;
bool logBlockedReferrals = false;
bool showCommentsWhenViewingEntry = false;
bool enableAutoSave = false;
private string titlePermalinkSpaceReplacement = TitleMapperModule.DefaultTitlePermalinkSpaceReplacement;
bool enableCoComment = true;
bool enableAMPPages = false;
string rssEndPointRewrite = string.Empty;
string cheesySpamA = string.Empty;
string cheesySpamQ = string.Empty;
//paulb changed to comments
bool commentsRequireApproval;
bool commentsAllowHtml;
ValidTagCollection allowedTags;
// default tags
private const string defaultAllowedTags = "b,i,u,a@href@title,strong,blockquote@cite,em,strike,sup,sub";
//allow gravatar integration
bool commentsAllowGravatar = false;
string commentsGravatarNoImgPath = null;
string commentsGravatarSize = null;
string commentsGravatarBorder = null;
string commentsGravatarRating = null;
// supress email address display
bool supressEmailAddressDisplay = false;
// Allow user to choose default blogging API
// Can be "Moveable Type", "MetaWeblog" or "Blogger"
string preferredBloggingAPI = "Moveable Type";
ContentFilterCollection contentFilters = new ContentFilterCollection();
CrosspostSiteCollection crosspostSites = new CrosspostSiteCollection();
public static void Save(SiteConfig siteConfig)
{
System.Security.Principal.WindowsImpersonationContext wi = Impersonation.Impersonate();
XmlSerializer ser = new XmlSerializer(typeof(SiteConfig));
using (StreamWriter writer = new StreamWriter(SiteConfig.GetConfigFilePathFromCurrentContext()))
{
ser.Serialize(writer, siteConfig);
}
wi.Undo();
}
public static SiteConfig GetSiteConfig()
{
DataCache cache = CacheFactory.GetCache();
SiteConfig config = (SiteConfig)cache["SiteConfig"];
if (config == null)
{
string path = GetConfigFilePathFromCurrentContext();
config = GetSiteConfig(path);
cache.Insert("SiteConfig", config, new CacheDependency(path));
}
return config;
}
public static SiteConfig GetSiteConfig(string configPath)
{
SiteConfig config;
XmlSerializer ser = new XmlSerializer(typeof(SiteConfig));
using (StreamReader reader = new StreamReader(configPath))
{
//SDH: Requires FullTrust
//XmlNamespaceUpgradeReader xnur = new XmlNamespaceUpgradeReader(reader, "", "urn:newtelligence-com:dasblog:config");
//config = ser.Deserialize(xnur) as SiteConfig;
config = ser.Deserialize(reader) as SiteConfig;
}
return config;
}
public static string GetConfigFilePathFromCurrentContext()
{
return SiteUtilities.MapPath("~/SiteConfig/site.config");
}
public static string GetSecurityFilePathFromCurrentContext()
{
return SiteUtilities.MapPath("~/SiteConfig/siteSecurity.config");
}
public static string GetConfigPathFromCurrentContext()
{
return SiteUtilities.MapPath("~/SiteConfig/");
}
public static string GetContentPathFromCurrentContext()
{
return SiteUtilities.MapPath(GetSiteConfig().ContentDir);
}
public static string GetLogPathFromCurrentContext()
{
return SiteUtilities.MapPath(GetSiteConfig().LogDir);
}
public static string GetBinariesPathFromCurrentContext()
{
return SiteUtilities.MapPath(GetSiteConfig().BinariesDir);
}
public static string GetProfilesPathFromCurrentContext()
{
return SiteUtilities.MapPath(GetSiteConfig().ProfilesDir);
}
private string CheckTrailingSlashAndRooted(string path)
{
if (path == null)
return path;
if (path[0] != '~' && path[0] != '/')
path = "~/" + path;
if (path[path.Length - 1] != '/')
return path + '/';
else
return path;
}
private WindowsTimeZone windowsTimeZone = null;
public WindowsTimeZone GetConfiguredTimeZone()
{
if (windowsTimeZone == null)
{
windowsTimeZone = WindowsTimeZone.TimeZones.GetByZoneIndex(displayTimeZoneIndex) as WindowsTimeZone;
}
return windowsTimeZone;
}
public string Title
{
get { return title; }
set { title = value; }
}
public string Subtitle
{
get { return subtitle; }
set { subtitle = value; }
}
public string Theme
{
get { return theme; }
set { theme = value; }
}
public string Description
{
get { return description; }
set { description = value; }
}
public string Contact
{
get { return contact; }
set { contact = value; }
}
public string Root
{
get
{
if (root[root.Length - 1] != '/')
{
root = root + "/";
}
return root;
}
set
{
root = value;
if (root[root.Length - 1] != '/')
{
root = root + "/";
}
}
}
public string Copyright
{
get { return copyright; }
set { copyright = value; }
}
public int RssDayCount
{
get { return rssDayCount; }
set { rssDayCount = value; }
}
public int RssMainEntryCount
{
get { return rssMainEntryCount; }
set { rssMainEntryCount = value; }
}
public int RssEntryCount
{
get { return rssEntryCount; }
set { rssEntryCount = value; }
}
public bool EnableRssItemFooters
{
get { return enableRssItemFooters; }
set { enableRssItemFooters = value; }
}
public string RssItemFooter
{
get { return rssItemFooter; }
set { rssItemFooter = value; }
}
public int FrontPageDayCount
{
get { return frontPageDayCount; }
set { frontPageDayCount = value; }
}
public int FrontPageEntryCount
{
get { return frontPageEntryCount; }
set { frontPageEntryCount = value; }
}
public bool CategoryAllEntries
{
get { return categoryAllEntries; }
set { categoryAllEntries = value; }
}
public string FrontPageCategory
{
get { return frontPageCategory; }
set { frontPageCategory = value; }
}
public bool AlwaysIncludeContentInRSS
{
get { return alwaysIncludeContentInRSS; }
set { alwaysIncludeContentInRSS = value; }
}
public bool EntryTitleAsLink
{
get { return entryTitleAsLink; }
set { entryTitleAsLink = value; }
}
[Obsolete]
public bool NotifyWebLogsDotCom
{
get { return notifyWebLogsDotCom; }
set { notifyWebLogsDotCom = value; }
}
[Obsolete]
public bool NotifyBloGs
{
get { return notifyBloGs; }
set { notifyBloGs = value; }
}
public bool ObfuscateEmail
{
get { return obfuscateEmail; }
set { obfuscateEmail = value; }
}
public string NotificationEMailAddress
{
get { return notificationEmailAddress; }
set { notificationEmailAddress = value; }
}
public bool SendCommentsByEmail
{
get { return sendCommentsByEmail; }
set { sendCommentsByEmail = value; }
}
public bool SendReferralsByEmail
{
get { return sendReferralsByEmail; }
set { sendReferralsByEmail = value; }
}
public bool SendTrackbacksByEmail
{
get { return sendTrackbacksByEmail; }
set { sendTrackbacksByEmail = value; }
}
public bool SendPingbacksByEmail
{
get { return sendPingbacksByEmail; }
set { sendPingbacksByEmail = value; }
}
public bool SendPostsByEmail
{
get { return sendPostsByEmail; }
set { sendPostsByEmail = value; }
}
public bool EnableBloggerApi
{
get { return enableBloggerApi; }
set { enableBloggerApi = value; }
}
public bool EnableComments
{
get { return enableComments; }
set { enableComments = value; }
}
public bool EnableCommentApi
{
get { return enableCommentApi; }
set { enableCommentApi = value; }
}
public bool EnableConfigEditService
{
get { return enableConfigEditService; }
set { enableConfigEditService = value; }
}
public bool EnableEditService
{
get { return enableEditService; }
set { enableEditService = value; }
}
public bool EnableAutoPingback
{
get { return enableAutoPingback; }
set { enableAutoPingback = value; }
}
public bool ShowCommentCount
{
get { return showCommentCount; }
set { showCommentCount = value; }
}
public bool EnableTrackbackService
{
get { return enableTrackbackService; }
set { enableTrackbackService = value; }
}
public bool EnablePingbackService
{
get { return enablePingbackService; }
set { enablePingbackService = value; }
}
public bool EnableStartPageCaching
{
get { return enableStartPageCaching; }
set { enableStartPageCaching = value; }
}
public bool EnableBlogrollDescription
{
get { return enableBlogrollDescription; }
set { enableBlogrollDescription = value; }
}
public bool EnableUrlRewriting
{
get { return enableUrlRewriting; }
set { enableUrlRewriting = value; }
}
[Obsolete("FreeTextBox is always enabled")]
public bool EnableFtb
{
get { return enableFtb; }
set { enableFtb = value; }
}
public bool EnableCrossposts
{
get { return enableCrossposts; }
set { enableCrossposts = value; }
}
public bool UseUserCulture
{
get { return useUserCulture; }
set { useUserCulture = value; }
}
public bool ShowItemDescriptionInAggregatedViews
{
get { return showItemDescriptionInAggregatedViews; }
set { showItemDescriptionInAggregatedViews = value; }
}
public bool EnableClickThrough
{
get { return enableClickThrough; }
set { enableClickThrough = value; }
}
public bool EnableAggregatorBugging
{
get { return enableAggregatorBugging; }
set { enableAggregatorBugging = value; }
}
public int DisplayTimeZoneIndex
{
get { return displayTimeZoneIndex; }
set
{
displayTimeZoneIndex = value;
windowsTimeZone = WindowsTimeZone.TimeZones.GetByZoneIndex(displayTimeZoneIndex) as WindowsTimeZone;
}
}
public bool AdjustDisplayTimeZone
{
get { return adjustDisplayTimeZone; }
set { adjustDisplayTimeZone = value; }
}
public string EditPassword
{
get { return editPassword; }
set { editPassword = value; }
}
public string ContentDir
{
get { return CheckTrailingSlashAndRooted(contentDir); }
set { contentDir = value; }
}
public string LogDir
{
get { return CheckTrailingSlashAndRooted(logDir); }
set { logDir = value; }
}
public string BinariesDir
{
get { return CheckTrailingSlashAndRooted(binariesDir); }
set { binariesDir = value; }
}
public string ProfilesDir
{
get { return CheckTrailingSlashAndRooted(profilesDir); }
set { profilesDir = value; }
}
public string BinariesDirRelative
{
get
{
string retVal = BinariesDir;
retVal = retVal.TrimStart('~');
retVal = retVal.TrimStart('/');
return retVal;
}
}
public string SmtpServer
{
get { return smtpServer; }
set { smtpServer = value; }
}
public bool EnablePop3
{
get { return enablePop3; }
set { enablePop3 = value; }
}
public string Pop3Server
{
get { return pop3Server; }
set { pop3Server = value; }
}
public string Pop3Username
{
get { return pop3Username; }
set { pop3Username = value; }
}
public string Pop3Password
{
get { return pop3Password; }
set { pop3Password = value; }
}
public string Pop3SubjectPrefix
{
get { return pop3SubjectPrefix; }
set { pop3SubjectPrefix = value; }
}
public int Pop3Interval
{
get { return pop3Interval; }
set { pop3Interval = value; }
}
public bool Pop3InlineAttachedPictures
{
get { return pop3InlineAttachedPictures; }
set { pop3InlineAttachedPictures = value; }
}
public int Pop3InlinedAttachedPicturesThumbHeight
{
get { return pop3InlinedAttachedPicturesThumbHeight; }
set { pop3InlinedAttachedPicturesThumbHeight = value; }
}
public bool ApplyContentFiltersToWeb
{
get { return applyContentFiltersToWeb; }
set { applyContentFiltersToWeb = value; }
}
public bool ApplyContentFiltersToRSS
{
get { return applyContentFiltersToRSS; }
set { applyContentFiltersToRSS = value; }
}
public bool EnableXSSUpstream
{
get { return enableXSSUpstream; }
set { enableXSSUpstream = value; }
}
public string XSSUpstreamEndpoint
{
get { return xssUpstreamEndpoint; }
set { xssUpstreamEndpoint = value; }
}
public string XSSUpstreamUsername
{
get { return xssUpstreamUsername; }
set { xssUpstreamUsername = value; }
}
public string XSSUpstreamPassword
{
get { return xssUpstreamPassword; }
set { xssUpstreamPassword = value; }
}
public string XSSRSSFilename
{
get { return xssRSSFilename; }
set { xssRSSFilename = value; }
}
public int XSSUpstreamInterval
{
get { return xssUpstreamInterval; }
set { xssUpstreamInterval = value; }
}
[XmlIgnore]
public ContentFilterCollection ContentFilters
{
get { return contentFilters; }
}
[XmlArray("ContentFilters")]
public ContentFilter[] ContentFilterArray
{
get { return new List<ContentFilter>(contentFilters).ToArray(); }
set
{
if (value == null)
{
contentFilters = new ContentFilterCollection();
}
else
{
contentFilters = new ContentFilterCollection(value);
}
}
}
[XmlIgnore]
public CrosspostSiteCollection CrosspostSites
{
get { return crosspostSites; }
}
[XmlArray("CrosspostSites")]
public CrosspostSite[] CrosspostSiteArray
{
get { return new List<CrosspostSite>(CrosspostSites).ToArray(); }
set
{
if (value == null)
{
crosspostSites = new CrosspostSiteCollection();
}
else
{
crosspostSites = new CrosspostSiteCollection(value);
}
}
}
bool pop3DeleteAllMessages = false;
public bool Pop3DeleteAllMessages
{
get { return pop3DeleteAllMessages; }
set { pop3DeleteAllMessages = value; }
}
bool pop3LogIgnoredEmails = true;
public bool Pop3LogIgnoredEmails
{
get { return pop3LogIgnoredEmails; }
set { pop3LogIgnoredEmails = value; }
}
bool enableReferralUrlBlackList = false;
public bool EnableReferralUrlBlackList
{
get { return enableReferralUrlBlackList; }
set { enableReferralUrlBlackList = value; }
}
private string referralUrlBlackList = String.Empty;
public string ReferralUrlBlackList { get { return referralUrlBlackList; } set { referralUrlBlackList = value; } }
[XmlIgnore]
public string[] ReferralUrlBlackListArray { get { return referralUrlBlackList.Split(new char[] { (';') }); } }
bool enableCaptcha = true;
public bool EnableCaptcha { get { return enableCaptcha; } set { enableCaptcha = value; } }
bool enableReferralUrlBlackList404s = true;
public bool EnableReferralUrlBlackList404s { get { return enableReferralUrlBlackList404s; } set { enableReferralUrlBlackList404s = value; } }
bool enableMovableTypeBlackList = false;
public bool EnableMovableTypeBlackList
{
get { return enableMovableTypeBlackList; }
set { enableMovableTypeBlackList = value; }
}
private string channelImageUrl;
public string ChannelImageUrl { get { return channelImageUrl; } set { channelImageUrl = value; } }
public bool EnableCrossPostFooter
{
get { return enableCrossPostFooter; }
set { enableCrossPostFooter = value; }
}
public string CrossPostFooter
{
get { return crossPostFooter; }
set { crossPostFooter = value; }
}
public bool ExtensionlessUrls
{
get { return extensionlessUrls; }
set { extensionlessUrls = value; }
}
public bool EnableTitlePermaLink
{
get { return enableTitlePermaLink; }
set { enableTitlePermaLink = value; }
}
public bool EnableTitlePermaLinkUnique
{
get { return enableTitlePermaLinkUnique; }
set { enableTitlePermaLinkUnique = value; }
}
public bool EnableTitlePermaLinkSpaces
{
get { return enableTitlePermaLinkSpaces; }
set { enableTitlePermaLinkSpaces = value; }
}
bool encryptLoginPassword = false;
public bool EncryptLoginPassword
{
get { return encryptLoginPassword; }
set { encryptLoginPassword = value; }
}
// RyanG: Added support for SMTP authentication properties
bool enableSmtpAuthentication = false;
public bool EnableSmtpAuthentication
{
get { return enableSmtpAuthentication; }
set { enableSmtpAuthentication = value; }
}
string smtpUserName = null;
public string SmtpUserName
{
get { return smtpUserName; }
set { smtpUserName = value; }
}
string smtpPassword = null;
public string SmtpPassword
{
get { return smtpPassword; }
set { smtpPassword = value; }
}
string rssLanguage = null;
public string RssLanguage
{
get { return rssLanguage; }
set { rssLanguage = value; }
}
bool enableSearchHighlight = true;
public bool EnableSearchHighlight
{
get { return enableSearchHighlight; }
set { enableSearchHighlight = value; }
}
public bool EnableEntryReferrals
{
get { return enableEntryReferrals; }
set { enableEntryReferrals = value; }
}
[XmlArray("PingServices")]
public PingService[] PingServiceArray
{
get { return new List<PingService>(PingServices).ToArray(); }
set
{
if (value == null)
{
pingServices = new PingServiceCollection();
}
else
{
pingServices = new PingServiceCollection(value);
}
}
}
[XmlIgnore]
public PingServiceCollection PingServices
{
get
{
// if the user has upgraded we want to maintain their settings
if (this.pingServices.Count == 0)
{
if (this.notifyBloGs)
{
this.pingServices.Add(PingService.GetBloGs());
}
if (this.notifyWebLogsDotCom)
{
this.pingServices.Add(PingService.GetWebLogsDotCom());
}
}
return pingServices;
}
set { pingServices = value; }
}
string feedBurnerName = null;
public string FeedBurnerName
{
get { return feedBurnerName; }
set { feedBurnerName = value; }
}
public int DaysCommentsAllowed
{
get { return daysCommentsAllowed; }
set { daysCommentsAllowed = value; }
}
public bool EnableCommentDays
{
get { return enableCommentDays; }
set { enableCommentDays = value; }
}
public bool SupressEmailAddressDisplay
{
get { return supressEmailAddressDisplay; }
set { supressEmailAddressDisplay = value; }
}
public string EntryEditControl
{
get;
set;
}
public bool LogBlockedReferrals
{
get { return logBlockedReferrals; }
set { logBlockedReferrals = value; }
}
public bool ShowCommentsWhenViewingEntry
{
get { return showCommentsWhenViewingEntry; }
set { showCommentsWhenViewingEntry = value; }
}
bool useFeedSchemeForSyndicationLinks;
public bool UseFeedSchemeForSyndication
{
get { return useFeedSchemeForSyndicationLinks; }
set { useFeedSchemeForSyndicationLinks = value; }
}
private int _contentLookaheadDays = 0;
public int ContentLookaheadDays
{
get { return _contentLookaheadDays; }
set { _contentLookaheadDays = value; }
}
public bool EnableAutoSave
{
get { return enableAutoSave; }
set { enableAutoSave = value; }
}
private int _smtpPort = 25;
public int SmtpPort
{
get { return _smtpPort; }
set { _smtpPort = value; }
}
public bool CommentsAllowGravatar
{
get { return commentsAllowGravatar; }
set { commentsAllowGravatar = value; }
}
public string CommentsGravatarNoImgPath
{
get { return commentsGravatarNoImgPath; }
set { commentsGravatarNoImgPath = value; }
}
public string CommentsGravatarSize
{
get { return commentsGravatarSize; }
set { commentsGravatarSize = value; }
}
public string CommentsGravatarBorder
{
get { return commentsGravatarBorder; }
set { commentsGravatarBorder = value; }
}
public string CommentsGravatarRating
{
get { return commentsGravatarRating; }
set { commentsGravatarRating = value; }
}
/// <summary>
/// Gets or sets a value indicating whether comments require approval.
/// </summary>
/// <value>
/// <see langword="true"/> if approval is required; otherwise, <see langword="false"/>.
/// </value>
public bool CommentsRequireApproval
{
get { return this.commentsRequireApproval; }
set { this.commentsRequireApproval = value; }
}
/// <summary>
/// Gets or sets a value indicating whether HTML is allowed in comments.
/// </summary>
/// <value>
/// <see langword="true"/> if HTML is allowed in comments; otherwise, <see langword="false"/>.
/// </value>
public bool CommentsAllowHtml
{
get { return this.commentsAllowHtml; }
set { this.commentsAllowHtml = value; }
}
/// <summary>
/// Gets the a collection of tags allowed in the comments.
/// </summary>
/// <value>The tags allowed in the comments.</value>
/// <remarks>
/// The array is sorted when set during de-serialization.
/// </remarks>
[XmlArray("validCommentTags", IsNullable = true)]
[XmlArrayItem("tag")]
public ValidTagCollection XmlAllowedTagsArray
{
get
{
return this.allowedTags;
}
set
{
this.allowedTags = value;
}
}
[XmlIgnore]
public ValidTagCollection AllowedTags
{
get
{
// if someone deleted all allowed tags, or the tags were never there he get's the default
if (this.allowedTags == null || this.allowedTags.Count == 0)
{
this.allowedTags = new ValidTagCollection(defaultAllowedTags);
}
return this.allowedTags;
}
}
/// <summary>
/// Gets or sets the tags allowed in the
/// comments as a comma separated list..
/// </summary>
/// <value>The tags allowed in the comments.</value>
[XmlElement("AllowedTags")]
[Obsolete("Please use the AllowedTags property.")]
public string XmlAllowedTags
{
get
{
return defaultAllowedTags;
}
set
{
;
}
}
public bool EnableCoComment
{
get
{
return enableCoComment;
}
set
{
enableCoComment = value;
}
}
private bool enableSpamBlockingService;
public bool EnableSpamBlockingService
{
get { return enableSpamBlockingService; }
set { enableSpamBlockingService = value; }
}
private string spamBlockingServiceApiKey;
public string SpamBlockingServiceApiKey
{
get { return spamBlockingServiceApiKey; }
set { spamBlockingServiceApiKey = value; }
}
[XmlIgnore]
public ISpamBlockingService SpamBlockingService
{
get
{
//TODO: this may eventually be configurable, if Akismet alternatives show up
if (!enableSpamBlockingService || spamBlockingServiceApiKey.Length == 0)
{
return null;
}
return new AkismetSpamBlockingService(this.spamBlockingServiceApiKey, this.root);
}
}
private bool enableSpamModeration = true;
public bool EnableSpamModeration
{
get { return enableSpamModeration; }
set { enableSpamModeration = value; }
}
private int _entriesPerPage = 5;
public int EntriesPerPage
{
get { return _entriesPerPage; }
set { _entriesPerPage = value; }
}
private bool enableDailyEmailReport = false;
public bool EnableDailyReportEmail
{
get { return enableDailyEmailReport; }
set { enableDailyEmailReport = value; }
}
bool useSSLForSMTP = false;
public bool UseSSLForSMTP
{
get { return useSSLForSMTP; }
set { useSSLForSMTP = value; }
}
public string PreferredBloggingAPI
{
get { return preferredBloggingAPI; }
set { preferredBloggingAPI = value; }
}
bool enableGoogleMaps = false;
public bool EnableGoogleMaps
{
get { return enableGoogleMaps; }
set { enableGoogleMaps = value; }
}
string googleMapsApiKey = string.Empty;
public string GoogleMapsApiKey
{
get { return googleMapsApiKey; }
set { googleMapsApiKey = value; }
}
bool enableGeoRss = false;
public bool EnableGeoRss
{
get { return enableGeoRss; }
set { enableGeoRss = value; }
}
double defaultLatitude = 0;
public double DefaultLatitude
{
get { return defaultLatitude; }
set { defaultLatitude = value; }
}
double defaultLongitude = 0;
public double DefaultLongitude
{
get { return defaultLongitude; }
set { defaultLongitude = value; }
}
bool enableDefaultLatLongForNonGeoCodedPosts = false;
public bool EnableDefaultLatLongForNonGeoCodedPosts
{
get { return enableDefaultLatLongForNonGeoCodedPosts; }
set { enableDefaultLatLongForNonGeoCodedPosts = value; }
}
bool htmlTidyContent = true;
public bool HtmlTidyContent
{
get { return htmlTidyContent; }
set { htmlTidyContent = value; }
}
bool resolveCommenterIP = true;
public bool ResolveCommenterIP
{
get { return resolveCommenterIP; }
set { resolveCommenterIP = value; }
}
bool allowOpenIdComments = false;
/// <summary>
/// Indicates whether commenters can login using openid.
/// </summary>
public bool AllowOpenIdComments
{
get { return this.allowOpenIdComments; }
set { this.allowOpenIdComments = value; }
}
/// <summary>
/// Indicates whether admins can login using openid.
/// </summary>
bool allowOpenIdAdmin = false;
public bool AllowOpenIdAdmin
{
get { return this.allowOpenIdAdmin; }
set { this.allowOpenIdAdmin = value; }
}
/// <summary>
/// Indicates if commenters authenticated using openid bypass the spam check.
/// </summary>
bool bypassSpamOpenIdComment = false;
public bool BypassSpamOpenIdComment
{
get { return this.bypassSpamOpenIdComment; }
set { this.bypassSpamOpenIdComment = value; }
}
/// <summary>
/// Gets or sets the title permalink space replacement.
/// </summary>
/// <value>The title permalink space replacement.</value>
public string TitlePermalinkSpaceReplacement
{
get { return this.titlePermalinkSpaceReplacement; }
set { this.titlePermalinkSpaceReplacement = value; }
}
public bool AMPPagesEnabled
{
get { return enableAMPPages; }
set { this.enableAMPPages = value; }
}
public string RSSEndPointRewrite
{
get { return rssEndPointRewrite; }
set { this.rssEndPointRewrite = value; }
}
public string CheesySpamQ
{
get { return cheesySpamQ; }
set { cheesySpamQ = value; }
}
public string CheesySpamA
{
get { return cheesySpamA; }
set { cheesySpamA = value; }
}
/*!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!*/
/* Add new properties just above this comment*/
/*!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!*/
[XmlAnyElement]
public XmlElement[] anyElements;
[XmlAnyAttribute]
public XmlAttribute[] anyAttributes;
}
[Serializable]
[XmlType(Namespace = "urn:newtelligence-com:dasblog:config")]
[XmlRoot(Namespace = "urn:newtelligence-com:dasblog:config")]
public class ContentFilter
{
string expression = "";
string mapTo;
bool isRegex = false;
public ContentFilter()
{
}
public ContentFilter(string expression, string mapTo)
{
this.expression = expression;
this.mapTo = mapTo;
}
[XmlAttribute("find")]
public string Expression
{
get { return expression; }
set { expression = value; }
}
[XmlAttribute("replace")]
public string MapTo
{
get { return mapTo; }
set { mapTo = value; }
}
[XmlAttribute("isregex")]
public bool IsRegEx
{
get { return isRegex; }
set { isRegex = value; }
}
[XmlAnyElement]
public XmlElement[] anyElements;
[XmlAnyAttribute]
public XmlAttribute[] anyAttributes;
}
/// <summary>
/// A collection of elements of type ContentFilter
/// </summary>
[Serializable]
[XmlType(Namespace = "urn:newtelligence-com:dasblog:config")]
[XmlRoot(Namespace = "urn:newtelligence-com:dasblog:config")]
public class ContentFilterCollection : CollectionBase, IEnumerable<ContentFilter>
{
/// <summary>
/// Initializes a new empty instance of the ContentFilterCollection class.
/// </summary>
public ContentFilterCollection()
{
// empty
}
/// <summary>
/// Initializes a new instance of the ContentFilterCollection class, containing elements
/// copied from an array.
/// </summary>
/// <param name="items">
/// The array whose elements are to be added to the new ContentFilterCollection.
/// </param>
public ContentFilterCollection(ContentFilter[] items)
{
this.AddRange(items);
}
/// <summary>
/// Initializes a new instance of the ContentFilterCollection class, containing elements
/// copied from another instance of ContentFilterCollection
/// </summary>
/// <param name="items">
/// The ContentFilterCollection whose elements are to be added to the new ContentFilterCollection.
/// </param>
public ContentFilterCollection(ContentFilterCollection items)
{
this.AddRange(items);
}
/// <summary>
/// Adds the elements of an array to the end of this ContentFilterCollection.
/// </summary>
/// <param name="items">
/// The array whose elements are to be added to the end of this ContentFilterCollection.
/// </param>
public virtual void AddRange(ContentFilter[] items)
{
foreach (ContentFilter item in items)
{
this.List.Add(item);
}
}
/// <summary>
/// Adds the elements of another ContentFilterCollection to the end of this ContentFilterCollection.
/// </summary>
/// <param name="items">
/// The ContentFilterCollection whose elements are to be added to the end of this ContentFilterCollection.
/// </param>
public virtual void AddRange(ContentFilterCollection items)
{
foreach (ContentFilter item in items)
{
this.List.Add(item);
}
}
/// <summary>
/// Adds an instance of type ContentFilter to the end of this ContentFilterCollection.
/// </summary>
/// <param name="value">
/// The ContentFilter to be added to the end of this ContentFilterCollection.
/// </param>
public virtual void Add(ContentFilter value)
{
this.List.Add(value);
}
/// <summary>
/// Determines whether a specfic ContentFilter value is in this ContentFilterCollection.
/// </summary>
/// <param name="value">
/// The ContentFilter value to locate in this ContentFilterCollection.
/// </param>
/// <returns>
/// true if value is found in this ContentFilterCollection;
/// false otherwise.
/// </returns>
public virtual bool Contains(ContentFilter value)
{
return this.List.Contains(value);
}
/// <summary>
/// Return the zero-based index of the first occurrence of a specific value
/// in this ContentFilterCollection
/// </summary>
/// <param name="value">
/// The ContentFilter value to locate in the ContentFilterCollection.
/// </param>
/// <returns>
/// The zero-based index of the first occurrence of the _ELEMENT value if found;
/// -1 otherwise.
/// </returns>
public virtual int IndexOf(ContentFilter value)
{
return this.List.IndexOf(value);
}
/// <summary>
/// Inserts an element into the ContentFilterCollection at the specified index
/// </summary>
/// <param name="index">
/// The index at which the ContentFilter is to be inserted.
/// </param>
/// <param name="value">
/// The ContentFilter to insert.
/// </param>
public virtual void Insert(int index, ContentFilter value)
{
this.List.Insert(index, value);
}
/// <summary>
/// Gets or sets the ContentFilter at the given index in this ContentFilterCollection.
/// </summary>
public virtual ContentFilter this[int index]
{
get { return (ContentFilter)this.List[index]; }
set { this.List[index] = value; }
}
/// <summary>
/// Removes the first occurrence of a specific ContentFilter from this ContentFilterCollection.
/// </summary>
/// <param name="value">
/// The ContentFilter value to remove from this ContentFilterCollection.
/// </param>
public virtual void Remove(ContentFilter value)
{
this.List.Remove(value);
}
/// <summary>
/// Type-specific enumeration class, used by ContentFilterCollection.GetEnumerator.
/// </summary>
public class Enumerator : IEnumerator<ContentFilter>
{
private IEnumerator wrapped;
public Enumerator(ContentFilterCollection collection)
{
this.wrapped = ((CollectionBase)collection).GetEnumerator();
}
public ContentFilter Current
{
get { return (ContentFilter)(this.wrapped.Current); }
}
object IEnumerator.Current
{
get { return (ContentFilter)(this.wrapped.Current); }
}
ContentFilter IEnumerator<ContentFilter>.Current
{
get { return (ContentFilter)(this.wrapped.Current); }
}
public bool MoveNext()
{
return this.wrapped.MoveNext();
}
public void Reset()
{
this.wrapped.Reset();
}
void IDisposable.Dispose()
{
this.wrapped.Reset();
}
}
/// <summary>
/// Returns an enumerator that can iterate through the elements of this ContentFilterCollection.
/// </summary>
/// <returns>
/// An object that implements System.Collections.IEnumerator.
/// </returns>
public new virtual Enumerator GetEnumerator()
{
return new Enumerator(this);
}
IEnumerator<ContentFilter> IEnumerable<ContentFilter>.GetEnumerator()
{
return new Enumerator(this);
}
IEnumerator IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
}
[XmlType(Namespace = "urn:newtelligence-com:dasblog:config")]
[XmlRoot(Namespace = "urn:newtelligence-com:dasblog:config")]
public class ServiceDisabledException : Exception
{
public ServiceDisabledException()
: base("Service disabled")
{
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace AngularAzureSearch.WebAPI.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Portable
{
using System;
using System.Collections.Generic;
using System.Reflection;
using Apache.Ignite.Core.Portable;
/// <summary>
/// Portable serializer which reflectively writes all fields except of ones with
/// <see cref="System.NonSerializedAttribute"/>.
/// <para />
/// Note that Java platform stores dates as a difference between current time
/// and predefined absolute UTC date. Therefore, this difference is always the
/// same for all time zones. .Net, in contrast, stores dates as a difference
/// between current time and some predefined date relative to the current time
/// zone. It means that this difference will be different as you change time zones.
/// To overcome this discrepancy Ignite always converts .Net date to UTC form
/// before serializing and allows user to decide whether to deserialize them
/// in UTC or local form using <c>ReadDate(..., true/false)</c> methods in
/// <see cref="IPortableReader"/> and <see cref="IPortableRawReader"/>.
/// This serializer always read dates in UTC form. It means that if you have
/// local date in any field/property, it will be implicitly converted to UTC
/// form after the first serialization-deserialization cycle.
/// </summary>
internal class PortableReflectiveSerializer : IPortableSerializer
{
/** Cached binding flags. */
private static readonly BindingFlags Flags = BindingFlags.Instance | BindingFlags.Public |
BindingFlags.NonPublic | BindingFlags.DeclaredOnly;
/** Cached type descriptors. */
private readonly IDictionary<Type, Descriptor> _types = new Dictionary<Type, Descriptor>();
/// <summary>
/// Write portalbe object.
/// </summary>
/// <param name="obj">Object.</param>
/// <param name="writer">Portable writer.</param>
/// <exception cref="PortableException">Type is not registered in serializer: + type.Name</exception>
public void WritePortable(object obj, IPortableWriter writer)
{
var portableMarshalAware = obj as IPortableMarshalAware;
if (portableMarshalAware != null)
portableMarshalAware.WritePortable(writer);
else
GetDescriptor(obj).Write(obj, writer);
}
/// <summary>
/// Read portable object.
/// </summary>
/// <param name="obj">Instantiated empty object.</param>
/// <param name="reader">Portable reader.</param>
/// <exception cref="PortableException">Type is not registered in serializer: + type.Name</exception>
public void ReadPortable(object obj, IPortableReader reader)
{
var portableMarshalAware = obj as IPortableMarshalAware;
if (portableMarshalAware != null)
portableMarshalAware.ReadPortable(reader);
else
GetDescriptor(obj).Read(obj, reader);
}
/// <summary>Register type.</summary>
/// <param name="type">Type.</param>
/// <param name="typeId">Type ID.</param>
/// <param name="converter">Name converter.</param>
/// <param name="idMapper">ID mapper.</param>
public void Register(Type type, int typeId, IPortableNameMapper converter,
IPortableIdMapper idMapper)
{
if (type.GetInterface(typeof(IPortableMarshalAware).Name) != null)
return;
List<FieldInfo> fields = new List<FieldInfo>();
Type curType = type;
while (curType != null)
{
foreach (FieldInfo field in curType.GetFields(Flags))
{
if (!field.IsNotSerialized)
fields.Add(field);
}
curType = curType.BaseType;
}
IDictionary<int, string> idMap = new Dictionary<int, string>();
foreach (FieldInfo field in fields)
{
string fieldName = PortableUtils.CleanFieldName(field.Name);
int fieldId = PortableUtils.FieldId(typeId, fieldName, converter, idMapper);
if (idMap.ContainsKey(fieldId))
{
throw new PortableException("Conflicting field IDs [type=" +
type.Name + ", field1=" + idMap[fieldId] + ", field2=" + fieldName +
", fieldId=" + fieldId + ']');
}
idMap[fieldId] = fieldName;
}
fields.Sort(Compare);
Descriptor desc = new Descriptor(fields);
_types[type] = desc;
}
/// <summary>
/// Gets the descriptor for an object.
/// </summary>
private Descriptor GetDescriptor(object obj)
{
var type = obj.GetType();
Descriptor desc;
if (!_types.TryGetValue(type, out desc))
throw new PortableException("Type is not registered in serializer: " + type.Name);
return desc;
}
/// <summary>
/// Compare two FieldInfo instances.
/// </summary>
private static int Compare(FieldInfo info1, FieldInfo info2) {
string name1 = PortableUtils.CleanFieldName(info1.Name);
string name2 = PortableUtils.CleanFieldName(info2.Name);
return string.Compare(name1, name2, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Type descriptor.
/// </summary>
private class Descriptor
{
/** Write actions to be performed. */
private readonly List<PortableReflectiveWriteAction> _wActions;
/** Read actions to be performed. */
private readonly List<PortableReflectiveReadAction> _rActions;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="fields">Fields.</param>
public Descriptor(List<FieldInfo> fields)
{
_wActions = new List<PortableReflectiveWriteAction>(fields.Count);
_rActions = new List<PortableReflectiveReadAction>(fields.Count);
foreach (FieldInfo field in fields)
{
PortableReflectiveWriteAction writeAction;
PortableReflectiveReadAction readAction;
PortableReflectiveActions.TypeActions(field, out writeAction, out readAction);
_wActions.Add(writeAction);
_rActions.Add(readAction);
}
}
/// <summary>
/// Write object.
/// </summary>
/// <param name="obj">Object.</param>
/// <param name="writer">Portable writer.</param>
public void Write(object obj, IPortableWriter writer)
{
int cnt = _wActions.Count;
for (int i = 0; i < cnt; i++)
_wActions[i](obj, writer);
}
/// <summary>
/// Read object.
/// </summary>
/// <param name="obj">Object.</param>
/// <param name="reader">Portable reader.</param>
public void Read(object obj, IPortableReader reader)
{
int cnt = _rActions.Count;
for (int i = 0; i < cnt; i++ )
_rActions[i](obj, reader);
}
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Smidge.Cache;
using Smidge.Options;
using Smidge.Models;
using Smidge.FileProcessors;
using Smidge.Nuglify;
using Microsoft.Extensions.Hosting;
using Smidge.InMemory;
using Microsoft.Extensions.FileProviders;
using System.IO;
namespace Smidge.Web
{
//public class DotlessPreProcessor : IPreProcessor
//{
// private readonly IHostingEnvironment _hostingEnvironment;
// public DotlessPreProcessor(IHostingEnvironment hostingEnvironment)
// {
// _hostingEnvironment = hostingEnvironment;
// }
// public async Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next)
// {
// if (Path.GetExtension(fileProcessContext.WebFile.FilePath) == ".less")
// {
// var result = dotless.Core.Less.Parse(fileProcessContext.FileContent);
// fileProcessContext.Update(result);
// }
// await next(fileProcessContext);
// }
//}
public class Startup
{
// Entry point for the application.
public static void Main(string[] args)
{
BuildWebHost(args).Run();
}
public static IWebHost BuildWebHost(string[] args) =>
WebHost.CreateDefaultBuilder(args)
.UseStartup<Startup>()
.Build();
public IConfigurationRoot Configuration { get; }
public IWebHostEnvironment CurrentEnvironment { get; }
/// <summary>
/// Constructor sets up the configuration - for our example we'll load in the config from appsettings.json with
/// a sub configuration value of 'smidge'
/// </summary>
/// <param name="env"></param>
public Startup(IWebHostEnvironment env)
{
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("appsettings.json")
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true);
Configuration = builder.Build();
CurrentEnvironment = env;
}
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
services.AddSingleton<ISmidgeFileProvider>(f =>
{
var hostEnv = f.GetRequiredService<IWebHostEnvironment>();
return new SmidgeFileProvider(
hostEnv.WebRootFileProvider,
new PhysicalFileProvider(Path.Combine(hostEnv.ContentRootPath, "Smidge", "Static")));
});
// Or use services.AddSmidge() to test from smidge.json config.
services.AddSmidge(Configuration.GetSection("smidge"));
// We could replace a processor in the default pipeline like this
//services.Configure<SmidgeOptions>(opt =>
//{
// opt.PipelineFactory.OnCreateDefault = (type, pipeline) => pipeline.Replace<JsMinifier, NuglifyJs>(opt.PipelineFactory);
//});
// We could change a lot of defaults like this
services.Configure<SmidgeOptions>(options =>
{
//options.PipelineFactory.OnCreateDefault = (type, processors) =>
//options.FileWatchOptions.Enabled = true;
//options.PipelineFactory.OnCreateDefault = GetDefaultPipelineFactory;
options.DefaultBundleOptions.DebugOptions.SetCacheBusterType<AppDomainLifetimeCacheBuster>();
options.DefaultBundleOptions.ProductionOptions.SetCacheBusterType<AppDomainLifetimeCacheBuster>();
});
services.AddSmidgeNuglify();
services.AddSmidgeInMemory();
//services.AddSingleton<IPreProcessor, DotlessPreProcessor>();
}
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
// Add the following to the request pipeline only in development environment.
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
// Add Error handling middleware which catches all application specific errors and
// sends the request to the following path or controller action.
app.UseExceptionHandler("/Home/Error");
}
app.UseStaticFiles();
app.UseStaticFiles(new StaticFileOptions
{
FileProvider = new PhysicalFileProvider(Path.Combine(CurrentEnvironment.ContentRootPath, "Smidge", "Static")),
RequestPath = "/smidge-static"
});
app.UseRouting();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllerRoute(
name: "Default",
pattern: "{controller=Home}/{action=Index}/{id?}");
});
app.UseSmidge(bundles =>
{
//Create pre-defined bundles
//var lessPipeline = bundles.PipelineFactory.DefaultCss();
//lessPipeline.Processors.Insert(0, bundles.PipelineFactory.Resolve<DotlessPreProcessor>());
//bundles.CreateCss(
// "less-test",
// lessPipeline,
// "~/Css/test.less")
// .WithEnvironmentOptions(BundleEnvironmentOptions.Create()
// .ForDebug(builder => builder.EnableCompositeProcessing().SetCacheBusterType<AppDomainLifetimeCacheBuster>())
// .Build());
bundles.Create("test-bundle-1",
new JavaScriptFile("~/Js/Bundle1/a1.js"),
new JavaScriptFile("~/Js/Bundle1/a2.js"),
//NOTE: This is already min'd based on it's file name, therefore
// by convention JsMin should be removed
new JavaScriptFile("~/Js/Bundle1/a3.min.js"))
.WithEnvironmentOptions(bundles.DefaultBundleOptions)
.OnOrdering(collection =>
{
//return some custom ordering
return collection.OrderBy(x => x.FilePath);
});
bundles.CreateJs("test-bundle-2", "~/Js/Bundle2")
.WithEnvironmentOptions(BundleEnvironmentOptions.Create()
.ForDebug(builder => builder
.EnableCompositeProcessing()
.EnableFileWatcher()
.SetCacheBusterType<AppDomainLifetimeCacheBuster>()
.CacheControlOptions(enableEtag: false, cacheControlMaxAge: 0))
.Build()
);
bundles.Create("test-bundle-3", WebFileType.Js, "~/Js/Bundle2");
bundles.Create("test-bundle-4",
new CssFile("~/Css/Bundle1/a1.css"),
new CssFile("~/Css/Bundle1/a2.css"));
bundles.CreateJs("libs-js",
//Here we can change the default pipeline to use Nuglify for this single bundle
bundles.PipelineFactory.Create<NuglifyJs>(),
"~/Js/Libs/jquery-1.12.2.js", "~/Js/Libs/knockout-es5.js");
bundles.CreateCss("libs-css",
//Here we can change the default pipeline to use Nuglify for this single bundle (we'll replace the default)
bundles.PipelineFactory.DefaultCss().Replace<CssMinifier, NuglifyCss>(bundles.PipelineFactory),
"~/Css/Libs/font-awesome.css");
bundles.Create("test-bundle-10", new JavaScriptFile("~/test10.js")
{
RequestPath = "/smidge-static"
});
});
app.UseSmidgeNuglify();
}
}
}
| |
//
// Transcoder.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
//
// Copyright (C) 2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Threading;
using System.Runtime.InteropServices;
using Mono.Unix;
using Hyena;
using Banshee.Base;
using Banshee.Collection;
using Banshee.ServiceStack;
using Banshee.MediaEngine;
using Banshee.MediaProfiles;
using Banshee.Configuration.Schema;
namespace Banshee.GStreamer
{
public class Transcoder : ITranscoder
{
public event TranscoderProgressHandler Progress;
public event TranscoderTrackFinishedHandler TrackFinished;
public event TranscoderErrorHandler Error;
private HandleRef handle;
private GstTranscoderProgressCallback ProgressCallback;
private GstTranscoderFinishedCallback FinishedCallback;
private GstTranscoderErrorCallback ErrorCallback;
private TrackInfo current_track;
private string error_message;
private SafeUri managed_output_uri;
public Transcoder ()
{
IntPtr ptr = gst_transcoder_new();
if(ptr == IntPtr.Zero) {
throw new NullReferenceException(Catalog.GetString("Could not create transcoder"));
}
handle = new HandleRef(this, ptr);
ProgressCallback = new GstTranscoderProgressCallback(OnNativeProgress);
FinishedCallback = new GstTranscoderFinishedCallback(OnNativeFinished);
ErrorCallback = new GstTranscoderErrorCallback(OnNativeError);
gst_transcoder_set_progress_callback(handle, ProgressCallback);
gst_transcoder_set_finished_callback(handle, FinishedCallback);
gst_transcoder_set_error_callback(handle, ErrorCallback);
}
public void Finish ()
{
gst_transcoder_free(handle);
handle = new HandleRef (this, IntPtr.Zero);
}
public void Cancel ()
{
gst_transcoder_cancel(handle);
handle = new HandleRef (this, IntPtr.Zero);
}
public void TranscodeTrack (TrackInfo track, SafeUri outputUri, ProfileConfiguration config)
{
if(IsTranscoding) {
throw new ApplicationException("Transcoder is busy");
}
Log.DebugFormat ("Transcoding {0} to {1}", track.Uri, outputUri);
SafeUri inputUri = track.Uri;
managed_output_uri = outputUri;
IntPtr input_uri = GLib.Marshaller.StringToPtrGStrdup(inputUri.AbsoluteUri);
IntPtr output_uri = GLib.Marshaller.StringToPtrGStrdup(outputUri.AbsoluteUri);
error_message = null;
current_track = track;
gst_transcoder_transcode(handle, input_uri, output_uri, config.Profile.Pipeline.GetProcessById("gstreamer"));
GLib.Marshaller.Free(input_uri);
GLib.Marshaller.Free(output_uri);
}
private void OnNativeProgress(IntPtr transcoder, double fraction)
{
OnProgress (current_track, fraction);
}
private void OnNativeFinished(IntPtr transcoder)
{
OnTrackFinished (current_track, managed_output_uri);
}
private void OnNativeError(IntPtr transcoder, IntPtr error, IntPtr debug)
{
error_message = GLib.Marshaller.Utf8PtrToString(error);
if(debug != IntPtr.Zero) {
string debug_string = GLib.Marshaller.Utf8PtrToString(debug);
if(!String.IsNullOrEmpty (debug_string)) {
error_message = String.Format ("{0}: {1}", error_message, debug_string);
}
}
try {
Banshee.IO.File.Delete (managed_output_uri);
} catch {}
OnError (current_track, error_message);
}
protected virtual void OnProgress (TrackInfo track, double fraction)
{
TranscoderProgressHandler handler = Progress;
if (handler != null) {
handler (this, new TranscoderProgressArgs (track, fraction, track.Duration));
}
}
protected virtual void OnTrackFinished (TrackInfo track, SafeUri outputUri)
{
TranscoderTrackFinishedHandler handler = TrackFinished;
if (handler != null) {
handler (this, new TranscoderTrackFinishedArgs (track, outputUri));
}
}
protected virtual void OnError (TrackInfo track, string message)
{
TranscoderErrorHandler handler = Error;
if (handler != null) {
handler (this, new TranscoderErrorArgs (track, message));
}
}
public bool IsTranscoding {
get { return gst_transcoder_get_is_transcoding(handle); }
}
public string ErrorMessage {
get { return error_message; }
}
private delegate void GstTranscoderProgressCallback(IntPtr transcoder, double progress);
private delegate void GstTranscoderFinishedCallback(IntPtr transcoder);
private delegate void GstTranscoderErrorCallback(IntPtr transcoder, IntPtr error, IntPtr debug);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern IntPtr gst_transcoder_new();
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_free(HandleRef handle);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_transcode(HandleRef handle, IntPtr input_uri,
IntPtr output_uri, string encoder_pipeline);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_cancel(HandleRef handle);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_set_progress_callback(HandleRef handle,
GstTranscoderProgressCallback cb);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_set_finished_callback(HandleRef handle,
GstTranscoderFinishedCallback cb);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern void gst_transcoder_set_error_callback(HandleRef handle,
GstTranscoderErrorCallback cb);
[DllImport(PlayerEngine.LibBansheeLibrary, CallingConvention = CallingConvention.Cdecl)]
private static extern bool gst_transcoder_get_is_transcoding(HandleRef handle);
}
}
| |
#region License
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
#region Imports
using System.Collections;
using System.Collections.Generic;
using NUnit.Framework;
using Rhino.Mocks;
using Spring.Objects.Factory.Config;
using Spring.Objects.Factory.Support;
using Spring.Objects.Factory.Xml;
#endregion
namespace Spring.Objects.Factory
{
/// <summary>
/// Unit tests for the ObjectFactoryUtils class.
/// </summary>
/// <author>Rod Johnson</author>
/// <author>Simon White (.NET)</author>
/// <author>Rick Evans (.NET)</author>
[TestFixture]
public sealed class ObjectFactoryUtilsTests
{
private IConfigurableListableObjectFactory _factory;
[SetUp]
public void SetUp()
{
IObjectFactory grandparent = new XmlObjectFactory(new ReadOnlyXmlTestResource("root.xml", GetType()));
IObjectFactory parent = new XmlObjectFactory(new ReadOnlyXmlTestResource("middle.xml", GetType()), grandparent);
IConfigurableListableObjectFactory child = new XmlObjectFactory(new ReadOnlyXmlTestResource("leaf.xml", GetType()), parent);
_factory = child;
}
/// <summary>
/// Check that override doesn't count as two separate objects.
/// </summary>
[Test]
public void CountObjectsIncludingAncestors()
{
// leaf count...
Assert.AreEqual(1, _factory.ObjectDefinitionCount);
// count minus duplicate...
Assert.AreEqual(6, ObjectFactoryUtils.CountObjectsIncludingAncestors(_factory),
"Should count 6 objects, not " + ObjectFactoryUtils.CountObjectsIncludingAncestors(_factory));
}
[Test]
public void ObjectNamesIncludingAncestors()
{
IList<string> names = ObjectFactoryUtils.ObjectNamesIncludingAncestors(_factory);
Assert.AreEqual(6, names.Count);
}
[Test]
public void ObjectNamesForTypeIncludingAncestors()
{
IList<string> names = ObjectFactoryUtils.ObjectNamesForTypeIncludingAncestors(_factory, typeof(ITestObject));
// includes 2 TestObjects from IFactoryObjects (DummyFactory definitions)
Assert.AreEqual(4, names.Count);
Assert.IsTrue(names.Contains("test"));
Assert.IsTrue(names.Contains("test3"));
Assert.IsTrue(names.Contains("testFactory1"));
Assert.IsTrue(names.Contains("testFactory2"));
}
[Test]
public void ObjectNamesForTypeIncludingAncestorsExcludesObjectsFromParentWhenLocalObjectDefined()
{
DefaultListableObjectFactory root = new DefaultListableObjectFactory();
root.RegisterObjectDefinition("excludeLocalObject", new RootObjectDefinition(typeof(ArrayList)));
DefaultListableObjectFactory child = new DefaultListableObjectFactory(root);
child.RegisterObjectDefinition("excludeLocalObject", new RootObjectDefinition(typeof(Hashtable)));
IList<string> names = ObjectFactoryUtils.ObjectNamesForTypeIncludingAncestors(child, typeof(ArrayList));
// "excludeLocalObject" matches on the parent, but not the local object definition
Assert.AreEqual(0, names.Count);
names = ObjectFactoryUtils.ObjectNamesForTypeIncludingAncestors(child, typeof(ArrayList), true, true);
// "excludeLocalObject" matches on the parent, but not the local object definition
Assert.AreEqual(0, names.Count);
}
[Test]
public void CountObjectsIncludingAncestorsWithNonHierarchicalFactory()
{
StaticListableObjectFactory lof = new StaticListableObjectFactory();
lof.AddObject("t1", new TestObject());
lof.AddObject("t2", new TestObject());
Assert.IsTrue(ObjectFactoryUtils.CountObjectsIncludingAncestors(lof) == 2);
}
[Test]
public void HierarchicalResolutionWithOverride()
{
object test3 = _factory.GetObject("test3");
object test = _factory.GetObject("test");
object testFactory1 = _factory.GetObject("testFactory1");
IDictionary<string, object> objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(ITestObject), true, false);
Assert.AreEqual(3, objects.Count);
Assert.AreEqual(test3, objects["test3"]);
Assert.AreEqual(test, objects["test"]);
Assert.AreEqual(testFactory1, objects["testFactory1"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(ITestObject), false, false);
Assert.AreEqual(2, objects.Count);
Assert.AreEqual(test, objects["test"]);
Assert.AreEqual(testFactory1, objects["testFactory1"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(ITestObject), false, true);
Assert.AreEqual(2, objects.Count);
Assert.AreEqual(test, objects["test"]);
Assert.AreEqual(testFactory1, objects["testFactory1"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(ITestObject), true, true);
Assert.AreEqual(4, objects.Count);
Assert.AreEqual(test3, objects["test3"]);
Assert.AreEqual(test, objects["test"]);
Assert.AreEqual(testFactory1, objects["testFactory1"]);
Assert.IsTrue(objects["testFactory2"] is ITestObject);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(DummyFactory), true, true);
Assert.AreEqual(2, objects.Count);
Assert.AreEqual(_factory.GetObject("&testFactory1"), objects["&testFactory1"]);
Assert.AreEqual(_factory.GetObject("&testFactory2"), objects["&testFactory2"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(_factory, typeof(IFactoryObject), true, true);
Assert.AreEqual(2, objects.Count);
Assert.AreEqual(_factory.GetObject("&testFactory1"), objects["&testFactory1"]);
Assert.AreEqual(_factory.GetObject("&testFactory2"), objects["&testFactory2"]);
}
[Test]
[ExpectedException(typeof(NoSuchObjectDefinitionException),
ExpectedMessage = "No unique object of type [Spring.Objects.ITestObject] is defined : Expected single object but found 4")]
public void ObjectOfTypeIncludingAncestorsWithMoreThanOneObjectOfType()
{
ObjectFactoryUtils.ObjectOfTypeIncludingAncestors(_factory, typeof(ITestObject), true, true);
}
[Test]
public void ObjectOfTypeIncludingAncestorsExcludesObjectsFromParentWhenLocalObjectDefined()
{
DefaultListableObjectFactory root = new DefaultListableObjectFactory();
root.RegisterObjectDefinition("excludeLocalObject", new RootObjectDefinition(typeof(ArrayList)));
DefaultListableObjectFactory child = new DefaultListableObjectFactory(root);
child.RegisterObjectDefinition("excludeLocalObject", new RootObjectDefinition(typeof(Hashtable)));
IDictionary<string, object> objectEntries = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(child, typeof(ArrayList), true, true);
// "excludeLocalObject" matches on the parent, but not the local object definition
Assert.AreEqual(0, objectEntries.Count);
}
[Test]
public void NoObjectsOfTypeIncludingAncestors()
{
StaticListableObjectFactory lof = new StaticListableObjectFactory();
lof.AddObject("foo", new object());
IDictionary<string, object> objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(lof, typeof(ITestObject), true, false);
Assert.IsTrue(objects.Count == 0);
}
[Test]
public void ObjectsOfTypeIncludingAncestorsWithStaticFactory()
{
StaticListableObjectFactory lof = new StaticListableObjectFactory();
TestObject t1 = new TestObject();
TestObject t2 = new TestObject();
DummyFactory t3 = new DummyFactory();
DummyFactory t4 = new DummyFactory();
t4.IsSingleton = false;
lof.AddObject("t1", t1);
lof.AddObject("t2", t2);
lof.AddObject("t3", t3);
t3.AfterPropertiesSet(); // StaticListableObjectFactory does support lifecycle calls.
lof.AddObject("t4", t4);
t4.AfterPropertiesSet(); // StaticListableObjectFactory does support lifecycle calls.
IDictionary<string, object> objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(lof, typeof(ITestObject), true, false);
Assert.AreEqual(2, objects.Count);
Assert.AreEqual(t1, objects["t1"]);
Assert.AreEqual(t2, objects["t2"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(lof, typeof(ITestObject), false, true);
Assert.AreEqual(3, objects.Count);
Assert.AreEqual(t1, objects["t1"]);
Assert.AreEqual(t2, objects["t2"]);
Assert.AreEqual(t3.GetObject(), objects["t3"]);
objects = ObjectFactoryUtils.ObjectsOfTypeIncludingAncestors(lof, typeof(ITestObject), true, true);
Assert.AreEqual(4, objects.Count);
Assert.AreEqual(t1, objects["t1"]);
Assert.AreEqual(t2, objects["t2"]);
Assert.AreEqual(t3.GetObject(), objects["t3"]);
Assert.IsTrue(objects["t4"] is TestObject);
}
[Test]
public void IsFactoryDereferenceWithNonFactoryObjectName()
{
Assert.IsFalse(ObjectFactoryUtils.IsFactoryDereference("roob"),
"Name that didn't start with the factory object prefix is being reported " +
"(incorrectly) as a factory object dereference.");
}
[Test]
public void IsFactoryDereferenceWithNullName()
{
Assert.IsFalse(ObjectFactoryUtils.IsFactoryDereference(null),
"Null name that (obviously) didn't start with the factory object prefix is being reported " +
"(incorrectly) as a factory object dereference.");
}
[Test]
public void IsFactoryDereferenceWithEmptyName()
{
Assert.IsFalse(ObjectFactoryUtils.IsFactoryDereference(string.Empty),
"String.Empty name that (obviously) didn't start with the factory object prefix is being reported " +
"(incorrectly) as a factory object dereference.");
}
[Test]
public void IsFactoryDereferenceWithJustTheFactoryObjectPrefixCharacter()
{
Assert.IsFalse(ObjectFactoryUtils.IsFactoryDereference(
ObjectFactoryUtils.FactoryObjectPrefix),
"Name that consisted solely of the factory object prefix is being reported " +
"(incorrectly) as a factory object dereference.");
}
[Test]
public void IsFactoryDereferenceSunnyDay()
{
Assert.IsTrue(ObjectFactoryUtils.IsFactoryDereference(
ObjectFactoryUtils.FactoryObjectPrefix + "roob"),
"Name that did start with the factory object prefix is not being reported " +
"(incorrectly) as a factory object dereference.");
}
}
}
| |
namespace UnitTests
{
using Jose;
using Jose.keys;
using Microsoft.IdentityModel.Tokens;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using Xunit;
using Xunit.Abstractions;
public class JweTest
{
private TestConsole Console;
public JweTest(ITestOutputHelper output)
{
this.Console = new TestConsole(output);
}
[Theory]
[InlineData(SerializationMode.Compact)]
[InlineData(SerializationMode.Json)]
public void EncryptDecrypt_RoundTripOneRecipient_PlaintextSurvives(SerializationMode mode)
{
//given
byte[] payload = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
var recipients = new JweRecipient[]
{
recipientAes256KW1,
};
var sharedProtectedHeaders = new Dictionary<string, object>
{
{ "cty", "application/octet-string"},
};
//when
var jwe = JWE.EncryptBytes(
plaintext: payload,
recipients: recipients,
JweEncryption.A256GCM,
mode: mode,
extraProtectedHeaders: sharedProtectedHeaders);
var decrypted = JWE.Decrypt(jwe, aes256KWKey1);
Console.Out.WriteLine("[{0}][A256GCM] = {1}", mode, jwe);
//then
Assert.Equal(payload, decrypted.PlaintextBytes);
}
public static IEnumerable<object[]> TestDataModeGeneralJsonRoundTripMultipleRecipients =>
new List<object[]>
{
new object[] { aes256KWKey1 },
new object[] { aes256KWKey2 },
new object[] { PrivKey() },
};
[Theory]
[MemberData(nameof(TestDataModeGeneralJsonRoundTripMultipleRecipients))]
public void EncryptDecrypt_ModeGeneralJsonRoundTripMultipleRecipients_ValidRecipientsCanDecrypt(object decryptKey)
{
//given
byte[] payload = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
var recipients = new JweRecipient[]
{
recipientAes256KW1,
recipientAes256KW2,
recipientRsa1,
};
var sharedProtectedHeaders = new Dictionary<string, object>
{
{ "cty", "application/octet-string"},
};
//when
var jwe = JWE.EncryptBytes(
plaintext: payload,
recipients: recipients,
JweEncryption.A256GCM,
extraProtectedHeaders: sharedProtectedHeaders);
var decrypted = JWE.Decrypt(jwe, decryptKey);
Console.Out.WriteLine("[Json][Multiple][A256GCM] = {0}", jwe);
//then
Assert.Equal(payload, decrypted.PlaintextBytes);
}
[Theory]
[InlineData(JweEncryption.A256GCM, JweAlgorithm.ECDH_ES_A256KW, "The algorithm type passed to the Decrypt method did not match the algorithm type in the header.")]
[InlineData(JweEncryption.A192GCM, JweAlgorithm.A256KW, "The encryption type passed to the Decrypt method did not match the encryption type in the header.")]
public void Decrypt_MultipleRecipients_MismatchEncOrAlgThrows(JweEncryption expectedJweEnc, JweAlgorithm expectedJweAlg, string expectedMessage)
{
//given
byte[] payload = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
var recipients = new JweRecipient[]
{
recipientAes256KW1,
recipientAes256KW2,
recipientRsa1,
};
var sharedProtectedHeaders = new Dictionary<string, object>
{
{ "cty", "application/octet-string"},
};
var jwe = JWE.EncryptBytes(
plaintext: payload,
recipients: recipients,
JweEncryption.A256GCM,
extraProtectedHeaders: sharedProtectedHeaders);
//when
var exception = Record.Exception(() => JWE.Decrypt(jwe, aes256KWKey2, expectedJweAlg, expectedJweEnc));
//then
Assert.IsType<InvalidAlgorithmException>(exception);
Assert.Equal(expectedMessage, exception.Message);
}
/// <summary>
/// Attempting to decrypt with a private key not matching any of the recipients.
/// </summary>
[Fact]
public void Decrypt_NoMatchingRecipient_Throws()
{
//given
byte[] payload = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
var recipients = new JweRecipient[]
{
recipientAes256KW1,
recipientAes256KW2,
};
var sharedProtectedHeaders = new Dictionary<string, object>
{
{ "cty", "application/octet-string"},
};
var jwe = JWE.EncryptBytes(
plaintext: payload,
recipients: recipients,
JweEncryption.A256GCM,
extraProtectedHeaders: sharedProtectedHeaders);
//when
var exception = Record.Exception(() => { JWE.Decrypt(jwe, aes256KWKey3); });
//then
Assert.IsType<IntegrityException>(exception);
Assert.Equal("AesKeyWrap integrity check failed.", exception.Message);
}
[Theory]
[InlineData(SerializationMode.Compact, "Only one recipient is supported by the JWE Compact Serialization.")]
public void Encrypt_WithMoreThanOneRecipient_Throws(SerializationMode mode, string expectedMessage)
{
//given
byte[] plaintext = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 };
var recipients = new JweRecipient[]
{
recipientAes256KW1,
recipientAes256KW2,
};
//when
var exception = Record.Exception(() => JWE.EncryptBytes(
plaintext: plaintext,
recipients: recipients,
JweEncryption.A256GCM,
mode: mode));
//then
Assert.IsType<JoseException>(exception);
Assert.Equal(expectedMessage, exception.Message);
}
[Fact]
public void Encrypt_ModeCompactWithEmptyBytesA128KW_A128CBC_HS256_ExpectedResults()
{
//given
byte[] plaintext = { };
//when
var jwe = JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[] { recipientAes128KW },
JweEncryption.A128CBC_HS256,
mode: SerializationMode.Compact
);
//then
Console.Out.WriteLine("Empty bytes A128KW_A128CBC_HS256 = {0}", jwe);
string[] parts = jwe.Split('.');
Assert.Equal(5, parts.Length); //Make sure 5 parts
Assert.Equal("{\"alg\":\"A128KW\",\"enc\":\"A128CBC-HS256\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode(parts[0])));
Assert.Equal("eyJhbGciOiJBMTI4S1ciLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0", parts[0]); //Header is non-encrypted and static text
Assert.Equal(54, parts[1].Length); //CEK size
Assert.Equal(22, parts[2].Length); //IV size
Assert.Equal(22, parts[3].Length); //cipher text size
Assert.Equal(22, parts[4].Length); //auth tag size
Assert.Equal(new byte[0], JWE.Decrypt(jwe, aes128KWKey).PlaintextBytes);
}
[Fact]
public void Encrypt_ModeJsonTwoRecipientsWithEmptyBytesA128KW_A128CBC_HS256_ExpectedResults()
{
//given
byte[] plaintext = { };
//when
var jwe = JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[] { recipientAes128KW, recipientAes128KW },
JweEncryption.A128CBC_HS256
);
//then
Console.Out.WriteLine("Empty bytes A128KW_A128CBC_HS256 (General Json Serialization) = {0}", jwe);
JObject deserialized = JObject.Parse(jwe);
Assert.Equal("{\"enc\":\"A128CBC-HS256\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["recipients"] is JArray);
Assert.Equal(2, ((JArray)deserialized["recipients"]).Count);
var recipient0 = ((JArray)deserialized["recipients"])[0];
Assert.True(recipient0["header"] is JObject);
Assert.Equal("{\"alg\":\"A128KW\"}", recipient0["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal("A128KW", recipient0["header"]["alg"]);
Assert.Equal(54, ((string)recipient0["encrypted_key"]).Length); //CEK size
Assert.Equal(22, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(22, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.Equal(new byte[0], JWE.Decrypt(jwe, aes128KWKey).PlaintextBytes);
}
[Fact]
public void Encrypt_ModeJsonOneRecipientWithEmptyBytesA128KW_A128CBC_HS256_ExpectedResults()
{
//given
byte[] plaintext = { };
//when
var jwe = JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[] { recipientAes128KW },
JweEncryption.A128CBC_HS256
);
//then
Console.Out.WriteLine("Empty bytes A128KW_A128CBC_HS256 (Flattened Json Serialization) = {0}", jwe);
JObject deserialized = JObject.Parse(jwe);
Assert.Equal("{\"enc\":\"A128CBC-HS256\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"A128KW\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal("A128KW", deserialized["header"]["alg"]);
Assert.Equal(54, ((string)deserialized["encrypted_key"]).Length); //CEK size
Assert.Equal(22, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(22, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.Equal(new byte[0], JWE.Decrypt(jwe, aes128KWKey).PlaintextBytes);
}
[Fact]
public void Decrypt_Rfc7516AppendixA23DecryptWithFirstRecipient_ExpectedResults()
{
//given
var key = GetLegacyKeyObjectFromJwk(new JsonWebKey(Rfc7516_A_2_3_ExampleJwk));
//when
var decrypted = JWE.Decrypt(Rfc7516_A_4_7_ExampleJwe, key);
//then
Assert.Equal("Live long and prosper.", decrypted.Plaintext);
Assert.Equal(4, decrypted.Recipient.JoseHeader.Count);
Assert.Equal("RSA1_5", decrypted.Recipient.JoseHeader["alg"]);
Assert.Equal("2011-04-29", decrypted.Recipient.JoseHeader["kid"]);
Assert.Equal("A128CBC-HS256", decrypted.Recipient.JoseHeader["enc"]);
Assert.Equal("https://server.example.com/keys.jwks", decrypted.Recipient.JoseHeader["jku"]);
}
[Fact]
public void Decrypt_Rfc7516AppendixA23DecryptWithSecondRecipient_ExpectedResults()
{
//given
var key = GetLegacyKeyObjectFromJwk(new JsonWebKey(Rfc7516_A_3_3_ExampleJwk));
//when
var decrypted = JWE.Decrypt(Rfc7516_A_4_7_ExampleJwe, key);
//then
Assert.Equal("Live long and prosper.", decrypted.Plaintext);
Assert.Equal(4, decrypted.Recipient.JoseHeader.Count);
Assert.Equal("A128KW", decrypted.Recipient.JoseHeader["alg"]);
Assert.Equal("7", decrypted.Recipient.JoseHeader["kid"]);
Assert.Equal("A128CBC-HS256", decrypted.Recipient.JoseHeader["enc"]);
Assert.Equal("https://server.example.com/keys.jwks", decrypted.Recipient.JoseHeader["jku"]);
Assert.Null(decrypted.Aad);
}
[Fact]
public void Encrypt_WithAdditionalAuthenticatedData_PopulatesAad()
{
//given
var key = GetLegacyKeyObjectFromJwk(new JsonWebKey(Rfc7520_5_8_1_Figure151_ExampleJwk));
//when
var jwe = JWE.EncryptBytes(
UTF8Encoding.UTF8.GetBytes(Rfc7520_Figure72_ExamplePlaintext),
new JweRecipient[] { new JweRecipient(JweAlgorithm.A128KW, key) },
JweEncryption.A128CBC_HS256,
aad: Base64Url.Decode(Rfc7520_Figure176_ExampleBase64UrlEncodedAad)
);
//then
JObject deserialized = JObject.Parse(jwe);
var base64UrlAad = (string)deserialized["aad"];
Assert.NotNull(base64UrlAad);
Assert.Equal(Rfc7520_Figure176_ExampleBase64UrlEncodedAad, base64UrlAad);
var decrypted = JWE.Decrypt(jwe, key);
Assert.Equal(Rfc7520_Figure72_ExamplePlaintext, decrypted.Plaintext);
}
[Fact]
public void EncryptDecrypt_WithAdditionalAuthenticatedData_RoundtripOk()
{
//given
var key = GetLegacyKeyObjectFromJwk(new JsonWebKey(Rfc7520_5_8_1_Figure151_ExampleJwk));
var plaintext = Rfc7520_Figure72_ExamplePlaintext;
//when
var jwe = JWE.EncryptBytes(
UTF8Encoding.UTF8.GetBytes(Rfc7520_Figure72_ExamplePlaintext),
new JweRecipient[] { new JweRecipient(JweAlgorithm.A128KW, key) },
JweEncryption.A128CBC_HS256,
aad: Base64Url.Decode(Rfc7520_Figure176_ExampleBase64UrlEncodedAad)
);
//then
var decrypted = JWE.Decrypt(jwe, key);
Assert.Equal(plaintext, decrypted.Plaintext);
}
[Fact]
public void Decrypt_WithAdditionalAuthenticatedDataOk_ReturnsExpectedResults()
{
//given
var jwk = new JsonWebKey(Rfc7520_5_8_1_Figure151_ExampleJwk);
var key = GetLegacyKeyObjectFromJwk(jwk);
var kid = jwk.Kid;
//when
var decrypted = JWE.Decrypt(Rfc7520_5_10_ExampleJwe, key);
//then
Assert.Equal(Rfc7520_Figure72_ExamplePlaintext, decrypted.Plaintext);
Assert.Equal(3, decrypted.Recipient.JoseHeader.Count);
Assert.Equal(jwk.Alg, decrypted.Recipient.JoseHeader["alg"]);
Assert.Equal(jwk.Kid, decrypted.Recipient.JoseHeader["kid"]);
Assert.Equal("A128GCM", decrypted.Recipient.JoseHeader["enc"]);
Assert.Equal(Rfc7520_5_10_1_ExampleAadString, UTF8Encoding.UTF8.GetString(decrypted.Aad));
}
[Fact]
public void Decrypt_WithAdditionalAuthenticatedDataTampered_Throws()
{
//given
var key = GetLegacyKeyObjectFromJwk(new JsonWebKey(Rfc7520_5_8_1_Figure151_ExampleJwk));
var tamperedJwe = Rfc7520_5_10_ExampleJwe.Replace("aad\": \"W", "aad\": \"V");
//when
var exception = Record.Exception(() => JWE.Decrypt(tamperedJwe, key));
//then
Assert.IsType<EncryptionException>(exception);
Assert.Equal("Unable to decrypt content or authentication tag do not match.", exception.Message);
}
public static IEnumerable<object[]> TestDataMultipleRecipientDirectEncryption()
{
var ret = new List<object[]>
{
new object[] { new JweRecipient[] { recipientDirectEncyption1 }, null }, // (Single direct encryption is ok)
new object[] { new JweRecipient[] { recipientDirectEncyption1, recipientAes256KW1 }, null }, // (Direct recipient currently allowed as first receipient)
new object[] { new JweRecipient[] { recipientAes256KW1, recipientDirectEncyption1 }, "Direct Encryption not supported for multi-recipient JWE.", }, // (Direct recipient in multi not supported)
};
return ret;
}
[Theory()]
[MemberData(nameof(TestDataMultipleRecipientDirectEncryption))]
public void Encrypt_MultipleRecipient_SpecialCasesHandled(JweRecipient[] recipients, string expectedError)
{
//given
byte[] plaintext = { };
//when
var exception = Record.Exception(() => JWE.EncryptBytes(
plaintext: plaintext,
recipients: recipients,
JweEncryption.A128CBC_HS256));
//then
if (expectedError == null)
{
Assert.Null(exception);
}
else
{
Assert.IsType<JoseException>(exception);
Assert.Equal(expectedError, exception.Message);
}
}
/// <summary>
/// Enforce uniquness of header names - as per https://tools.ietf.org/html/rfc7516#section-4
/// Here passed into extraHeaders
/// </summary>
[Theory]
[InlineData("example.com:extra_recipient_header")]
[InlineData("alg")]
[InlineData("enc")]
public void Encrypt_WithNonUniqueHeaderParameterNamesInExtraHeaders_Throws(string injectedHeaderName)
{
//given
byte[] plaintext = { };
//when
var exception = Record.Exception(() => JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[]
{
new JweRecipient(
JweAlgorithm.A256KW,
aes256KWKey1,
new Dictionary<string, object>
{
{ "kid", "my_key_reference" },
{ "example.com:extra_recipient_header", "value1" },
})
},
JweEncryption.A128CBC_HS256,
extraProtectedHeaders: new Dictionary<string, object>
{
{ "cty", "text/plain" },
{ "example.com:extra_header", "another value" },
{ injectedHeaderName, string.Empty },
}));
//then
Assert.NotNull(exception);
Assert.IsType<ArgumentException>(exception);
Assert.StartsWith("An item with the same key has already been added.", exception.Message);
}
/// <summary>
/// Enforce uniquness of header names - as per https://tools.ietf.org/html/rfc7516#section-4
/// Here passed into recipient's headers
/// </summary>
[Theory]
[InlineData("example.com:extra_header")]
[InlineData("alg")]
[InlineData("enc")]
void Encrypt_WithNonUniqueHeaderParameterNamesInRecipientHeaders_Throws(string injectedHeaderName)
{
//given
byte[] plaintext = { };
//when
var exception = Record.Exception(() => JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[]
{
new JweRecipient(
JweAlgorithm.A256KW,
aes256KWKey1,
new Dictionary<string, object>
{
{ "kid", "my_key_reference" },
{ "example.com:extra_recipient_header", "value1" },
{ injectedHeaderName, string.Empty },
})
},
JweEncryption.A128CBC_HS256,
extraProtectedHeaders: new Dictionary<string, object>
{
{ "cty", "text/plain" },
{ "example.com:extra_header", "another value" },
}));
//then
Assert.NotNull(exception);
Assert.IsType<ArgumentException>(exception);
Assert.StartsWith("An item with the same key has already been added.", exception.Message);
}
[Fact]
public void UnsafeJoseHeaders_ModeCompactWithEmptyBytesA128KW_A128CBC_HS256_ExpectedResults()
{
//given
byte[] plaintext = { };
var jwe = JWE.EncryptBytes(
plaintext: plaintext,
recipients: new JweRecipient[] { recipientAes128KW },
JweEncryption.A128CBC_HS256,
mode: SerializationMode.Compact);
//when
var test = JWE.Headers(jwe);
//then
Assert.Single(test.Recipients);
Assert.Equal(2, test.Recipients[0].JoseHeader.Count());
Assert.Equal("A128CBC-HS256", test.Recipients[0].JoseHeader["enc"]);
Assert.Equal("A128KW", test.Recipients[0].JoseHeader["alg"]);
}
[Fact]
public void UnsafeJoseHeaders_Rfc7516AppendixA23_ExpectedResults()
{
//when
var test = JWE.Headers(Rfc7516_A_4_7_ExampleJwe);
//then
Assert.Equal(2, test.Recipients.Count());
Assert.Equal(4, test.Recipients[0].JoseHeader.Count());
Assert.Equal("A128CBC-HS256", test.Recipients[0].JoseHeader["enc"]);
Assert.Equal("https://server.example.com/keys.jwks", test.Recipients[0].JoseHeader["jku"]);
Assert.Equal("RSA1_5", test.Recipients[0].JoseHeader["alg"]);
Assert.Equal("2011-04-29", test.Recipients[0].JoseHeader["kid"]);
Assert.Equal(4, test.Recipients[1].JoseHeader.Count());
Assert.Equal("A128CBC-HS256", test.Recipients[1].JoseHeader["enc"]);
Assert.Equal("https://server.example.com/keys.jwks", test.Recipients[1].JoseHeader["jku"]);
Assert.Equal("A128KW", test.Recipients[1].JoseHeader["alg"]);
Assert.Equal("7", test.Recipients[1].JoseHeader["kid"]);
}
[Fact]
public void DecodeSingleRecipientProtectedHeader()
{
var token = @"{""ciphertext"":""tzh1xXdNDke99sLmZEnmYw"",""encrypted_key"":""DNszn45AFTiUAWsPeLi-AZd4oSkUKLK95FrRMpDv9qEe9TIA6QOPezOh7NrOzTXa8AdrbnDRQJwO7S_0i4p5xQrEukjkzelD"",""header"":{""alg"":""A256KW"",""enc"":""A256CBC-HS512""},""iv"":""480QxkaQPCiaEmxJFPxgsg"",""tag"":""dHeG5UCb4nCSbysUKva_4I_Z4D2WfYUaeasxOsJXTYg""}";
var payload = Jose.JWE.Decrypt(token, sharedKey);
Assert.Equal("Hello World", payload.Plaintext);
Assert.Equal(payload.Recipient.JoseHeader.Count, 2);
Assert.Equal(payload.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(payload.Recipient.JoseHeader["alg"], "A256KW");
}
[Fact]
public void DecodeAAD()
{
var token = @"{""aad"":""ZXlKaGJHY2lPaUpCTVRJNFMxY2lMQ0psYm1NaU9pSkJNVEk0UTBKRExVaFRNalUySW4w"",""ciphertext"":""02VvoX1sUsmFi2ZpIbTI8g"",""encrypted_key"":""kH4te-O3DNZoDlxeDnBXM9CNx2d5IgVGO-cVMmqTRW_ws0EG_RKDQ7FLLztMM83z2s-pSNSZtFf3bx9Aky8XOzhIYCIU7XvmiQ0pp5z1FRdrwO-RxEOJfb2hAjD-hE5lCJkkY722QGs4IrUQ5N5Atc9h9-0vDcg-gksFIuaLMeRQj3LxivhwJO-QWFd6sG0FY6fBCwS1X6zsrZo-m9DNvrB6FhMpkLPBDOlCNnjKf1_Mz_jAuXIwnVUhoq59m8tvxQY1Fyngiug6zSnM207-0BTXzuCTnPgPAwGWGDLO7o0ttPT6RI_tLvYE6AuOynsqsHDaecyIkJ26dif3iRmkeg"",""header"":{""alg"":""RSA-OAEP-256"",""kid"":""Ex-p1KJFz8hQE1S76SzkhHcaObCKoDPrtAPJdWuTcTc""},""iv"":""E1BAiqIeAH_0eInT59zb8w"",""protected"":""eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidHlwIjoiSldFIn0"",""tag"":""yYBiajF5oMtyK3mRVQyPnlJL25hXW8Ct8ZMcFK5ehDY""}";
var payload = Jose.JWE.Decrypt(token, PrivKey());
Assert.Equal("Hello World", payload.Plaintext);
Assert.Equal(payload.Recipient.JoseHeader.Count, 4);
Assert.Equal(payload.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(payload.Recipient.JoseHeader["alg"], "RSA-OAEP-256");
Assert.Equal(payload.Recipient.JoseHeader["kid"], "Ex-p1KJFz8hQE1S76SzkhHcaObCKoDPrtAPJdWuTcTc");
Assert.Equal(payload.Recipient.JoseHeader["typ"], "JWE");
}
[Fact]
public void DecodeMultipleRecipientsNoProtectedHeader()
{
var token = @"{
""ciphertext"": ""zKxWBKEUDF4cucE"",
""iv"": ""vEF_GqDbgyblOZ-i"",
""recipients"": [
{
""encrypted_key"": ""UDhZR9USzDByzrhxFtKYxzi5lUVvsze7kpjdfxMhDxyG5cKf2ldmqQ"",
""header"": {
""alg"": ""A256KW"",
""enc"": ""A256GCM""
}
},
{
""encrypted_key"": ""euWT-ji1Iud_oCCvnTrHTSZ37kh_6cUNaGXLqCQWClCNRksCkcFUasoDWwQBby_kpOihe0yoc-AL3Jm_jrdO1YyovumKLBgygwDyBttdl5sBW9EvabyJWu9Q6tAv_fJYhx-icOJyTtG133zHeymx_vTyBKtJ9-S3zCfMPBFl2Yy0mn34f9EIxfENBuYmpKyEcTvXO9LXpjQuTyBpxxTd6jjoMjESHzH0xL4WWugQCGpgY2zilW_bOBZRmZ8wxEhB0oKSCjY13b9ZeGcWgK0rQAt-ekw9dKAp2rz7jOLceFwyvbhpKrtBgbWHiaMBKNoRb6djRX0Z_Tn79Fc-VUvVBA"",
""header"": {
""alg"": ""RSA-OAEP-256"",
""enc"": ""A256GCM"",
""typ"": ""JWE""
}
}
],
""tag"": ""9UFM1EdyzU3ExyrtLaWaQg""
}";
var firstRecipient = Jose.JWE.Decrypt(token, sharedKey);
Assert.Equal("Hello World", firstRecipient.Plaintext);
Assert.Equal(firstRecipient.Recipient.JoseHeader.Count, 2);
Assert.Equal(firstRecipient.Recipient.JoseHeader["enc"], "A256GCM");
Assert.Equal(firstRecipient.Recipient.JoseHeader["alg"], "A256KW");
var secondRecipient = Jose.JWE.Decrypt(token, PrivKey());
Assert.Equal("Hello World", secondRecipient.Plaintext);
Assert.Equal(secondRecipient.Recipient.JoseHeader.Count, 3);
Assert.Equal(secondRecipient.Recipient.JoseHeader["enc"], "A256GCM");
Assert.Equal(secondRecipient.Recipient.JoseHeader["typ"], "JWE");
Assert.Equal(secondRecipient.Recipient.JoseHeader["alg"], "RSA-OAEP-256");
}
[Fact]
public void DecodeMultipleRecipientsWithProtectedHeader()
{
var token = @"{
""ciphertext"": ""gVZ-iyqX3o8xlFzZD3e58g"",
""iv"": ""iv7cQBIEzM5Jdvt1nakgvw"",
""protected"": ""eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidHlwIjoiSldFIn0"",
""recipients"": [
{
""encrypted_key"": ""5xCobIXzGwDTSITcStYvDc8C636p4i4PjHsvfTCD2yaHjXuA-0YDxRj6tPDTn2rkhnRII3hhDC6XO0b_ir-OZ2FWKr01nC3a"",
""header"": {
""alg"": ""A256KW""
}
},
{
""encrypted_key"": ""Vx6HmM8aoem03w67iQOGiBI2B-thcLwVIZWLZavwDWRub3yZNTHlsM0FNGXhX9qhenJ-3eIBbsAwQnbdkBQaOugxHANp-xoYbWqq1FXcHiaQSRs9K1vCd-xgyJbNuqJHD3h1gEupIoxCJNAu6dypzrUcC_nLX8L6Y-H4ST_18bPFfSMbD3YatvS9k879NJzru_gigvaoyCrwW0LD1Fry05cPEl9hkyiKpnr63MmOVfGHYQvqO_xAKq02w5-LcYmuloPfpFOZEAoF3OB_4zKAcEEhEmRujSvIPrsaG3mJiRRchryiRSt5TIDO_gOkaySGQ8JFULt8zK_k5Sl0SdhZ-Q"",
""header"": {
""alg"": ""RSA-OAEP-256"",
""kid"": ""Ex-p1KJFz8hQE1S76SzkhHcaObCKoDPrtAPJdWuTcTc""
}
}
],
""tag"": ""UxOqwzlsIQsbR3W0nin1EAtez0MMgJbuNr2ZjCtmMIE""
}";
var firstRecipient = Jose.JWE.Decrypt(token, sharedKey);
Assert.Equal("Hello World", firstRecipient.Plaintext);
Assert.Equal(firstRecipient.Recipient.JoseHeader.Count, 3);
Assert.Equal(firstRecipient.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(firstRecipient.Recipient.JoseHeader["typ"], "JWE");
Assert.Equal(firstRecipient.Recipient.JoseHeader["alg"], "A256KW");
var secondRecipient = Jose.JWE.Decrypt(token, PrivKey());
Assert.Equal("Hello World", secondRecipient.Plaintext);
Assert.Equal(secondRecipient.Recipient.JoseHeader.Count, 4);
Assert.Equal(secondRecipient.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(secondRecipient.Recipient.JoseHeader["typ"], "JWE");
Assert.Equal(secondRecipient.Recipient.JoseHeader["alg"], "RSA-OAEP-256");
Assert.Equal(secondRecipient.Recipient.JoseHeader["kid"], "Ex-p1KJFz8hQE1S76SzkhHcaObCKoDPrtAPJdWuTcTc");
}
[Fact]
public void DecodeMultipleRecipientsWithUnprotectedHeader()
{
var token = @"{
""ciphertext"": ""wnecd9ceRDb0PqFdvNkjUw"",
""iv"": ""d-F9AVZ7W6M5bWp45G_okw"",
""protected"": ""eyJ0eXAiOiJKV0UifQ"",
""recipients"": [
{
""encrypted_key"": ""gk0a-lu_f588KjKomSl8v4ULeNEXktECpLWkTyxpmtFXMDyO-BtARt1fuBkFJsYqAwUNxz4uh1u4i3QCpKxdl01tZRW1yyxR"",
""header"": {
""alg"": ""PBES2-HS256+A128KW"",
""p2c"": 8192,
""p2s"": ""kpL8s71MjhPnBExCF-cIMA""
}
},
{
""encrypted_key"": ""WDt1HtoyK0lazAF84EBoL7OWtkCyKBEj2hG_QEgX0hx2QDAgFh7HGiR5NnnChFTwdpXIA-8tBDzhWFLd6aEU8w8sqjC4txoc"",
""header"": {
""alg"": ""ECDH-ES+A128KW"",
""epk"": {
""crv"": ""P-256"",
""kty"": ""EC"",
""x"": ""WOqJxZwzivLSO-r3qRkBVDd9uA_de_AIu3G3hkIQg1M"",
""y"": ""aFbCEl231v5IeA_Zjg8kMVJXxZWhpEHibtvHnq7Kk9k""
}
}
}
],
""tag"": ""zJxGA445Q4LBp4WAXo0vdCfD8ZdrWVLGRPkUH8Sv_6I"",
""unprotected"": {
""enc"": ""A256CBC-HS512""
}
}";
var firstRecipient = Jose.JWE.Decrypt(token, "secret");
Assert.Equal("Hello World", firstRecipient.Plaintext);
Assert.Equal(firstRecipient.Recipient.JoseHeader.Count, 5);
Assert.Equal(firstRecipient.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(firstRecipient.Recipient.JoseHeader["typ"], "JWE");
Assert.Equal(firstRecipient.Recipient.JoseHeader["alg"], "PBES2-HS256+A128KW");
Assert.Equal(firstRecipient.Recipient.JoseHeader["p2c"], 8192);
Assert.Equal(firstRecipient.Recipient.JoseHeader["p2s"], "kpL8s71MjhPnBExCF-cIMA");
var secondRecipient = Jose.JWE.Decrypt(token, Ecc256Private());
Assert.Equal("Hello World", secondRecipient.Plaintext);
Assert.Equal(secondRecipient.Recipient.JoseHeader.Count, 4);
Assert.Equal(secondRecipient.Recipient.JoseHeader["enc"], "A256CBC-HS512");
Assert.Equal(secondRecipient.Recipient.JoseHeader["typ"], "JWE");
Assert.Equal(secondRecipient.Recipient.JoseHeader["alg"], "ECDH-ES+A128KW");
Assert.True(secondRecipient.Recipient.JoseHeader.ContainsKey("epk"));
var epk = (IDictionary<string, object>)secondRecipient.Recipient.JoseHeader["epk"];
Assert.Equal(epk.Count, 4);
Assert.Equal(epk["crv"], "P-256");
Assert.Equal(epk["kty"], "EC");
Assert.Equal(epk["x"], "WOqJxZwzivLSO-r3qRkBVDd9uA_de_AIu3G3hkIQg1M");
Assert.Equal(epk["y"], "aFbCEl231v5IeA_Zjg8kMVJXxZWhpEHibtvHnq7Kk9k");
}
[Fact]
public void DecodeDuplicateKeys_ProtectedHeader_ReceipientHeader()
{
var token = @"{
""ciphertext"": ""hPHYxxZWLWxI5g224mPnAA"",
""iv"": ""r_DCANXTkVo1TEwkd-Cx1w"",
""protected"": ""eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidHlwIjoiSldFIn0"",
""recipients"": [
{
""encrypted_key"": ""KqEaCvRWCxZW9kG3eaf4ekL1nf5YWjv_m96QVjOaSV0H5O1lORQkDCkuNrWYwHLMGAEgXSaGGRXFIdFuG68zgVQJ5u1I7Ona"",
""header"": {
""alg"": ""A256KW"",
""typ"": ""JWE""
}
},
{
""encrypted_key"": ""EYPZerMlLRu0LU1yfNiNNnl92Stz36hzM-NMNiBHmBLyysg6JTOi8PB2QOh4FUKO-YWpq80iacMiUniGmEnRrK8x4n4_acYADtj_36aKf5guJ3XOWjpm8BfTRtLJ-D7OlrDlLnn23pQHYlYHAXZMEky1JRbUbpt-1Jf1raHUUZIxSS2s2aZxkxpQR8lgfId3aPwzGdIqPWgWvKsNtR510E8RSKJVatNL5uGwDDo1F5gpxIThdUcNAAoINaBlpbBUWQvefRAQYzOT25jcmCuNQmKMPJrhsZZpyC4QVvjJ5nXqi027xHKelOIaUkpliPFmnq2rFp0RDFe_Kcq7_hk86A"",
""header"": {
""alg"": ""RSA-OAEP-256"",
""kid"": ""Ex-p1KJFz8hQE1S76SzkhHcaObCKoDPrtAPJdWuTcTc""
}
}
],
""tag"": ""q_8tx6Ud3q-X1K6NKaYF_qfUriicAm8M4eRX7H75N04""
}";
//then
Assert.Throws<JoseException>(() => Jose.JWE.Decrypt(token, sharedKey));
}
[Fact]
public void DecodeDuplicateKeys_ProtectedHeader_UnprotectedHeader()
{
var token = @"{
""ciphertext"": ""z95vPJ_gXxejpFsno9EBCQ"",
""iv"": ""jGdsbNjl-_uHT4V86MdFBA"",
""protected"": ""eyJ0eXAiOiJKV0UifQ"",
""recipients"": [
{
""encrypted_key"": ""Kpr6FHWViJNnGCuDEEl27dsCiyWHRjiYuB2dOque06oqJZGVYgu9yif0L6OKd9gWvltrGJdo_byafGF5lwIvcl6ZGCNfRF3s"",
""header"": {
""alg"": ""PBES2-HS256+A128KW"",
""p2c"": 8192,
""p2s"": ""C5Hn0y-ho1mwygXPVfDynQ""
}
},
{
""encrypted_key"": ""VuzPor1OEenPP-w0qg__uGS0w4h6Yt7K2ZHtzjqj0mnAzhNzTHumYFjaivk0dUwk1H2jxieEO9FYdC48BOMMjMcylnVGTgAV"",
""header"": {
""alg"": ""ECDH-ES+A128KW"",
""epk"": {
""crv"": ""P-256"",
""kty"": ""EC"",
""x"": ""LqM-HYhs3GcIPKRdiR2R7CuPx-aPVwBohgzP9l2WdfA"",
""y"": ""0hP45SduS8HPQaZ8RAyikZTuvYCjKaknhcCSVK_tIIY""
}
}
}
],
""tag"": ""cbKJYp4ZRWWPWVHDyL2vuUjAZ3oAHXT1I75t1j9rCKI"",
""unprotected"": {
""enc"": ""A256CBC-HS512"",
""typ"": ""JWE""
}
}";
//then
Assert.Throws<JoseException>(() => Jose.JWE.Decrypt(token, sharedKey));
}
[Fact]
public void DecodeDuplicateKeys_UnprotectedHeader_RecipientHeader()
{
var token = @"{
""ciphertext"": ""z95vPJ_gXxejpFsno9EBCQ"",
""iv"": ""jGdsbNjl-_uHT4V86MdFBA"",
""protected"": ""eyJ0eXAiOiJKV0UifQ"",
""recipients"": [
{
""encrypted_key"": ""Kpr6FHWViJNnGCuDEEl27dsCiyWHRjiYuB2dOque06oqJZGVYgu9yif0L6OKd9gWvltrGJdo_byafGF5lwIvcl6ZGCNfRF3s"",
""header"": {
""alg"": ""PBES2-HS256+A128KW"",
""p2c"": 8192,
""p2s"": ""C5Hn0y-ho1mwygXPVfDynQ""
}
},
{
""encrypted_key"": ""VuzPor1OEenPP-w0qg__uGS0w4h6Yt7K2ZHtzjqj0mnAzhNzTHumYFjaivk0dUwk1H2jxieEO9FYdC48BOMMjMcylnVGTgAV"",
""header"": {
""alg"": ""ECDH-ES+A128KW"",
""epk"": {
""crv"": ""P-256"",
""kty"": ""EC"",
""x"": ""LqM-HYhs3GcIPKRdiR2R7CuPx-aPVwBohgzP9l2WdfA"",
""y"": ""0hP45SduS8HPQaZ8RAyikZTuvYCjKaknhcCSVK_tIIY""
}
}
}
],
""tag"": ""cbKJYp4ZRWWPWVHDyL2vuUjAZ3oAHXT1I75t1j9rCKI"",
""unprotected"": {
""enc"": ""A256CBC-HS512"",
""alg"": ""ECDH-ES+A128KW""
}
}";
//then
Assert.Throws<JoseException>(() => Jose.JWE.Decrypt(token, Ecc256Private()));
}
[Fact]
public void EncodeSingleRecipient()
{
var payload = "Hello World !";
JweRecipient r = new JweRecipient(JweAlgorithm.A256KW, sharedKey);
string token = JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM);
Console.Out.WriteLine("[JSON][A256KW][A256GCM]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"A256KW\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal("A256KW", deserialized["header"]["alg"]);
Assert.Equal(54, ((string)deserialized["encrypted_key"]).Length); //CEK size
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
var decoded = JWE.Decrypt(token, sharedKey);
Assert.Equal(decoded.Plaintext, payload);
}
[Fact]
public void EncodeWithAAD()
{
var payload = "Hello World !";
JweRecipient r = new JweRecipient(JweAlgorithm.A256KW, sharedKey);
var aad = new byte[] { 101, 121, 74, 104, 98, 71, 99, 105, 79, 105, 74, 66, 77, 84, 73, 52, 83, 49, 99, 105, 76, 67, 74, 108, 98, 109, 77, 105, 79, 105, 74, 66, 77, 84, 73, 52, 81, 48, 74, 68, 76, 85, 104, 84, 77, 106, 85, 50, 73, 110, 48 };
string token = JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM, aad);
Console.Out.WriteLine("[JSON][A256KW][A256GCM][AAD]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"A256KW\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal("A256KW", deserialized["header"]["alg"]);
Assert.Equal(54, ((string)deserialized["encrypted_key"]).Length); //CEK size
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
var decoded = JWE.Decrypt(token, sharedKey);
Assert.Equal(decoded.Plaintext, payload);
}
[Fact]
public void EncodeMultipleRecipients()
{
var payload = "Hello World !";
JweRecipient r1 = new JweRecipient(JweAlgorithm.PBES2_HS256_A128KW, "secret");
JweRecipient r2 = new JweRecipient(JweAlgorithm.ECDH_ES_A128KW, Ecc256Public());
JweRecipient r3 = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey());
string token = JWE.Encrypt(payload, new[] { r1, r2, r3 }, JweEncryption.A256GCM, mode: SerializationMode.Json);
Console.Out.WriteLine("[JSON][PBES2_HS256_A128KW, ECDH-ES+A128KW, RSA_OAEP_256][A256GCM]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.True(deserialized["recipients"] is JArray);
Assert.Equal(3, ((JArray)deserialized["recipients"]).Count);
var rec0 = ((JArray)deserialized["recipients"])[0];
var rec1 = ((JArray)deserialized["recipients"])[1];
var rec2 = ((JArray)deserialized["recipients"])[2];
Assert.True(rec0["header"] is JObject);
Assert.Equal("PBES2-HS256+A128KW", rec0["header"]["alg"]);
Assert.Equal(8192, rec0["header"]["p2c"]);
Assert.Equal(16, ((string)rec0["header"]["p2s"]).Length);
Assert.Equal(54, ((string)rec0["encrypted_key"]).Length);
Assert.True(rec1["header"] is JObject);
Assert.True(rec1["header"]["epk"] is JObject);
Assert.Equal("ECDH-ES+A128KW", rec1["header"]["alg"]);
Assert.Equal("EC", rec1["header"]["epk"]["kty"]);
Assert.Equal("P-256", rec1["header"]["epk"]["crv"]);
Assert.Equal(43, ((string)rec1["header"]["epk"]["x"]).Length);
Assert.Equal(43, ((string)rec1["header"]["epk"]["y"]).Length);
Assert.Equal(54, ((string)rec1["encrypted_key"]).Length);
Assert.True(rec2["header"] is JObject);
Assert.Equal("RSA-OAEP-256", rec2["header"]["alg"]);
Assert.Equal(342, ((string)rec2["encrypted_key"]).Length);
Assert.Equal(JWE.Decrypt(token, "secret").Plaintext, payload);
Assert.Equal(JWE.Decrypt(token, PrivKey()).Plaintext, payload);
Assert.Equal(JWE.Decrypt(token, Ecc256Private()).Plaintext, payload);
}
[Fact]
public void EncodeUnprotectedHeader()
{
var payload = "Hello World !";
var unprotected = new Dictionary<string, object>
{
{ "jku", "https://server.example.com/keys.jwks" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey());
string token = JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM, unprotectedHeaders: unprotected);
Console.Out.WriteLine("[JSON][RSA_OAEP_256][A256GCM]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"RSA-OAEP-256\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.True(deserialized["unprotected"] is JObject);
Assert.Equal("{\"jku\":\"https://server.example.com/keys.jwks\"}", deserialized["unprotected"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.Equal(Encoding.UTF8.GetString(JWE.Decrypt(token, PrivKey()).PlaintextBytes), payload);
}
[Fact]
public void EncodeExtraProtectedHeaders()
{
var payload = "Hello World !";
var extra = new Dictionary<string, object>
{
{ "jku", "https://server.example.com/keys.jwks" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey());
string token = JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM, extraProtectedHeaders: extra);
Console.Out.WriteLine("[JSON][RSA_OAEP_256][A256GCM]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\",\"jku\":\"https://server.example.com/keys.jwks\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"RSA-OAEP-256\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.Equal(Encoding.UTF8.GetString(JWE.Decrypt(token, PrivKey()).PlaintextBytes), payload);
}
[Fact]
public void EncodeExtraRecipientHeaders()
{
var payload = "Hello World !";
var extra = new Dictionary<string, object>
{
{ "kid", "2011-04-29" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey(), header: extra);
string token = JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM);
Console.Out.WriteLine("[JSON][RSA_OAEP_256][A256GCM]: {0}", token);
JObject deserialized = JObject.Parse(token);
Assert.Equal("{\"enc\":\"A256GCM\"}",
UTF8Encoding.UTF8.GetString(Base64Url.Decode((string)deserialized["protected"])));
Assert.True(deserialized["header"] is JObject);
Assert.Equal("{\"alg\":\"RSA-OAEP-256\",\"kid\":\"2011-04-29\"}", deserialized["header"].ToString(Newtonsoft.Json.Formatting.None));
Assert.Equal(16, ((string)deserialized["iv"]).Length); //IV size
Assert.Equal(18, ((string)deserialized["ciphertext"]).Length); //cipher text size
Assert.Equal(22, ((string)deserialized["tag"]).Length); //auth tag size
Assert.Equal(Encoding.UTF8.GetString(JWE.Decrypt(token, PrivKey()).PlaintextBytes), payload);
}
[Fact]
public void EncodeDuplicateHeaders_Protected_PerRecipient()
{
var payload = "Hello World !";
var headers = new Dictionary<string, object>()
{
{ "enc", "A256GCM" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey(), headers);
//then
Assert.Throws<ArgumentException>(() => JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM));
}
[Fact]
public void EncodeDuplicateHeaders_Protected_Unprotected()
{
var payload = "Hello World !";
var unprotected = new Dictionary<string, object>
{
{ "enc", "A256GCM" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey());
//then
Assert.Throws<ArgumentException>(() => JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM, unprotectedHeaders: unprotected));
}
[Fact]
public void EncodeDuplicateHeaders_Unprotected_PerRecipient()
{
var payload = "Hello World !";
var headers = new Dictionary<string, object>()
{
{ "jku", "https://server.example.com/keys.jwks" }
};
var unprotected = new Dictionary<string, object>
{
{ "jku", "https://server.example.com/keys.jwks" }
};
JweRecipient r = new JweRecipient(JweAlgorithm.RSA_OAEP_256, PubKey(), headers);
//then
Assert.Throws<ArgumentException>(() => JWE.Encrypt(payload, new[] { r }, JweEncryption.A256GCM, unprotectedHeaders: unprotected));
}
private static object GetLegacyKeyObjectFromJwk(JsonWebKey jwk)
{
switch (jwk.Kty)
{
case "RSA":
var rsa = RSA.Create();
rsa.ImportParameters(new RSAParameters()
{
Modulus = Base64Url.Decode(jwk.N),
Exponent = Base64Url.Decode(jwk.E),
D = Base64Url.Decode(jwk.D),
P = Base64Url.Decode(jwk.P),
Q = Base64Url.Decode(jwk.Q),
DP = Base64Url.Decode(jwk.DP),
DQ = Base64Url.Decode(jwk.DQ),
InverseQ = Base64Url.Decode(jwk.QI),
});
return rsa;
case "oct":
return Base64Url.Decode(jwk.K);
default:
throw new NotImplementedException($"Key type not implemented: {jwk.Kty}");
}
}
private static RSA PrivKey()
{
return X509().GetRSAPrivateKey();
}
private static RSA PubKey()
{
return X509().GetRSAPublicKey();
}
private static X509Certificate2 X509()
{
return new X509Certificate2("jwt-2048.p12", "1", X509KeyStorageFlags.Exportable | X509KeyStorageFlags.MachineKeySet);
}
private static CngKey Ecc256Public()
{
byte[] x = { 4, 114, 29, 223, 58, 3, 191, 170, 67, 128, 229, 33, 242, 178, 157, 150, 133, 25, 209, 139, 166, 69, 55, 26, 84, 48, 169, 165, 67, 232, 98, 9 };
byte[] y = { 131, 116, 8, 14, 22, 150, 18, 75, 24, 181, 159, 78, 90, 51, 71, 159, 214, 186, 250, 47, 207, 246, 142, 127, 54, 183, 72, 72, 253, 21, 88, 53 };
byte[] d = { 42, 148, 231, 48, 225, 196, 166, 201, 23, 190, 229, 199, 20, 39, 226, 70, 209, 148, 29, 70, 125, 14, 174, 66, 9, 198, 80, 251, 95, 107, 98, 206 };
return EccKey.New(x, y, usage: CngKeyUsages.KeyAgreement);
}
private static CngKey Ecc256Private()
{
byte[] x = { 4, 114, 29, 223, 58, 3, 191, 170, 67, 128, 229, 33, 242, 178, 157, 150, 133, 25, 209, 139, 166, 69, 55, 26, 84, 48, 169, 165, 67, 232, 98, 9 };
byte[] y = { 131, 116, 8, 14, 22, 150, 18, 75, 24, 181, 159, 78, 90, 51, 71, 159, 214, 186, 250, 47, 207, 246, 142, 127, 54, 183, 72, 72, 253, 21, 88, 53 };
byte[] d = { 42, 148, 231, 48, 225, 196, 166, 201, 23, 190, 229, 199, 20, 39, 226, 70, 209, 148, 29, 70, 125, 14, 174, 66, 9, 198, 80, 251, 95, 107, 98, 206 };
return EccKey.New(x, y, d, CngKeyUsages.KeyAgreement);
}
private static readonly byte[] sharedKey = new byte[] { 21, 26, 196, 88, 134, 11, 137, 127, 215, 118, 142, 180, 138, 115, 246, 247, 179, 182, 140, 136, 76, 33, 206, 189, 255, 22, 243, 100, 251, 74, 254, 161 };
private static readonly byte[] aes256KWKey1 = new byte[] { 194, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, 194, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, };
private static readonly byte[] aes256KWKey2 = new byte[] { 94, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, 194, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, };
private static readonly byte[] aes256KWKey3 = new byte[] { 4, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, 194, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133, };
private static byte[] aes128KWKey = new byte[] { 194, 164, 235, 6, 138, 248, 171, 239, 24, 216, 11, 22, 137, 199, 215, 133 };
private static JweRecipient recipientEcdhEs1 => new JweRecipient(JweAlgorithm.ECDH_ES, Ecc256Public());
private static JweRecipient recipientAes256KW1 => new JweRecipient(JweAlgorithm.A256KW, aes256KWKey1);
private static JweRecipient recipientAes256KW2 => new JweRecipient(JweAlgorithm.A256KW, aes256KWKey2);
private static JweRecipient recipientAes128KW => new JweRecipient(JweAlgorithm.A128KW, aes128KWKey);
private static JweRecipient recipientDirectEncyption1 => new JweRecipient(JweAlgorithm.DIR, aes256KWKey1);
private static JweRecipient recipientRsa1 => new JweRecipient(JweAlgorithm.RSA1_5, PubKey());
private static string Rfc7516_A_4_7_ExampleJwe = @"
{
""protected"":
""eyJlbmMiOiJBMTI4Q0JDLUhTMjU2In0"",
""unprotected"":
{ ""jku"":""https://server.example.com/keys.jwks""},
""recipients"":[
{""header"":
{ ""alg"":""RSA1_5"",""kid"":""2011-04-29""},
""encrypted_key"":
""UGhIOguC7IuEvf_NPVaXsGMoLOmwvc1GyqlIKOK1nN94nHPoltGRhWhw7Zx0-kFm1NJn8LE9XShH59_i8J0PH5ZZyNfGy2xGdULU7sHNF6Gp2vPLgNZ__deLKxGHZ7PcHALUzoOegEI-8E66jX2E4zyJKx-YxzZIItRzC5hlRirb6Y5Cl_p-ko3YvkkysZIFNPccxRU7qve1WYPxqbb2Yw8kZqa2rMWI5ng8OtvzlV7elprCbuPhcCdZ6XDP0_F8rkXds2vE4X-ncOIM8hAYHHi29NX0mcKiRaD0-D-ljQTP-cFPgwCp6X-nZZd9OHBv-B3oWh2TbqmScqXMR4gp_A""},
{""header"":
{ ""alg"":""A128KW"",""kid"":""7""},
""encrypted_key"":
""6KB707dM9YTIgHtLvtgWQ8mKwboJW3of9locizkDTHzBC2IlrT1oOQ""}],
""iv"":
""AxY8DCtDaGlsbGljb3RoZQ"",
""ciphertext"":
""KDlTtXchhZTGufMYmOYGS4HffxPSUrfmqCHXaI9wOGY"",
""tag"":
""Mz-VPPyU4RlcuYv1IwIvzw""
}";
private static string Rfc7516_A_2_3_ExampleJwk = @"
{""kty"":""RSA"",
""n"":""sXchDaQebHnPiGvyDOAT4saGEUetSyo9MKLOoWFsueri23bOdgWp4Dy1WlUzewbgBHod5pcM9H95GQRV3JDXboIRROSBigeC5yjU1hGzHHyXss8UDprecbAYxknTcQkhslANGRUZmdTOQ5qTRsLAt6BTYuyvVRdhS8exSZEy_c4gs_7svlJJQ4H9_NxsiIoLwAEk7-Q3UXERGYw_75IDrGA84-lA_-Ct4eTlXHBIY2EaV7t7LjJaynVJCpkv4LKjTTAumiGUIuQhrNhZLuF_RJLqHpM2kgWFLU7-VTdL1VbC2tejvcI2BlMkEpk1BzBZI0KQB0GaDWFLN-aEAw3vRw"",
""e"":""AQAB"",
""d"":""VFCWOqXr8nvZNyaaJLXdnNPXZKRaWCjkU5Q2egQQpTBMwhprMzWzpR8Sxq1OPThh_J6MUD8Z35wky9b8eEO0pwNS8xlh1lOFRRBoNqDIKVOku0aZb-rynq8cxjDTLZQ6Fz7jSjR1Klop-YKaUHc9GsEofQqYruPhzSA-QgajZGPbE_0ZaVDJHfyd7UUBUKunFMScbflYAAOYJqVIVwaYR5zWEEceUjNnTNo_CVSj-VvXLO5VZfCUAVLgW4dpf1SrtZjSt34YLsRarSb127reG_DUwg9Ch-KyvjT1SkHgUWRVGcyly7uvVGRSDwsXypdrNinPA4jlhoNdizK2zF2CWQ"",
""p"":""9gY2w6I6S6L0juEKsbeDAwpd9WMfgqFoeA9vEyEUuk4kLwBKcoe1x4HG68ik918hdDSE9vDQSccA3xXHOAFOPJ8R9EeIAbTi1VwBYnbTp87X-xcPWlEPkrdoUKW60tgs1aNd_Nnc9LEVVPMS390zbFxt8TN_biaBgelNgbC95sM"",
""q"":""uKlCKvKv_ZJMVcdIs5vVSU_6cPtYI1ljWytExV_skstvRSNi9r66jdd9-yBhVfuG4shsp2j7rGnIio901RBeHo6TPKWVVykPu1iYhQXw1jIABfw-MVsN-3bQ76WLdt2SDxsHs7q7zPyUyHXmps7ycZ5c72wGkUwNOjYelmkiNS0"",
""dp"":""w0kZbV63cVRvVX6yk3C8cMxo2qCM4Y8nsq1lmMSYhG4EcL6FWbX5h9yuvngs4iLEFk6eALoUS4vIWEwcL4txw9LsWH_zKI-hwoReoP77cOdSL4AVcraHawlkpyd2TWjE5evgbhWtOxnZee3cXJBkAi64Ik6jZxbvk-RR3pEhnCs"",
""dq"":""o_8V14SezckO6CNLKs_btPdFiO9_kC1DsuUTd2LAfIIVeMZ7jn1Gus_Ff7B7IVx3p5KuBGOVF8L-qifLb6nQnLysgHDh132NDioZkhH7mI7hPG-PYE_odApKdnqECHWw0J-F0JWnUd6D2B_1TvF9mXA2Qx-iGYn8OVV1Bsmp6qU"",
""qi"":""eNho5yRBEBxhGBtQRww9QirZsB66TrfFReG_CcteI1aCneT0ELGhYlRlCtUkTRclIfuEPmNsNDPbLoLqqCVznFbvdB7x-Tl-m0l_eFTj2KiqwGqE9PZB9nNTwMVvH3VRRSLWACvPnSiwP8N5Usy-WRXS-V7TbpxIhvepTfE0NNo""
}";
private static string Rfc7516_A_3_3_ExampleJwk = @"
{""kty"":""oct"",
""k"":""GawgguFyGrWKav7AX4VKUg""
}";
private static string Rfc7520_5_10_ExampleJwe = @"
{
""recipients"": [
{
""encrypted_key"": ""4YiiQ_ZzH76TaIkJmYfRFgOV9MIpnx4X""
}
],
""protected"": ""eyJhbGciOiJBMTI4S1ciLCJraWQiOiI4MWIyMDk2NS04MzMyLTQzZDktYTQ2OC04MjE2MGFkOTFhYzgiLCJlbmMiOiJBMTI4R0NNIn0"",
""iv"": ""veCx9ece2orS7c_N"",
""aad"": ""WyJ2Y2FyZCIsW1sidmVyc2lvbiIse30sInRleHQiLCI0LjAiXSxbImZuIix7fSwidGV4dCIsIk1lcmlhZG9jIEJyYW5keWJ1Y2siXSxbIm4iLHt9LCJ0ZXh0IixbIkJyYW5keWJ1Y2siLCJNZXJpYWRvYyIsIk1yLiIsIiJdXSxbImJkYXkiLHt9LCJ0ZXh0IiwiVEEgMjk4MiJdLFsiZ2VuZGVyIix7fSwidGV4dCIsIk0iXV1d"",
""ciphertext"": ""Z_3cbr0k3bVM6N3oSNmHz7Lyf3iPppGf3Pj17wNZqteJ0Ui8p74SchQP8xygM1oFRWCNzeIa6s6BcEtp8qEFiqTUEyiNkOWDNoF14T_4NFqF-p2Mx8zkbKxI7oPK8KNarFbyxIDvICNqBLba-v3uzXBdB89fzOI-Lv4PjOFAQGHrgv1rjXAmKbgkft9cB4WeyZw8MldbBhc-V_KWZslrsLNygon_JJWd_ek6LQn5NRehvApqf9ZrxB4aq3FXBxOxCys35PhCdaggy2kfUfl2OkwKnWUbgXVD1C6HxLIlqHhCwXDG59weHrRDQeHyMRoBljoV3X_bUTJDnKBFOod7nLz-cj48JMx3SnCZTpbQAkFV"",
""tag"": ""vOaH_Rajnpy_3hOtqvZHRA""
}";
private static string Rfc7520_5_8_1_Figure151_ExampleJwk = @"
{
""kty"": ""oct"",
""kid"": ""81b20965-8332-43d9-a468-82160ad91ac8"",
""use"": ""enc"",
""alg"": ""A128KW"",
""k"": ""GZy6sIZ6wl9NJOKB-jnmVQ""
}";
private static string Rfc7520_Figure72_ExamplePlaintext =
"You can trust us to stick with you through thick and "
+ "thin\x2013to the bitter end. And you can trust us to "
+ "keep any secret of yours\x2013closer than you keep it "
+ "yourself. But you cannot trust us to let you face trouble "
+ "alone, and go off without a word. We are your friends, Frodo.";
private static string Rfc7520_Figure176_ExampleBase64UrlEncodedAad =
"WyJ2Y2FyZCIsW1sidmVyc2lvbiIse30sInRleHQiLCI0LjAiXSxbImZuIix7fS"
+ "widGV4dCIsIk1lcmlhZG9jIEJyYW5keWJ1Y2siXSxbIm4iLHt9LCJ0ZXh0Iixb"
+ "IkJyYW5keWJ1Y2siLCJNZXJpYWRvYyIsIk1yLiIsIiJdXSxbImJkYXkiLHt9LC"
+ "J0ZXh0IiwiVEEgMjk4MiJdLFsiZ2VuZGVyIix7fSwidGV4dCIsIk0iXV1d";
private static string Rfc7520_5_10_1_ExampleAadString =
"[\"vcard\",[[\"version\",{},\"text\",\"4.0\"],[\"fn\",{},\"text\",\"Meriadoc Brandybuck\"],[\"n\",{},\"text\",[\"Brandybuck\",\"Meriadoc\",\"Mr.\",\"\"]],[\"bday\",{},\"text\",\"TA 2982\"],[\"gender\",{},\"text\",\"M\"]]]";
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.